majlis 0.7.0 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +400 -233
  2. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -249,6 +249,13 @@ var init_migrations = __esm({
249
249
  );
250
250
 
251
251
  CREATE INDEX idx_swarm_members_run ON swarm_members(swarm_run_id);
252
+ `);
253
+ },
254
+ // Migration 006: v5 → v6 — Experiment dependencies and scoped context
255
+ (db) => {
256
+ db.exec(`
257
+ ALTER TABLE experiments ADD COLUMN depends_on TEXT;
258
+ ALTER TABLE experiments ADD COLUMN context_files TEXT;
252
259
  `);
253
260
  }
254
261
  ];
@@ -1261,7 +1268,7 @@ var init_config = __esm({
1261
1268
  path3 = __toESM(require("path"));
1262
1269
  DEFAULT_CONFIG = {
1263
1270
  project: { name: "", description: "", objective: "" },
1264
- metrics: { command: "", fixtures: [], tracked: {} },
1271
+ metrics: { command: "", fixtures: {}, tracked: {} },
1265
1272
  build: { pre_measure: null, post_measure: null },
1266
1273
  cycle: {
1267
1274
  compression_interval: 5,
@@ -2197,12 +2204,13 @@ var init_upgrade = __esm({
2197
2204
  });
2198
2205
 
2199
2206
  // src/db/queries.ts
2200
- function createExperiment(db, slug, branch, hypothesis, subType, classificationRef) {
2207
+ function createExperiment(db, slug, branch, hypothesis, subType, classificationRef, dependsOn = null, contextFiles = null) {
2201
2208
  const stmt = db.prepare(`
2202
- INSERT INTO experiments (slug, branch, hypothesis, sub_type, classification_ref, status)
2203
- VALUES (?, ?, ?, ?, ?, 'classified')
2209
+ INSERT INTO experiments (slug, branch, hypothesis, sub_type, classification_ref, status, depends_on, context_files)
2210
+ VALUES (?, ?, ?, ?, ?, 'classified', ?, ?)
2204
2211
  `);
2205
- const result = stmt.run(slug, branch, hypothesis, subType, classificationRef);
2212
+ const contextJson = contextFiles && contextFiles.length > 0 ? JSON.stringify(contextFiles) : null;
2213
+ const result = stmt.run(slug, branch, hypothesis, subType, classificationRef, dependsOn, contextJson);
2206
2214
  return getExperimentById(db, result.lastInsertRowid);
2207
2215
  }
2208
2216
  function getExperimentById(db, id) {
@@ -2711,6 +2719,34 @@ async function status(isJson) {
2711
2719
  console.log(`
2712
2720
  ${yellow(`${judgmentDecisions.length} judgment-level decisions`)} (provisional targets for doubt)`);
2713
2721
  }
2722
+ console.log();
2723
+ header("Project Readiness");
2724
+ const validation = (0, import_shared2.validateProject)({
2725
+ hasGitRepo: fs9.existsSync(path9.join(root, ".git")),
2726
+ hasClaudeMd: fs9.existsSync(path9.join(root, "CLAUDE.md")),
2727
+ metricsCommand: config.metrics.command,
2728
+ metricsCommandRunnable: checkCommandRunnable(config.metrics.command, root),
2729
+ fixtures: config.metrics.fixtures,
2730
+ tracked: config.metrics.tracked,
2731
+ preMeasure: config.build.pre_measure,
2732
+ hasObjective: !!(config.project.objective && config.project.objective.length > 0),
2733
+ hasSynthesis: (() => {
2734
+ const sp = path9.join(root, "docs", "synthesis", "current.md");
2735
+ if (!fs9.existsSync(sp)) return false;
2736
+ const content = fs9.readFileSync(sp, "utf-8");
2737
+ return content.length > 100 && !content.includes("No experiments yet");
2738
+ })()
2739
+ });
2740
+ console.log((0, import_shared2.formatValidation)(validation));
2741
+ }
2742
+ function checkCommandRunnable(command, cwd) {
2743
+ if (!command || command.includes(`echo '{"fixtures":{}}'`)) return false;
2744
+ try {
2745
+ (0, import_node_child_process3.execSync)(command, { cwd, encoding: "utf-8", timeout: 15e3, stdio: ["pipe", "pipe", "pipe"] });
2746
+ return true;
2747
+ } catch {
2748
+ return false;
2749
+ }
2714
2750
  }
2715
2751
  function buildSummary(expCount, activeSession, sessionsSinceCompression, config) {
2716
2752
  const parts = [];
@@ -2721,12 +2757,17 @@ function buildSummary(expCount, activeSession, sessionsSinceCompression, config)
2721
2757
  }
2722
2758
  return parts.join(". ");
2723
2759
  }
2760
+ var fs9, path9, import_node_child_process3, import_shared2;
2724
2761
  var init_status = __esm({
2725
2762
  "src/commands/status.ts"() {
2726
2763
  "use strict";
2764
+ fs9 = __toESM(require("fs"));
2765
+ path9 = __toESM(require("path"));
2766
+ import_node_child_process3 = require("child_process");
2727
2767
  init_connection();
2728
2768
  init_queries();
2729
2769
  init_config();
2770
+ import_shared2 = require("@majlis/shared");
2730
2771
  init_format();
2731
2772
  }
2732
2773
  });
@@ -2848,6 +2889,10 @@ var init_machine = __esm({
2848
2889
  });
2849
2890
 
2850
2891
  // src/metrics.ts
2892
+ function isGateFixture(fixtures, fixtureName) {
2893
+ if (Array.isArray(fixtures)) return false;
2894
+ return fixtures[fixtureName]?.gate === true;
2895
+ }
2851
2896
  function compareMetrics(db, experimentId, config) {
2852
2897
  const before = getMetricsByExperimentAndPhase(db, experimentId, "before");
2853
2898
  const after = getMetricsByExperimentAndPhase(db, experimentId, "after");
@@ -2869,13 +2914,17 @@ function compareMetrics(db, experimentId, config) {
2869
2914
  before: b.metric_value,
2870
2915
  after: a.metric_value,
2871
2916
  delta: a.metric_value - b.metric_value,
2872
- regression
2917
+ regression,
2918
+ gate: isGateFixture(config.metrics.fixtures, fixture)
2873
2919
  });
2874
2920
  }
2875
2921
  }
2876
2922
  }
2877
2923
  return comparisons;
2878
2924
  }
2925
+ function checkGateViolations(comparisons) {
2926
+ return comparisons.filter((c) => c.gate && c.regression);
2927
+ }
2879
2928
  function isRegression(before, after, direction, target) {
2880
2929
  switch (direction) {
2881
2930
  case "lower_is_better":
@@ -2939,7 +2988,7 @@ async function captureMetrics(phase, args) {
2939
2988
  if (config.build.pre_measure) {
2940
2989
  info(`Running pre-measure: ${config.build.pre_measure}`);
2941
2990
  try {
2942
- (0, import_node_child_process3.execSync)(config.build.pre_measure, { cwd: root, encoding: "utf-8", stdio: "inherit" });
2991
+ (0, import_node_child_process4.execSync)(config.build.pre_measure, { cwd: root, encoding: "utf-8", stdio: "inherit" });
2943
2992
  } catch {
2944
2993
  warn("Pre-measure command failed \u2014 continuing anyway.");
2945
2994
  }
@@ -2950,7 +2999,7 @@ async function captureMetrics(phase, args) {
2950
2999
  info(`Running metrics: ${config.metrics.command}`);
2951
3000
  let metricsOutput;
2952
3001
  try {
2953
- metricsOutput = (0, import_node_child_process3.execSync)(config.metrics.command, {
3002
+ metricsOutput = (0, import_node_child_process4.execSync)(config.metrics.command, {
2954
3003
  cwd: root,
2955
3004
  encoding: "utf-8",
2956
3005
  stdio: ["pipe", "pipe", "pipe"]
@@ -2969,7 +3018,7 @@ async function captureMetrics(phase, args) {
2969
3018
  success(`Captured ${parsed.length} metric(s) for ${exp.slug} (phase: ${phase})`);
2970
3019
  if (config.build.post_measure) {
2971
3020
  try {
2972
- (0, import_node_child_process3.execSync)(config.build.post_measure, { cwd: root, encoding: "utf-8", stdio: "inherit" });
3021
+ (0, import_node_child_process4.execSync)(config.build.post_measure, { cwd: root, encoding: "utf-8", stdio: "inherit" });
2973
3022
  } catch {
2974
3023
  warn("Post-measure command failed.");
2975
3024
  }
@@ -3020,11 +3069,11 @@ function formatDelta(delta) {
3020
3069
  const prefix = delta > 0 ? "+" : "";
3021
3070
  return `${prefix}${delta.toFixed(4)}`;
3022
3071
  }
3023
- var import_node_child_process3;
3072
+ var import_node_child_process4;
3024
3073
  var init_measure = __esm({
3025
3074
  "src/commands/measure.ts"() {
3026
3075
  "use strict";
3027
- import_node_child_process3 = require("child_process");
3076
+ import_node_child_process4 = require("child_process");
3028
3077
  init_connection();
3029
3078
  init_queries();
3030
3079
  init_metrics();
@@ -3057,7 +3106,7 @@ async function newExperiment(args) {
3057
3106
  const paddedNum = String(num).padStart(3, "0");
3058
3107
  const branch = `exp/${paddedNum}-${slug}`;
3059
3108
  try {
3060
- (0, import_node_child_process4.execFileSync)("git", ["checkout", "-b", branch], {
3109
+ (0, import_node_child_process5.execFileSync)("git", ["checkout", "-b", branch], {
3061
3110
  cwd: root,
3062
3111
  encoding: "utf-8",
3063
3112
  stdio: ["pipe", "pipe", "pipe"]
@@ -3067,15 +3116,28 @@ async function newExperiment(args) {
3067
3116
  warn(`Could not create branch ${branch} \u2014 continuing without git branch.`);
3068
3117
  }
3069
3118
  const subType = getFlagValue(args, "--sub-type") ?? null;
3070
- const exp = createExperiment(db, slug, branch, hypothesis, subType, null);
3119
+ const dependsOn = getFlagValue(args, "--depends-on") ?? null;
3120
+ const contextArg = getFlagValue(args, "--context") ?? null;
3121
+ const contextFiles = contextArg ? contextArg.split(",").map((f) => f.trim()) : null;
3122
+ if (dependsOn) {
3123
+ const depExp = getExperimentBySlug(db, dependsOn);
3124
+ if (!depExp) {
3125
+ throw new Error(`Dependency experiment not found: ${dependsOn}`);
3126
+ }
3127
+ info(`Depends on: ${dependsOn} (status: ${depExp.status})`);
3128
+ }
3129
+ const exp = createExperiment(db, slug, branch, hypothesis, subType, null, dependsOn, contextFiles);
3130
+ if (contextFiles) {
3131
+ info(`Context files: ${contextFiles.join(", ")}`);
3132
+ }
3071
3133
  success(`Created experiment #${exp.id}: ${exp.slug}`);
3072
- const docsDir = path9.join(root, "docs", "experiments");
3073
- const templatePath = path9.join(docsDir, "_TEMPLATE.md");
3074
- if (fs9.existsSync(templatePath)) {
3075
- const template = fs9.readFileSync(templatePath, "utf-8");
3134
+ const docsDir = path10.join(root, "docs", "experiments");
3135
+ const templatePath = path10.join(docsDir, "_TEMPLATE.md");
3136
+ if (fs10.existsSync(templatePath)) {
3137
+ const template = fs10.readFileSync(templatePath, "utf-8");
3076
3138
  const logContent = template.replace(/\{\{title\}\}/g, hypothesis).replace(/\{\{hypothesis\}\}/g, hypothesis).replace(/\{\{branch\}\}/g, branch).replace(/\{\{status\}\}/g, "classified").replace(/\{\{sub_type\}\}/g, subType ?? "unclassified").replace(/\{\{date\}\}/g, (/* @__PURE__ */ new Date()).toISOString().split("T")[0]);
3077
- const logPath = path9.join(docsDir, `${paddedNum}-${slug}.md`);
3078
- fs9.writeFileSync(logPath, logContent);
3139
+ const logPath = path10.join(docsDir, `${paddedNum}-${slug}.md`);
3140
+ fs10.writeFileSync(logPath, logContent);
3079
3141
  info(`Created experiment log: docs/experiments/${paddedNum}-${slug}.md`);
3080
3142
  }
3081
3143
  autoCommit(root, `new: ${slug}`);
@@ -3115,19 +3177,19 @@ async function revert(args) {
3115
3177
  );
3116
3178
  adminTransitionAndPersist(db, exp.id, exp.status, "dead_end" /* DEAD_END */, "revert");
3117
3179
  try {
3118
- const currentBranch = (0, import_node_child_process4.execFileSync)("git", ["rev-parse", "--abbrev-ref", "HEAD"], {
3180
+ const currentBranch = (0, import_node_child_process5.execFileSync)("git", ["rev-parse", "--abbrev-ref", "HEAD"], {
3119
3181
  cwd: root,
3120
3182
  encoding: "utf-8"
3121
3183
  }).trim();
3122
3184
  if (currentBranch === exp.branch) {
3123
3185
  try {
3124
- (0, import_node_child_process4.execFileSync)("git", ["checkout", "main"], {
3186
+ (0, import_node_child_process5.execFileSync)("git", ["checkout", "main"], {
3125
3187
  cwd: root,
3126
3188
  encoding: "utf-8",
3127
3189
  stdio: ["pipe", "pipe", "pipe"]
3128
3190
  });
3129
3191
  } catch {
3130
- (0, import_node_child_process4.execFileSync)("git", ["checkout", "master"], {
3192
+ (0, import_node_child_process5.execFileSync)("git", ["checkout", "master"], {
3131
3193
  cwd: root,
3132
3194
  encoding: "utf-8",
3133
3195
  stdio: ["pipe", "pipe", "pipe"]
@@ -3139,13 +3201,13 @@ async function revert(args) {
3139
3201
  }
3140
3202
  info(`Experiment ${exp.slug} reverted to dead-end. Reason: ${reason}`);
3141
3203
  }
3142
- var fs9, path9, import_node_child_process4;
3204
+ var fs10, path10, import_node_child_process5;
3143
3205
  var init_experiment = __esm({
3144
3206
  "src/commands/experiment.ts"() {
3145
3207
  "use strict";
3146
- fs9 = __toESM(require("fs"));
3147
- path9 = __toESM(require("path"));
3148
- import_node_child_process4 = require("child_process");
3208
+ fs10 = __toESM(require("fs"));
3209
+ path10 = __toESM(require("path"));
3210
+ import_node_child_process5 = require("child_process");
3149
3211
  init_connection();
3150
3212
  init_queries();
3151
3213
  init_machine();
@@ -3288,12 +3350,12 @@ function queryDeadEnds(db, args, isJson) {
3288
3350
  console.log(table(["ID", "Sub-Type", "Approach", "Constraint"], rows));
3289
3351
  }
3290
3352
  function queryFragility(root, isJson) {
3291
- const fragPath = path10.join(root, "docs", "synthesis", "fragility.md");
3292
- if (!fs10.existsSync(fragPath)) {
3353
+ const fragPath = path11.join(root, "docs", "synthesis", "fragility.md");
3354
+ if (!fs11.existsSync(fragPath)) {
3293
3355
  info("No fragility map found.");
3294
3356
  return;
3295
3357
  }
3296
- const content = fs10.readFileSync(fragPath, "utf-8");
3358
+ const content = fs11.readFileSync(fragPath, "utf-8");
3297
3359
  if (isJson) {
3298
3360
  console.log(JSON.stringify({ content }, null, 2));
3299
3361
  return;
@@ -3349,7 +3411,7 @@ function queryCircuitBreakers(db, root, isJson) {
3349
3411
  function checkCommit(db) {
3350
3412
  let stdinData = "";
3351
3413
  try {
3352
- stdinData = fs10.readFileSync(0, "utf-8");
3414
+ stdinData = fs11.readFileSync(0, "utf-8");
3353
3415
  } catch {
3354
3416
  }
3355
3417
  if (stdinData) {
@@ -3374,12 +3436,12 @@ function checkCommit(db) {
3374
3436
  process.exit(1);
3375
3437
  }
3376
3438
  }
3377
- var fs10, path10;
3439
+ var fs11, path11;
3378
3440
  var init_query = __esm({
3379
3441
  "src/commands/query.ts"() {
3380
3442
  "use strict";
3381
- fs10 = __toESM(require("fs"));
3382
- path10 = __toESM(require("path"));
3443
+ fs11 = __toESM(require("fs"));
3444
+ path11 = __toESM(require("path"));
3383
3445
  init_connection();
3384
3446
  init_queries();
3385
3447
  init_config();
@@ -3413,6 +3475,28 @@ async function resolve2(db, exp, projectRoot) {
3413
3475
  grades = getVerificationsByExperiment(db, exp.id);
3414
3476
  }
3415
3477
  const overallGrade = worstGrade(grades);
3478
+ const config = loadConfig(projectRoot);
3479
+ const metricComparisons = compareMetrics(db, exp.id, config);
3480
+ const gateViolations = checkGateViolations(metricComparisons);
3481
+ if (gateViolations.length > 0 && (overallGrade === "sound" || overallGrade === "good")) {
3482
+ warn("Gate fixture regression detected \u2014 blocking merge:");
3483
+ for (const v of gateViolations) {
3484
+ warn(` ${v.fixture} / ${v.metric}: ${v.before} \u2192 ${v.after} (${v.delta > 0 ? "+" : ""}${v.delta})`);
3485
+ }
3486
+ updateExperimentStatus(db, exp.id, "resolved");
3487
+ const guidanceText = `Gate fixture regression blocks merge. Fix these regressions before re-attempting:
3488
+ ` + gateViolations.map((v) => `- ${v.fixture} / ${v.metric}: was ${v.before}, now ${v.after}`).join("\n");
3489
+ transition("resolved" /* RESOLVED */, "building" /* BUILDING */);
3490
+ db.transaction(() => {
3491
+ storeBuilderGuidance(db, exp.id, guidanceText);
3492
+ updateExperimentStatus(db, exp.id, "building");
3493
+ if (exp.sub_type) {
3494
+ incrementSubTypeFailure(db, exp.sub_type, exp.id, "weak");
3495
+ }
3496
+ })();
3497
+ warn(`Experiment ${exp.slug} CYCLING BACK \u2014 gate fixture(s) regressed.`);
3498
+ return;
3499
+ }
3416
3500
  updateExperimentStatus(db, exp.id, "resolved");
3417
3501
  switch (overallGrade) {
3418
3502
  case "sound": {
@@ -3500,6 +3584,28 @@ async function resolveDbOnly(db, exp, projectRoot) {
3500
3584
  grades = getVerificationsByExperiment(db, exp.id);
3501
3585
  }
3502
3586
  const overallGrade = worstGrade(grades);
3587
+ const config = loadConfig(projectRoot);
3588
+ const metricComparisons = compareMetrics(db, exp.id, config);
3589
+ const gateViolations = checkGateViolations(metricComparisons);
3590
+ if (gateViolations.length > 0 && (overallGrade === "sound" || overallGrade === "good")) {
3591
+ warn("Gate fixture regression detected \u2014 blocking merge:");
3592
+ for (const v of gateViolations) {
3593
+ warn(` ${v.fixture} / ${v.metric}: ${v.before} \u2192 ${v.after} (${v.delta > 0 ? "+" : ""}${v.delta})`);
3594
+ }
3595
+ updateExperimentStatus(db, exp.id, "resolved");
3596
+ const guidanceText = `Gate fixture regression blocks merge. Fix these regressions before re-attempting:
3597
+ ` + gateViolations.map((v) => `- ${v.fixture} / ${v.metric}: was ${v.before}, now ${v.after}`).join("\n");
3598
+ transition("resolved" /* RESOLVED */, "building" /* BUILDING */);
3599
+ db.transaction(() => {
3600
+ storeBuilderGuidance(db, exp.id, guidanceText);
3601
+ updateExperimentStatus(db, exp.id, "building");
3602
+ if (exp.sub_type) {
3603
+ incrementSubTypeFailure(db, exp.sub_type, exp.id, "weak");
3604
+ }
3605
+ })();
3606
+ warn(`Experiment ${exp.slug} CYCLING BACK \u2014 gate fixture(s) regressed.`);
3607
+ return "weak";
3608
+ }
3503
3609
  updateExperimentStatus(db, exp.id, "resolved");
3504
3610
  switch (overallGrade) {
3505
3611
  case "sound":
@@ -3570,19 +3676,19 @@ async function resolveDbOnly(db, exp, projectRoot) {
3570
3676
  function gitMerge(branch, cwd) {
3571
3677
  try {
3572
3678
  try {
3573
- (0, import_node_child_process5.execFileSync)("git", ["checkout", "main"], {
3679
+ (0, import_node_child_process6.execFileSync)("git", ["checkout", "main"], {
3574
3680
  cwd,
3575
3681
  encoding: "utf-8",
3576
3682
  stdio: ["pipe", "pipe", "pipe"]
3577
3683
  });
3578
3684
  } catch {
3579
- (0, import_node_child_process5.execFileSync)("git", ["checkout", "master"], {
3685
+ (0, import_node_child_process6.execFileSync)("git", ["checkout", "master"], {
3580
3686
  cwd,
3581
3687
  encoding: "utf-8",
3582
3688
  stdio: ["pipe", "pipe", "pipe"]
3583
3689
  });
3584
3690
  }
3585
- (0, import_node_child_process5.execFileSync)("git", ["merge", branch, "--no-ff", "-m", `Merge experiment branch ${branch}`], {
3691
+ (0, import_node_child_process6.execFileSync)("git", ["merge", branch, "--no-ff", "-m", `Merge experiment branch ${branch}`], {
3586
3692
  cwd,
3587
3693
  encoding: "utf-8",
3588
3694
  stdio: ["pipe", "pipe", "pipe"]
@@ -3593,23 +3699,23 @@ function gitMerge(branch, cwd) {
3593
3699
  }
3594
3700
  function gitRevert(branch, cwd) {
3595
3701
  try {
3596
- const currentBranch = (0, import_node_child_process5.execFileSync)("git", ["rev-parse", "--abbrev-ref", "HEAD"], {
3702
+ const currentBranch = (0, import_node_child_process6.execFileSync)("git", ["rev-parse", "--abbrev-ref", "HEAD"], {
3597
3703
  cwd,
3598
3704
  encoding: "utf-8"
3599
3705
  }).trim();
3600
3706
  if (currentBranch === branch) {
3601
3707
  try {
3602
- (0, import_node_child_process5.execFileSync)("git", ["checkout", "--", "."], { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
3708
+ (0, import_node_child_process6.execFileSync)("git", ["checkout", "--", "."], { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
3603
3709
  } catch {
3604
3710
  }
3605
3711
  try {
3606
- (0, import_node_child_process5.execFileSync)("git", ["checkout", "main"], {
3712
+ (0, import_node_child_process6.execFileSync)("git", ["checkout", "main"], {
3607
3713
  cwd,
3608
3714
  encoding: "utf-8",
3609
3715
  stdio: ["pipe", "pipe", "pipe"]
3610
3716
  });
3611
3717
  } catch {
3612
- (0, import_node_child_process5.execFileSync)("git", ["checkout", "master"], {
3718
+ (0, import_node_child_process6.execFileSync)("git", ["checkout", "master"], {
3613
3719
  cwd,
3614
3720
  encoding: "utf-8",
3615
3721
  stdio: ["pipe", "pipe", "pipe"]
@@ -3621,28 +3727,30 @@ function gitRevert(branch, cwd) {
3621
3727
  }
3622
3728
  }
3623
3729
  function appendToFragilityMap(projectRoot, expSlug, gaps) {
3624
- const fragPath = path11.join(projectRoot, "docs", "synthesis", "fragility.md");
3730
+ const fragPath = path12.join(projectRoot, "docs", "synthesis", "fragility.md");
3625
3731
  let content = "";
3626
- if (fs11.existsSync(fragPath)) {
3627
- content = fs11.readFileSync(fragPath, "utf-8");
3732
+ if (fs12.existsSync(fragPath)) {
3733
+ content = fs12.readFileSync(fragPath, "utf-8");
3628
3734
  }
3629
3735
  const entry = `
3630
3736
  ## From experiment: ${expSlug}
3631
3737
  ${gaps}
3632
3738
  `;
3633
- fs11.writeFileSync(fragPath, content + entry);
3739
+ fs12.writeFileSync(fragPath, content + entry);
3634
3740
  }
3635
- var fs11, path11, import_node_child_process5;
3741
+ var fs12, path12, import_node_child_process6;
3636
3742
  var init_resolve = __esm({
3637
3743
  "src/resolve.ts"() {
3638
3744
  "use strict";
3639
- fs11 = __toESM(require("fs"));
3640
- path11 = __toESM(require("path"));
3745
+ fs12 = __toESM(require("fs"));
3746
+ path12 = __toESM(require("path"));
3641
3747
  init_types2();
3642
3748
  init_machine();
3643
3749
  init_queries();
3750
+ init_metrics();
3751
+ init_config();
3644
3752
  init_spawn();
3645
- import_node_child_process5 = require("child_process");
3753
+ import_node_child_process6 = require("child_process");
3646
3754
  init_git();
3647
3755
  init_format();
3648
3756
  }
@@ -3710,8 +3818,8 @@ async function runResolve(db, exp, root) {
3710
3818
  }
3711
3819
  async function doGate(db, exp, root) {
3712
3820
  transition(exp.status, "gated" /* GATED */);
3713
- const synthesis = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3714
- const fragility = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3821
+ const synthesis = truncateContext(readFileOrEmpty(path13.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3822
+ const fragility = truncateContext(readFileOrEmpty(path13.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3715
3823
  const structuralDeadEnds = exp.sub_type ? listStructuralDeadEndsBySubType(db, exp.sub_type) : listStructuralDeadEnds(db);
3716
3824
  const result = await spawnAgent("gatekeeper", {
3717
3825
  experiment: {
@@ -3754,17 +3862,25 @@ Output your gate_decision as "approve", "reject", or "flag" with reasoning.`
3754
3862
  }
3755
3863
  }
3756
3864
  async function doBuild(db, exp, root) {
3865
+ if (exp.depends_on) {
3866
+ const dep = getExperimentBySlug(db, exp.depends_on);
3867
+ if (!dep || dep.status !== "merged") {
3868
+ throw new Error(
3869
+ `Experiment "${exp.slug}" depends on "${exp.depends_on}" which is ${dep ? dep.status : "not found"}. Dependency must be merged before building.`
3870
+ );
3871
+ }
3872
+ }
3757
3873
  transition(exp.status, "building" /* BUILDING */);
3758
3874
  const deadEnds = exp.sub_type ? listDeadEndsBySubType(db, exp.sub_type) : listAllDeadEnds(db);
3759
3875
  const builderGuidance = getBuilderGuidance(db, exp.id);
3760
- const fragility = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3761
- const synthesis = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3876
+ const fragility = truncateContext(readFileOrEmpty(path13.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3877
+ const synthesis = truncateContext(readFileOrEmpty(path13.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3762
3878
  const confirmedDoubts = getConfirmedDoubts(db, exp.id);
3763
3879
  const config = loadConfig(root);
3764
3880
  const existingBaseline = getMetricsByExperimentAndPhase(db, exp.id, "before");
3765
3881
  if (config.metrics?.command && existingBaseline.length === 0) {
3766
3882
  try {
3767
- const output = (0, import_node_child_process6.execSync)(config.metrics.command, {
3883
+ const output = (0, import_node_child_process7.execSync)(config.metrics.command, {
3768
3884
  cwd: root,
3769
3885
  encoding: "utf-8",
3770
3886
  timeout: 6e4,
@@ -3792,6 +3908,7 @@ Build the experiment: ${exp.hypothesis}` : `Build the experiment: ${exp.hypothes
3792
3908
  }
3793
3909
  }
3794
3910
  taskPrompt += "\n\nNote: The framework captures metrics automatically. Do NOT claim specific numbers unless quoting framework output.";
3911
+ const supplementaryContext = loadExperimentContext(exp, root);
3795
3912
  const result = await spawnAgent("builder", {
3796
3913
  experiment: {
3797
3914
  id: exp.id,
@@ -3809,6 +3926,7 @@ Build the experiment: ${exp.hypothesis}` : `Build the experiment: ${exp.hypothes
3809
3926
  fragility,
3810
3927
  synthesis,
3811
3928
  confirmedDoubts,
3929
+ supplementaryContext: supplementaryContext || void 0,
3812
3930
  taskPrompt
3813
3931
  }, root);
3814
3932
  ingestStructuredOutput(db, exp.id, result.structured);
@@ -3821,7 +3939,7 @@ Build the experiment: ${exp.hypothesis}` : `Build the experiment: ${exp.hypothes
3821
3939
  } else {
3822
3940
  if (config.metrics?.command) {
3823
3941
  try {
3824
- const output = (0, import_node_child_process6.execSync)(config.metrics.command, {
3942
+ const output = (0, import_node_child_process7.execSync)(config.metrics.command, {
3825
3943
  cwd: root,
3826
3944
  encoding: "utf-8",
3827
3945
  timeout: 6e4,
@@ -3845,7 +3963,7 @@ async function doChallenge(db, exp, root) {
3845
3963
  transition(exp.status, "challenged" /* CHALLENGED */);
3846
3964
  let gitDiff = "";
3847
3965
  try {
3848
- gitDiff = (0, import_node_child_process6.execSync)('git diff main -- . ":!.majlis/"', {
3966
+ gitDiff = (0, import_node_child_process7.execSync)('git diff main -- . ":!.majlis/"', {
3849
3967
  cwd: root,
3850
3968
  encoding: "utf-8",
3851
3969
  stdio: ["pipe", "pipe", "pipe"]
@@ -3853,7 +3971,7 @@ async function doChallenge(db, exp, root) {
3853
3971
  } catch {
3854
3972
  }
3855
3973
  if (gitDiff.length > 8e3) gitDiff = gitDiff.slice(0, 8e3) + "\n[DIFF TRUNCATED]";
3856
- const synthesis = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3974
+ const synthesis = truncateContext(readFileOrEmpty(path13.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3857
3975
  let taskPrompt = `Construct adversarial test cases for experiment ${exp.slug}: ${exp.hypothesis}`;
3858
3976
  if (gitDiff) {
3859
3977
  taskPrompt += `
@@ -3886,9 +4004,9 @@ ${gitDiff}
3886
4004
  async function doDoubt(db, exp, root) {
3887
4005
  transition(exp.status, "doubted" /* DOUBTED */);
3888
4006
  const paddedNum = String(exp.id).padStart(3, "0");
3889
- const expDocPath = path12.join(root, "docs", "experiments", `${paddedNum}-${exp.slug}.md`);
4007
+ const expDocPath = path13.join(root, "docs", "experiments", `${paddedNum}-${exp.slug}.md`);
3890
4008
  const experimentDoc = truncateContext(readFileOrEmpty(expDocPath), CONTEXT_LIMITS.experimentDoc);
3891
- const synthesis = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
4009
+ const synthesis = truncateContext(readFileOrEmpty(path13.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3892
4010
  const deadEnds = exp.sub_type ? listDeadEndsBySubType(db, exp.sub_type) : listAllDeadEnds(db);
3893
4011
  let taskPrompt = `Doubt the work in experiment ${exp.slug}: ${exp.hypothesis}. Produce a doubt document with evidence for each doubt.`;
3894
4012
  if (experimentDoc) {
@@ -3927,8 +4045,8 @@ ${experimentDoc}
3927
4045
  }
3928
4046
  async function doScout(db, exp, root) {
3929
4047
  transition(exp.status, "scouted" /* SCOUTED */);
3930
- const synthesis = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3931
- const fragility = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
4048
+ const synthesis = truncateContext(readFileOrEmpty(path13.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
4049
+ const fragility = truncateContext(readFileOrEmpty(path13.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3932
4050
  const deadEnds = exp.sub_type ? listDeadEndsBySubType(db, exp.sub_type) : listAllDeadEnds(db);
3933
4051
  const deadEndsSummary = deadEnds.map(
3934
4052
  (d) => `- [${d.category ?? "structural"}] ${d.approach}: ${d.why_failed}`
@@ -3975,31 +4093,49 @@ ${fragility}`;
3975
4093
  async function doVerify(db, exp, root) {
3976
4094
  transition(exp.status, "verifying" /* VERIFYING */);
3977
4095
  const doubts = getDoubtsByExperiment(db, exp.id);
3978
- const challengeDir = path12.join(root, "docs", "challenges");
4096
+ const challengeDir = path13.join(root, "docs", "challenges");
3979
4097
  let challenges = "";
3980
- if (fs12.existsSync(challengeDir)) {
3981
- const files = fs12.readdirSync(challengeDir).filter((f) => f.includes(exp.slug) && f.endsWith(".md"));
4098
+ if (fs13.existsSync(challengeDir)) {
4099
+ const files = fs13.readdirSync(challengeDir).filter((f) => f.includes(exp.slug) && f.endsWith(".md"));
3982
4100
  for (const f of files) {
3983
- challenges += fs12.readFileSync(path12.join(challengeDir, f), "utf-8") + "\n\n";
4101
+ challenges += fs13.readFileSync(path13.join(challengeDir, f), "utf-8") + "\n\n";
3984
4102
  }
3985
4103
  }
3986
- const beforeMetrics = getMetricsByExperimentAndPhase(db, exp.id, "before");
3987
- const afterMetrics = getMetricsByExperimentAndPhase(db, exp.id, "after");
4104
+ const config = loadConfig(root);
4105
+ const metricComparisons = compareMetrics(db, exp.id, config);
3988
4106
  let metricsSection = "";
3989
- if (beforeMetrics.length > 0 || afterMetrics.length > 0) {
4107
+ if (metricComparisons.length > 0) {
3990
4108
  metricsSection = "\n\n## Framework-Captured Metrics (GROUND TRUTH \u2014 not self-reported by builder)\n";
3991
- if (beforeMetrics.length > 0) {
3992
- metricsSection += "### Before Build\n";
3993
- for (const m of beforeMetrics) {
3994
- metricsSection += `- ${m.fixture} / ${m.metric_name}: ${m.metric_value}
4109
+ metricsSection += "| Fixture | Metric | Before | After | Delta | Regression | Gate |\n";
4110
+ metricsSection += "|---------|--------|--------|-------|-------|------------|------|\n";
4111
+ for (const c of metricComparisons) {
4112
+ metricsSection += `| ${c.fixture} | ${c.metric} | ${c.before} | ${c.after} | ${c.delta > 0 ? "+" : ""}${c.delta} | ${c.regression ? "YES" : "no"} | ${c.gate ? "GATE" : "-"} |
4113
+ `;
4114
+ }
4115
+ const gateViolations = metricComparisons.filter((c) => c.gate && c.regression);
4116
+ if (gateViolations.length > 0) {
4117
+ metricsSection += `
4118
+ **GATE VIOLATION**: ${gateViolations.length} gate fixture(s) regressed. This MUST be addressed \u2014 gate regressions block merge.
3995
4119
  `;
3996
- }
3997
4120
  }
3998
- if (afterMetrics.length > 0) {
3999
- metricsSection += "### After Build\n";
4000
- for (const m of afterMetrics) {
4001
- metricsSection += `- ${m.fixture} / ${m.metric_name}: ${m.metric_value}
4121
+ } else {
4122
+ const beforeMetrics = getMetricsByExperimentAndPhase(db, exp.id, "before");
4123
+ const afterMetrics = getMetricsByExperimentAndPhase(db, exp.id, "after");
4124
+ if (beforeMetrics.length > 0 || afterMetrics.length > 0) {
4125
+ metricsSection = "\n\n## Framework-Captured Metrics (GROUND TRUTH \u2014 not self-reported by builder)\n";
4126
+ if (beforeMetrics.length > 0) {
4127
+ metricsSection += "### Before Build\n";
4128
+ for (const m of beforeMetrics) {
4129
+ metricsSection += `- ${m.fixture} / ${m.metric_name}: ${m.metric_value}
4002
4130
  `;
4131
+ }
4132
+ }
4133
+ if (afterMetrics.length > 0) {
4134
+ metricsSection += "### After Build\n";
4135
+ for (const m of afterMetrics) {
4136
+ metricsSection += `- ${m.fixture} / ${m.metric_name}: ${m.metric_value}
4137
+ `;
4138
+ }
4003
4139
  }
4004
4140
  }
4005
4141
  }
@@ -4013,6 +4149,7 @@ async function doVerify(db, exp, root) {
4013
4149
  doubtReference += "\nWhen resolving doubts, use the DOUBT-{id} number as the doubt_id value in your doubt_resolutions output.";
4014
4150
  }
4015
4151
  updateExperimentStatus(db, exp.id, "verifying");
4152
+ const verifierSupplementaryContext = loadExperimentContext(exp, root);
4016
4153
  const result = await spawnAgent("verifier", {
4017
4154
  experiment: {
4018
4155
  id: exp.id,
@@ -4024,6 +4161,8 @@ async function doVerify(db, exp, root) {
4024
4161
  },
4025
4162
  doubts,
4026
4163
  challenges,
4164
+ metricComparisons: metricComparisons.length > 0 ? metricComparisons : void 0,
4165
+ supplementaryContext: verifierSupplementaryContext || void 0,
4027
4166
  taskPrompt: `Verify experiment ${exp.slug}: ${exp.hypothesis}. Check provenance and content. Test the ${doubts.length} doubt(s) and any adversarial challenges.` + metricsSection + doubtReference
4028
4167
  }, root);
4029
4168
  ingestStructuredOutput(db, exp.id, result.structured);
@@ -4048,22 +4187,22 @@ async function doVerify(db, exp, root) {
4048
4187
  success(`Verification complete for ${exp.slug}. Run \`majlis resolve\` next.`);
4049
4188
  }
4050
4189
  async function doCompress(db, root) {
4051
- const synthesisPath = path12.join(root, "docs", "synthesis", "current.md");
4052
- const sizeBefore = fs12.existsSync(synthesisPath) ? fs12.statSync(synthesisPath).size : 0;
4190
+ const synthesisPath = path13.join(root, "docs", "synthesis", "current.md");
4191
+ const sizeBefore = fs13.existsSync(synthesisPath) ? fs13.statSync(synthesisPath).size : 0;
4053
4192
  const sessionCount = getSessionsSinceCompression(db);
4054
4193
  const dbExport = exportForCompressor(db);
4055
4194
  const result = await spawnAgent("compressor", {
4056
4195
  taskPrompt: "## Structured Data (CANONICAL \u2014 from SQLite database)\nThe database export below is the source of truth. docs/ files are agent artifacts that may contain stale or incorrect information. Cross-reference everything against this data.\n\n" + dbExport + "\n\n## Your Task\nRead ALL experiments, decisions, doubts, challenges, verification reports, reframes, and recent diffs. Cross-reference for contradictions, redundancies, and patterns. REWRITE docs/synthesis/current.md \u2014 shorter and denser. Update docs/synthesis/fragility.md with current weak areas. Update docs/synthesis/dead-ends.md with structural constraints from rejected experiments."
4057
4196
  }, root);
4058
- const sizeAfter = fs12.existsSync(synthesisPath) ? fs12.statSync(synthesisPath).size : 0;
4197
+ const sizeAfter = fs13.existsSync(synthesisPath) ? fs13.statSync(synthesisPath).size : 0;
4059
4198
  recordCompression(db, sessionCount, sizeBefore, sizeAfter);
4060
4199
  autoCommit(root, "compress: update synthesis");
4061
4200
  success(`Compression complete. Synthesis: ${sizeBefore}B \u2192 ${sizeAfter}B`);
4062
4201
  }
4063
4202
  function gitCommitBuild(exp, cwd) {
4064
4203
  try {
4065
- (0, import_node_child_process6.execSync)('git add -A -- ":!.majlis/"', { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
4066
- const diff = (0, import_node_child_process6.execSync)("git diff --cached --stat", { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }).trim();
4204
+ (0, import_node_child_process7.execSync)('git add -A -- ":!.majlis/"', { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
4205
+ const diff = (0, import_node_child_process7.execSync)("git diff --cached --stat", { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }).trim();
4067
4206
  if (!diff) {
4068
4207
  info("No code changes to commit.");
4069
4208
  return;
@@ -4071,12 +4210,37 @@ function gitCommitBuild(exp, cwd) {
4071
4210
  const msg = `EXP-${String(exp.id).padStart(3, "0")}: ${exp.slug}
4072
4211
 
4073
4212
  ${exp.hypothesis ?? ""}`;
4074
- (0, import_node_child_process6.execFileSync)("git", ["commit", "-m", msg], { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
4213
+ (0, import_node_child_process7.execFileSync)("git", ["commit", "-m", msg], { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
4075
4214
  info(`Committed builder changes on ${exp.branch}.`);
4076
4215
  } catch {
4077
4216
  warn("Could not auto-commit builder changes \u2014 commit manually before resolving.");
4078
4217
  }
4079
4218
  }
4219
+ function loadExperimentContext(exp, root) {
4220
+ if (!exp.context_files) return "";
4221
+ let files;
4222
+ try {
4223
+ files = JSON.parse(exp.context_files);
4224
+ } catch {
4225
+ return "";
4226
+ }
4227
+ if (!Array.isArray(files) || files.length === 0) return "";
4228
+ const sections = ["## Experiment-Scoped Reference Material"];
4229
+ for (const relPath of files) {
4230
+ const absPath = path13.join(root, relPath);
4231
+ try {
4232
+ const content = fs13.readFileSync(absPath, "utf-8");
4233
+ sections.push(`### ${relPath}
4234
+ \`\`\`
4235
+ ${content.slice(0, 8e3)}
4236
+ \`\`\``);
4237
+ } catch {
4238
+ sections.push(`### ${relPath}
4239
+ *(file not found)*`);
4240
+ }
4241
+ }
4242
+ return sections.join("\n\n");
4243
+ }
4080
4244
  function resolveExperimentArg(db, args) {
4081
4245
  const slugArg = args.filter((a) => !a.startsWith("--"))[0];
4082
4246
  let exp;
@@ -4149,13 +4313,13 @@ function ingestStructuredOutput(db, experimentId, structured) {
4149
4313
  }
4150
4314
  })();
4151
4315
  }
4152
- var fs12, path12, import_node_child_process6;
4316
+ var fs13, path13, import_node_child_process7;
4153
4317
  var init_cycle = __esm({
4154
4318
  "src/commands/cycle.ts"() {
4155
4319
  "use strict";
4156
- fs12 = __toESM(require("fs"));
4157
- path12 = __toESM(require("path"));
4158
- import_node_child_process6 = require("child_process");
4320
+ fs13 = __toESM(require("fs"));
4321
+ path13 = __toESM(require("path"));
4322
+ import_node_child_process7 = require("child_process");
4159
4323
  init_connection();
4160
4324
  init_queries();
4161
4325
  init_machine();
@@ -4182,10 +4346,10 @@ async function classify(args) {
4182
4346
  if (!domain) {
4183
4347
  throw new Error('Usage: majlis classify "domain description"');
4184
4348
  }
4185
- const synthesisPath = path13.join(root, "docs", "synthesis", "current.md");
4186
- const synthesis = fs13.existsSync(synthesisPath) ? fs13.readFileSync(synthesisPath, "utf-8") : "";
4187
- const deadEndsPath = path13.join(root, "docs", "synthesis", "dead-ends.md");
4188
- const deadEnds = fs13.existsSync(deadEndsPath) ? fs13.readFileSync(deadEndsPath, "utf-8") : "";
4349
+ const synthesisPath = path14.join(root, "docs", "synthesis", "current.md");
4350
+ const synthesis = fs14.existsSync(synthesisPath) ? fs14.readFileSync(synthesisPath, "utf-8") : "";
4351
+ const deadEndsPath = path14.join(root, "docs", "synthesis", "dead-ends.md");
4352
+ const deadEnds = fs14.existsSync(deadEndsPath) ? fs14.readFileSync(deadEndsPath, "utf-8") : "";
4189
4353
  info(`Classifying problem domain: ${domain}`);
4190
4354
  const result = await spawnAgent("builder", {
4191
4355
  synthesis,
@@ -4204,22 +4368,22 @@ Write the classification to docs/classification/ following the template.`
4204
4368
  async function reframe(args) {
4205
4369
  const root = findProjectRoot();
4206
4370
  if (!root) throw new Error("Not in a Majlis project. Run `majlis init` first.");
4207
- const classificationDir = path13.join(root, "docs", "classification");
4371
+ const classificationDir = path14.join(root, "docs", "classification");
4208
4372
  let classificationContent = "";
4209
- if (fs13.existsSync(classificationDir)) {
4210
- const files = fs13.readdirSync(classificationDir).filter((f) => f.endsWith(".md") && !f.startsWith("_"));
4373
+ if (fs14.existsSync(classificationDir)) {
4374
+ const files = fs14.readdirSync(classificationDir).filter((f) => f.endsWith(".md") && !f.startsWith("_"));
4211
4375
  for (const f of files) {
4212
- classificationContent += fs13.readFileSync(path13.join(classificationDir, f), "utf-8") + "\n\n";
4376
+ classificationContent += fs14.readFileSync(path14.join(classificationDir, f), "utf-8") + "\n\n";
4213
4377
  }
4214
4378
  }
4215
- const synthesisPath = path13.join(root, "docs", "synthesis", "current.md");
4216
- const synthesis = fs13.existsSync(synthesisPath) ? fs13.readFileSync(synthesisPath, "utf-8") : "";
4217
- const deadEndsPath = path13.join(root, "docs", "synthesis", "dead-ends.md");
4218
- const deadEnds = fs13.existsSync(deadEndsPath) ? fs13.readFileSync(deadEndsPath, "utf-8") : "";
4219
- const configPath = path13.join(root, ".majlis", "config.json");
4379
+ const synthesisPath = path14.join(root, "docs", "synthesis", "current.md");
4380
+ const synthesis = fs14.existsSync(synthesisPath) ? fs14.readFileSync(synthesisPath, "utf-8") : "";
4381
+ const deadEndsPath = path14.join(root, "docs", "synthesis", "dead-ends.md");
4382
+ const deadEnds = fs14.existsSync(deadEndsPath) ? fs14.readFileSync(deadEndsPath, "utf-8") : "";
4383
+ const configPath = path14.join(root, ".majlis", "config.json");
4220
4384
  let problemStatement = "";
4221
- if (fs13.existsSync(configPath)) {
4222
- const config = JSON.parse(fs13.readFileSync(configPath, "utf-8"));
4385
+ if (fs14.existsSync(configPath)) {
4386
+ const config = JSON.parse(fs14.readFileSync(configPath, "utf-8"));
4223
4387
  problemStatement = `${config.project?.description ?? ""}
4224
4388
  Objective: ${config.project?.objective ?? ""}`;
4225
4389
  }
@@ -4244,12 +4408,12 @@ Write to docs/reframes/.`
4244
4408
  autoCommit(root, `reframe: ${target.slice(0, 60)}`);
4245
4409
  success("Reframe complete. Check docs/reframes/ for the output.");
4246
4410
  }
4247
- var fs13, path13;
4411
+ var fs14, path14;
4248
4412
  var init_classify = __esm({
4249
4413
  "src/commands/classify.ts"() {
4250
4414
  "use strict";
4251
- fs13 = __toESM(require("fs"));
4252
- path13 = __toESM(require("path"));
4415
+ fs14 = __toESM(require("fs"));
4416
+ path14 = __toESM(require("path"));
4253
4417
  init_connection();
4254
4418
  init_spawn();
4255
4419
  init_git();
@@ -4271,15 +4435,15 @@ async function audit(args) {
4271
4435
  const experiments = listAllExperiments(db);
4272
4436
  const deadEnds = listAllDeadEnds(db);
4273
4437
  const circuitBreakers = getAllCircuitBreakerStates(db, config.cycle.circuit_breaker_threshold);
4274
- const classificationDir = path14.join(root, "docs", "classification");
4438
+ const classificationDir = path15.join(root, "docs", "classification");
4275
4439
  let classification = "";
4276
- if (fs14.existsSync(classificationDir)) {
4277
- const files = fs14.readdirSync(classificationDir).filter((f) => f.endsWith(".md") && !f.startsWith("_"));
4440
+ if (fs15.existsSync(classificationDir)) {
4441
+ const files = fs15.readdirSync(classificationDir).filter((f) => f.endsWith(".md") && !f.startsWith("_"));
4278
4442
  for (const f of files) {
4279
- classification += fs14.readFileSync(path14.join(classificationDir, f), "utf-8") + "\n\n";
4443
+ classification += fs15.readFileSync(path15.join(classificationDir, f), "utf-8") + "\n\n";
4280
4444
  }
4281
4445
  }
4282
- const synthesis = readFileOrEmpty(path14.join(root, "docs", "synthesis", "current.md"));
4446
+ const synthesis = readFileOrEmpty(path15.join(root, "docs", "synthesis", "current.md"));
4283
4447
  header("Maqasid Check \u2014 Purpose Audit");
4284
4448
  const trippedBreakers = circuitBreakers.filter((cb) => cb.tripped);
4285
4449
  if (trippedBreakers.length > 0) {
@@ -4323,12 +4487,12 @@ Output: either "classification confirmed \u2014 continue" or "re-classify from X
4323
4487
  }, root);
4324
4488
  success("Purpose audit complete. Review the output above.");
4325
4489
  }
4326
- var fs14, path14;
4490
+ var fs15, path15;
4327
4491
  var init_audit = __esm({
4328
4492
  "src/commands/audit.ts"() {
4329
4493
  "use strict";
4330
- fs14 = __toESM(require("fs"));
4331
- path14 = __toESM(require("path"));
4494
+ fs15 = __toESM(require("fs"));
4495
+ path15 = __toESM(require("path"));
4332
4496
  init_connection();
4333
4497
  init_queries();
4334
4498
  init_spawn();
@@ -4619,16 +4783,16 @@ async function run(args) {
4619
4783
  info("Run `majlis status` to see final state.");
4620
4784
  }
4621
4785
  async function deriveNextHypothesis(goal, root, db) {
4622
- const synthesis = truncateContext(readFileOrEmpty(path15.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
4623
- const fragility = truncateContext(readFileOrEmpty(path15.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
4624
- const deadEndsDoc = truncateContext(readFileOrEmpty(path15.join(root, "docs", "synthesis", "dead-ends.md")), CONTEXT_LIMITS.deadEnds);
4786
+ const synthesis = truncateContext(readFileOrEmpty(path16.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
4787
+ const fragility = truncateContext(readFileOrEmpty(path16.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
4788
+ const deadEndsDoc = truncateContext(readFileOrEmpty(path16.join(root, "docs", "synthesis", "dead-ends.md")), CONTEXT_LIMITS.deadEnds);
4625
4789
  const diagnosis = truncateContext(readLatestDiagnosis(root), CONTEXT_LIMITS.synthesis);
4626
4790
  const deadEnds = listAllDeadEnds(db);
4627
4791
  const config = loadConfig(root);
4628
4792
  let metricsOutput = "";
4629
4793
  if (config.metrics?.command) {
4630
4794
  try {
4631
- metricsOutput = (0, import_node_child_process7.execSync)(config.metrics.command, {
4795
+ metricsOutput = (0, import_node_child_process8.execSync)(config.metrics.command, {
4632
4796
  cwd: root,
4633
4797
  encoding: "utf-8",
4634
4798
  timeout: 6e4,
@@ -4727,7 +4891,7 @@ async function createNewExperiment(db, root, hypothesis) {
4727
4891
  const paddedNum = String(num).padStart(3, "0");
4728
4892
  const branch = `exp/${paddedNum}-${finalSlug}`;
4729
4893
  try {
4730
- (0, import_node_child_process7.execFileSync)("git", ["checkout", "-b", branch], {
4894
+ (0, import_node_child_process8.execFileSync)("git", ["checkout", "-b", branch], {
4731
4895
  cwd: root,
4732
4896
  encoding: "utf-8",
4733
4897
  stdio: ["pipe", "pipe", "pipe"]
@@ -4739,24 +4903,24 @@ async function createNewExperiment(db, root, hypothesis) {
4739
4903
  const exp = createExperiment(db, finalSlug, branch, hypothesis, null, null);
4740
4904
  adminTransitionAndPersist(db, exp.id, exp.status, "reframed" /* REFRAMED */, "bootstrap");
4741
4905
  exp.status = "reframed";
4742
- const docsDir = path15.join(root, "docs", "experiments");
4743
- const templatePath = path15.join(docsDir, "_TEMPLATE.md");
4744
- if (fs15.existsSync(templatePath)) {
4745
- const template = fs15.readFileSync(templatePath, "utf-8");
4906
+ const docsDir = path16.join(root, "docs", "experiments");
4907
+ const templatePath = path16.join(docsDir, "_TEMPLATE.md");
4908
+ if (fs16.existsSync(templatePath)) {
4909
+ const template = fs16.readFileSync(templatePath, "utf-8");
4746
4910
  const logContent = template.replace(/\{\{title\}\}/g, hypothesis).replace(/\{\{hypothesis\}\}/g, hypothesis).replace(/\{\{branch\}\}/g, branch).replace(/\{\{status\}\}/g, "classified").replace(/\{\{sub_type\}\}/g, "unclassified").replace(/\{\{date\}\}/g, (/* @__PURE__ */ new Date()).toISOString().split("T")[0]);
4747
- const logPath = path15.join(docsDir, `${paddedNum}-${finalSlug}.md`);
4748
- fs15.writeFileSync(logPath, logContent);
4911
+ const logPath = path16.join(docsDir, `${paddedNum}-${finalSlug}.md`);
4912
+ fs16.writeFileSync(logPath, logContent);
4749
4913
  info(`Created experiment log: docs/experiments/${paddedNum}-${finalSlug}.md`);
4750
4914
  }
4751
4915
  return exp;
4752
4916
  }
4753
- var fs15, path15, import_node_child_process7;
4917
+ var fs16, path16, import_node_child_process8;
4754
4918
  var init_run = __esm({
4755
4919
  "src/commands/run.ts"() {
4756
4920
  "use strict";
4757
- fs15 = __toESM(require("fs"));
4758
- path15 = __toESM(require("path"));
4759
- import_node_child_process7 = require("child_process");
4921
+ fs16 = __toESM(require("fs"));
4922
+ path16 = __toESM(require("path"));
4923
+ import_node_child_process8 = require("child_process");
4760
4924
  init_connection();
4761
4925
  init_queries();
4762
4926
  init_machine();
@@ -4773,11 +4937,11 @@ var init_run = __esm({
4773
4937
 
4774
4938
  // src/swarm/worktree.ts
4775
4939
  function createWorktree(mainRoot, slug, paddedNum) {
4776
- const projectName = path16.basename(mainRoot);
4940
+ const projectName = path17.basename(mainRoot);
4777
4941
  const worktreeName = `${projectName}-swarm-${paddedNum}-${slug}`;
4778
- const worktreePath = path16.join(path16.dirname(mainRoot), worktreeName);
4942
+ const worktreePath = path17.join(path17.dirname(mainRoot), worktreeName);
4779
4943
  const branch = `swarm/${paddedNum}-${slug}`;
4780
- (0, import_node_child_process8.execFileSync)("git", ["worktree", "add", worktreePath, "-b", branch], {
4944
+ (0, import_node_child_process9.execFileSync)("git", ["worktree", "add", worktreePath, "-b", branch], {
4781
4945
  cwd: mainRoot,
4782
4946
  encoding: "utf-8",
4783
4947
  stdio: ["pipe", "pipe", "pipe"]
@@ -4792,43 +4956,43 @@ function createWorktree(mainRoot, slug, paddedNum) {
4792
4956
  };
4793
4957
  }
4794
4958
  function initializeWorktree(mainRoot, worktreePath) {
4795
- const majlisDir = path16.join(worktreePath, ".majlis");
4796
- fs16.mkdirSync(majlisDir, { recursive: true });
4797
- const configSrc = path16.join(mainRoot, ".majlis", "config.json");
4798
- if (fs16.existsSync(configSrc)) {
4799
- fs16.copyFileSync(configSrc, path16.join(majlisDir, "config.json"));
4800
- }
4801
- const agentsSrc = path16.join(mainRoot, ".majlis", "agents");
4802
- if (fs16.existsSync(agentsSrc)) {
4803
- const agentsDst = path16.join(majlisDir, "agents");
4804
- fs16.mkdirSync(agentsDst, { recursive: true });
4805
- for (const file of fs16.readdirSync(agentsSrc)) {
4806
- fs16.copyFileSync(path16.join(agentsSrc, file), path16.join(agentsDst, file));
4807
- }
4808
- }
4809
- const synthSrc = path16.join(mainRoot, "docs", "synthesis");
4810
- if (fs16.existsSync(synthSrc)) {
4811
- const synthDst = path16.join(worktreePath, "docs", "synthesis");
4812
- fs16.mkdirSync(synthDst, { recursive: true });
4813
- for (const file of fs16.readdirSync(synthSrc)) {
4814
- const srcFile = path16.join(synthSrc, file);
4815
- if (fs16.statSync(srcFile).isFile()) {
4816
- fs16.copyFileSync(srcFile, path16.join(synthDst, file));
4959
+ const majlisDir = path17.join(worktreePath, ".majlis");
4960
+ fs17.mkdirSync(majlisDir, { recursive: true });
4961
+ const configSrc = path17.join(mainRoot, ".majlis", "config.json");
4962
+ if (fs17.existsSync(configSrc)) {
4963
+ fs17.copyFileSync(configSrc, path17.join(majlisDir, "config.json"));
4964
+ }
4965
+ const agentsSrc = path17.join(mainRoot, ".majlis", "agents");
4966
+ if (fs17.existsSync(agentsSrc)) {
4967
+ const agentsDst = path17.join(majlisDir, "agents");
4968
+ fs17.mkdirSync(agentsDst, { recursive: true });
4969
+ for (const file of fs17.readdirSync(agentsSrc)) {
4970
+ fs17.copyFileSync(path17.join(agentsSrc, file), path17.join(agentsDst, file));
4971
+ }
4972
+ }
4973
+ const synthSrc = path17.join(mainRoot, "docs", "synthesis");
4974
+ if (fs17.existsSync(synthSrc)) {
4975
+ const synthDst = path17.join(worktreePath, "docs", "synthesis");
4976
+ fs17.mkdirSync(synthDst, { recursive: true });
4977
+ for (const file of fs17.readdirSync(synthSrc)) {
4978
+ const srcFile = path17.join(synthSrc, file);
4979
+ if (fs17.statSync(srcFile).isFile()) {
4980
+ fs17.copyFileSync(srcFile, path17.join(synthDst, file));
4817
4981
  }
4818
4982
  }
4819
4983
  }
4820
- const templateSrc = path16.join(mainRoot, "docs", "experiments", "_TEMPLATE.md");
4821
- if (fs16.existsSync(templateSrc)) {
4822
- const expDir = path16.join(worktreePath, "docs", "experiments");
4823
- fs16.mkdirSync(expDir, { recursive: true });
4824
- fs16.copyFileSync(templateSrc, path16.join(expDir, "_TEMPLATE.md"));
4984
+ const templateSrc = path17.join(mainRoot, "docs", "experiments", "_TEMPLATE.md");
4985
+ if (fs17.existsSync(templateSrc)) {
4986
+ const expDir = path17.join(worktreePath, "docs", "experiments");
4987
+ fs17.mkdirSync(expDir, { recursive: true });
4988
+ fs17.copyFileSync(templateSrc, path17.join(expDir, "_TEMPLATE.md"));
4825
4989
  }
4826
4990
  const db = openDbAt(worktreePath);
4827
4991
  db.close();
4828
4992
  }
4829
4993
  function cleanupWorktree(mainRoot, wt) {
4830
4994
  try {
4831
- (0, import_node_child_process8.execFileSync)("git", ["worktree", "remove", wt.path, "--force"], {
4995
+ (0, import_node_child_process9.execFileSync)("git", ["worktree", "remove", wt.path, "--force"], {
4832
4996
  cwd: mainRoot,
4833
4997
  encoding: "utf-8",
4834
4998
  stdio: ["pipe", "pipe", "pipe"]
@@ -4837,7 +5001,7 @@ function cleanupWorktree(mainRoot, wt) {
4837
5001
  warn(`Could not remove worktree ${wt.path} \u2014 remove manually.`);
4838
5002
  }
4839
5003
  try {
4840
- (0, import_node_child_process8.execFileSync)("git", ["branch", "-D", wt.branch], {
5004
+ (0, import_node_child_process9.execFileSync)("git", ["branch", "-D", wt.branch], {
4841
5005
  cwd: mainRoot,
4842
5006
  encoding: "utf-8",
4843
5007
  stdio: ["pipe", "pipe", "pipe"]
@@ -4845,7 +5009,7 @@ function cleanupWorktree(mainRoot, wt) {
4845
5009
  } catch {
4846
5010
  }
4847
5011
  try {
4848
- (0, import_node_child_process8.execSync)("git worktree prune", {
5012
+ (0, import_node_child_process9.execSync)("git worktree prune", {
4849
5013
  cwd: mainRoot,
4850
5014
  encoding: "utf-8",
4851
5015
  stdio: ["pipe", "pipe", "pipe"]
@@ -4853,13 +5017,13 @@ function cleanupWorktree(mainRoot, wt) {
4853
5017
  } catch {
4854
5018
  }
4855
5019
  }
4856
- var fs16, path16, import_node_child_process8;
5020
+ var fs17, path17, import_node_child_process9;
4857
5021
  var init_worktree = __esm({
4858
5022
  "src/swarm/worktree.ts"() {
4859
5023
  "use strict";
4860
- fs16 = __toESM(require("fs"));
4861
- path16 = __toESM(require("path"));
4862
- import_node_child_process8 = require("child_process");
5024
+ fs17 = __toESM(require("fs"));
5025
+ path17 = __toESM(require("path"));
5026
+ import_node_child_process9 = require("child_process");
4863
5027
  init_connection();
4864
5028
  init_format();
4865
5029
  }
@@ -4877,12 +5041,12 @@ async function runExperimentInWorktree(wt) {
4877
5041
  exp = createExperiment(db, wt.slug, wt.branch, wt.hypothesis, null, null);
4878
5042
  adminTransitionAndPersist(db, exp.id, exp.status, "reframed" /* REFRAMED */, "bootstrap");
4879
5043
  exp.status = "reframed";
4880
- const templatePath = path17.join(wt.path, "docs", "experiments", "_TEMPLATE.md");
4881
- if (fs17.existsSync(templatePath)) {
4882
- const template = fs17.readFileSync(templatePath, "utf-8");
5044
+ const templatePath = path18.join(wt.path, "docs", "experiments", "_TEMPLATE.md");
5045
+ if (fs18.existsSync(templatePath)) {
5046
+ const template = fs18.readFileSync(templatePath, "utf-8");
4883
5047
  const logContent = template.replace(/\{\{title\}\}/g, wt.hypothesis).replace(/\{\{hypothesis\}\}/g, wt.hypothesis).replace(/\{\{branch\}\}/g, wt.branch).replace(/\{\{status\}\}/g, "classified").replace(/\{\{sub_type\}\}/g, "unclassified").replace(/\{\{date\}\}/g, (/* @__PURE__ */ new Date()).toISOString().split("T")[0]);
4884
- const logPath = path17.join(wt.path, "docs", "experiments", `${wt.paddedNum}-${wt.slug}.md`);
4885
- fs17.writeFileSync(logPath, logContent);
5048
+ const logPath = path18.join(wt.path, "docs", "experiments", `${wt.paddedNum}-${wt.slug}.md`);
5049
+ fs18.writeFileSync(logPath, logContent);
4886
5050
  }
4887
5051
  info(`${label} Starting: ${wt.hypothesis}`);
4888
5052
  while (stepCount < MAX_STEPS) {
@@ -5005,12 +5169,12 @@ function statusToStepName(status2) {
5005
5169
  return null;
5006
5170
  }
5007
5171
  }
5008
- var fs17, path17, MAX_STEPS;
5172
+ var fs18, path18, MAX_STEPS;
5009
5173
  var init_runner = __esm({
5010
5174
  "src/swarm/runner.ts"() {
5011
5175
  "use strict";
5012
- fs17 = __toESM(require("fs"));
5013
- path17 = __toESM(require("path"));
5176
+ fs18 = __toESM(require("fs"));
5177
+ path18 = __toESM(require("path"));
5014
5178
  init_connection();
5015
5179
  init_queries();
5016
5180
  init_machine();
@@ -5164,7 +5328,7 @@ async function swarm(args) {
5164
5328
  MAX_PARALLEL
5165
5329
  );
5166
5330
  try {
5167
- const status2 = (0, import_node_child_process9.execSync)("git status --porcelain", {
5331
+ const status2 = (0, import_node_child_process10.execSync)("git status --porcelain", {
5168
5332
  cwd: root,
5169
5333
  encoding: "utf-8",
5170
5334
  stdio: ["pipe", "pipe", "pipe"]
@@ -5192,20 +5356,20 @@ async function swarm(args) {
5192
5356
  info(` ${i + 1}. ${hypotheses[i]}`);
5193
5357
  }
5194
5358
  try {
5195
- const worktreeList = (0, import_node_child_process9.execFileSync)("git", ["worktree", "list", "--porcelain"], {
5359
+ const worktreeList = (0, import_node_child_process10.execFileSync)("git", ["worktree", "list", "--porcelain"], {
5196
5360
  cwd: root,
5197
5361
  encoding: "utf-8"
5198
5362
  });
5199
5363
  const orphaned = worktreeList.split("\n").filter((line) => line.startsWith("worktree ")).map((line) => line.replace("worktree ", "")).filter((p) => p.includes("-swarm-"));
5200
5364
  for (const orphanPath of orphaned) {
5201
5365
  try {
5202
- (0, import_node_child_process9.execFileSync)("git", ["worktree", "remove", orphanPath, "--force"], { cwd: root, encoding: "utf-8" });
5203
- info(`Cleaned up orphaned worktree: ${path18.basename(orphanPath)}`);
5366
+ (0, import_node_child_process10.execFileSync)("git", ["worktree", "remove", orphanPath, "--force"], { cwd: root, encoding: "utf-8" });
5367
+ info(`Cleaned up orphaned worktree: ${path19.basename(orphanPath)}`);
5204
5368
  } catch {
5205
5369
  }
5206
5370
  }
5207
5371
  if (orphaned.length > 0) {
5208
- (0, import_node_child_process9.execFileSync)("git", ["worktree", "prune"], { cwd: root, encoding: "utf-8" });
5372
+ (0, import_node_child_process10.execFileSync)("git", ["worktree", "prune"], { cwd: root, encoding: "utf-8" });
5209
5373
  }
5210
5374
  } catch {
5211
5375
  }
@@ -5269,7 +5433,7 @@ async function swarm(args) {
5269
5433
  const best = summary.bestExperiment;
5270
5434
  info(`Best experiment: ${best.worktree.slug} (${best.overallGrade})`);
5271
5435
  try {
5272
- (0, import_node_child_process9.execFileSync)(
5436
+ (0, import_node_child_process10.execFileSync)(
5273
5437
  "git",
5274
5438
  ["merge", best.worktree.branch, "--no-ff", "-m", `Merge swarm winner: ${best.worktree.slug}`],
5275
5439
  { cwd: root, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }
@@ -5320,15 +5484,15 @@ function isMergeable(grade) {
5320
5484
  }
5321
5485
  async function deriveMultipleHypotheses(goal, root, count) {
5322
5486
  const synthesis = truncateContext(
5323
- readFileOrEmpty(path18.join(root, "docs", "synthesis", "current.md")),
5487
+ readFileOrEmpty(path19.join(root, "docs", "synthesis", "current.md")),
5324
5488
  CONTEXT_LIMITS.synthesis
5325
5489
  );
5326
5490
  const fragility = truncateContext(
5327
- readFileOrEmpty(path18.join(root, "docs", "synthesis", "fragility.md")),
5491
+ readFileOrEmpty(path19.join(root, "docs", "synthesis", "fragility.md")),
5328
5492
  CONTEXT_LIMITS.fragility
5329
5493
  );
5330
5494
  const deadEndsDoc = truncateContext(
5331
- readFileOrEmpty(path18.join(root, "docs", "synthesis", "dead-ends.md")),
5495
+ readFileOrEmpty(path19.join(root, "docs", "synthesis", "dead-ends.md")),
5332
5496
  CONTEXT_LIMITS.deadEnds
5333
5497
  );
5334
5498
  const diagnosis = truncateContext(readLatestDiagnosis(root), CONTEXT_LIMITS.synthesis);
@@ -5338,7 +5502,7 @@ async function deriveMultipleHypotheses(goal, root, count) {
5338
5502
  let metricsOutput = "";
5339
5503
  if (config.metrics?.command) {
5340
5504
  try {
5341
- metricsOutput = (0, import_node_child_process9.execSync)(config.metrics.command, {
5505
+ metricsOutput = (0, import_node_child_process10.execSync)(config.metrics.command, {
5342
5506
  cwd: root,
5343
5507
  encoding: "utf-8",
5344
5508
  timeout: 6e4,
@@ -5418,12 +5582,12 @@ If the goal is met:
5418
5582
  warn("Planner did not return structured hypotheses. Using goal as single hypothesis.");
5419
5583
  return [goal];
5420
5584
  }
5421
- var path18, import_node_child_process9, MAX_PARALLEL, DEFAULT_PARALLEL;
5585
+ var path19, import_node_child_process10, MAX_PARALLEL, DEFAULT_PARALLEL;
5422
5586
  var init_swarm = __esm({
5423
5587
  "src/commands/swarm.ts"() {
5424
5588
  "use strict";
5425
- path18 = __toESM(require("path"));
5426
- import_node_child_process9 = require("child_process");
5589
+ path19 = __toESM(require("path"));
5590
+ import_node_child_process10 = require("child_process");
5427
5591
  init_connection();
5428
5592
  init_queries();
5429
5593
  init_machine();
@@ -5450,21 +5614,21 @@ async function diagnose(args) {
5450
5614
  const db = getDb(root);
5451
5615
  const focus = args.filter((a) => !a.startsWith("--")).join(" ");
5452
5616
  const keepScripts = args.includes("--keep-scripts");
5453
- const scriptsDir = path19.join(root, ".majlis", "scripts");
5454
- if (!fs18.existsSync(scriptsDir)) {
5455
- fs18.mkdirSync(scriptsDir, { recursive: true });
5617
+ const scriptsDir = path20.join(root, ".majlis", "scripts");
5618
+ if (!fs19.existsSync(scriptsDir)) {
5619
+ fs19.mkdirSync(scriptsDir, { recursive: true });
5456
5620
  }
5457
5621
  header("Deep Diagnosis");
5458
5622
  if (focus) info(`Focus: ${focus}`);
5459
5623
  const dbExport = exportForDiagnostician(db);
5460
- const synthesis = readFileOrEmpty(path19.join(root, "docs", "synthesis", "current.md"));
5461
- const fragility = readFileOrEmpty(path19.join(root, "docs", "synthesis", "fragility.md"));
5462
- const deadEndsDoc = readFileOrEmpty(path19.join(root, "docs", "synthesis", "dead-ends.md"));
5624
+ const synthesis = readFileOrEmpty(path20.join(root, "docs", "synthesis", "current.md"));
5625
+ const fragility = readFileOrEmpty(path20.join(root, "docs", "synthesis", "fragility.md"));
5626
+ const deadEndsDoc = readFileOrEmpty(path20.join(root, "docs", "synthesis", "dead-ends.md"));
5463
5627
  const config = loadConfig(root);
5464
5628
  let metricsOutput = "";
5465
5629
  if (config.metrics?.command) {
5466
5630
  try {
5467
- metricsOutput = (0, import_node_child_process10.execSync)(config.metrics.command, {
5631
+ metricsOutput = (0, import_node_child_process11.execSync)(config.metrics.command, {
5468
5632
  cwd: root,
5469
5633
  encoding: "utf-8",
5470
5634
  timeout: 6e4,
@@ -5510,13 +5674,13 @@ Perform a deep diagnostic analysis of this project. Identify root causes, recurr
5510
5674
  Remember: you may write files ONLY to .majlis/scripts/. You cannot modify project code.`;
5511
5675
  info("Spawning diagnostician (60 turns, full DB access)...");
5512
5676
  const result = await spawnAgent("diagnostician", { taskPrompt }, root);
5513
- const diagnosisDir = path19.join(root, "docs", "diagnosis");
5514
- if (!fs18.existsSync(diagnosisDir)) {
5515
- fs18.mkdirSync(diagnosisDir, { recursive: true });
5677
+ const diagnosisDir = path20.join(root, "docs", "diagnosis");
5678
+ if (!fs19.existsSync(diagnosisDir)) {
5679
+ fs19.mkdirSync(diagnosisDir, { recursive: true });
5516
5680
  }
5517
5681
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-").slice(0, 19);
5518
- const artifactPath = path19.join(diagnosisDir, `diagnosis-${timestamp}.md`);
5519
- fs18.writeFileSync(artifactPath, result.output);
5682
+ const artifactPath = path20.join(diagnosisDir, `diagnosis-${timestamp}.md`);
5683
+ fs19.writeFileSync(artifactPath, result.output);
5520
5684
  info(`Diagnostic report: docs/diagnosis/diagnosis-${timestamp}.md`);
5521
5685
  if (result.structured?.diagnosis) {
5522
5686
  const d = result.structured.diagnosis;
@@ -5529,11 +5693,11 @@ Remember: you may write files ONLY to .majlis/scripts/. You cannot modify projec
5529
5693
  }
5530
5694
  if (!keepScripts) {
5531
5695
  try {
5532
- const files = fs18.readdirSync(scriptsDir);
5696
+ const files = fs19.readdirSync(scriptsDir);
5533
5697
  for (const f of files) {
5534
- fs18.unlinkSync(path19.join(scriptsDir, f));
5698
+ fs19.unlinkSync(path20.join(scriptsDir, f));
5535
5699
  }
5536
- fs18.rmdirSync(scriptsDir);
5700
+ fs19.rmdirSync(scriptsDir);
5537
5701
  info("Cleaned up .majlis/scripts/");
5538
5702
  } catch {
5539
5703
  }
@@ -5546,13 +5710,13 @@ Remember: you may write files ONLY to .majlis/scripts/. You cannot modify projec
5546
5710
  autoCommit(root, `diagnosis: ${focus || "general"}`);
5547
5711
  success("Diagnosis complete.");
5548
5712
  }
5549
- var fs18, path19, import_node_child_process10;
5713
+ var fs19, path20, import_node_child_process11;
5550
5714
  var init_diagnose = __esm({
5551
5715
  "src/commands/diagnose.ts"() {
5552
5716
  "use strict";
5553
- fs18 = __toESM(require("fs"));
5554
- path19 = __toESM(require("path"));
5555
- import_node_child_process10 = require("child_process");
5717
+ fs19 = __toESM(require("fs"));
5718
+ path20 = __toESM(require("path"));
5719
+ import_node_child_process11 = require("child_process");
5556
5720
  init_connection();
5557
5721
  init_queries();
5558
5722
  init_spawn();
@@ -5591,7 +5755,7 @@ function getLastActivityTimestamp(db) {
5591
5755
  }
5592
5756
  function getCommitsSince(root, timestamp) {
5593
5757
  try {
5594
- const output = (0, import_node_child_process11.execFileSync)(
5758
+ const output = (0, import_node_child_process12.execFileSync)(
5595
5759
  "git",
5596
5760
  ["log", `--since=${timestamp}`, "--oneline", "--", ".", ":!.majlis/", ":!docs/"],
5597
5761
  { cwd: root, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }
@@ -5604,13 +5768,13 @@ function getCommitsSince(root, timestamp) {
5604
5768
  }
5605
5769
  function getGitDiffStat(root, timestamp) {
5606
5770
  try {
5607
- const baseRef = (0, import_node_child_process11.execFileSync)(
5771
+ const baseRef = (0, import_node_child_process12.execFileSync)(
5608
5772
  "git",
5609
5773
  ["rev-list", "-1", `--before=${timestamp}`, "HEAD"],
5610
5774
  { cwd: root, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }
5611
5775
  ).trim();
5612
5776
  if (!baseRef) return { stat: "", filesChanged: 0 };
5613
- const stat = (0, import_node_child_process11.execFileSync)(
5777
+ const stat = (0, import_node_child_process12.execFileSync)(
5614
5778
  "git",
5615
5779
  ["diff", "--stat", baseRef, "--", ".", ":!.majlis/", ":!docs/"],
5616
5780
  { cwd: root, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }
@@ -5647,11 +5811,11 @@ function checkMetrics(root, config) {
5647
5811
  const command = config.metrics?.command;
5648
5812
  if (!command) return { working: false, error: "No metrics command configured" };
5649
5813
  try {
5650
- const scriptPath = path20.join(root, command);
5651
- if (command.includes("/") && !fs19.existsSync(scriptPath)) {
5814
+ const scriptPath = path21.join(root, command);
5815
+ if (command.includes("/") && !fs20.existsSync(scriptPath)) {
5652
5816
  return { working: false, error: `Script not found: ${command}` };
5653
5817
  }
5654
- const output = (0, import_node_child_process11.execSync)(command, {
5818
+ const output = (0, import_node_child_process12.execSync)(command, {
5655
5819
  cwd: root,
5656
5820
  encoding: "utf-8",
5657
5821
  timeout: 6e4,
@@ -5679,10 +5843,10 @@ function assessStaleness(db, root, profile, config) {
5679
5843
  filesChanged = diffResult.filesChanged;
5680
5844
  }
5681
5845
  const configDrift = detectConfigDrift(config, profile);
5682
- const synthesisPath = path20.join(root, "docs", "synthesis", "current.md");
5846
+ const synthesisPath = path21.join(root, "docs", "synthesis", "current.md");
5683
5847
  let synthesisSize = 0;
5684
5848
  try {
5685
- synthesisSize = fs19.statSync(synthesisPath).size;
5849
+ synthesisSize = fs20.statSync(synthesisPath).size;
5686
5850
  } catch {
5687
5851
  }
5688
5852
  const unresolvedDoubts = db.prepare(`
@@ -5740,13 +5904,13 @@ function printStalenessReport(report) {
5740
5904
  success(" Already up to date.");
5741
5905
  }
5742
5906
  }
5743
- var fs19, path20, import_node_child_process11;
5907
+ var fs20, path21, import_node_child_process12;
5744
5908
  var init_staleness = __esm({
5745
5909
  "src/scan/staleness.ts"() {
5746
5910
  "use strict";
5747
- fs19 = __toESM(require("fs"));
5748
- path20 = __toESM(require("path"));
5749
- import_node_child_process11 = require("child_process");
5911
+ fs20 = __toESM(require("fs"));
5912
+ path21 = __toESM(require("path"));
5913
+ import_node_child_process12 = require("child_process");
5750
5914
  init_queries();
5751
5915
  init_format();
5752
5916
  }
@@ -5796,13 +5960,13 @@ async function resync(args) {
5796
5960
  return;
5797
5961
  }
5798
5962
  info("Phase 1: Deep re-scan...");
5799
- const synthesisDir = path21.join(root, "docs", "synthesis");
5800
- const scriptsDir = path21.join(root, ".majlis", "scripts");
5801
- if (!fs20.existsSync(synthesisDir)) fs20.mkdirSync(synthesisDir, { recursive: true });
5802
- if (!fs20.existsSync(scriptsDir)) fs20.mkdirSync(scriptsDir, { recursive: true });
5963
+ const synthesisDir = path22.join(root, "docs", "synthesis");
5964
+ const scriptsDir = path22.join(root, ".majlis", "scripts");
5965
+ if (!fs21.existsSync(synthesisDir)) fs21.mkdirSync(synthesisDir, { recursive: true });
5966
+ if (!fs21.existsSync(scriptsDir)) fs21.mkdirSync(scriptsDir, { recursive: true });
5803
5967
  const profileJson = JSON.stringify(profile, null, 2);
5804
- const oldSynthesis = readFileOrEmpty(path21.join(root, "docs", "synthesis", "current.md"));
5805
- const oldFragility = readFileOrEmpty(path21.join(root, "docs", "synthesis", "fragility.md"));
5968
+ const oldSynthesis = readFileOrEmpty(path22.join(root, "docs", "synthesis", "current.md"));
5969
+ const oldFragility = readFileOrEmpty(path22.join(root, "docs", "synthesis", "fragility.md"));
5806
5970
  const dbExport = exportForCompressor(db);
5807
5971
  const stalenessSummary = `Last Majlis activity: ${report.daysSinceActivity} days ago (${report.lastActivitySource}).
5808
5972
  Commits since: ${report.commitsSinceActivity}. Files changed: ${report.filesChanged}.
@@ -5928,10 +6092,10 @@ You may ONLY write to .majlis/scripts/. Output your structured JSON when done.`;
5928
6092
  info("Updated .majlis/config.json with resync results.");
5929
6093
  if (toolsmithOutput.metrics_command) {
5930
6094
  try {
5931
- const metricsPath = path21.join(root, toolsmithOutput.metrics_command);
5932
- if (fs20.existsSync(metricsPath)) {
6095
+ const metricsPath = path22.join(root, toolsmithOutput.metrics_command);
6096
+ if (fs21.existsSync(metricsPath)) {
5933
6097
  try {
5934
- fs20.chmodSync(metricsPath, 493);
6098
+ fs21.chmodSync(metricsPath, 493);
5935
6099
  } catch {
5936
6100
  }
5937
6101
  }
@@ -5968,8 +6132,8 @@ You may ONLY write to .majlis/scripts/. Output your structured JSON when done.`;
5968
6132
  }
5969
6133
  if (cartographerOk) {
5970
6134
  const sessionCount = getSessionsSinceCompression(db);
5971
- const newSynthesisPath = path21.join(root, "docs", "synthesis", "current.md");
5972
- const newSynthesisSize = fs20.existsSync(newSynthesisPath) ? fs20.statSync(newSynthesisPath).size : 0;
6135
+ const newSynthesisPath = path22.join(root, "docs", "synthesis", "current.md");
6136
+ const newSynthesisSize = fs21.existsSync(newSynthesisPath) ? fs21.statSync(newSynthesisPath).size : 0;
5973
6137
  recordCompression(db, sessionCount, report.synthesisSize, newSynthesisSize);
5974
6138
  info("Recorded compression in DB.");
5975
6139
  }
@@ -5979,12 +6143,12 @@ You may ONLY write to .majlis/scripts/. Output your structured JSON when done.`;
5979
6143
  if (toolsmithOk) info(" \u2192 .majlis/scripts/metrics.sh + .majlis/config.json");
5980
6144
  info("Run `majlis status` to see project state.");
5981
6145
  }
5982
- var fs20, path21;
6146
+ var fs21, path22;
5983
6147
  var init_resync = __esm({
5984
6148
  "src/commands/resync.ts"() {
5985
6149
  "use strict";
5986
- fs20 = __toESM(require("fs"));
5987
- path21 = __toESM(require("path"));
6150
+ fs21 = __toESM(require("fs"));
6151
+ path22 = __toESM(require("path"));
5988
6152
  init_connection();
5989
6153
  init_connection();
5990
6154
  init_queries();
@@ -5999,10 +6163,10 @@ var init_resync = __esm({
5999
6163
  });
6000
6164
 
6001
6165
  // src/cli.ts
6002
- var fs21 = __toESM(require("fs"));
6003
- var path22 = __toESM(require("path"));
6166
+ var fs22 = __toESM(require("fs"));
6167
+ var path23 = __toESM(require("path"));
6004
6168
  var VERSION2 = JSON.parse(
6005
- fs21.readFileSync(path22.join(__dirname, "..", "package.json"), "utf-8")
6169
+ fs22.readFileSync(path23.join(__dirname, "..", "package.json"), "utf-8")
6006
6170
  ).version;
6007
6171
  async function main() {
6008
6172
  let sigintCount = 0;
@@ -6169,6 +6333,9 @@ Lifecycle:
6169
6333
 
6170
6334
  Experiments:
6171
6335
  new "hypothesis" Create experiment, branch, log, DB entry
6336
+ --sub-type TYPE Classify by problem sub-type
6337
+ --depends-on SLUG Block building until dependency is merged
6338
+ --context FILE,FILE Inject domain-specific docs into agent context
6172
6339
  baseline Capture metrics snapshot (before)
6173
6340
  measure Capture metrics snapshot (after)
6174
6341
  compare [--json] Compare before/after, detect regressions