majlis 0.6.0 → 0.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +451 -212
  2. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -435,6 +435,10 @@ var init_format = __esm({
435
435
  // src/commands/init.ts
436
436
  var init_exports = {};
437
437
  __export(init_exports, {
438
+ AGENT_DEFINITIONS: () => AGENT_DEFINITIONS,
439
+ CLAUDE_MD_SECTION: () => CLAUDE_MD_SECTION,
440
+ HOOKS_CONFIG: () => HOOKS_CONFIG,
441
+ SLASH_COMMANDS: () => SLASH_COMMANDS,
438
442
  init: () => init
439
443
  });
440
444
  async function init(_args) {
@@ -1435,6 +1439,193 @@ Run \`majlis status\` for live experiment state and cycle position.
1435
1439
  }
1436
1440
  });
1437
1441
 
1442
+ // src/git.ts
1443
+ function autoCommit(root, message) {
1444
+ try {
1445
+ (0, import_node_child_process.execSync)("git add docs/ .majlis/scripts/ 2>/dev/null; true", {
1446
+ cwd: root,
1447
+ encoding: "utf-8",
1448
+ stdio: ["pipe", "pipe", "pipe"]
1449
+ });
1450
+ const diff = (0, import_node_child_process.execSync)("git diff --cached --stat", {
1451
+ cwd: root,
1452
+ encoding: "utf-8",
1453
+ stdio: ["pipe", "pipe", "pipe"]
1454
+ }).trim();
1455
+ if (!diff) return;
1456
+ (0, import_node_child_process.execSync)(`git commit -m ${JSON.stringify(`[majlis] ${message}`)}`, {
1457
+ cwd: root,
1458
+ encoding: "utf-8",
1459
+ stdio: ["pipe", "pipe", "pipe"]
1460
+ });
1461
+ info(`Auto-committed: ${message}`);
1462
+ } catch {
1463
+ }
1464
+ }
1465
+ var import_node_child_process;
1466
+ var init_git = __esm({
1467
+ "src/git.ts"() {
1468
+ "use strict";
1469
+ import_node_child_process = require("child_process");
1470
+ init_format();
1471
+ }
1472
+ });
1473
+
1474
+ // src/commands/upgrade.ts
1475
+ var upgrade_exports = {};
1476
+ __export(upgrade_exports, {
1477
+ upgrade: () => upgrade
1478
+ });
1479
+ async function upgrade(_args) {
1480
+ const root = findProjectRoot();
1481
+ if (!root) throw new Error("Not in a Majlis project. Run `majlis init` first.");
1482
+ header(`Upgrading to Majlis v${VERSION}`);
1483
+ let updated = 0;
1484
+ let added = 0;
1485
+ const majlisAgentsDir = path3.join(root, ".majlis", "agents");
1486
+ const claudeAgentsDir = path3.join(root, ".claude", "agents");
1487
+ mkdirSafe2(majlisAgentsDir);
1488
+ mkdirSafe2(claudeAgentsDir);
1489
+ for (const [name, content] of Object.entries(AGENT_DEFINITIONS)) {
1490
+ const majlisPath = path3.join(majlisAgentsDir, `${name}.md`);
1491
+ const claudePath = path3.join(claudeAgentsDir, `${name}.md`);
1492
+ const existed = fs3.existsSync(majlisPath);
1493
+ const current = existed ? fs3.readFileSync(majlisPath, "utf-8") : "";
1494
+ if (current !== content) {
1495
+ fs3.writeFileSync(majlisPath, content);
1496
+ fs3.writeFileSync(claudePath, content);
1497
+ if (existed) {
1498
+ info(` Updated agent: ${name}`);
1499
+ updated++;
1500
+ } else {
1501
+ info(` Added agent: ${name}`);
1502
+ added++;
1503
+ }
1504
+ }
1505
+ }
1506
+ try {
1507
+ for (const file of fs3.readdirSync(majlisAgentsDir)) {
1508
+ const name = file.replace(".md", "");
1509
+ if (!AGENT_DEFINITIONS[name]) {
1510
+ fs3.unlinkSync(path3.join(majlisAgentsDir, file));
1511
+ try {
1512
+ fs3.unlinkSync(path3.join(claudeAgentsDir, file));
1513
+ } catch {
1514
+ }
1515
+ info(` Removed deprecated agent: ${name}`);
1516
+ updated++;
1517
+ }
1518
+ }
1519
+ } catch {
1520
+ }
1521
+ const commandsDir = path3.join(root, ".claude", "commands");
1522
+ mkdirSafe2(commandsDir);
1523
+ for (const [name, cmd] of Object.entries(SLASH_COMMANDS)) {
1524
+ const cmdPath = path3.join(commandsDir, `${name}.md`);
1525
+ const content = `---
1526
+ description: ${cmd.description}
1527
+ ---
1528
+ ${cmd.body}
1529
+ `;
1530
+ const existed = fs3.existsSync(cmdPath);
1531
+ const current = existed ? fs3.readFileSync(cmdPath, "utf-8") : "";
1532
+ if (current !== content) {
1533
+ fs3.writeFileSync(cmdPath, content);
1534
+ if (existed) {
1535
+ updated++;
1536
+ } else {
1537
+ added++;
1538
+ }
1539
+ info(` ${existed ? "Updated" : "Added"} command: /${name}`);
1540
+ }
1541
+ }
1542
+ const settingsPath = path3.join(root, ".claude", "settings.json");
1543
+ try {
1544
+ if (fs3.existsSync(settingsPath)) {
1545
+ const existing = JSON.parse(fs3.readFileSync(settingsPath, "utf-8"));
1546
+ const before = JSON.stringify(existing.hooks);
1547
+ existing.hooks = { ...existing.hooks, ...HOOKS_CONFIG.hooks };
1548
+ if (JSON.stringify(existing.hooks) !== before) {
1549
+ fs3.writeFileSync(settingsPath, JSON.stringify(existing, null, 2));
1550
+ info(" Updated hooks in .claude/settings.json");
1551
+ updated++;
1552
+ }
1553
+ } else {
1554
+ fs3.writeFileSync(settingsPath, JSON.stringify(HOOKS_CONFIG, null, 2));
1555
+ info(" Created .claude/settings.json");
1556
+ added++;
1557
+ }
1558
+ } catch {
1559
+ warn(" Could not update .claude/settings.json");
1560
+ }
1561
+ const docDirs = [
1562
+ "inbox",
1563
+ "experiments",
1564
+ "decisions",
1565
+ "classification",
1566
+ "doubts",
1567
+ "challenges",
1568
+ "verification",
1569
+ "reframes",
1570
+ "rihla",
1571
+ "synthesis",
1572
+ "diagnosis"
1573
+ ];
1574
+ for (const dir of docDirs) {
1575
+ const dirPath = path3.join(root, "docs", dir);
1576
+ if (!fs3.existsSync(dirPath)) {
1577
+ fs3.mkdirSync(dirPath, { recursive: true });
1578
+ info(` Added docs/${dir}/`);
1579
+ added++;
1580
+ }
1581
+ }
1582
+ const claudeMdPath = path3.join(root, "CLAUDE.md");
1583
+ if (fs3.existsSync(claudeMdPath)) {
1584
+ const existing = fs3.readFileSync(claudeMdPath, "utf-8");
1585
+ if (existing.includes("## Majlis Protocol")) {
1586
+ const replaced = existing.replace(
1587
+ /## Majlis Protocol[\s\S]*?(?=\n## [^M]|\n## $|$)/,
1588
+ CLAUDE_MD_SECTION.trim()
1589
+ );
1590
+ if (replaced !== existing) {
1591
+ fs3.writeFileSync(claudeMdPath, replaced);
1592
+ info(" Updated Majlis Protocol in CLAUDE.md");
1593
+ updated++;
1594
+ }
1595
+ } else {
1596
+ fs3.writeFileSync(claudeMdPath, existing + "\n" + CLAUDE_MD_SECTION);
1597
+ info(" Appended Majlis Protocol to CLAUDE.md");
1598
+ added++;
1599
+ }
1600
+ }
1601
+ if (updated === 0 && added === 0) {
1602
+ success(`Already up to date (v${VERSION}).`);
1603
+ } else {
1604
+ autoCommit(root, `upgrade to v${VERSION}`);
1605
+ success(`Upgraded to v${VERSION}: ${updated} updated, ${added} added.`);
1606
+ }
1607
+ }
1608
+ function mkdirSafe2(dir) {
1609
+ if (!fs3.existsSync(dir)) {
1610
+ fs3.mkdirSync(dir, { recursive: true });
1611
+ }
1612
+ }
1613
+ var fs3, path3, VERSION;
1614
+ var init_upgrade = __esm({
1615
+ "src/commands/upgrade.ts"() {
1616
+ "use strict";
1617
+ fs3 = __toESM(require("fs"));
1618
+ path3 = __toESM(require("path"));
1619
+ init_connection();
1620
+ init_init();
1621
+ init_git();
1622
+ init_format();
1623
+ VERSION = JSON.parse(
1624
+ fs3.readFileSync(path3.join(__dirname, "..", "package.json"), "utf-8")
1625
+ ).version;
1626
+ }
1627
+ });
1628
+
1438
1629
  // src/db/queries.ts
1439
1630
  function createExperiment(db, slug, branch, hypothesis, subType, classificationRef) {
1440
1631
  const stmt = db.prepare(`
@@ -1869,13 +2060,13 @@ var init_queries = __esm({
1869
2060
  // src/config.ts
1870
2061
  function loadConfig(projectRoot) {
1871
2062
  if (_cachedConfig && _cachedRoot === projectRoot) return _cachedConfig;
1872
- const configPath = path3.join(projectRoot, ".majlis", "config.json");
1873
- if (!fs3.existsSync(configPath)) {
2063
+ const configPath = path4.join(projectRoot, ".majlis", "config.json");
2064
+ if (!fs4.existsSync(configPath)) {
1874
2065
  _cachedConfig = { ...DEFAULT_CONFIG2 };
1875
2066
  _cachedRoot = projectRoot;
1876
2067
  return _cachedConfig;
1877
2068
  }
1878
- const loaded = JSON.parse(fs3.readFileSync(configPath, "utf-8"));
2069
+ const loaded = JSON.parse(fs4.readFileSync(configPath, "utf-8"));
1879
2070
  _cachedConfig = {
1880
2071
  ...DEFAULT_CONFIG2,
1881
2072
  ...loaded,
@@ -1889,7 +2080,7 @@ function loadConfig(projectRoot) {
1889
2080
  }
1890
2081
  function readFileOrEmpty(filePath) {
1891
2082
  try {
1892
- return fs3.readFileSync(filePath, "utf-8");
2083
+ return fs4.readFileSync(filePath, "utf-8");
1893
2084
  } catch {
1894
2085
  return "";
1895
2086
  }
@@ -1903,12 +2094,22 @@ function truncateContext(content, limit) {
1903
2094
  if (content.length <= limit) return content;
1904
2095
  return content.slice(0, limit) + "\n[TRUNCATED]";
1905
2096
  }
1906
- var fs3, path3, DEFAULT_CONFIG2, _cachedConfig, _cachedRoot, CONTEXT_LIMITS;
2097
+ function readLatestDiagnosis(projectRoot) {
2098
+ const dir = path4.join(projectRoot, "docs", "diagnosis");
2099
+ try {
2100
+ const files = fs4.readdirSync(dir).filter((f) => f.startsWith("diagnosis-") && f.endsWith(".md")).sort().reverse();
2101
+ if (files.length === 0) return "";
2102
+ return fs4.readFileSync(path4.join(dir, files[0]), "utf-8");
2103
+ } catch {
2104
+ return "";
2105
+ }
2106
+ }
2107
+ var fs4, path4, DEFAULT_CONFIG2, _cachedConfig, _cachedRoot, CONTEXT_LIMITS;
1907
2108
  var init_config = __esm({
1908
2109
  "src/config.ts"() {
1909
2110
  "use strict";
1910
- fs3 = __toESM(require("fs"));
1911
- path3 = __toESM(require("path"));
2111
+ fs4 = __toESM(require("fs"));
2112
+ path4 = __toESM(require("path"));
1912
2113
  DEFAULT_CONFIG2 = {
1913
2114
  project: { name: "", description: "", objective: "" },
1914
2115
  metrics: { command: "", fixtures: [], tracked: {} },
@@ -2237,11 +2438,11 @@ var init_parse = __esm({
2237
2438
  // src/agents/spawn.ts
2238
2439
  function loadAgentDefinition(role, projectRoot) {
2239
2440
  const root = projectRoot ?? findProjectRoot() ?? process.cwd();
2240
- const filePath = path4.join(root, ".majlis", "agents", `${role}.md`);
2241
- if (!fs4.existsSync(filePath)) {
2441
+ const filePath = path5.join(root, ".majlis", "agents", `${role}.md`);
2442
+ if (!fs5.existsSync(filePath)) {
2242
2443
  throw new Error(`Agent definition not found: ${filePath}`);
2243
2444
  }
2244
- const content = fs4.readFileSync(filePath, "utf-8");
2445
+ const content = fs5.readFileSync(filePath, "utf-8");
2245
2446
  const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
2246
2447
  if (!frontmatterMatch) {
2247
2448
  throw new Error(`Invalid agent definition (missing YAML frontmatter): ${filePath}`);
@@ -2417,8 +2618,10 @@ ${taskPrompt}`;
2417
2618
  }
2418
2619
  return { output: markdown, structured, truncated };
2419
2620
  }
2420
- async function spawnSynthesiser(context, projectRoot) {
2621
+ async function spawnSynthesiser(context, projectRoot, opts) {
2421
2622
  const root = projectRoot ?? findProjectRoot() ?? process.cwd();
2623
+ const maxTurns = opts?.maxTurns ?? 5;
2624
+ const tools = opts?.tools ?? ["Read", "Glob", "Grep"];
2422
2625
  const contextJson = JSON.stringify(context);
2423
2626
  const taskPrompt = context.taskPrompt ?? "Synthesise the findings into actionable builder guidance.";
2424
2627
  const prompt = `Here is your context:
@@ -2429,14 +2632,14 @@ ${contextJson}
2429
2632
 
2430
2633
  ${taskPrompt}`;
2431
2634
  const systemPrompt = 'You are a Synthesis Agent. Be concrete: which decisions failed, which assumptions broke, what constraints must the next approach satisfy. CRITICAL: Your LAST line of output MUST be a <!-- majlis-json --> block. The framework parses this programmatically \u2014 if you omit it, the pipeline breaks. Format: <!-- majlis-json {"guidance": "your guidance here"} -->';
2432
- console.log(`[synthesiser] Spawning (maxTurns: 5)...`);
2635
+ console.log(`[synthesiser] Spawning (maxTurns: ${maxTurns})...`);
2433
2636
  const { text: markdown, costUsd, truncated } = await runQuery({
2434
2637
  prompt,
2435
2638
  model: "sonnet",
2436
- tools: ["Read", "Glob", "Grep"],
2639
+ tools,
2437
2640
  systemPrompt,
2438
2641
  cwd: root,
2439
- maxTurns: 5,
2642
+ maxTurns,
2440
2643
  label: "synthesiser",
2441
2644
  role: "synthesiser"
2442
2645
  });
@@ -2447,15 +2650,15 @@ async function spawnRecovery(role, partialOutput, context, projectRoot) {
2447
2650
  const root = projectRoot ?? findProjectRoot() ?? process.cwd();
2448
2651
  const expSlug = context.experiment?.slug ?? "unknown";
2449
2652
  console.log(`[recovery] Cleaning up after truncated ${role} for ${expSlug}...`);
2450
- const expDocPath = path4.join(
2653
+ const expDocPath = path5.join(
2451
2654
  root,
2452
2655
  "docs",
2453
2656
  "experiments",
2454
2657
  `${String(context.experiment?.id ?? 0).padStart(3, "0")}-${expSlug}.md`
2455
2658
  );
2456
- const templatePath = path4.join(root, "docs", "experiments", "_TEMPLATE.md");
2457
- const template = fs4.existsSync(templatePath) ? fs4.readFileSync(templatePath, "utf-8") : "";
2458
- const currentDoc = fs4.existsSync(expDocPath) ? fs4.readFileSync(expDocPath, "utf-8") : "";
2659
+ const templatePath = path5.join(root, "docs", "experiments", "_TEMPLATE.md");
2660
+ const template = fs5.existsSync(templatePath) ? fs5.readFileSync(templatePath, "utf-8") : "";
2661
+ const currentDoc = fs5.existsSync(expDocPath) ? fs5.readFileSync(expDocPath, "utf-8") : "";
2459
2662
  const prompt = `The ${role} agent was truncated (hit max turns) while working on experiment "${expSlug}".
2460
2663
 
2461
2664
  Here is the partial agent output (reasoning + tool calls):
@@ -2618,23 +2821,23 @@ function writeArtifact(role, context, markdown, projectRoot) {
2618
2821
  const dir = dirMap[role];
2619
2822
  if (!dir) return null;
2620
2823
  if (role === "builder" || role === "compressor" || role === "diagnostician") return null;
2621
- const fullDir = path4.join(projectRoot, dir);
2622
- if (!fs4.existsSync(fullDir)) {
2623
- fs4.mkdirSync(fullDir, { recursive: true });
2824
+ const fullDir = path5.join(projectRoot, dir);
2825
+ if (!fs5.existsSync(fullDir)) {
2826
+ fs5.mkdirSync(fullDir, { recursive: true });
2624
2827
  }
2625
2828
  const expSlug = context.experiment?.slug ?? "general";
2626
2829
  const nextNum = String(context.experiment?.id ?? 1).padStart(3, "0");
2627
2830
  const filename = `${nextNum}-${role}-${expSlug}.md`;
2628
- const target = path4.join(fullDir, filename);
2629
- fs4.writeFileSync(target, markdown);
2831
+ const target = path5.join(fullDir, filename);
2832
+ fs5.writeFileSync(target, markdown);
2630
2833
  return target;
2631
2834
  }
2632
- var fs4, path4, import_claude_agent_sdk2, ROLE_MAX_TURNS, CHECKPOINT_INTERVAL, DIM2, RESET2, CYAN2;
2835
+ var fs5, path5, import_claude_agent_sdk2, ROLE_MAX_TURNS, CHECKPOINT_INTERVAL, DIM2, RESET2, CYAN2;
2633
2836
  var init_spawn = __esm({
2634
2837
  "src/agents/spawn.ts"() {
2635
2838
  "use strict";
2636
- fs4 = __toESM(require("fs"));
2637
- path4 = __toESM(require("path"));
2839
+ fs5 = __toESM(require("fs"));
2840
+ path5 = __toESM(require("path"));
2638
2841
  import_claude_agent_sdk2 = require("@anthropic-ai/claude-agent-sdk");
2639
2842
  init_parse();
2640
2843
  init_connection();
@@ -2752,7 +2955,7 @@ async function captureMetrics(phase, args) {
2752
2955
  if (config.build.pre_measure) {
2753
2956
  info(`Running pre-measure: ${config.build.pre_measure}`);
2754
2957
  try {
2755
- (0, import_node_child_process.execSync)(config.build.pre_measure, { cwd: root, encoding: "utf-8", stdio: "inherit" });
2958
+ (0, import_node_child_process2.execSync)(config.build.pre_measure, { cwd: root, encoding: "utf-8", stdio: "inherit" });
2756
2959
  } catch {
2757
2960
  warn("Pre-measure command failed \u2014 continuing anyway.");
2758
2961
  }
@@ -2763,7 +2966,7 @@ async function captureMetrics(phase, args) {
2763
2966
  info(`Running metrics: ${config.metrics.command}`);
2764
2967
  let metricsOutput;
2765
2968
  try {
2766
- metricsOutput = (0, import_node_child_process.execSync)(config.metrics.command, {
2969
+ metricsOutput = (0, import_node_child_process2.execSync)(config.metrics.command, {
2767
2970
  cwd: root,
2768
2971
  encoding: "utf-8",
2769
2972
  stdio: ["pipe", "pipe", "pipe"]
@@ -2782,7 +2985,7 @@ async function captureMetrics(phase, args) {
2782
2985
  success(`Captured ${parsed.length} metric(s) for ${exp.slug} (phase: ${phase})`);
2783
2986
  if (config.build.post_measure) {
2784
2987
  try {
2785
- (0, import_node_child_process.execSync)(config.build.post_measure, { cwd: root, encoding: "utf-8", stdio: "inherit" });
2988
+ (0, import_node_child_process2.execSync)(config.build.post_measure, { cwd: root, encoding: "utf-8", stdio: "inherit" });
2786
2989
  } catch {
2787
2990
  warn("Post-measure command failed.");
2788
2991
  }
@@ -2833,11 +3036,11 @@ function formatDelta(delta) {
2833
3036
  const prefix = delta > 0 ? "+" : "";
2834
3037
  return `${prefix}${delta.toFixed(4)}`;
2835
3038
  }
2836
- var import_node_child_process;
3039
+ var import_node_child_process2;
2837
3040
  var init_measure = __esm({
2838
3041
  "src/commands/measure.ts"() {
2839
3042
  "use strict";
2840
- import_node_child_process = require("child_process");
3043
+ import_node_child_process2 = require("child_process");
2841
3044
  init_connection();
2842
3045
  init_queries();
2843
3046
  init_metrics();
@@ -2870,7 +3073,7 @@ async function newExperiment(args) {
2870
3073
  const paddedNum = String(num).padStart(3, "0");
2871
3074
  const branch = `exp/${paddedNum}-${slug}`;
2872
3075
  try {
2873
- (0, import_node_child_process2.execSync)(`git checkout -b ${branch}`, {
3076
+ (0, import_node_child_process3.execSync)(`git checkout -b ${branch}`, {
2874
3077
  cwd: root,
2875
3078
  encoding: "utf-8",
2876
3079
  stdio: ["pipe", "pipe", "pipe"]
@@ -2882,15 +3085,16 @@ async function newExperiment(args) {
2882
3085
  const subType = getFlagValue(args, "--sub-type") ?? null;
2883
3086
  const exp = createExperiment(db, slug, branch, hypothesis, subType, null);
2884
3087
  success(`Created experiment #${exp.id}: ${exp.slug}`);
2885
- const docsDir = path5.join(root, "docs", "experiments");
2886
- const templatePath = path5.join(docsDir, "_TEMPLATE.md");
2887
- if (fs5.existsSync(templatePath)) {
2888
- const template = fs5.readFileSync(templatePath, "utf-8");
3088
+ const docsDir = path6.join(root, "docs", "experiments");
3089
+ const templatePath = path6.join(docsDir, "_TEMPLATE.md");
3090
+ if (fs6.existsSync(templatePath)) {
3091
+ const template = fs6.readFileSync(templatePath, "utf-8");
2889
3092
  const logContent = template.replace(/\{\{title\}\}/g, hypothesis).replace(/\{\{hypothesis\}\}/g, hypothesis).replace(/\{\{branch\}\}/g, branch).replace(/\{\{status\}\}/g, "classified").replace(/\{\{sub_type\}\}/g, subType ?? "unclassified").replace(/\{\{date\}\}/g, (/* @__PURE__ */ new Date()).toISOString().split("T")[0]);
2890
- const logPath = path5.join(docsDir, `${paddedNum}-${slug}.md`);
2891
- fs5.writeFileSync(logPath, logContent);
3093
+ const logPath = path6.join(docsDir, `${paddedNum}-${slug}.md`);
3094
+ fs6.writeFileSync(logPath, logContent);
2892
3095
  info(`Created experiment log: docs/experiments/${paddedNum}-${slug}.md`);
2893
3096
  }
3097
+ autoCommit(root, `new: ${slug}`);
2894
3098
  if (config.cycle.auto_baseline_on_new_experiment && config.metrics.command) {
2895
3099
  info("Auto-baselining... (run `majlis baseline` to do this manually)");
2896
3100
  try {
@@ -2927,12 +3131,12 @@ async function revert(args) {
2927
3131
  );
2928
3132
  updateExperimentStatus(db, exp.id, "dead_end");
2929
3133
  try {
2930
- const currentBranch = (0, import_node_child_process2.execSync)("git rev-parse --abbrev-ref HEAD", {
3134
+ const currentBranch = (0, import_node_child_process3.execSync)("git rev-parse --abbrev-ref HEAD", {
2931
3135
  cwd: root,
2932
3136
  encoding: "utf-8"
2933
3137
  }).trim();
2934
3138
  if (currentBranch === exp.branch) {
2935
- (0, import_node_child_process2.execSync)("git checkout main 2>/dev/null || git checkout master", {
3139
+ (0, import_node_child_process3.execSync)("git checkout main 2>/dev/null || git checkout master", {
2936
3140
  cwd: root,
2937
3141
  encoding: "utf-8",
2938
3142
  stdio: ["pipe", "pipe", "pipe"]
@@ -2943,17 +3147,18 @@ async function revert(args) {
2943
3147
  }
2944
3148
  info(`Experiment ${exp.slug} reverted to dead-end. Reason: ${reason}`);
2945
3149
  }
2946
- var fs5, path5, import_node_child_process2;
3150
+ var fs6, path6, import_node_child_process3;
2947
3151
  var init_experiment = __esm({
2948
3152
  "src/commands/experiment.ts"() {
2949
3153
  "use strict";
2950
- fs5 = __toESM(require("fs"));
2951
- path5 = __toESM(require("path"));
2952
- import_node_child_process2 = require("child_process");
3154
+ fs6 = __toESM(require("fs"));
3155
+ path6 = __toESM(require("path"));
3156
+ import_node_child_process3 = require("child_process");
2953
3157
  init_connection();
2954
3158
  init_queries();
2955
3159
  init_config();
2956
3160
  init_spawn();
3161
+ init_git();
2957
3162
  init_format();
2958
3163
  }
2959
3164
  });
@@ -3089,12 +3294,12 @@ function queryDeadEnds(db, args, isJson) {
3089
3294
  console.log(table(["ID", "Sub-Type", "Approach", "Constraint"], rows));
3090
3295
  }
3091
3296
  function queryFragility(root, isJson) {
3092
- const fragPath = path6.join(root, "docs", "synthesis", "fragility.md");
3093
- if (!fs6.existsSync(fragPath)) {
3297
+ const fragPath = path7.join(root, "docs", "synthesis", "fragility.md");
3298
+ if (!fs7.existsSync(fragPath)) {
3094
3299
  info("No fragility map found.");
3095
3300
  return;
3096
3301
  }
3097
- const content = fs6.readFileSync(fragPath, "utf-8");
3302
+ const content = fs7.readFileSync(fragPath, "utf-8");
3098
3303
  if (isJson) {
3099
3304
  console.log(JSON.stringify({ content }, null, 2));
3100
3305
  return;
@@ -3150,7 +3355,7 @@ function queryCircuitBreakers(db, root, isJson) {
3150
3355
  function checkCommit(db) {
3151
3356
  let stdinData = "";
3152
3357
  try {
3153
- stdinData = fs6.readFileSync(0, "utf-8");
3358
+ stdinData = fs7.readFileSync(0, "utf-8");
3154
3359
  } catch {
3155
3360
  }
3156
3361
  if (stdinData) {
@@ -3175,12 +3380,12 @@ function checkCommit(db) {
3175
3380
  process.exit(1);
3176
3381
  }
3177
3382
  }
3178
- var fs6, path6;
3383
+ var fs7, path7;
3179
3384
  var init_query = __esm({
3180
3385
  "src/commands/query.ts"() {
3181
3386
  "use strict";
3182
- fs6 = __toESM(require("fs"));
3183
- path6 = __toESM(require("path"));
3387
+ fs7 = __toESM(require("fs"));
3388
+ path7 = __toESM(require("path"));
3184
3389
  init_connection();
3185
3390
  init_queries();
3186
3391
  init_config();
@@ -3301,6 +3506,7 @@ async function resolve(db, exp, projectRoot) {
3301
3506
  gitMerge(exp.branch, projectRoot);
3302
3507
  const gaps = grades.filter((g) => g.grade === "good").map((g) => `- **${g.component}**: ${g.notes ?? "minor gaps"}`).join("\n");
3303
3508
  appendToFragilityMap(projectRoot, exp.slug, gaps);
3509
+ autoCommit(projectRoot, `resolve: fragility gaps from ${exp.slug}`);
3304
3510
  updateExperimentStatus(db, exp.id, "merged");
3305
3511
  success(`Experiment ${exp.slug} MERGED (good, ${grades.filter((g) => g.grade === "good").length} gaps added to fragility map).`);
3306
3512
  break;
@@ -3435,7 +3641,12 @@ async function resolveDbOnly(db, exp, projectRoot) {
3435
3641
  }
3436
3642
  function gitMerge(branch, cwd) {
3437
3643
  try {
3438
- (0, import_node_child_process3.execSync)(`git merge ${branch} --no-ff -m "Merge experiment branch ${branch}"`, {
3644
+ (0, import_node_child_process4.execSync)("git checkout main 2>/dev/null || git checkout master", {
3645
+ cwd,
3646
+ encoding: "utf-8",
3647
+ stdio: ["pipe", "pipe", "pipe"]
3648
+ });
3649
+ (0, import_node_child_process4.execSync)(`git merge ${branch} --no-ff -m "Merge experiment branch ${branch}"`, {
3439
3650
  cwd,
3440
3651
  encoding: "utf-8",
3441
3652
  stdio: ["pipe", "pipe", "pipe"]
@@ -3446,16 +3657,16 @@ function gitMerge(branch, cwd) {
3446
3657
  }
3447
3658
  function gitRevert(branch, cwd) {
3448
3659
  try {
3449
- const currentBranch = (0, import_node_child_process3.execSync)("git rev-parse --abbrev-ref HEAD", {
3660
+ const currentBranch = (0, import_node_child_process4.execSync)("git rev-parse --abbrev-ref HEAD", {
3450
3661
  cwd,
3451
3662
  encoding: "utf-8"
3452
3663
  }).trim();
3453
3664
  if (currentBranch === branch) {
3454
3665
  try {
3455
- (0, import_node_child_process3.execSync)("git checkout -- .", { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
3666
+ (0, import_node_child_process4.execSync)("git checkout -- .", { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
3456
3667
  } catch {
3457
3668
  }
3458
- (0, import_node_child_process3.execSync)("git checkout main 2>/dev/null || git checkout master", {
3669
+ (0, import_node_child_process4.execSync)("git checkout main 2>/dev/null || git checkout master", {
3459
3670
  cwd,
3460
3671
  encoding: "utf-8",
3461
3672
  stdio: ["pipe", "pipe", "pipe"]
@@ -3466,27 +3677,28 @@ function gitRevert(branch, cwd) {
3466
3677
  }
3467
3678
  }
3468
3679
  function appendToFragilityMap(projectRoot, expSlug, gaps) {
3469
- const fragPath = path7.join(projectRoot, "docs", "synthesis", "fragility.md");
3680
+ const fragPath = path8.join(projectRoot, "docs", "synthesis", "fragility.md");
3470
3681
  let content = "";
3471
- if (fs7.existsSync(fragPath)) {
3472
- content = fs7.readFileSync(fragPath, "utf-8");
3682
+ if (fs8.existsSync(fragPath)) {
3683
+ content = fs8.readFileSync(fragPath, "utf-8");
3473
3684
  }
3474
3685
  const entry = `
3475
3686
  ## From experiment: ${expSlug}
3476
3687
  ${gaps}
3477
3688
  `;
3478
- fs7.writeFileSync(fragPath, content + entry);
3689
+ fs8.writeFileSync(fragPath, content + entry);
3479
3690
  }
3480
- var fs7, path7, import_node_child_process3;
3691
+ var fs8, path8, import_node_child_process4;
3481
3692
  var init_resolve = __esm({
3482
3693
  "src/resolve.ts"() {
3483
3694
  "use strict";
3484
- fs7 = __toESM(require("fs"));
3485
- path7 = __toESM(require("path"));
3695
+ fs8 = __toESM(require("fs"));
3696
+ path8 = __toESM(require("path"));
3486
3697
  init_types2();
3487
3698
  init_queries();
3488
3699
  init_spawn();
3489
- import_node_child_process3 = require("child_process");
3700
+ import_node_child_process4 = require("child_process");
3701
+ init_git();
3490
3702
  init_format();
3491
3703
  }
3492
3704
  });
@@ -3553,8 +3765,8 @@ async function runResolve(db, exp, root) {
3553
3765
  }
3554
3766
  async function doGate(db, exp, root) {
3555
3767
  transition(exp.status, "gated" /* GATED */);
3556
- const synthesis = truncateContext(readFileOrEmpty(path8.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3557
- const fragility = truncateContext(readFileOrEmpty(path8.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3768
+ const synthesis = truncateContext(readFileOrEmpty(path9.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3769
+ const fragility = truncateContext(readFileOrEmpty(path9.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3558
3770
  const structuralDeadEnds = exp.sub_type ? listStructuralDeadEndsBySubType(db, exp.sub_type) : listStructuralDeadEnds(db);
3559
3771
  const result = await spawnAgent("gatekeeper", {
3560
3772
  experiment: {
@@ -3598,14 +3810,14 @@ async function doBuild(db, exp, root) {
3598
3810
  transition(exp.status, "building" /* BUILDING */);
3599
3811
  const deadEnds = exp.sub_type ? listDeadEndsBySubType(db, exp.sub_type) : listAllDeadEnds(db);
3600
3812
  const builderGuidance = getBuilderGuidance(db, exp.id);
3601
- const fragility = truncateContext(readFileOrEmpty(path8.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3602
- const synthesis = truncateContext(readFileOrEmpty(path8.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3813
+ const fragility = truncateContext(readFileOrEmpty(path9.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3814
+ const synthesis = truncateContext(readFileOrEmpty(path9.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3603
3815
  const confirmedDoubts = getConfirmedDoubts(db, exp.id);
3604
3816
  const config = loadConfig(root);
3605
3817
  const existingBaseline = getMetricsByExperimentAndPhase(db, exp.id, "before");
3606
3818
  if (config.metrics?.command && existingBaseline.length === 0) {
3607
3819
  try {
3608
- const output = (0, import_node_child_process4.execSync)(config.metrics.command, {
3820
+ const output = (0, import_node_child_process5.execSync)(config.metrics.command, {
3609
3821
  cwd: root,
3610
3822
  encoding: "utf-8",
3611
3823
  timeout: 6e4,
@@ -3662,7 +3874,7 @@ Build the experiment: ${exp.hypothesis}` : `Build the experiment: ${exp.hypothes
3662
3874
  } else {
3663
3875
  if (config.metrics?.command) {
3664
3876
  try {
3665
- const output = (0, import_node_child_process4.execSync)(config.metrics.command, {
3877
+ const output = (0, import_node_child_process5.execSync)(config.metrics.command, {
3666
3878
  cwd: root,
3667
3879
  encoding: "utf-8",
3668
3880
  timeout: 6e4,
@@ -3686,7 +3898,7 @@ async function doChallenge(db, exp, root) {
3686
3898
  transition(exp.status, "challenged" /* CHALLENGED */);
3687
3899
  let gitDiff = "";
3688
3900
  try {
3689
- gitDiff = (0, import_node_child_process4.execSync)('git diff main -- . ":!.majlis/"', {
3901
+ gitDiff = (0, import_node_child_process5.execSync)('git diff main -- . ":!.majlis/"', {
3690
3902
  cwd: root,
3691
3903
  encoding: "utf-8",
3692
3904
  stdio: ["pipe", "pipe", "pipe"]
@@ -3694,7 +3906,7 @@ async function doChallenge(db, exp, root) {
3694
3906
  } catch {
3695
3907
  }
3696
3908
  if (gitDiff.length > 8e3) gitDiff = gitDiff.slice(0, 8e3) + "\n[DIFF TRUNCATED]";
3697
- const synthesis = truncateContext(readFileOrEmpty(path8.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3909
+ const synthesis = truncateContext(readFileOrEmpty(path9.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3698
3910
  let taskPrompt = `Construct adversarial test cases for experiment ${exp.slug}: ${exp.hypothesis}`;
3699
3911
  if (gitDiff) {
3700
3912
  taskPrompt += `
@@ -3727,9 +3939,9 @@ ${gitDiff}
3727
3939
  async function doDoubt(db, exp, root) {
3728
3940
  transition(exp.status, "doubted" /* DOUBTED */);
3729
3941
  const paddedNum = String(exp.id).padStart(3, "0");
3730
- const expDocPath = path8.join(root, "docs", "experiments", `${paddedNum}-${exp.slug}.md`);
3942
+ const expDocPath = path9.join(root, "docs", "experiments", `${paddedNum}-${exp.slug}.md`);
3731
3943
  const experimentDoc = truncateContext(readFileOrEmpty(expDocPath), CONTEXT_LIMITS.experimentDoc);
3732
- const synthesis = truncateContext(readFileOrEmpty(path8.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3944
+ const synthesis = truncateContext(readFileOrEmpty(path9.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3733
3945
  const deadEnds = exp.sub_type ? listDeadEndsBySubType(db, exp.sub_type) : listAllDeadEnds(db);
3734
3946
  let taskPrompt = `Doubt the work in experiment ${exp.slug}: ${exp.hypothesis}. Produce a doubt document with evidence for each doubt.`;
3735
3947
  if (experimentDoc) {
@@ -3768,8 +3980,8 @@ ${experimentDoc}
3768
3980
  }
3769
3981
  async function doScout(db, exp, root) {
3770
3982
  transition(exp.status, "scouted" /* SCOUTED */);
3771
- const synthesis = truncateContext(readFileOrEmpty(path8.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3772
- const fragility = truncateContext(readFileOrEmpty(path8.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3983
+ const synthesis = truncateContext(readFileOrEmpty(path9.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
3984
+ const fragility = truncateContext(readFileOrEmpty(path9.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
3773
3985
  const deadEnds = exp.sub_type ? listDeadEndsBySubType(db, exp.sub_type) : listAllDeadEnds(db);
3774
3986
  const deadEndsSummary = deadEnds.map(
3775
3987
  (d) => `- [${d.category ?? "structural"}] ${d.approach}: ${d.why_failed}`
@@ -3816,12 +4028,12 @@ ${fragility}`;
3816
4028
  async function doVerify(db, exp, root) {
3817
4029
  transition(exp.status, "verifying" /* VERIFYING */);
3818
4030
  const doubts = getDoubtsByExperiment(db, exp.id);
3819
- const challengeDir = path8.join(root, "docs", "challenges");
4031
+ const challengeDir = path9.join(root, "docs", "challenges");
3820
4032
  let challenges = "";
3821
- if (fs8.existsSync(challengeDir)) {
3822
- const files = fs8.readdirSync(challengeDir).filter((f) => f.includes(exp.slug) && f.endsWith(".md"));
4033
+ if (fs9.existsSync(challengeDir)) {
4034
+ const files = fs9.readdirSync(challengeDir).filter((f) => f.includes(exp.slug) && f.endsWith(".md"));
3823
4035
  for (const f of files) {
3824
- challenges += fs8.readFileSync(path8.join(challengeDir, f), "utf-8") + "\n\n";
4036
+ challenges += fs9.readFileSync(path9.join(challengeDir, f), "utf-8") + "\n\n";
3825
4037
  }
3826
4038
  }
3827
4039
  const beforeMetrics = getMetricsByExperimentAndPhase(db, exp.id, "before");
@@ -3889,21 +4101,22 @@ async function doVerify(db, exp, root) {
3889
4101
  success(`Verification complete for ${exp.slug}. Run \`majlis resolve\` next.`);
3890
4102
  }
3891
4103
  async function doCompress(db, root) {
3892
- const synthesisPath = path8.join(root, "docs", "synthesis", "current.md");
3893
- const sizeBefore = fs8.existsSync(synthesisPath) ? fs8.statSync(synthesisPath).size : 0;
4104
+ const synthesisPath = path9.join(root, "docs", "synthesis", "current.md");
4105
+ const sizeBefore = fs9.existsSync(synthesisPath) ? fs9.statSync(synthesisPath).size : 0;
3894
4106
  const sessionCount = getSessionsSinceCompression(db);
3895
4107
  const dbExport = exportForCompressor(db);
3896
4108
  const result = await spawnAgent("compressor", {
3897
4109
  taskPrompt: "## Structured Data (CANONICAL \u2014 from SQLite database)\nThe database export below is the source of truth. docs/ files are agent artifacts that may contain stale or incorrect information. Cross-reference everything against this data.\n\n" + dbExport + "\n\n## Your Task\nRead ALL experiments, decisions, doubts, challenges, verification reports, reframes, and recent diffs. Cross-reference for contradictions, redundancies, and patterns. REWRITE docs/synthesis/current.md \u2014 shorter and denser. Update docs/synthesis/fragility.md with current weak areas. Update docs/synthesis/dead-ends.md with structural constraints from rejected experiments."
3898
4110
  }, root);
3899
- const sizeAfter = fs8.existsSync(synthesisPath) ? fs8.statSync(synthesisPath).size : 0;
4111
+ const sizeAfter = fs9.existsSync(synthesisPath) ? fs9.statSync(synthesisPath).size : 0;
3900
4112
  recordCompression(db, sessionCount, sizeBefore, sizeAfter);
4113
+ autoCommit(root, "compress: update synthesis");
3901
4114
  success(`Compression complete. Synthesis: ${sizeBefore}B \u2192 ${sizeAfter}B`);
3902
4115
  }
3903
4116
  function gitCommitBuild(exp, cwd) {
3904
4117
  try {
3905
- (0, import_node_child_process4.execSync)('git add -A -- ":!.majlis/"', { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
3906
- const diff = (0, import_node_child_process4.execSync)("git diff --cached --stat", { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }).trim();
4118
+ (0, import_node_child_process5.execSync)('git add -A -- ":!.majlis/"', { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
4119
+ const diff = (0, import_node_child_process5.execSync)("git diff --cached --stat", { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }).trim();
3907
4120
  if (!diff) {
3908
4121
  info("No code changes to commit.");
3909
4122
  return;
@@ -3911,7 +4124,7 @@ function gitCommitBuild(exp, cwd) {
3911
4124
  const msg = `EXP-${String(exp.id).padStart(3, "0")}: ${exp.slug}
3912
4125
 
3913
4126
  ${exp.hypothesis ?? ""}`;
3914
- (0, import_node_child_process4.execSync)(`git commit -m ${JSON.stringify(msg)}`, { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
4127
+ (0, import_node_child_process5.execSync)(`git commit -m ${JSON.stringify(msg)}`, { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
3915
4128
  info(`Committed builder changes on ${exp.branch}.`);
3916
4129
  } catch {
3917
4130
  warn("Could not auto-commit builder changes \u2014 commit manually before resolving.");
@@ -3987,13 +4200,13 @@ function ingestStructuredOutput(db, experimentId, structured) {
3987
4200
  info(`Ingested ${structured.findings.length} finding(s)`);
3988
4201
  }
3989
4202
  }
3990
- var fs8, path8, import_node_child_process4;
4203
+ var fs9, path9, import_node_child_process5;
3991
4204
  var init_cycle = __esm({
3992
4205
  "src/commands/cycle.ts"() {
3993
4206
  "use strict";
3994
- fs8 = __toESM(require("fs"));
3995
- path8 = __toESM(require("path"));
3996
- import_node_child_process4 = require("child_process");
4207
+ fs9 = __toESM(require("fs"));
4208
+ path9 = __toESM(require("path"));
4209
+ import_node_child_process5 = require("child_process");
3997
4210
  init_connection();
3998
4211
  init_queries();
3999
4212
  init_machine();
@@ -4002,6 +4215,7 @@ var init_cycle = __esm({
4002
4215
  init_resolve();
4003
4216
  init_config();
4004
4217
  init_metrics();
4218
+ init_git();
4005
4219
  init_format();
4006
4220
  }
4007
4221
  });
@@ -4019,10 +4233,10 @@ async function classify(args) {
4019
4233
  if (!domain) {
4020
4234
  throw new Error('Usage: majlis classify "domain description"');
4021
4235
  }
4022
- const synthesisPath = path9.join(root, "docs", "synthesis", "current.md");
4023
- const synthesis = fs9.existsSync(synthesisPath) ? fs9.readFileSync(synthesisPath, "utf-8") : "";
4024
- const deadEndsPath = path9.join(root, "docs", "synthesis", "dead-ends.md");
4025
- const deadEnds = fs9.existsSync(deadEndsPath) ? fs9.readFileSync(deadEndsPath, "utf-8") : "";
4236
+ const synthesisPath = path10.join(root, "docs", "synthesis", "current.md");
4237
+ const synthesis = fs10.existsSync(synthesisPath) ? fs10.readFileSync(synthesisPath, "utf-8") : "";
4238
+ const deadEndsPath = path10.join(root, "docs", "synthesis", "dead-ends.md");
4239
+ const deadEnds = fs10.existsSync(deadEndsPath) ? fs10.readFileSync(deadEndsPath, "utf-8") : "";
4026
4240
  info(`Classifying problem domain: ${domain}`);
4027
4241
  const result = await spawnAgent("builder", {
4028
4242
  synthesis,
@@ -4035,27 +4249,28 @@ ${deadEnds}
4035
4249
 
4036
4250
  Write the classification to docs/classification/ following the template.`
4037
4251
  }, root);
4252
+ autoCommit(root, `classify: ${domain.slice(0, 60)}`);
4038
4253
  success("Classification complete. Check docs/classification/ for the output.");
4039
4254
  }
4040
4255
  async function reframe(args) {
4041
4256
  const root = findProjectRoot();
4042
4257
  if (!root) throw new Error("Not in a Majlis project. Run `majlis init` first.");
4043
- const classificationDir = path9.join(root, "docs", "classification");
4258
+ const classificationDir = path10.join(root, "docs", "classification");
4044
4259
  let classificationContent = "";
4045
- if (fs9.existsSync(classificationDir)) {
4046
- const files = fs9.readdirSync(classificationDir).filter((f) => f.endsWith(".md") && !f.startsWith("_"));
4260
+ if (fs10.existsSync(classificationDir)) {
4261
+ const files = fs10.readdirSync(classificationDir).filter((f) => f.endsWith(".md") && !f.startsWith("_"));
4047
4262
  for (const f of files) {
4048
- classificationContent += fs9.readFileSync(path9.join(classificationDir, f), "utf-8") + "\n\n";
4263
+ classificationContent += fs10.readFileSync(path10.join(classificationDir, f), "utf-8") + "\n\n";
4049
4264
  }
4050
4265
  }
4051
- const synthesisPath = path9.join(root, "docs", "synthesis", "current.md");
4052
- const synthesis = fs9.existsSync(synthesisPath) ? fs9.readFileSync(synthesisPath, "utf-8") : "";
4053
- const deadEndsPath = path9.join(root, "docs", "synthesis", "dead-ends.md");
4054
- const deadEnds = fs9.existsSync(deadEndsPath) ? fs9.readFileSync(deadEndsPath, "utf-8") : "";
4055
- const configPath = path9.join(root, ".majlis", "config.json");
4266
+ const synthesisPath = path10.join(root, "docs", "synthesis", "current.md");
4267
+ const synthesis = fs10.existsSync(synthesisPath) ? fs10.readFileSync(synthesisPath, "utf-8") : "";
4268
+ const deadEndsPath = path10.join(root, "docs", "synthesis", "dead-ends.md");
4269
+ const deadEnds = fs10.existsSync(deadEndsPath) ? fs10.readFileSync(deadEndsPath, "utf-8") : "";
4270
+ const configPath = path10.join(root, ".majlis", "config.json");
4056
4271
  let problemStatement = "";
4057
- if (fs9.existsSync(configPath)) {
4058
- const config = JSON.parse(fs9.readFileSync(configPath, "utf-8"));
4272
+ if (fs10.existsSync(configPath)) {
4273
+ const config = JSON.parse(fs10.readFileSync(configPath, "utf-8"));
4059
4274
  problemStatement = `${config.project?.description ?? ""}
4060
4275
  Objective: ${config.project?.objective ?? ""}`;
4061
4276
  }
@@ -4077,16 +4292,18 @@ ${deadEnds}
4077
4292
  Independently propose a decomposition. Compare with the existing classification. Flag structural divergences \u2014 these are the most valuable signals.
4078
4293
  Write to docs/reframes/.`
4079
4294
  }, root);
4295
+ autoCommit(root, `reframe: ${target.slice(0, 60)}`);
4080
4296
  success("Reframe complete. Check docs/reframes/ for the output.");
4081
4297
  }
4082
- var fs9, path9;
4298
+ var fs10, path10;
4083
4299
  var init_classify = __esm({
4084
4300
  "src/commands/classify.ts"() {
4085
4301
  "use strict";
4086
- fs9 = __toESM(require("fs"));
4087
- path9 = __toESM(require("path"));
4302
+ fs10 = __toESM(require("fs"));
4303
+ path10 = __toESM(require("path"));
4088
4304
  init_connection();
4089
4305
  init_spawn();
4306
+ init_git();
4090
4307
  init_format();
4091
4308
  }
4092
4309
  });
@@ -4105,15 +4322,15 @@ async function audit(args) {
4105
4322
  const experiments = listAllExperiments(db);
4106
4323
  const deadEnds = listAllDeadEnds(db);
4107
4324
  const circuitBreakers = getAllCircuitBreakerStates(db, config.cycle.circuit_breaker_threshold);
4108
- const classificationDir = path10.join(root, "docs", "classification");
4325
+ const classificationDir = path11.join(root, "docs", "classification");
4109
4326
  let classification = "";
4110
- if (fs10.existsSync(classificationDir)) {
4111
- const files = fs10.readdirSync(classificationDir).filter((f) => f.endsWith(".md") && !f.startsWith("_"));
4327
+ if (fs11.existsSync(classificationDir)) {
4328
+ const files = fs11.readdirSync(classificationDir).filter((f) => f.endsWith(".md") && !f.startsWith("_"));
4112
4329
  for (const f of files) {
4113
- classification += fs10.readFileSync(path10.join(classificationDir, f), "utf-8") + "\n\n";
4330
+ classification += fs11.readFileSync(path11.join(classificationDir, f), "utf-8") + "\n\n";
4114
4331
  }
4115
4332
  }
4116
- const synthesis = readFileOrEmpty(path10.join(root, "docs", "synthesis", "current.md"));
4333
+ const synthesis = readFileOrEmpty(path11.join(root, "docs", "synthesis", "current.md"));
4117
4334
  header("Maqasid Check \u2014 Purpose Audit");
4118
4335
  const trippedBreakers = circuitBreakers.filter((cb) => cb.tripped);
4119
4336
  if (trippedBreakers.length > 0) {
@@ -4157,12 +4374,12 @@ Output: either "classification confirmed \u2014 continue" or "re-classify from X
4157
4374
  }, root);
4158
4375
  success("Purpose audit complete. Review the output above.");
4159
4376
  }
4160
- var fs10, path10;
4377
+ var fs11, path11;
4161
4378
  var init_audit = __esm({
4162
4379
  "src/commands/audit.ts"() {
4163
4380
  "use strict";
4164
- fs10 = __toESM(require("fs"));
4165
- path10 = __toESM(require("path"));
4381
+ fs11 = __toESM(require("fs"));
4382
+ path11 = __toESM(require("path"));
4166
4383
  init_connection();
4167
4384
  init_queries();
4168
4385
  init_spawn();
@@ -4398,6 +4615,7 @@ async function run(args) {
4398
4615
  usedHypotheses.add(hypothesis);
4399
4616
  info(`Next hypothesis: ${hypothesis}`);
4400
4617
  exp = await createNewExperiment(db, root, hypothesis);
4618
+ autoCommit(root, `new: ${exp.slug}`);
4401
4619
  success(`Created experiment #${exp.id}: ${exp.slug}`);
4402
4620
  }
4403
4621
  if (isTerminal(exp.status)) {
@@ -4446,15 +4664,16 @@ async function run(args) {
4446
4664
  info("Run `majlis status` to see final state.");
4447
4665
  }
4448
4666
  async function deriveNextHypothesis(goal, root, db) {
4449
- const synthesis = truncateContext(readFileOrEmpty(path11.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
4450
- const fragility = truncateContext(readFileOrEmpty(path11.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
4451
- const deadEndsDoc = truncateContext(readFileOrEmpty(path11.join(root, "docs", "synthesis", "dead-ends.md")), CONTEXT_LIMITS.deadEnds);
4667
+ const synthesis = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "current.md")), CONTEXT_LIMITS.synthesis);
4668
+ const fragility = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "fragility.md")), CONTEXT_LIMITS.fragility);
4669
+ const deadEndsDoc = truncateContext(readFileOrEmpty(path12.join(root, "docs", "synthesis", "dead-ends.md")), CONTEXT_LIMITS.deadEnds);
4670
+ const diagnosis = truncateContext(readLatestDiagnosis(root), CONTEXT_LIMITS.synthesis);
4452
4671
  const deadEnds = listAllDeadEnds(db);
4453
4672
  const config = loadConfig(root);
4454
4673
  let metricsOutput = "";
4455
4674
  if (config.metrics?.command) {
4456
4675
  try {
4457
- metricsOutput = (0, import_node_child_process5.execSync)(config.metrics.command, {
4676
+ metricsOutput = (0, import_node_child_process6.execSync)(config.metrics.command, {
4458
4677
  cwd: root,
4459
4678
  encoding: "utf-8",
4460
4679
  timeout: 6e4,
@@ -4469,7 +4688,10 @@ async function deriveNextHypothesis(goal, root, db) {
4469
4688
 
4470
4689
  ## Goal
4471
4690
  ${goal}
4472
-
4691
+ ${diagnosis ? `
4692
+ ## Latest Diagnosis Report (PRIORITISE \u2014 deep analysis from diagnostician agent)
4693
+ ${diagnosis}
4694
+ ` : ""}
4473
4695
  ## Current Metrics
4474
4696
  ${metricsOutput || "(no metrics configured)"}
4475
4697
 
@@ -4489,6 +4711,8 @@ Note: [structural] dead ends are HARD CONSTRAINTS \u2014 your hypothesis MUST NO
4489
4711
  [procedural] dead ends are process failures \u2014 the approach may still be valid if executed differently.
4490
4712
 
4491
4713
  ## Your Task
4714
+ DO NOT read source code or use tools. All context you need is above. Plan from the synthesis and dead-end registry.
4715
+
4492
4716
  1. Assess: based on the metrics and synthesis, has the goal been met? Be specific.
4493
4717
  2. If YES \u2014 output the JSON block below with goal_met: true.
4494
4718
  3. If NO \u2014 propose the SINGLE most promising next experiment hypothesis.
@@ -4504,7 +4728,7 @@ CRITICAL: Your LAST line of output MUST be EXACTLY this format (on its own line,
4504
4728
 
4505
4729
  If the goal is met:
4506
4730
  <!-- majlis-json {"goal_met": true, "hypothesis": null} -->`
4507
- }, root);
4731
+ }, root, { maxTurns: 2, tools: [] });
4508
4732
  const structured = result.structured;
4509
4733
  if (structured?.goal_met === true) {
4510
4734
  return null;
@@ -4530,7 +4754,7 @@ If the goal is met:
4530
4754
  ${result.output.slice(-2e3)}
4531
4755
 
4532
4756
  <!-- majlis-json {"goal_met": false, "hypothesis": "your hypothesis"} -->`
4533
- }, root);
4757
+ }, root, { maxTurns: 1, tools: [] });
4534
4758
  if (retry.structured?.hypothesis) return retry.structured.hypothesis;
4535
4759
  warn("Could not extract hypothesis. Using goal as fallback.");
4536
4760
  return goal;
@@ -4548,7 +4772,7 @@ async function createNewExperiment(db, root, hypothesis) {
4548
4772
  const paddedNum = String(num).padStart(3, "0");
4549
4773
  const branch = `exp/${paddedNum}-${finalSlug}`;
4550
4774
  try {
4551
- (0, import_node_child_process5.execSync)(`git checkout -b ${branch}`, {
4775
+ (0, import_node_child_process6.execSync)(`git checkout -b ${branch}`, {
4552
4776
  cwd: root,
4553
4777
  encoding: "utf-8",
4554
4778
  stdio: ["pipe", "pipe", "pipe"]
@@ -4560,24 +4784,24 @@ async function createNewExperiment(db, root, hypothesis) {
4560
4784
  const exp = createExperiment(db, finalSlug, branch, hypothesis, null, null);
4561
4785
  updateExperimentStatus(db, exp.id, "reframed");
4562
4786
  exp.status = "reframed";
4563
- const docsDir = path11.join(root, "docs", "experiments");
4564
- const templatePath = path11.join(docsDir, "_TEMPLATE.md");
4565
- if (fs11.existsSync(templatePath)) {
4566
- const template = fs11.readFileSync(templatePath, "utf-8");
4787
+ const docsDir = path12.join(root, "docs", "experiments");
4788
+ const templatePath = path12.join(docsDir, "_TEMPLATE.md");
4789
+ if (fs12.existsSync(templatePath)) {
4790
+ const template = fs12.readFileSync(templatePath, "utf-8");
4567
4791
  const logContent = template.replace(/\{\{title\}\}/g, hypothesis).replace(/\{\{hypothesis\}\}/g, hypothesis).replace(/\{\{branch\}\}/g, branch).replace(/\{\{status\}\}/g, "classified").replace(/\{\{sub_type\}\}/g, "unclassified").replace(/\{\{date\}\}/g, (/* @__PURE__ */ new Date()).toISOString().split("T")[0]);
4568
- const logPath = path11.join(docsDir, `${paddedNum}-${finalSlug}.md`);
4569
- fs11.writeFileSync(logPath, logContent);
4792
+ const logPath = path12.join(docsDir, `${paddedNum}-${finalSlug}.md`);
4793
+ fs12.writeFileSync(logPath, logContent);
4570
4794
  info(`Created experiment log: docs/experiments/${paddedNum}-${finalSlug}.md`);
4571
4795
  }
4572
4796
  return exp;
4573
4797
  }
4574
- var fs11, path11, import_node_child_process5;
4798
+ var fs12, path12, import_node_child_process6;
4575
4799
  var init_run = __esm({
4576
4800
  "src/commands/run.ts"() {
4577
4801
  "use strict";
4578
- fs11 = __toESM(require("fs"));
4579
- path11 = __toESM(require("path"));
4580
- import_node_child_process5 = require("child_process");
4802
+ fs12 = __toESM(require("fs"));
4803
+ path12 = __toESM(require("path"));
4804
+ import_node_child_process6 = require("child_process");
4581
4805
  init_connection();
4582
4806
  init_queries();
4583
4807
  init_machine();
@@ -4586,17 +4810,18 @@ var init_run = __esm({
4586
4810
  init_spawn();
4587
4811
  init_config();
4588
4812
  init_shutdown();
4813
+ init_git();
4589
4814
  init_format();
4590
4815
  }
4591
4816
  });
4592
4817
 
4593
4818
  // src/swarm/worktree.ts
4594
4819
  function createWorktree(mainRoot, slug, paddedNum) {
4595
- const projectName = path12.basename(mainRoot);
4820
+ const projectName = path13.basename(mainRoot);
4596
4821
  const worktreeName = `${projectName}-swarm-${paddedNum}-${slug}`;
4597
- const worktreePath = path12.join(path12.dirname(mainRoot), worktreeName);
4822
+ const worktreePath = path13.join(path13.dirname(mainRoot), worktreeName);
4598
4823
  const branch = `swarm/${paddedNum}-${slug}`;
4599
- (0, import_node_child_process6.execSync)(`git worktree add ${JSON.stringify(worktreePath)} -b ${branch}`, {
4824
+ (0, import_node_child_process7.execSync)(`git worktree add ${JSON.stringify(worktreePath)} -b ${branch}`, {
4600
4825
  cwd: mainRoot,
4601
4826
  encoding: "utf-8",
4602
4827
  stdio: ["pipe", "pipe", "pipe"]
@@ -4611,43 +4836,43 @@ function createWorktree(mainRoot, slug, paddedNum) {
4611
4836
  };
4612
4837
  }
4613
4838
  function initializeWorktree(mainRoot, worktreePath) {
4614
- const majlisDir = path12.join(worktreePath, ".majlis");
4615
- fs12.mkdirSync(majlisDir, { recursive: true });
4616
- const configSrc = path12.join(mainRoot, ".majlis", "config.json");
4617
- if (fs12.existsSync(configSrc)) {
4618
- fs12.copyFileSync(configSrc, path12.join(majlisDir, "config.json"));
4619
- }
4620
- const agentsSrc = path12.join(mainRoot, ".majlis", "agents");
4621
- if (fs12.existsSync(agentsSrc)) {
4622
- const agentsDst = path12.join(majlisDir, "agents");
4623
- fs12.mkdirSync(agentsDst, { recursive: true });
4624
- for (const file of fs12.readdirSync(agentsSrc)) {
4625
- fs12.copyFileSync(path12.join(agentsSrc, file), path12.join(agentsDst, file));
4839
+ const majlisDir = path13.join(worktreePath, ".majlis");
4840
+ fs13.mkdirSync(majlisDir, { recursive: true });
4841
+ const configSrc = path13.join(mainRoot, ".majlis", "config.json");
4842
+ if (fs13.existsSync(configSrc)) {
4843
+ fs13.copyFileSync(configSrc, path13.join(majlisDir, "config.json"));
4844
+ }
4845
+ const agentsSrc = path13.join(mainRoot, ".majlis", "agents");
4846
+ if (fs13.existsSync(agentsSrc)) {
4847
+ const agentsDst = path13.join(majlisDir, "agents");
4848
+ fs13.mkdirSync(agentsDst, { recursive: true });
4849
+ for (const file of fs13.readdirSync(agentsSrc)) {
4850
+ fs13.copyFileSync(path13.join(agentsSrc, file), path13.join(agentsDst, file));
4626
4851
  }
4627
4852
  }
4628
- const synthSrc = path12.join(mainRoot, "docs", "synthesis");
4629
- if (fs12.existsSync(synthSrc)) {
4630
- const synthDst = path12.join(worktreePath, "docs", "synthesis");
4631
- fs12.mkdirSync(synthDst, { recursive: true });
4632
- for (const file of fs12.readdirSync(synthSrc)) {
4633
- const srcFile = path12.join(synthSrc, file);
4634
- if (fs12.statSync(srcFile).isFile()) {
4635
- fs12.copyFileSync(srcFile, path12.join(synthDst, file));
4853
+ const synthSrc = path13.join(mainRoot, "docs", "synthesis");
4854
+ if (fs13.existsSync(synthSrc)) {
4855
+ const synthDst = path13.join(worktreePath, "docs", "synthesis");
4856
+ fs13.mkdirSync(synthDst, { recursive: true });
4857
+ for (const file of fs13.readdirSync(synthSrc)) {
4858
+ const srcFile = path13.join(synthSrc, file);
4859
+ if (fs13.statSync(srcFile).isFile()) {
4860
+ fs13.copyFileSync(srcFile, path13.join(synthDst, file));
4636
4861
  }
4637
4862
  }
4638
4863
  }
4639
- const templateSrc = path12.join(mainRoot, "docs", "experiments", "_TEMPLATE.md");
4640
- if (fs12.existsSync(templateSrc)) {
4641
- const expDir = path12.join(worktreePath, "docs", "experiments");
4642
- fs12.mkdirSync(expDir, { recursive: true });
4643
- fs12.copyFileSync(templateSrc, path12.join(expDir, "_TEMPLATE.md"));
4864
+ const templateSrc = path13.join(mainRoot, "docs", "experiments", "_TEMPLATE.md");
4865
+ if (fs13.existsSync(templateSrc)) {
4866
+ const expDir = path13.join(worktreePath, "docs", "experiments");
4867
+ fs13.mkdirSync(expDir, { recursive: true });
4868
+ fs13.copyFileSync(templateSrc, path13.join(expDir, "_TEMPLATE.md"));
4644
4869
  }
4645
4870
  const db = openDbAt(worktreePath);
4646
4871
  db.close();
4647
4872
  }
4648
4873
  function cleanupWorktree(mainRoot, wt) {
4649
4874
  try {
4650
- (0, import_node_child_process6.execSync)(`git worktree remove ${JSON.stringify(wt.path)} --force`, {
4875
+ (0, import_node_child_process7.execSync)(`git worktree remove ${JSON.stringify(wt.path)} --force`, {
4651
4876
  cwd: mainRoot,
4652
4877
  encoding: "utf-8",
4653
4878
  stdio: ["pipe", "pipe", "pipe"]
@@ -4656,7 +4881,7 @@ function cleanupWorktree(mainRoot, wt) {
4656
4881
  warn(`Could not remove worktree ${wt.path} \u2014 remove manually.`);
4657
4882
  }
4658
4883
  try {
4659
- (0, import_node_child_process6.execSync)(`git branch -D ${wt.branch}`, {
4884
+ (0, import_node_child_process7.execSync)(`git branch -D ${wt.branch}`, {
4660
4885
  cwd: mainRoot,
4661
4886
  encoding: "utf-8",
4662
4887
  stdio: ["pipe", "pipe", "pipe"]
@@ -4664,7 +4889,7 @@ function cleanupWorktree(mainRoot, wt) {
4664
4889
  } catch {
4665
4890
  }
4666
4891
  try {
4667
- (0, import_node_child_process6.execSync)("git worktree prune", {
4892
+ (0, import_node_child_process7.execSync)("git worktree prune", {
4668
4893
  cwd: mainRoot,
4669
4894
  encoding: "utf-8",
4670
4895
  stdio: ["pipe", "pipe", "pipe"]
@@ -4672,13 +4897,13 @@ function cleanupWorktree(mainRoot, wt) {
4672
4897
  } catch {
4673
4898
  }
4674
4899
  }
4675
- var fs12, path12, import_node_child_process6;
4900
+ var fs13, path13, import_node_child_process7;
4676
4901
  var init_worktree = __esm({
4677
4902
  "src/swarm/worktree.ts"() {
4678
4903
  "use strict";
4679
- fs12 = __toESM(require("fs"));
4680
- path12 = __toESM(require("path"));
4681
- import_node_child_process6 = require("child_process");
4904
+ fs13 = __toESM(require("fs"));
4905
+ path13 = __toESM(require("path"));
4906
+ import_node_child_process7 = require("child_process");
4682
4907
  init_connection();
4683
4908
  init_format();
4684
4909
  }
@@ -4696,12 +4921,12 @@ async function runExperimentInWorktree(wt) {
4696
4921
  exp = createExperiment(db, wt.slug, wt.branch, wt.hypothesis, null, null);
4697
4922
  updateExperimentStatus(db, exp.id, "reframed");
4698
4923
  exp.status = "reframed";
4699
- const templatePath = path13.join(wt.path, "docs", "experiments", "_TEMPLATE.md");
4700
- if (fs13.existsSync(templatePath)) {
4701
- const template = fs13.readFileSync(templatePath, "utf-8");
4924
+ const templatePath = path14.join(wt.path, "docs", "experiments", "_TEMPLATE.md");
4925
+ if (fs14.existsSync(templatePath)) {
4926
+ const template = fs14.readFileSync(templatePath, "utf-8");
4702
4927
  const logContent = template.replace(/\{\{title\}\}/g, wt.hypothesis).replace(/\{\{hypothesis\}\}/g, wt.hypothesis).replace(/\{\{branch\}\}/g, wt.branch).replace(/\{\{status\}\}/g, "classified").replace(/\{\{sub_type\}\}/g, "unclassified").replace(/\{\{date\}\}/g, (/* @__PURE__ */ new Date()).toISOString().split("T")[0]);
4703
- const logPath = path13.join(wt.path, "docs", "experiments", `${wt.paddedNum}-${wt.slug}.md`);
4704
- fs13.writeFileSync(logPath, logContent);
4928
+ const logPath = path14.join(wt.path, "docs", "experiments", `${wt.paddedNum}-${wt.slug}.md`);
4929
+ fs14.writeFileSync(logPath, logContent);
4705
4930
  }
4706
4931
  info(`${label} Starting: ${wt.hypothesis}`);
4707
4932
  while (stepCount < MAX_STEPS) {
@@ -4824,12 +5049,12 @@ function statusToStepName(status2) {
4824
5049
  return null;
4825
5050
  }
4826
5051
  }
4827
- var fs13, path13, MAX_STEPS;
5052
+ var fs14, path14, MAX_STEPS;
4828
5053
  var init_runner = __esm({
4829
5054
  "src/swarm/runner.ts"() {
4830
5055
  "use strict";
4831
- fs13 = __toESM(require("fs"));
4832
- path13 = __toESM(require("path"));
5056
+ fs14 = __toESM(require("fs"));
5057
+ path14 = __toESM(require("path"));
4833
5058
  init_connection();
4834
5059
  init_queries();
4835
5060
  init_machine();
@@ -4983,7 +5208,7 @@ async function swarm(args) {
4983
5208
  MAX_PARALLEL
4984
5209
  );
4985
5210
  try {
4986
- const status2 = (0, import_node_child_process7.execSync)("git status --porcelain", {
5211
+ const status2 = (0, import_node_child_process8.execSync)("git status --porcelain", {
4987
5212
  cwd: root,
4988
5213
  encoding: "utf-8",
4989
5214
  stdio: ["pipe", "pipe", "pipe"]
@@ -5067,7 +5292,7 @@ async function swarm(args) {
5067
5292
  const best = summary.bestExperiment;
5068
5293
  info(`Best experiment: ${best.worktree.slug} (${best.overallGrade})`);
5069
5294
  try {
5070
- (0, import_node_child_process7.execSync)(
5295
+ (0, import_node_child_process8.execSync)(
5071
5296
  `git merge ${best.worktree.branch} --no-ff -m "Merge swarm winner: ${best.worktree.slug}"`,
5072
5297
  { cwd: root, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }
5073
5298
  );
@@ -5115,24 +5340,25 @@ function isMergeable(grade) {
5115
5340
  }
5116
5341
  async function deriveMultipleHypotheses(goal, root, count) {
5117
5342
  const synthesis = truncateContext(
5118
- readFileOrEmpty(path14.join(root, "docs", "synthesis", "current.md")),
5343
+ readFileOrEmpty(path15.join(root, "docs", "synthesis", "current.md")),
5119
5344
  CONTEXT_LIMITS.synthesis
5120
5345
  );
5121
5346
  const fragility = truncateContext(
5122
- readFileOrEmpty(path14.join(root, "docs", "synthesis", "fragility.md")),
5347
+ readFileOrEmpty(path15.join(root, "docs", "synthesis", "fragility.md")),
5123
5348
  CONTEXT_LIMITS.fragility
5124
5349
  );
5125
5350
  const deadEndsDoc = truncateContext(
5126
- readFileOrEmpty(path14.join(root, "docs", "synthesis", "dead-ends.md")),
5351
+ readFileOrEmpty(path15.join(root, "docs", "synthesis", "dead-ends.md")),
5127
5352
  CONTEXT_LIMITS.deadEnds
5128
5353
  );
5354
+ const diagnosis = truncateContext(readLatestDiagnosis(root), CONTEXT_LIMITS.synthesis);
5129
5355
  const db = getDb(root);
5130
5356
  const deadEnds = listAllDeadEnds(db);
5131
5357
  const config = loadConfig(root);
5132
5358
  let metricsOutput = "";
5133
5359
  if (config.metrics?.command) {
5134
5360
  try {
5135
- metricsOutput = (0, import_node_child_process7.execSync)(config.metrics.command, {
5361
+ metricsOutput = (0, import_node_child_process8.execSync)(config.metrics.command, {
5136
5362
  cwd: root,
5137
5363
  encoding: "utf-8",
5138
5364
  timeout: 6e4,
@@ -5147,7 +5373,10 @@ async function deriveMultipleHypotheses(goal, root, count) {
5147
5373
 
5148
5374
  ## Goal
5149
5375
  ${goal}
5150
-
5376
+ ${diagnosis ? `
5377
+ ## Latest Diagnosis Report (PRIORITISE \u2014 deep analysis from diagnostician agent)
5378
+ ${diagnosis}
5379
+ ` : ""}
5151
5380
  ## Current Metrics
5152
5381
  ${metricsOutput || "(no metrics configured)"}
5153
5382
 
@@ -5167,6 +5396,8 @@ Note: [structural] dead ends are HARD CONSTRAINTS \u2014 hypotheses MUST NOT rep
5167
5396
  [procedural] dead ends are process failures \u2014 the approach may still be valid if executed differently.
5168
5397
 
5169
5398
  ## Your Task
5399
+ DO NOT read source code or use tools. All context you need is above. Plan from the synthesis and dead-end registry.
5400
+
5170
5401
  1. Assess: based on the metrics and synthesis, has the goal been met? Be specific.
5171
5402
  2. If YES \u2014 output the JSON block below with goal_met: true.
5172
5403
  3. If NO \u2014 generate exactly ${count} DIVERSE hypotheses for parallel testing.
@@ -5184,7 +5415,7 @@ CRITICAL: Your LAST line of output MUST be EXACTLY this format (on its own line,
5184
5415
 
5185
5416
  If the goal is met:
5186
5417
  <!-- majlis-json {"goal_met": true, "hypotheses": []} -->`
5187
- }, root);
5418
+ }, root, { maxTurns: 2, tools: [] });
5188
5419
  if (result.structured?.goal_met === true) return [];
5189
5420
  if (result.structured?.hypotheses && Array.isArray(result.structured.hypotheses)) {
5190
5421
  return result.structured.hypotheses.filter(
@@ -5207,12 +5438,12 @@ If the goal is met:
5207
5438
  warn("Planner did not return structured hypotheses. Using goal as single hypothesis.");
5208
5439
  return [goal];
5209
5440
  }
5210
- var path14, import_node_child_process7, MAX_PARALLEL, DEFAULT_PARALLEL;
5441
+ var path15, import_node_child_process8, MAX_PARALLEL, DEFAULT_PARALLEL;
5211
5442
  var init_swarm = __esm({
5212
5443
  "src/commands/swarm.ts"() {
5213
5444
  "use strict";
5214
- path14 = __toESM(require("path"));
5215
- import_node_child_process7 = require("child_process");
5445
+ path15 = __toESM(require("path"));
5446
+ import_node_child_process8 = require("child_process");
5216
5447
  init_connection();
5217
5448
  init_queries();
5218
5449
  init_spawn();
@@ -5237,21 +5468,21 @@ async function diagnose(args) {
5237
5468
  const db = getDb(root);
5238
5469
  const focus = args.filter((a) => !a.startsWith("--")).join(" ");
5239
5470
  const keepScripts = args.includes("--keep-scripts");
5240
- const scriptsDir = path15.join(root, ".majlis", "scripts");
5241
- if (!fs14.existsSync(scriptsDir)) {
5242
- fs14.mkdirSync(scriptsDir, { recursive: true });
5471
+ const scriptsDir = path16.join(root, ".majlis", "scripts");
5472
+ if (!fs15.existsSync(scriptsDir)) {
5473
+ fs15.mkdirSync(scriptsDir, { recursive: true });
5243
5474
  }
5244
5475
  header("Deep Diagnosis");
5245
5476
  if (focus) info(`Focus: ${focus}`);
5246
5477
  const dbExport = exportForDiagnostician(db);
5247
- const synthesis = readFileOrEmpty(path15.join(root, "docs", "synthesis", "current.md"));
5248
- const fragility = readFileOrEmpty(path15.join(root, "docs", "synthesis", "fragility.md"));
5249
- const deadEndsDoc = readFileOrEmpty(path15.join(root, "docs", "synthesis", "dead-ends.md"));
5478
+ const synthesis = readFileOrEmpty(path16.join(root, "docs", "synthesis", "current.md"));
5479
+ const fragility = readFileOrEmpty(path16.join(root, "docs", "synthesis", "fragility.md"));
5480
+ const deadEndsDoc = readFileOrEmpty(path16.join(root, "docs", "synthesis", "dead-ends.md"));
5250
5481
  const config = loadConfig(root);
5251
5482
  let metricsOutput = "";
5252
5483
  if (config.metrics?.command) {
5253
5484
  try {
5254
- metricsOutput = (0, import_node_child_process8.execSync)(config.metrics.command, {
5485
+ metricsOutput = (0, import_node_child_process9.execSync)(config.metrics.command, {
5255
5486
  cwd: root,
5256
5487
  encoding: "utf-8",
5257
5488
  timeout: 6e4,
@@ -5297,13 +5528,13 @@ Perform a deep diagnostic analysis of this project. Identify root causes, recurr
5297
5528
  Remember: you may write files ONLY to .majlis/scripts/. You cannot modify project code.`;
5298
5529
  info("Spawning diagnostician (60 turns, full DB access)...");
5299
5530
  const result = await spawnAgent("diagnostician", { taskPrompt }, root);
5300
- const diagnosisDir = path15.join(root, "docs", "diagnosis");
5301
- if (!fs14.existsSync(diagnosisDir)) {
5302
- fs14.mkdirSync(diagnosisDir, { recursive: true });
5531
+ const diagnosisDir = path16.join(root, "docs", "diagnosis");
5532
+ if (!fs15.existsSync(diagnosisDir)) {
5533
+ fs15.mkdirSync(diagnosisDir, { recursive: true });
5303
5534
  }
5304
5535
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-").slice(0, 19);
5305
- const artifactPath = path15.join(diagnosisDir, `diagnosis-${timestamp}.md`);
5306
- fs14.writeFileSync(artifactPath, result.output);
5536
+ const artifactPath = path16.join(diagnosisDir, `diagnosis-${timestamp}.md`);
5537
+ fs15.writeFileSync(artifactPath, result.output);
5307
5538
  info(`Diagnostic report: docs/diagnosis/diagnosis-${timestamp}.md`);
5308
5539
  if (result.structured?.diagnosis) {
5309
5540
  const d = result.structured.diagnosis;
@@ -5316,11 +5547,11 @@ Remember: you may write files ONLY to .majlis/scripts/. You cannot modify projec
5316
5547
  }
5317
5548
  if (!keepScripts) {
5318
5549
  try {
5319
- const files = fs14.readdirSync(scriptsDir);
5550
+ const files = fs15.readdirSync(scriptsDir);
5320
5551
  for (const f of files) {
5321
- fs14.unlinkSync(path15.join(scriptsDir, f));
5552
+ fs15.unlinkSync(path16.join(scriptsDir, f));
5322
5553
  }
5323
- fs14.rmdirSync(scriptsDir);
5554
+ fs15.rmdirSync(scriptsDir);
5324
5555
  info("Cleaned up .majlis/scripts/");
5325
5556
  } catch {
5326
5557
  }
@@ -5330,28 +5561,30 @@ Remember: you may write files ONLY to .majlis/scripts/. You cannot modify projec
5330
5561
  if (result.truncated) {
5331
5562
  warn("Diagnostician was truncated (hit 60 turn limit).");
5332
5563
  }
5564
+ autoCommit(root, `diagnosis: ${focus || "general"}`);
5333
5565
  success("Diagnosis complete.");
5334
5566
  }
5335
- var fs14, path15, import_node_child_process8;
5567
+ var fs15, path16, import_node_child_process9;
5336
5568
  var init_diagnose = __esm({
5337
5569
  "src/commands/diagnose.ts"() {
5338
5570
  "use strict";
5339
- fs14 = __toESM(require("fs"));
5340
- path15 = __toESM(require("path"));
5341
- import_node_child_process8 = require("child_process");
5571
+ fs15 = __toESM(require("fs"));
5572
+ path16 = __toESM(require("path"));
5573
+ import_node_child_process9 = require("child_process");
5342
5574
  init_connection();
5343
5575
  init_queries();
5344
5576
  init_spawn();
5345
5577
  init_config();
5578
+ init_git();
5346
5579
  init_format();
5347
5580
  }
5348
5581
  });
5349
5582
 
5350
5583
  // src/cli.ts
5351
- var fs15 = __toESM(require("fs"));
5352
- var path16 = __toESM(require("path"));
5353
- var VERSION = JSON.parse(
5354
- fs15.readFileSync(path16.join(__dirname, "..", "package.json"), "utf-8")
5584
+ var fs16 = __toESM(require("fs"));
5585
+ var path17 = __toESM(require("path"));
5586
+ var VERSION2 = JSON.parse(
5587
+ fs16.readFileSync(path17.join(__dirname, "..", "package.json"), "utf-8")
5355
5588
  ).version;
5356
5589
  async function main() {
5357
5590
  let sigintCount = 0;
@@ -5364,7 +5597,7 @@ async function main() {
5364
5597
  });
5365
5598
  const args = process.argv.slice(2);
5366
5599
  if (args.includes("--version") || args.includes("-v")) {
5367
- console.log(VERSION);
5600
+ console.log(VERSION2);
5368
5601
  return;
5369
5602
  }
5370
5603
  if (args.includes("--help") || args.includes("-h") || args.length === 0) {
@@ -5381,6 +5614,11 @@ async function main() {
5381
5614
  await init2(rest);
5382
5615
  break;
5383
5616
  }
5617
+ case "upgrade": {
5618
+ const { upgrade: upgrade2 } = await Promise.resolve().then(() => (init_upgrade(), upgrade_exports));
5619
+ await upgrade2(rest);
5620
+ break;
5621
+ }
5384
5622
  case "status": {
5385
5623
  const { status: status2 } = await Promise.resolve().then(() => (init_status(), status_exports));
5386
5624
  await status2(isJson);
@@ -5489,12 +5727,13 @@ async function main() {
5489
5727
  }
5490
5728
  function printHelp() {
5491
5729
  console.log(`
5492
- majlis v${VERSION} \u2014 Structured multi-agent problem solving
5730
+ majlis v${VERSION2} \u2014 Structured multi-agent problem solving
5493
5731
 
5494
5732
  Usage: majlis <command> [options]
5495
5733
 
5496
5734
  Lifecycle:
5497
5735
  init Initialize Majlis in current project
5736
+ upgrade Sync agents, commands, hooks from CLI version
5498
5737
  status [--json] Show experiment states and cycle position
5499
5738
 
5500
5739
  Experiments: