oh-my-opencode 2.12.4 → 2.13.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -7925,12 +7925,12 @@ var require_dist = __commonJS((exports, module) => {
7925
7925
  throw new Error(`Unknown format "${name}"`);
7926
7926
  return f;
7927
7927
  };
7928
- function addFormats(ajv, list, fs10, exportName) {
7928
+ function addFormats(ajv, list, fs11, exportName) {
7929
7929
  var _a;
7930
7930
  var _b;
7931
7931
  (_a = (_b = ajv.opts.code).formats) !== null && _a !== undefined || (_b.formats = (0, codegen_1._)`require("ajv-formats/dist/formats").${exportName}`);
7932
7932
  for (const f of list)
7933
- ajv.addFormat(f, fs10[f]);
7933
+ ajv.addFormat(f, fs11[f]);
7934
7934
  }
7935
7935
  module.exports = exports = formatsPlugin;
7936
7936
  Object.defineProperty(exports, "__esModule", { value: true });
@@ -7941,7 +7941,7 @@ var require_dist = __commonJS((exports, module) => {
7941
7941
  var require_windows = __commonJS((exports, module) => {
7942
7942
  module.exports = isexe;
7943
7943
  isexe.sync = sync;
7944
- var fs10 = __require("fs");
7944
+ var fs11 = __require("fs");
7945
7945
  function checkPathExt(path7, options) {
7946
7946
  var pathext = options.pathExt !== undefined ? options.pathExt : process.env.PATHEXT;
7947
7947
  if (!pathext) {
@@ -7966,12 +7966,12 @@ var require_windows = __commonJS((exports, module) => {
7966
7966
  return checkPathExt(path7, options);
7967
7967
  }
7968
7968
  function isexe(path7, options, cb) {
7969
- fs10.stat(path7, function(er, stat2) {
7969
+ fs11.stat(path7, function(er, stat2) {
7970
7970
  cb(er, er ? false : checkStat(stat2, path7, options));
7971
7971
  });
7972
7972
  }
7973
7973
  function sync(path7, options) {
7974
- return checkStat(fs10.statSync(path7), path7, options);
7974
+ return checkStat(fs11.statSync(path7), path7, options);
7975
7975
  }
7976
7976
  });
7977
7977
 
@@ -7979,14 +7979,14 @@ var require_windows = __commonJS((exports, module) => {
7979
7979
  var require_mode = __commonJS((exports, module) => {
7980
7980
  module.exports = isexe;
7981
7981
  isexe.sync = sync;
7982
- var fs10 = __require("fs");
7982
+ var fs11 = __require("fs");
7983
7983
  function isexe(path7, options, cb) {
7984
- fs10.stat(path7, function(er, stat2) {
7984
+ fs11.stat(path7, function(er, stat2) {
7985
7985
  cb(er, er ? false : checkStat(stat2, options));
7986
7986
  });
7987
7987
  }
7988
7988
  function sync(path7, options) {
7989
- return checkStat(fs10.statSync(path7), options);
7989
+ return checkStat(fs11.statSync(path7), options);
7990
7990
  }
7991
7991
  function checkStat(stat2, options) {
7992
7992
  return stat2.isFile() && checkMode(stat2, options);
@@ -8008,7 +8008,7 @@ var require_mode = __commonJS((exports, module) => {
8008
8008
 
8009
8009
  // node_modules/isexe/index.js
8010
8010
  var require_isexe = __commonJS((exports, module) => {
8011
- var fs10 = __require("fs");
8011
+ var fs11 = __require("fs");
8012
8012
  var core3;
8013
8013
  if (process.platform === "win32" || global.TESTING_WINDOWS) {
8014
8014
  core3 = require_windows();
@@ -8247,16 +8247,16 @@ var require_shebang_command = __commonJS((exports, module) => {
8247
8247
 
8248
8248
  // node_modules/cross-spawn/lib/util/readShebang.js
8249
8249
  var require_readShebang = __commonJS((exports, module) => {
8250
- var fs10 = __require("fs");
8250
+ var fs11 = __require("fs");
8251
8251
  var shebangCommand = require_shebang_command();
8252
8252
  function readShebang(command) {
8253
8253
  const size = 150;
8254
8254
  const buffer = Buffer.alloc(size);
8255
8255
  let fd;
8256
8256
  try {
8257
- fd = fs10.openSync(command, "r");
8258
- fs10.readSync(fd, buffer, 0, size, 0);
8259
- fs10.closeSync(fd);
8257
+ fd = fs11.openSync(command, "r");
8258
+ fs11.readSync(fd, buffer, 0, size, 0);
8259
+ fs11.closeSync(fd);
8260
8260
  } catch (e) {}
8261
8261
  return shebangCommand(buffer.toString());
8262
8262
  }
@@ -15544,6 +15544,7 @@ function isHookDisabled(config, hookType) {
15544
15544
  }
15545
15545
  // src/shared/file-utils.ts
15546
15546
  import { lstatSync, readlinkSync } from "fs";
15547
+ import { promises as fs4 } from "fs";
15547
15548
  import { resolve as resolve3 } from "path";
15548
15549
  function isMarkdownFile(entry) {
15549
15550
  return !entry.name.startsWith(".") && entry.name.endsWith(".md") && entry.isFile();
@@ -15559,20 +15560,32 @@ function resolveSymlink(filePath) {
15559
15560
  return filePath;
15560
15561
  }
15561
15562
  }
15563
+ async function resolveSymlinkAsync(filePath) {
15564
+ try {
15565
+ const stats = await fs4.lstat(filePath);
15566
+ if (stats.isSymbolicLink()) {
15567
+ const linkTarget = await fs4.readlink(filePath);
15568
+ return resolve3(filePath, "..", linkTarget);
15569
+ }
15570
+ return filePath;
15571
+ } catch {
15572
+ return filePath;
15573
+ }
15574
+ }
15562
15575
  // src/shared/config-path.ts
15563
15576
  import * as path3 from "path";
15564
15577
  import * as os3 from "os";
15565
- import * as fs4 from "fs";
15578
+ import * as fs5 from "fs";
15566
15579
  function getUserConfigDir() {
15567
15580
  if (process.platform === "win32") {
15568
15581
  const crossPlatformDir = path3.join(os3.homedir(), ".config");
15569
15582
  const crossPlatformConfigPath = path3.join(crossPlatformDir, "opencode", "oh-my-opencode.json");
15570
15583
  const appdataDir = process.env.APPDATA || path3.join(os3.homedir(), "AppData", "Roaming");
15571
15584
  const appdataConfigPath = path3.join(appdataDir, "opencode", "oh-my-opencode.json");
15572
- if (fs4.existsSync(crossPlatformConfigPath)) {
15585
+ if (fs5.existsSync(crossPlatformConfigPath)) {
15573
15586
  return crossPlatformDir;
15574
15587
  }
15575
- if (fs4.existsSync(appdataConfigPath)) {
15588
+ if (fs5.existsSync(appdataConfigPath)) {
15576
15589
  return appdataDir;
15577
15590
  }
15578
15591
  return crossPlatformDir;
@@ -16469,7 +16482,7 @@ function detectConfigFile(basePath) {
16469
16482
  return { format: "none", path: jsonPath };
16470
16483
  }
16471
16484
  // src/shared/migration.ts
16472
- import * as fs5 from "fs";
16485
+ import * as fs6 from "fs";
16473
16486
  var AGENT_NAME_MAP = {
16474
16487
  omo: "Sisyphus",
16475
16488
  OmO: "Sisyphus",
@@ -16535,7 +16548,7 @@ function migrateConfigFile(configPath, rawConfig) {
16535
16548
  }
16536
16549
  if (needsWrite) {
16537
16550
  try {
16538
- fs5.writeFileSync(configPath, JSON.stringify(rawConfig, null, 2) + `
16551
+ fs6.writeFileSync(configPath, JSON.stringify(rawConfig, null, 2) + `
16539
16552
  `, "utf-8");
16540
16553
  log(`Migrated config file: ${configPath}`);
16541
16554
  } catch (err) {
@@ -17522,96 +17535,186 @@ setInterval(() => {
17522
17535
  // src/hooks/keyword-detector/constants.ts
17523
17536
  var CODE_BLOCK_PATTERN2 = /```[\s\S]*?```/g;
17524
17537
  var INLINE_CODE_PATTERN2 = /`[^`]+`/g;
17525
- var KEYWORD_DETECTORS = [
17526
- {
17527
- pattern: /(ultrawork|ulw)/i,
17528
- message: `<ultrawork-mode>
17538
+ var ULTRAWORK_PLANNER_SECTION = `## CRITICAL: YOU ARE A PLANNER, NOT AN IMPLEMENTER
17539
+
17540
+ **IDENTITY CONSTRAINT (NON-NEGOTIABLE):**
17541
+ You ARE the planner. You ARE NOT an implementer. You DO NOT write code. You DO NOT execute tasks.
17542
+
17543
+ **TOOL RESTRICTIONS (SYSTEM-ENFORCED):**
17544
+ | Tool | Allowed | Blocked |
17545
+ |------|---------|---------|
17546
+ | Write/Edit | \`.sisyphus/**/*.md\` ONLY | Everything else |
17547
+ | Read | All files | - |
17548
+ | Bash | Research commands only | Implementation commands |
17549
+ | sisyphus_task | explore, librarian | - |
17550
+
17551
+ **IF YOU TRY TO WRITE/EDIT OUTSIDE \`.sisyphus/\`:**
17552
+ - System will BLOCK your action
17553
+ - You will receive an error
17554
+ - DO NOT retry - you are not supposed to implement
17555
+
17556
+ **YOUR ONLY WRITABLE PATHS:**
17557
+ - \`.sisyphus/plans/*.md\` - Final work plans
17558
+ - \`.sisyphus/drafts/*.md\` - Working drafts during interview
17559
+
17560
+ **WHEN USER ASKS YOU TO IMPLEMENT:**
17561
+ REFUSE. Say: "I'm a planner. I create work plans, not implementations. Run \`/start-work\` after I finish planning."
17562
+
17563
+ ---
17564
+
17565
+ ## CONTEXT GATHERING (MANDATORY BEFORE PLANNING)
17566
+
17567
+ You ARE the planner. Your job: create bulletproof work plans.
17568
+ **Before drafting ANY plan, gather context via explore/librarian agents.**
17569
+
17570
+ ### Research Protocol
17571
+ 1. **Fire parallel background agents** for comprehensive context:
17572
+ \`\`\`
17573
+ sisyphus_task(agent="explore", prompt="Find existing patterns for [topic] in codebase", background=true)
17574
+ sisyphus_task(agent="explore", prompt="Find test infrastructure and conventions", background=true)
17575
+ sisyphus_task(agent="librarian", prompt="Find official docs and best practices for [technology]", background=true)
17576
+ \`\`\`
17577
+ 2. **Wait for results** before planning - rushed plans fail
17578
+ 3. **Synthesize findings** into informed requirements
17579
+
17580
+ ### What to Research
17581
+ - Existing codebase patterns and conventions
17582
+ - Test infrastructure (TDD possible?)
17583
+ - External library APIs and constraints
17584
+ - Similar implementations in OSS (via librarian)
17585
+
17586
+ **NEVER plan blind. Context first, plan second.**`;
17587
+ function isPlannerAgent(agentName) {
17588
+ if (!agentName)
17589
+ return false;
17590
+ const lowerName = agentName.toLowerCase();
17591
+ return lowerName.includes("prometheus") || lowerName.includes("planner") || lowerName === "plan";
17592
+ }
17593
+ function getUltraworkMessage(agentName) {
17594
+ const isPlanner = isPlannerAgent(agentName);
17595
+ if (isPlanner) {
17596
+ return `<ultrawork-mode>
17597
+
17598
+ **MANDATORY**: You MUST say "ULTRAWORK MODE ENABLED!" to the user as your first response when this mode activates. This is non-negotiable.
17599
+
17600
+ ${ULTRAWORK_PLANNER_SECTION}
17601
+
17602
+ </ultrawork-mode>
17529
17603
 
17530
- ## TODO IS YOUR LIFELINE (NON-NEGOTIABLE)
17604
+ ---
17605
+
17606
+ `;
17607
+ }
17608
+ return `<ultrawork-mode>
17609
+
17610
+ **MANDATORY**: You MUST say "ULTRAWORK MODE ENABLED!" to the user as your first response when this mode activates. This is non-negotiable.
17611
+
17612
+ [CODE RED] Maximum precision required. Ultrathink before acting.
17613
+
17614
+ YOU MUST LEVERAGE ALL AVAILABLE AGENTS TO THEIR FULLEST POTENTIAL.
17615
+ TELL THE USER WHAT AGENTS YOU WILL LEVERAGE NOW TO SATISFY USER'S REQUEST.
17531
17616
 
17532
- **USE TodoWrite OBSESSIVELY. This is the #1 most important tool.**
17617
+ ## AGENT UTILIZATION PRINCIPLES (by capability, not by name)
17618
+ - **Codebase Exploration**: Spawn exploration agents using BACKGROUND TASKS for file patterns, internal implementations, project structure
17619
+ - **Documentation & References**: Use librarian-type agents via BACKGROUND TASKS for API references, examples, external library docs
17620
+ - **Planning & Strategy**: NEVER plan yourself - ALWAYS spawn a dedicated planning agent for work breakdown
17621
+ - **High-IQ Reasoning**: Leverage specialized agents for architecture decisions, code review, strategic planning
17622
+ - **Frontend/UI Tasks**: Delegate to UI-specialized agents for design and implementation
17533
17623
 
17534
- ### TODO Rules
17535
- 1. **BEFORE any action**: Create TODOs FIRST. Break down into atomic, granular steps.
17536
- 2. **Be excessively detailed**: 10 small TODOs > 3 vague TODOs. Err on the side of too many.
17537
- 3. **Real-time updates**: Mark \`in_progress\` before starting, \`completed\` IMMEDIATELY after. NEVER batch.
17538
- 4. **One at a time**: Only ONE TODO should be \`in_progress\` at any moment.
17539
- 5. **Sub-tasks**: Complex TODO? Break it into sub-TODOs. Keep granularity high.
17540
- 6. **Questions too**: User asks a question? TODO: "Answer with evidence: [question]"
17624
+ ## EXECUTION RULES
17625
+ - **TODO**: Track EVERY step. Mark complete IMMEDIATELY after each.
17626
+ - **PARALLEL**: Fire independent agent calls simultaneously via background_task - NEVER wait sequentially.
17627
+ - **BACKGROUND FIRST**: Use background_task for exploration/research agents (10+ concurrent if needed).
17628
+ - **VERIFY**: Re-read request after completion. Check ALL requirements met before reporting done.
17629
+ - **DELEGATE**: Don't do everything yourself - orchestrate specialized agents for their strengths.
17541
17630
 
17542
- ### Example TODO Granularity
17543
- BAD: "Implement user auth"
17544
- GOOD:
17545
- - "Read existing auth patterns in codebase"
17546
- - "Create auth schema types"
17547
- - "Implement login endpoint"
17548
- - "Implement token validation middleware"
17549
- - "Add auth tests - login success case"
17550
- - "Add auth tests - login failure case"
17551
- - "Verify LSP diagnostics clean"
17631
+ ## WORKFLOW
17632
+ 1. Analyze the request and identify required capabilities
17633
+ 2. Spawn exploration/librarian agents via background_task in PARALLEL (10+ if needed)
17634
+ 3. Always Use Plan agent with gathered context to create detailed work breakdown
17635
+ 4. Execute with continuous verification against original requirements
17552
17636
 
17553
- **YOUR WORK IS INVISIBLE WITHOUT TODOs. USE THEM.**
17637
+ ## VERIFICATION GUARANTEE (NON-NEGOTIABLE)
17554
17638
 
17555
- ## TDD WORKFLOW (MANDATORY when tests exist)
17639
+ **NOTHING is "done" without PROOF it works.**
17556
17640
 
17557
- Check for test infrastructure FIRST. If exists, follow strictly:
17641
+ ### Pre-Implementation: Define Success Criteria
17558
17642
 
17559
- 1. **RED**: Write failing test FIRST \u2192 \`bun test\` must FAIL
17560
- 2. **GREEN**: Write MINIMAL code to pass \u2192 \`bun test\` must PASS
17561
- 3. **REFACTOR**: Clean up, tests stay green \u2192 \`bun test\` still PASS
17562
- 4. **REPEAT**: Next test case, loop until complete
17643
+ BEFORE writing ANY code, you MUST define:
17563
17644
 
17564
- **NEVER write implementation before test. NEVER delete failing tests.**
17645
+ | Criteria Type | Description | Example |
17646
+ |---------------|-------------|---------|
17647
+ | **Functional** | What specific behavior must work | "Button click triggers API call" |
17648
+ | **Observable** | What can be measured/seen | "Console shows 'success', no errors" |
17649
+ | **Pass/Fail** | Binary, no ambiguity | "Returns 200 OK" not "should work" |
17565
17650
 
17566
- ## AGENT DEPLOYMENT
17651
+ Write these criteria explicitly. Share with user if scope is non-trivial.
17567
17652
 
17568
- Fire available agents in PARALLEL via background tasks. Use explore/librarian agents liberally (multiple concurrent if needed).
17653
+ ### Test Plan Template (MANDATORY for non-trivial tasks)
17569
17654
 
17570
- ## EVIDENCE-BASED ANSWERS
17655
+ \`\`\`
17656
+ ## Test Plan
17657
+ ### Objective: [What we're verifying]
17658
+ ### Prerequisites: [Setup needed]
17659
+ ### Test Cases:
17660
+ 1. [Test Name]: [Input] \u2192 [Expected Output] \u2192 [How to verify]
17661
+ 2. ...
17662
+ ### Success Criteria: ALL test cases pass
17663
+ ### How to Execute: [Exact commands/steps]
17664
+ \`\`\`
17571
17665
 
17572
- - Every claim: code snippet + file path + line number
17573
- - No "I think..." - find and SHOW actual code
17574
- - Local search fails? \u2192 librarian for external sources
17575
- - **NEVER acceptable**: "I couldn't find it"
17666
+ ### Execution & Evidence Requirements
17576
17667
 
17577
- ## ZERO TOLERANCE FOR SHORTCUTS (RIGOROUS & HONEST EXECUTION)
17668
+ | Phase | Action | Required Evidence |
17669
+ |-------|--------|-------------------|
17670
+ | **Build** | Run build command | Exit code 0, no errors |
17671
+ | **Test** | Execute test suite | All tests pass (screenshot/output) |
17672
+ | **Manual Verify** | Test the actual feature | Demonstrate it works (describe what you observed) |
17673
+ | **Regression** | Ensure nothing broke | Existing tests still pass |
17578
17674
 
17579
- **CORE PRINCIPLE**: Execute user's ORIGINAL INTENT with maximum rigor. No shortcuts. No compromises. No matter how large the task.
17675
+ **WITHOUT evidence = NOT verified = NOT done.**
17580
17676
 
17581
- ### ABSOLUTE PROHIBITIONS
17582
- | Violation | Why It's Forbidden |
17583
- |-----------|-------------------|
17584
- | **Mocking/Stubbing** | Never use mocks, stubs, or fake implementations unless explicitly requested. Real implementation only. |
17585
- | **Scope Reduction** | Never make "demo", "skeleton", "simplified", "basic", "minimal" versions. Deliver FULL implementation. |
17586
- | **Partial Completion** | Never stop at 60-80% saying "you can extend this...", "as an exercise...", "you can add...". Finish 100%. |
17587
- | **Lazy Placeholders** | Never use "// TODO", "...", "etc.", "and so on" in actual code. Complete everything. |
17588
- | **Assumed Shortcuts** | Never skip requirements deemed "optional" or "can be added later". All requirements are mandatory. |
17589
- | **Test Deletion** | Never delete or skip failing tests. Fix the code, not the tests. |
17590
- | **Evidence-Free Claims** | Never say "I think...", "probably...", "should work...". Show actual code/output. |
17677
+ ### TDD Workflow (when test infrastructure exists)
17591
17678
 
17592
- ### RIGOROUS EXECUTION MANDATE
17593
- 1. **Parse Original Intent**: What did the user ACTUALLY want? Not what's convenient. The REAL, COMPLETE request.
17594
- 2. **No Task Too Large**: If the task requires 100 files, modify 100 files. If it needs 1000 lines, write 1000 lines. Size is irrelevant.
17595
- 3. **Honest Assessment**: If you cannot complete something, say so BEFORE starting. Don't fake completion.
17596
- 4. **Evidence-Based Verification**: Every claim backed by code snippets, file paths, line numbers, and actual outputs.
17597
- 5. **Complete Verification**: Re-read original request after completion. Check EVERY requirement was met.
17679
+ 1. **SPEC**: Define what "working" means (success criteria above)
17680
+ 2. **RED**: Write failing test \u2192 Run it \u2192 Confirm it FAILS
17681
+ 3. **GREEN**: Write minimal code \u2192 Run test \u2192 Confirm it PASSES
17682
+ 4. **REFACTOR**: Clean up \u2192 Tests MUST stay green
17683
+ 5. **VERIFY**: Run full test suite, confirm no regressions
17684
+ 6. **EVIDENCE**: Report what you ran and what output you saw
17598
17685
 
17599
- ### FAILURE RECOVERY
17600
- If you realize you've taken shortcuts:
17601
- 1. STOP immediately
17602
- 2. Identify what you skipped/faked
17603
- 3. Create TODOs for ALL remaining work
17604
- 4. Execute to TRUE completion - not "good enough"
17686
+ ### Verification Anti-Patterns (BLOCKING)
17687
+
17688
+ | Violation | Why It Fails |
17689
+ |-----------|--------------|
17690
+ | "It should work now" | No evidence. Run it. |
17691
+ | "I added the tests" | Did they pass? Show output. |
17692
+ | "Fixed the bug" | How do you know? What did you test? |
17693
+ | "Implementation complete" | Did you verify against success criteria? |
17694
+ | Skipping test execution | Tests exist to be RUN, not just written |
17605
17695
 
17606
- **THE USER ASKED FOR X. DELIVER EXACTLY X. COMPLETELY. HONESTLY. NO MATTER THE SIZE.**
17696
+ **CLAIM NOTHING WITHOUT PROOF. EXECUTE. VERIFY. SHOW EVIDENCE.**
17607
17697
 
17608
- ## SUCCESS = All TODOs Done + All Requirements Met + Evidence Provided
17698
+ ## ZERO TOLERANCE FAILURES
17699
+ - **NO Scope Reduction**: Never make "demo", "skeleton", "simplified", "basic" versions - deliver FULL implementation
17700
+ - **NO MockUp Work**: When user asked you to do "port A", you must "port A", fully, 100%. No Extra feature, No reduced feature, no mock data, fully working 100% port.
17701
+ - **NO Partial Completion**: Never stop at 60-80% saying "you can extend this..." - finish 100%
17702
+ - **NO Assumed Shortcuts**: Never skip requirements you deem "optional" or "can be added later"
17703
+ - **NO Premature Stopping**: Never declare done until ALL TODOs are completed and verified
17704
+ - **NO TEST DELETION**: Never delete or skip failing tests to make the build pass. Fix the code, not the tests.
17705
+
17706
+ THE USER ASKED FOR X. DELIVER EXACTLY X. NOT A SUBSET. NOT A DEMO. NOT A STARTING POINT.
17609
17707
 
17610
17708
  </ultrawork-mode>
17611
17709
 
17612
17710
  ---
17613
17711
 
17614
- `
17712
+ `;
17713
+ }
17714
+ var KEYWORD_DETECTORS = [
17715
+ {
17716
+ pattern: /(ultrawork|ulw)/i,
17717
+ message: getUltraworkMessage
17615
17718
  },
17616
17719
  {
17617
17720
  pattern: /\b(search|find|locate|lookup|look\s*up|explore|discover|scan|grep|query|browse|detect|trace|seek|track|pinpoint|hunt)\b|where\s+is|show\s+me|list\s+all|\uAC80\uC0C9|\uCC3E\uC544|\uD0D0\uC0C9|\uC870\uD68C|\uC2A4\uCE94|\uC11C\uCE58|\uB4A4\uC838|\uCC3E\uAE30|\uC5B4\uB514|\uCD94\uC801|\uD0D0\uC9C0|\uCC3E\uC544\uBD10|\uCC3E\uC544\uB0B4|\uBCF4\uC5EC\uC918|\uBAA9\uB85D|\u691C\u7D22|\u63A2\u3057\u3066|\u898B\u3064\u3051\u3066|\u30B5\u30FC\u30C1|\u63A2\u7D22|\u30B9\u30AD\u30E3\u30F3|\u3069\u3053|\u767A\u898B|\u635C\u7D22|\u898B\u3064\u3051\u51FA\u3059|\u4E00\u89A7|\u641C\u7D22|\u67E5\u627E|\u5BFB\u627E|\u67E5\u8BE2|\u68C0\u7D22|\u5B9A\u4F4D|\u626B\u63CF|\u53D1\u73B0|\u5728\u54EA\u91CC|\u627E\u51FA\u6765|\u5217\u51FA|t\u00ECm ki\u1EBFm|tra c\u1EE9u|\u0111\u1ECBnh v\u1ECB|qu\u00E9t|ph\u00E1t hi\u1EC7n|truy t\u00ECm|t\u00ECm ra|\u1EDF \u0111\u00E2u|li\u1EC7t k\u00EA/i,
@@ -17643,13 +17746,16 @@ SYNTHESIZE findings before proceeding.`
17643
17746
  function removeCodeBlocks2(text) {
17644
17747
  return text.replace(CODE_BLOCK_PATTERN2, "").replace(INLINE_CODE_PATTERN2, "");
17645
17748
  }
17646
- function detectKeywordsWithType(text) {
17749
+ function resolveMessage(message, agentName) {
17750
+ return typeof message === "function" ? message(agentName) : message;
17751
+ }
17752
+ function detectKeywordsWithType(text, agentName) {
17647
17753
  const textWithoutCode = removeCodeBlocks2(text);
17648
17754
  const types3 = ["ultrawork", "search", "analyze"];
17649
17755
  return KEYWORD_DETECTORS.map(({ pattern, message }, index) => ({
17650
17756
  matches: pattern.test(textWithoutCode),
17651
17757
  type: types3[index],
17652
- message
17758
+ message: resolveMessage(message, agentName)
17653
17759
  })).filter((result) => result.matches).map(({ type: type2, message }) => ({ type: type2, message }));
17654
17760
  }
17655
17761
  function extractPromptText2(parts) {
@@ -17661,13 +17767,14 @@ function createKeywordDetectorHook(ctx) {
17661
17767
  return {
17662
17768
  "chat.message": async (input, output) => {
17663
17769
  const promptText = extractPromptText2(output.parts);
17664
- const detectedKeywords = detectKeywordsWithType(removeCodeBlocks2(promptText));
17770
+ const detectedKeywords = detectKeywordsWithType(removeCodeBlocks2(promptText), input.agent);
17665
17771
  if (detectedKeywords.length === 0) {
17666
17772
  return;
17667
17773
  }
17668
17774
  const hasUltrawork = detectedKeywords.some((k) => k.type === "ultrawork");
17669
17775
  if (hasUltrawork) {
17670
17776
  log(`[keyword-detector] Ultrawork mode activated`, { sessionID: input.sessionID });
17777
+ output.message.variant = "max";
17671
17778
  ctx.client.tui.showToast({
17672
17779
  body: {
17673
17780
  title: "Ultrawork Mode Activated",
@@ -17759,7 +17866,7 @@ function createClaudeCodeHooksHook(ctx, config = {}) {
17759
17866
  log("chat.message injection skipped - interrupted during hooks", { sessionID: input.sessionID });
17760
17867
  return;
17761
17868
  }
17762
- const detectedKeywords = detectKeywordsWithType(removeCodeBlocks2(prompt));
17869
+ const detectedKeywords = detectKeywordsWithType(removeCodeBlocks2(prompt), input.agent);
17763
17870
  const keywordMessages = detectedKeywords.map((k) => k.message);
17764
17871
  if (keywordMessages.length > 0) {
17765
17872
  log("[claude-code-hooks] Detected keywords", {
@@ -18490,14 +18597,14 @@ function createBackgroundNotificationHook(manager) {
18490
18597
  };
18491
18598
  }
18492
18599
  // src/hooks/auto-update-checker/checker.ts
18493
- import * as fs7 from "fs";
18600
+ import * as fs8 from "fs";
18494
18601
  import * as path5 from "path";
18495
18602
  import { fileURLToPath } from "url";
18496
18603
 
18497
18604
  // src/hooks/auto-update-checker/constants.ts
18498
18605
  import * as path4 from "path";
18499
18606
  import * as os4 from "os";
18500
- import * as fs6 from "fs";
18607
+ import * as fs7 from "fs";
18501
18608
  var PACKAGE_NAME = "oh-my-opencode";
18502
18609
  var NPM_REGISTRY_URL = `https://registry.npmjs.org/-/package/${PACKAGE_NAME}/dist-tags`;
18503
18610
  var NPM_FETCH_TIMEOUT = 5000;
@@ -18516,7 +18623,7 @@ function getUserConfigDir2() {
18516
18623
  const appdataDir = process.env.APPDATA ?? path4.join(os4.homedir(), "AppData", "Roaming");
18517
18624
  const crossPlatformConfig = path4.join(crossPlatformDir, "opencode", "opencode.json");
18518
18625
  const crossPlatformConfigJsonc = path4.join(crossPlatformDir, "opencode", "opencode.jsonc");
18519
- if (fs6.existsSync(crossPlatformConfig) || fs6.existsSync(crossPlatformConfigJsonc)) {
18626
+ if (fs7.existsSync(crossPlatformConfig) || fs7.existsSync(crossPlatformConfigJsonc)) {
18520
18627
  return crossPlatformDir;
18521
18628
  }
18522
18629
  return appdataDir;
@@ -18564,9 +18671,9 @@ function getConfigPaths(directory) {
18564
18671
  function getLocalDevPath(directory) {
18565
18672
  for (const configPath of getConfigPaths(directory)) {
18566
18673
  try {
18567
- if (!fs7.existsSync(configPath))
18674
+ if (!fs8.existsSync(configPath))
18568
18675
  continue;
18569
- const content = fs7.readFileSync(configPath, "utf-8");
18676
+ const content = fs8.readFileSync(configPath, "utf-8");
18570
18677
  const config = JSON.parse(stripJsonComments(content));
18571
18678
  const plugins = config.plugin ?? [];
18572
18679
  for (const entry of plugins) {
@@ -18586,13 +18693,13 @@ function getLocalDevPath(directory) {
18586
18693
  }
18587
18694
  function findPackageJsonUp(startPath) {
18588
18695
  try {
18589
- const stat = fs7.statSync(startPath);
18696
+ const stat = fs8.statSync(startPath);
18590
18697
  let dir = stat.isDirectory() ? startPath : path5.dirname(startPath);
18591
18698
  for (let i2 = 0;i2 < 10; i2++) {
18592
18699
  const pkgPath = path5.join(dir, "package.json");
18593
- if (fs7.existsSync(pkgPath)) {
18700
+ if (fs8.existsSync(pkgPath)) {
18594
18701
  try {
18595
- const content = fs7.readFileSync(pkgPath, "utf-8");
18702
+ const content = fs8.readFileSync(pkgPath, "utf-8");
18596
18703
  const pkg = JSON.parse(content);
18597
18704
  if (pkg.name === PACKAGE_NAME)
18598
18705
  return pkgPath;
@@ -18614,7 +18721,7 @@ function getLocalDevVersion(directory) {
18614
18721
  const pkgPath = findPackageJsonUp(localPath);
18615
18722
  if (!pkgPath)
18616
18723
  return null;
18617
- const content = fs7.readFileSync(pkgPath, "utf-8");
18724
+ const content = fs8.readFileSync(pkgPath, "utf-8");
18618
18725
  const pkg = JSON.parse(content);
18619
18726
  return pkg.version ?? null;
18620
18727
  } catch {
@@ -18624,9 +18731,9 @@ function getLocalDevVersion(directory) {
18624
18731
  function findPluginEntry(directory) {
18625
18732
  for (const configPath of getConfigPaths(directory)) {
18626
18733
  try {
18627
- if (!fs7.existsSync(configPath))
18734
+ if (!fs8.existsSync(configPath))
18628
18735
  continue;
18629
- const content = fs7.readFileSync(configPath, "utf-8");
18736
+ const content = fs8.readFileSync(configPath, "utf-8");
18630
18737
  const config = JSON.parse(stripJsonComments(content));
18631
18738
  const plugins = config.plugin ?? [];
18632
18739
  for (const entry of plugins) {
@@ -18647,8 +18754,8 @@ function findPluginEntry(directory) {
18647
18754
  }
18648
18755
  function getCachedVersion() {
18649
18756
  try {
18650
- if (fs7.existsSync(INSTALLED_PACKAGE_JSON)) {
18651
- const content = fs7.readFileSync(INSTALLED_PACKAGE_JSON, "utf-8");
18757
+ if (fs8.existsSync(INSTALLED_PACKAGE_JSON)) {
18758
+ const content = fs8.readFileSync(INSTALLED_PACKAGE_JSON, "utf-8");
18652
18759
  const pkg = JSON.parse(content);
18653
18760
  if (pkg.version)
18654
18761
  return pkg.version;
@@ -18658,7 +18765,7 @@ function getCachedVersion() {
18658
18765
  const currentDir = path5.dirname(fileURLToPath(import.meta.url));
18659
18766
  const pkgPath = findPackageJsonUp(currentDir);
18660
18767
  if (pkgPath) {
18661
- const content = fs7.readFileSync(pkgPath, "utf-8");
18768
+ const content = fs8.readFileSync(pkgPath, "utf-8");
18662
18769
  const pkg = JSON.parse(content);
18663
18770
  if (pkg.version)
18664
18771
  return pkg.version;
@@ -18670,7 +18777,7 @@ function getCachedVersion() {
18670
18777
  }
18671
18778
  function updatePinnedVersion(configPath, oldEntry, newVersion) {
18672
18779
  try {
18673
- const content = fs7.readFileSync(configPath, "utf-8");
18780
+ const content = fs8.readFileSync(configPath, "utf-8");
18674
18781
  const newEntry = `${PACKAGE_NAME}@${newVersion}`;
18675
18782
  const pluginMatch = content.match(/"plugin"\s*:\s*\[/);
18676
18783
  if (!pluginMatch || pluginMatch.index === undefined) {
@@ -18702,7 +18809,7 @@ function updatePinnedVersion(configPath, oldEntry, newVersion) {
18702
18809
  log(`[auto-update-checker] No changes made to ${configPath}`);
18703
18810
  return false;
18704
18811
  }
18705
- fs7.writeFileSync(configPath, updatedContent, "utf-8");
18812
+ fs8.writeFileSync(configPath, updatedContent, "utf-8");
18706
18813
  log(`[auto-update-checker] Updated ${configPath}: ${oldEntry} \u2192 ${newEntry}`);
18707
18814
  return true;
18708
18815
  } catch (err) {
@@ -18730,17 +18837,17 @@ async function getLatestVersion() {
18730
18837
  }
18731
18838
 
18732
18839
  // src/hooks/auto-update-checker/cache.ts
18733
- import * as fs8 from "fs";
18840
+ import * as fs9 from "fs";
18734
18841
  import * as path6 from "path";
18735
18842
  function stripTrailingCommas(json2) {
18736
18843
  return json2.replace(/,(\s*[}\]])/g, "$1");
18737
18844
  }
18738
18845
  function removeFromBunLock(packageName) {
18739
18846
  const lockPath = path6.join(CACHE_DIR, "bun.lock");
18740
- if (!fs8.existsSync(lockPath))
18847
+ if (!fs9.existsSync(lockPath))
18741
18848
  return false;
18742
18849
  try {
18743
- const content = fs8.readFileSync(lockPath, "utf-8");
18850
+ const content = fs9.readFileSync(lockPath, "utf-8");
18744
18851
  const lock = JSON.parse(stripTrailingCommas(content));
18745
18852
  let modified = false;
18746
18853
  if (lock.workspaces?.[""]?.dependencies?.[packageName]) {
@@ -18752,7 +18859,7 @@ function removeFromBunLock(packageName) {
18752
18859
  modified = true;
18753
18860
  }
18754
18861
  if (modified) {
18755
- fs8.writeFileSync(lockPath, JSON.stringify(lock, null, 2));
18862
+ fs9.writeFileSync(lockPath, JSON.stringify(lock, null, 2));
18756
18863
  log(`[auto-update-checker] Removed from bun.lock: ${packageName}`);
18757
18864
  }
18758
18865
  return modified;
@@ -18767,17 +18874,17 @@ function invalidatePackage(packageName = PACKAGE_NAME) {
18767
18874
  let packageRemoved = false;
18768
18875
  let dependencyRemoved = false;
18769
18876
  let lockRemoved = false;
18770
- if (fs8.existsSync(pkgDir)) {
18771
- fs8.rmSync(pkgDir, { recursive: true, force: true });
18877
+ if (fs9.existsSync(pkgDir)) {
18878
+ fs9.rmSync(pkgDir, { recursive: true, force: true });
18772
18879
  log(`[auto-update-checker] Package removed: ${pkgDir}`);
18773
18880
  packageRemoved = true;
18774
18881
  }
18775
- if (fs8.existsSync(pkgJsonPath)) {
18776
- const content = fs8.readFileSync(pkgJsonPath, "utf-8");
18882
+ if (fs9.existsSync(pkgJsonPath)) {
18883
+ const content = fs9.readFileSync(pkgJsonPath, "utf-8");
18777
18884
  const pkgJson = JSON.parse(content);
18778
18885
  if (pkgJson.dependencies?.[packageName]) {
18779
18886
  delete pkgJson.dependencies[packageName];
18780
- fs8.writeFileSync(pkgJsonPath, JSON.stringify(pkgJson, null, 2));
18887
+ fs9.writeFileSync(pkgJsonPath, JSON.stringify(pkgJson, null, 2));
18781
18888
  log(`[auto-update-checker] Dependency removed from package.json: ${packageName}`);
18782
18889
  dependencyRemoved = true;
18783
18890
  }
@@ -19040,7 +19147,6 @@ var TARGET_TOOLS = new Set([
19040
19147
  "webfetch",
19041
19148
  "context7_resolve-library-id",
19042
19149
  "context7_get-library-docs",
19043
- "websearch_exa_web_search_exa",
19044
19150
  "grep_app_searchgithub"
19045
19151
  ]);
19046
19152
  var AGENT_TOOLS = new Set([
@@ -20073,12 +20179,11 @@ function extractPromptText3(parts) {
20073
20179
  }
20074
20180
 
20075
20181
  // src/hooks/auto-slash-command/executor.ts
20076
- import { existsSync as existsSync36, readdirSync as readdirSync12, readFileSync as readFileSync24 } from "fs";
20182
+ import { existsSync as existsSync35, readdirSync as readdirSync11, readFileSync as readFileSync23 } from "fs";
20077
20183
  import { join as join43, basename as basename2, dirname as dirname8 } from "path";
20078
20184
  import { homedir as homedir13 } from "os";
20079
20185
  // src/features/opencode-skill-loader/loader.ts
20080
- import { existsSync as existsSync34, readdirSync as readdirSync11, readFileSync as readFileSync22 } from "fs";
20081
- import { promises as fs9 } from "fs";
20186
+ import { promises as fs10 } from "fs";
20082
20187
  import { join as join42, basename } from "path";
20083
20188
  import { homedir as homedir11 } from "os";
20084
20189
  function parseSkillMcpConfigFromFrontmatter(content) {
@@ -20095,12 +20200,10 @@ function parseSkillMcpConfigFromFrontmatter(content) {
20095
20200
  }
20096
20201
  return;
20097
20202
  }
20098
- function loadMcpJsonFromDir(skillDir) {
20203
+ async function loadMcpJsonFromDir(skillDir) {
20099
20204
  const mcpJsonPath = join42(skillDir, "mcp.json");
20100
- if (!existsSync34(mcpJsonPath))
20101
- return;
20102
20205
  try {
20103
- const content = readFileSync22(mcpJsonPath, "utf-8");
20206
+ const content = await fs10.readFile(mcpJsonPath, "utf-8");
20104
20207
  const parsed = JSON.parse(content);
20105
20208
  if (parsed && typeof parsed === "object" && "mcpServers" in parsed && parsed.mcpServers) {
20106
20209
  return parsed.mcpServers;
@@ -20121,71 +20224,12 @@ function parseAllowedTools(allowedTools) {
20121
20224
  return;
20122
20225
  return allowedTools.split(/\s+/).filter(Boolean);
20123
20226
  }
20124
- function loadSkillFromPath(skillPath, resolvedPath, defaultName, scope) {
20125
- try {
20126
- const content = readFileSync22(skillPath, "utf-8");
20127
- const { data } = parseFrontmatter(content);
20128
- const frontmatterMcp = parseSkillMcpConfigFromFrontmatter(content);
20129
- const mcpJsonMcp = loadMcpJsonFromDir(resolvedPath);
20130
- const mcpConfig = mcpJsonMcp || frontmatterMcp;
20131
- const skillName = data.name || defaultName;
20132
- const originalDescription = data.description || "";
20133
- const isOpencodeSource = scope === "opencode" || scope === "opencode-project";
20134
- const formattedDescription = `(${scope} - Skill) ${originalDescription}`;
20135
- const lazyContent = {
20136
- loaded: false,
20137
- content: undefined,
20138
- load: async () => {
20139
- if (!lazyContent.loaded) {
20140
- const fileContent = await fs9.readFile(skillPath, "utf-8");
20141
- const { body } = parseFrontmatter(fileContent);
20142
- lazyContent.content = `<skill-instruction>
20143
- Base directory for this skill: ${resolvedPath}/
20144
- File references (@path) in this skill are relative to this directory.
20145
-
20146
- ${body.trim()}
20147
- </skill-instruction>
20148
-
20149
- <user-request>
20150
- $ARGUMENTS
20151
- </user-request>`;
20152
- lazyContent.loaded = true;
20153
- }
20154
- return lazyContent.content;
20155
- }
20156
- };
20157
- const definition = {
20158
- name: skillName,
20159
- description: formattedDescription,
20160
- template: "",
20161
- model: sanitizeModelField(data.model, isOpencodeSource ? "opencode" : "claude-code"),
20162
- agent: data.agent,
20163
- subtask: data.subtask,
20164
- argumentHint: data["argument-hint"]
20165
- };
20166
- return {
20167
- name: skillName,
20168
- path: skillPath,
20169
- resolvedPath,
20170
- definition,
20171
- scope,
20172
- license: data.license,
20173
- compatibility: data.compatibility,
20174
- metadata: data.metadata,
20175
- allowedTools: parseAllowedTools(data["allowed-tools"]),
20176
- mcpConfig,
20177
- lazyContent
20178
- };
20179
- } catch {
20180
- return null;
20181
- }
20182
- }
20183
- async function loadSkillFromPathAsync(skillPath, resolvedPath, defaultName, scope) {
20227
+ async function loadSkillFromPath(skillPath, resolvedPath, defaultName, scope) {
20184
20228
  try {
20185
- const content = await fs9.readFile(skillPath, "utf-8");
20229
+ const content = await fs10.readFile(skillPath, "utf-8");
20186
20230
  const { data } = parseFrontmatter(content);
20187
20231
  const frontmatterMcp = parseSkillMcpConfigFromFrontmatter(content);
20188
- const mcpJsonMcp = loadMcpJsonFromDir(resolvedPath);
20232
+ const mcpJsonMcp = await loadMcpJsonFromDir(resolvedPath);
20189
20233
  const mcpConfig = mcpJsonMcp || frontmatterMcp;
20190
20234
  const skillName = data.name || defaultName;
20191
20235
  const originalDescription = data.description || "";
@@ -20196,7 +20240,7 @@ async function loadSkillFromPathAsync(skillPath, resolvedPath, defaultName, scop
20196
20240
  content: undefined,
20197
20241
  load: async () => {
20198
20242
  if (!lazyContent.loaded) {
20199
- const fileContent = await fs9.readFile(skillPath, "utf-8");
20243
+ const fileContent = await fs10.readFile(skillPath, "utf-8");
20200
20244
  const { body } = parseFrontmatter(fileContent);
20201
20245
  lazyContent.content = `<skill-instruction>
20202
20246
  Base directory for this skill: ${resolvedPath}/
@@ -20239,66 +20283,28 @@ $ARGUMENTS
20239
20283
  return null;
20240
20284
  }
20241
20285
  }
20242
- function loadSkillsFromDir(skillsDir, scope) {
20243
- if (!existsSync34(skillsDir)) {
20244
- return [];
20245
- }
20246
- const entries = readdirSync11(skillsDir, { withFileTypes: true });
20247
- const skills = [];
20248
- for (const entry of entries) {
20249
- if (entry.name.startsWith("."))
20250
- continue;
20251
- const entryPath = join42(skillsDir, entry.name);
20252
- if (entry.isDirectory() || entry.isSymbolicLink()) {
20253
- const resolvedPath = resolveSymlink(entryPath);
20254
- const dirName = entry.name;
20255
- const skillMdPath = join42(resolvedPath, "SKILL.md");
20256
- if (existsSync34(skillMdPath)) {
20257
- const skill = loadSkillFromPath(skillMdPath, resolvedPath, dirName, scope);
20258
- if (skill)
20259
- skills.push(skill);
20260
- continue;
20261
- }
20262
- const namedSkillMdPath = join42(resolvedPath, `${dirName}.md`);
20263
- if (existsSync34(namedSkillMdPath)) {
20264
- const skill = loadSkillFromPath(namedSkillMdPath, resolvedPath, dirName, scope);
20265
- if (skill)
20266
- skills.push(skill);
20267
- continue;
20268
- }
20269
- continue;
20270
- }
20271
- if (isMarkdownFile(entry)) {
20272
- const skillName = basename(entry.name, ".md");
20273
- const skill = loadSkillFromPath(entryPath, skillsDir, skillName, scope);
20274
- if (skill)
20275
- skills.push(skill);
20276
- }
20277
- }
20278
- return skills;
20279
- }
20280
- async function loadSkillsFromDirAsync(skillsDir, scope) {
20281
- const entries = await fs9.readdir(skillsDir, { withFileTypes: true }).catch(() => []);
20286
+ async function loadSkillsFromDir(skillsDir, scope) {
20287
+ const entries = await fs10.readdir(skillsDir, { withFileTypes: true }).catch(() => []);
20282
20288
  const skills = [];
20283
20289
  for (const entry of entries) {
20284
20290
  if (entry.name.startsWith("."))
20285
20291
  continue;
20286
20292
  const entryPath = join42(skillsDir, entry.name);
20287
20293
  if (entry.isDirectory() || entry.isSymbolicLink()) {
20288
- const resolvedPath = resolveSymlink(entryPath);
20294
+ const resolvedPath = await resolveSymlinkAsync(entryPath);
20289
20295
  const dirName = entry.name;
20290
20296
  const skillMdPath = join42(resolvedPath, "SKILL.md");
20291
20297
  try {
20292
- await fs9.access(skillMdPath);
20293
- const skill = await loadSkillFromPathAsync(skillMdPath, resolvedPath, dirName, scope);
20298
+ await fs10.access(skillMdPath);
20299
+ const skill = await loadSkillFromPath(skillMdPath, resolvedPath, dirName, scope);
20294
20300
  if (skill)
20295
20301
  skills.push(skill);
20296
20302
  continue;
20297
20303
  } catch {}
20298
20304
  const namedSkillMdPath = join42(resolvedPath, `${dirName}.md`);
20299
20305
  try {
20300
- await fs9.access(namedSkillMdPath);
20301
- const skill = await loadSkillFromPathAsync(namedSkillMdPath, resolvedPath, dirName, scope);
20306
+ await fs10.access(namedSkillMdPath);
20307
+ const skill = await loadSkillFromPath(namedSkillMdPath, resolvedPath, dirName, scope);
20302
20308
  if (skill)
20303
20309
  skills.push(skill);
20304
20310
  continue;
@@ -20307,7 +20313,7 @@ async function loadSkillsFromDirAsync(skillsDir, scope) {
20307
20313
  }
20308
20314
  if (isMarkdownFile(entry)) {
20309
20315
  const skillName = basename(entry.name, ".md");
20310
- const skill = await loadSkillFromPathAsync(entryPath, skillsDir, skillName, scope);
20316
+ const skill = await loadSkillFromPath(entryPath, skillsDir, skillName, scope);
20311
20317
  if (skill)
20312
20318
  skills.push(skill);
20313
20319
  }
@@ -20322,70 +20328,68 @@ function skillsToRecord(skills) {
20322
20328
  }
20323
20329
  return result;
20324
20330
  }
20325
- function loadUserSkills() {
20331
+ async function loadUserSkills() {
20326
20332
  const userSkillsDir = join42(getClaudeConfigDir(), "skills");
20327
- const skills = loadSkillsFromDir(userSkillsDir, "user");
20333
+ const skills = await loadSkillsFromDir(userSkillsDir, "user");
20328
20334
  return skillsToRecord(skills);
20329
20335
  }
20330
- function loadProjectSkills() {
20336
+ async function loadProjectSkills() {
20331
20337
  const projectSkillsDir = join42(process.cwd(), ".claude", "skills");
20332
- const skills = loadSkillsFromDir(projectSkillsDir, "project");
20338
+ const skills = await loadSkillsFromDir(projectSkillsDir, "project");
20333
20339
  return skillsToRecord(skills);
20334
20340
  }
20335
- function loadOpencodeGlobalSkills() {
20341
+ async function loadOpencodeGlobalSkills() {
20336
20342
  const opencodeSkillsDir = join42(homedir11(), ".config", "opencode", "skill");
20337
- const skills = loadSkillsFromDir(opencodeSkillsDir, "opencode");
20343
+ const skills = await loadSkillsFromDir(opencodeSkillsDir, "opencode");
20338
20344
  return skillsToRecord(skills);
20339
20345
  }
20340
- function loadOpencodeProjectSkills() {
20346
+ async function loadOpencodeProjectSkills() {
20341
20347
  const opencodeProjectDir = join42(process.cwd(), ".opencode", "skill");
20342
- const skills = loadSkillsFromDir(opencodeProjectDir, "opencode-project");
20348
+ const skills = await loadSkillsFromDir(opencodeProjectDir, "opencode-project");
20343
20349
  return skillsToRecord(skills);
20344
20350
  }
20345
- function discoverAllSkills() {
20346
- const opencodeProjectDir = join42(process.cwd(), ".opencode", "skill");
20347
- const projectDir = join42(process.cwd(), ".claude", "skills");
20348
- const opencodeGlobalDir = join42(homedir11(), ".config", "opencode", "skill");
20349
- const userDir = join42(getClaudeConfigDir(), "skills");
20350
- const opencodeProjectSkills = loadSkillsFromDir(opencodeProjectDir, "opencode-project");
20351
- const projectSkills = loadSkillsFromDir(projectDir, "project");
20352
- const opencodeGlobalSkills = loadSkillsFromDir(opencodeGlobalDir, "opencode");
20353
- const userSkills = loadSkillsFromDir(userDir, "user");
20351
+ async function discoverAllSkills() {
20352
+ const [opencodeProjectSkills, projectSkills, opencodeGlobalSkills, userSkills] = await Promise.all([
20353
+ discoverOpencodeProjectSkills(),
20354
+ discoverProjectClaudeSkills(),
20355
+ discoverOpencodeGlobalSkills(),
20356
+ discoverUserClaudeSkills()
20357
+ ]);
20354
20358
  return [...opencodeProjectSkills, ...projectSkills, ...opencodeGlobalSkills, ...userSkills];
20355
20359
  }
20356
- function discoverSkills(options = {}) {
20360
+ async function discoverSkills(options = {}) {
20357
20361
  const { includeClaudeCodePaths = true } = options;
20358
- const opencodeProjectDir = join42(process.cwd(), ".opencode", "skill");
20359
- const opencodeGlobalDir = join42(homedir11(), ".config", "opencode", "skill");
20360
- const opencodeProjectSkills = loadSkillsFromDir(opencodeProjectDir, "opencode-project");
20361
- const opencodeGlobalSkills = loadSkillsFromDir(opencodeGlobalDir, "opencode");
20362
+ const [opencodeProjectSkills, opencodeGlobalSkills] = await Promise.all([
20363
+ discoverOpencodeProjectSkills(),
20364
+ discoverOpencodeGlobalSkills()
20365
+ ]);
20362
20366
  if (!includeClaudeCodePaths) {
20363
20367
  return [...opencodeProjectSkills, ...opencodeGlobalSkills];
20364
20368
  }
20365
- const projectDir = join42(process.cwd(), ".claude", "skills");
20366
- const userDir = join42(getClaudeConfigDir(), "skills");
20367
- const projectSkills = loadSkillsFromDir(projectDir, "project");
20368
- const userSkills = loadSkillsFromDir(userDir, "user");
20369
+ const [projectSkills, userSkills] = await Promise.all([
20370
+ discoverProjectClaudeSkills(),
20371
+ discoverUserClaudeSkills()
20372
+ ]);
20369
20373
  return [...opencodeProjectSkills, ...projectSkills, ...opencodeGlobalSkills, ...userSkills];
20370
20374
  }
20371
- async function discoverUserClaudeSkillsAsync() {
20375
+ async function discoverUserClaudeSkills() {
20372
20376
  const userSkillsDir = join42(getClaudeConfigDir(), "skills");
20373
- return loadSkillsFromDirAsync(userSkillsDir, "user");
20377
+ return loadSkillsFromDir(userSkillsDir, "user");
20374
20378
  }
20375
- async function discoverProjectClaudeSkillsAsync() {
20379
+ async function discoverProjectClaudeSkills() {
20376
20380
  const projectSkillsDir = join42(process.cwd(), ".claude", "skills");
20377
- return loadSkillsFromDirAsync(projectSkillsDir, "project");
20381
+ return loadSkillsFromDir(projectSkillsDir, "project");
20378
20382
  }
20379
- async function discoverOpencodeGlobalSkillsAsync() {
20383
+ async function discoverOpencodeGlobalSkills() {
20380
20384
  const opencodeSkillsDir = join42(homedir11(), ".config", "opencode", "skill");
20381
- return loadSkillsFromDirAsync(opencodeSkillsDir, "opencode");
20385
+ return loadSkillsFromDir(opencodeSkillsDir, "opencode");
20382
20386
  }
20383
- async function discoverOpencodeProjectSkillsAsync() {
20387
+ async function discoverOpencodeProjectSkills() {
20384
20388
  const opencodeProjectDir = join42(process.cwd(), ".opencode", "skill");
20385
- return loadSkillsFromDirAsync(opencodeProjectDir, "opencode-project");
20389
+ return loadSkillsFromDir(opencodeProjectDir, "opencode-project");
20386
20390
  }
20387
20391
  // src/features/opencode-skill-loader/merger.ts
20388
- import { readFileSync as readFileSync23, existsSync as existsSync35 } from "fs";
20392
+ import { readFileSync as readFileSync22, existsSync as existsSync34 } from "fs";
20389
20393
  import { dirname as dirname7, resolve as resolve5, isAbsolute as isAbsolute2 } from "path";
20390
20394
  import { homedir as homedir12 } from "os";
20391
20395
  var SCOPE_PRIORITY = {
@@ -20433,9 +20437,9 @@ function resolveFilePath2(from, configDir) {
20433
20437
  }
20434
20438
  function loadSkillFromFile(filePath) {
20435
20439
  try {
20436
- if (!existsSync35(filePath))
20440
+ if (!existsSync34(filePath))
20437
20441
  return null;
20438
- const content = readFileSync23(filePath, "utf-8");
20442
+ const content = readFileSync22(filePath, "utf-8");
20439
20443
  const { data, body } = parseFrontmatter(content);
20440
20444
  return { template: body, metadata: data };
20441
20445
  } catch {
@@ -20589,10 +20593,10 @@ function mergeSkills(builtinSkills, config, userClaudeSkills, userOpencodeSkills
20589
20593
  }
20590
20594
  // src/hooks/auto-slash-command/executor.ts
20591
20595
  function discoverCommandsFromDir(commandsDir, scope) {
20592
- if (!existsSync36(commandsDir)) {
20596
+ if (!existsSync35(commandsDir)) {
20593
20597
  return [];
20594
20598
  }
20595
- const entries = readdirSync12(commandsDir, { withFileTypes: true });
20599
+ const entries = readdirSync11(commandsDir, { withFileTypes: true });
20596
20600
  const commands = [];
20597
20601
  for (const entry of entries) {
20598
20602
  if (!isMarkdownFile(entry))
@@ -20600,7 +20604,7 @@ function discoverCommandsFromDir(commandsDir, scope) {
20600
20604
  const commandPath = join43(commandsDir, entry.name);
20601
20605
  const commandName = basename2(entry.name, ".md");
20602
20606
  try {
20603
- const content = readFileSync24(commandPath, "utf-8");
20607
+ const content = readFileSync23(commandPath, "utf-8");
20604
20608
  const { data, body } = parseFrontmatter(content);
20605
20609
  const isOpencodeSource = scope === "opencode" || scope === "opencode-project";
20606
20610
  const metadata = {
@@ -20640,7 +20644,7 @@ function skillToCommandInfo(skill) {
20640
20644
  scope: "skill"
20641
20645
  };
20642
20646
  }
20643
- function discoverAllCommands() {
20647
+ async function discoverAllCommands() {
20644
20648
  const userCommandsDir = join43(getClaudeConfigDir(), "commands");
20645
20649
  const projectCommandsDir = join43(process.cwd(), ".claude", "commands");
20646
20650
  const opencodeGlobalDir = join43(homedir13(), ".config", "opencode", "command");
@@ -20649,7 +20653,7 @@ function discoverAllCommands() {
20649
20653
  const opencodeGlobalCommands = discoverCommandsFromDir(opencodeGlobalDir, "opencode");
20650
20654
  const projectCommands = discoverCommandsFromDir(projectCommandsDir, "project");
20651
20655
  const opencodeProjectCommands = discoverCommandsFromDir(opencodeProjectDir, "opencode-project");
20652
- const skills = discoverAllSkills();
20656
+ const skills = await discoverAllSkills();
20653
20657
  const skillCommands = skills.map(skillToCommandInfo);
20654
20658
  return [
20655
20659
  ...opencodeProjectCommands,
@@ -20659,8 +20663,8 @@ function discoverAllCommands() {
20659
20663
  ...skillCommands
20660
20664
  ];
20661
20665
  }
20662
- function findCommand2(commandName) {
20663
- const allCommands = discoverAllCommands();
20666
+ async function findCommand2(commandName) {
20667
+ const allCommands = await discoverAllCommands();
20664
20668
  return allCommands.find((cmd) => cmd.name.toLowerCase() === commandName.toLowerCase()) ?? null;
20665
20669
  }
20666
20670
  async function formatCommandTemplate(cmd, args) {
@@ -20706,7 +20710,7 @@ async function formatCommandTemplate(cmd, args) {
20706
20710
  `);
20707
20711
  }
20708
20712
  async function executeSlashCommand(parsed) {
20709
- const command = findCommand2(parsed.command);
20713
+ const command = await findCommand2(parsed.command);
20710
20714
  if (!command) {
20711
20715
  return {
20712
20716
  success: false,
@@ -22800,7 +22804,7 @@ function createBuiltinSkills() {
22800
22804
  return [playwrightSkill];
22801
22805
  }
22802
22806
  // src/features/claude-code-mcp-loader/loader.ts
22803
- import { existsSync as existsSync37, readFileSync as readFileSync25 } from "fs";
22807
+ import { existsSync as existsSync36, readFileSync as readFileSync24 } from "fs";
22804
22808
  import { join as join44 } from "path";
22805
22809
 
22806
22810
  // src/features/claude-code-mcp-loader/env-expander.ts
@@ -22876,7 +22880,7 @@ function getMcpConfigPaths() {
22876
22880
  ];
22877
22881
  }
22878
22882
  async function loadMcpConfigFile(filePath) {
22879
- if (!existsSync37(filePath)) {
22883
+ if (!existsSync36(filePath)) {
22880
22884
  return null;
22881
22885
  }
22882
22886
  try {
@@ -22891,10 +22895,10 @@ function getSystemMcpServerNames() {
22891
22895
  const names = new Set;
22892
22896
  const paths = getMcpConfigPaths();
22893
22897
  for (const { path: path7 } of paths) {
22894
- if (!existsSync37(path7))
22898
+ if (!existsSync36(path7))
22895
22899
  continue;
22896
22900
  try {
22897
- const content = readFileSync25(path7, "utf-8");
22901
+ const content = readFileSync24(path7, "utf-8");
22898
22902
  const config = JSON.parse(content);
22899
22903
  if (!config?.mcpServers)
22900
22904
  continue;
@@ -23326,14 +23330,14 @@ var EXT_TO_LANG = {
23326
23330
  ".gql": "graphql"
23327
23331
  };
23328
23332
  // src/tools/lsp/config.ts
23329
- import { existsSync as existsSync38, readFileSync as readFileSync26 } from "fs";
23333
+ import { existsSync as existsSync37, readFileSync as readFileSync25 } from "fs";
23330
23334
  import { join as join45 } from "path";
23331
23335
  import { homedir as homedir14 } from "os";
23332
23336
  function loadJsonFile(path7) {
23333
- if (!existsSync38(path7))
23337
+ if (!existsSync37(path7))
23334
23338
  return null;
23335
23339
  try {
23336
- return JSON.parse(readFileSync26(path7, "utf-8"));
23340
+ return JSON.parse(readFileSync25(path7, "utf-8"));
23337
23341
  } catch {
23338
23342
  return null;
23339
23343
  }
@@ -23456,7 +23460,7 @@ function isServerInstalled(command) {
23456
23460
  return false;
23457
23461
  const cmd = command[0];
23458
23462
  if (cmd.includes("/") || cmd.includes("\\")) {
23459
- if (existsSync38(cmd))
23463
+ if (existsSync37(cmd))
23460
23464
  return true;
23461
23465
  }
23462
23466
  const isWindows2 = process.platform === "win32";
@@ -23465,7 +23469,7 @@ function isServerInstalled(command) {
23465
23469
  const pathSeparator = isWindows2 ? ";" : ":";
23466
23470
  const paths = pathEnv.split(pathSeparator);
23467
23471
  for (const p of paths) {
23468
- if (existsSync38(join45(p, cmd)) || existsSync38(join45(p, cmd + ext))) {
23472
+ if (existsSync37(join45(p, cmd)) || existsSync37(join45(p, cmd + ext))) {
23469
23473
  return true;
23470
23474
  }
23471
23475
  }
@@ -23479,7 +23483,7 @@ function isServerInstalled(command) {
23479
23483
  join45(homedir14(), ".config", "opencode", "node_modules", ".bin", cmd + ext)
23480
23484
  ];
23481
23485
  for (const p of additionalPaths) {
23482
- if (existsSync38(p)) {
23486
+ if (existsSync37(p)) {
23483
23487
  return true;
23484
23488
  }
23485
23489
  }
@@ -23532,7 +23536,7 @@ function getAllServers() {
23532
23536
  }
23533
23537
  // src/tools/lsp/client.ts
23534
23538
  var {spawn: spawn5 } = globalThis.Bun;
23535
- import { readFileSync as readFileSync27 } from "fs";
23539
+ import { readFileSync as readFileSync26 } from "fs";
23536
23540
  import { extname, resolve as resolve6 } from "path";
23537
23541
  class LSPServerManager {
23538
23542
  static instance;
@@ -23962,7 +23966,7 @@ ${msg}`);
23962
23966
  const absPath = resolve6(filePath);
23963
23967
  if (this.openedFiles.has(absPath))
23964
23968
  return;
23965
- const text = readFileSync27(absPath, "utf-8");
23969
+ const text = readFileSync26(absPath, "utf-8");
23966
23970
  const ext = extname(absPath);
23967
23971
  const languageId = getLanguageId(ext);
23968
23972
  this.notify("textDocument/didOpen", {
@@ -24078,16 +24082,16 @@ ${msg}`);
24078
24082
  // src/tools/lsp/utils.ts
24079
24083
  import { extname as extname2, resolve as resolve7 } from "path";
24080
24084
  import { fileURLToPath as fileURLToPath2 } from "url";
24081
- import { existsSync as existsSync39, readFileSync as readFileSync28, writeFileSync as writeFileSync15 } from "fs";
24085
+ import { existsSync as existsSync38, readFileSync as readFileSync27, writeFileSync as writeFileSync15 } from "fs";
24082
24086
  function findWorkspaceRoot(filePath) {
24083
24087
  let dir = resolve7(filePath);
24084
- if (!existsSync39(dir) || !__require("fs").statSync(dir).isDirectory()) {
24088
+ if (!existsSync38(dir) || !__require("fs").statSync(dir).isDirectory()) {
24085
24089
  dir = __require("path").dirname(dir);
24086
24090
  }
24087
24091
  const markers = [".git", "package.json", "pyproject.toml", "Cargo.toml", "go.mod", "pom.xml", "build.gradle"];
24088
24092
  while (dir !== "/") {
24089
24093
  for (const marker of markers) {
24090
- if (existsSync39(__require("path").join(dir, marker))) {
24094
+ if (existsSync38(__require("path").join(dir, marker))) {
24091
24095
  return dir;
24092
24096
  }
24093
24097
  }
@@ -24283,7 +24287,7 @@ function formatCodeActions(actions) {
24283
24287
  }
24284
24288
  function applyTextEditsToFile(filePath, edits) {
24285
24289
  try {
24286
- let content = readFileSync28(filePath, "utf-8");
24290
+ let content = readFileSync27(filePath, "utf-8");
24287
24291
  const lines = content.split(`
24288
24292
  `);
24289
24293
  const sortedEdits = [...edits].sort((a, b) => {
@@ -24349,7 +24353,7 @@ function applyWorkspaceEdit(edit) {
24349
24353
  try {
24350
24354
  const oldPath = uriToPath(change.oldUri);
24351
24355
  const newPath = uriToPath(change.newUri);
24352
- const content = readFileSync28(oldPath, "utf-8");
24356
+ const content = readFileSync27(oldPath, "utf-8");
24353
24357
  writeFileSync15(newPath, content, "utf-8");
24354
24358
  __require("fs").unlinkSync(oldPath);
24355
24359
  result.filesModified.push(newPath);
@@ -37051,11 +37055,11 @@ var lsp_code_action_resolve = tool({
37051
37055
  // src/tools/ast-grep/constants.ts
37052
37056
  import { createRequire as createRequire4 } from "module";
37053
37057
  import { dirname as dirname9, join as join47 } from "path";
37054
- import { existsSync as existsSync41, statSync as statSync4 } from "fs";
37058
+ import { existsSync as existsSync40, statSync as statSync4 } from "fs";
37055
37059
 
37056
37060
  // src/tools/ast-grep/downloader.ts
37057
37061
  var {spawn: spawn6 } = globalThis.Bun;
37058
- import { existsSync as existsSync40, mkdirSync as mkdirSync11, chmodSync as chmodSync2, unlinkSync as unlinkSync10 } from "fs";
37062
+ import { existsSync as existsSync39, mkdirSync as mkdirSync11, chmodSync as chmodSync2, unlinkSync as unlinkSync10 } from "fs";
37059
37063
  import { join as join46 } from "path";
37060
37064
  import { homedir as homedir15 } from "os";
37061
37065
  import { createRequire as createRequire3 } from "module";
@@ -37094,7 +37098,7 @@ function getBinaryName3() {
37094
37098
  }
37095
37099
  function getCachedBinaryPath2() {
37096
37100
  const binaryPath = join46(getCacheDir3(), getBinaryName3());
37097
- return existsSync40(binaryPath) ? binaryPath : null;
37101
+ return existsSync39(binaryPath) ? binaryPath : null;
37098
37102
  }
37099
37103
  async function extractZip2(archivePath, destDir) {
37100
37104
  const proc = process.platform === "win32" ? spawn6([
@@ -37121,7 +37125,7 @@ async function downloadAstGrep(version2 = DEFAULT_VERSION) {
37121
37125
  const cacheDir = getCacheDir3();
37122
37126
  const binaryName = getBinaryName3();
37123
37127
  const binaryPath = join46(cacheDir, binaryName);
37124
- if (existsSync40(binaryPath)) {
37128
+ if (existsSync39(binaryPath)) {
37125
37129
  return binaryPath;
37126
37130
  }
37127
37131
  const { arch, os: os6 } = platformInfo;
@@ -37129,7 +37133,7 @@ async function downloadAstGrep(version2 = DEFAULT_VERSION) {
37129
37133
  const downloadUrl = `https://github.com/${REPO2}/releases/download/${version2}/${assetName}`;
37130
37134
  console.log(`[oh-my-opencode] Downloading ast-grep binary...`);
37131
37135
  try {
37132
- if (!existsSync40(cacheDir)) {
37136
+ if (!existsSync39(cacheDir)) {
37133
37137
  mkdirSync11(cacheDir, { recursive: true });
37134
37138
  }
37135
37139
  const response2 = await fetch(downloadUrl, { redirect: "follow" });
@@ -37140,10 +37144,10 @@ async function downloadAstGrep(version2 = DEFAULT_VERSION) {
37140
37144
  const arrayBuffer = await response2.arrayBuffer();
37141
37145
  await Bun.write(archivePath, arrayBuffer);
37142
37146
  await extractZip2(archivePath, cacheDir);
37143
- if (existsSync40(archivePath)) {
37147
+ if (existsSync39(archivePath)) {
37144
37148
  unlinkSync10(archivePath);
37145
37149
  }
37146
- if (process.platform !== "win32" && existsSync40(binaryPath)) {
37150
+ if (process.platform !== "win32" && existsSync39(binaryPath)) {
37147
37151
  chmodSync2(binaryPath, 493);
37148
37152
  }
37149
37153
  console.log(`[oh-my-opencode] ast-grep binary ready.`);
@@ -37195,7 +37199,7 @@ function findSgCliPathSync() {
37195
37199
  const cliPkgPath = require2.resolve("@ast-grep/cli/package.json");
37196
37200
  const cliDir = dirname9(cliPkgPath);
37197
37201
  const sgPath = join47(cliDir, binaryName);
37198
- if (existsSync41(sgPath) && isValidBinary(sgPath)) {
37202
+ if (existsSync40(sgPath) && isValidBinary(sgPath)) {
37199
37203
  return sgPath;
37200
37204
  }
37201
37205
  } catch {}
@@ -37207,7 +37211,7 @@ function findSgCliPathSync() {
37207
37211
  const pkgDir = dirname9(pkgPath);
37208
37212
  const astGrepName = process.platform === "win32" ? "ast-grep.exe" : "ast-grep";
37209
37213
  const binaryPath = join47(pkgDir, astGrepName);
37210
- if (existsSync41(binaryPath) && isValidBinary(binaryPath)) {
37214
+ if (existsSync40(binaryPath) && isValidBinary(binaryPath)) {
37211
37215
  return binaryPath;
37212
37216
  }
37213
37217
  } catch {}
@@ -37215,7 +37219,7 @@ function findSgCliPathSync() {
37215
37219
  if (process.platform === "darwin") {
37216
37220
  const homebrewPaths = ["/opt/homebrew/bin/sg", "/usr/local/bin/sg"];
37217
37221
  for (const path7 of homebrewPaths) {
37218
- if (existsSync41(path7) && isValidBinary(path7)) {
37222
+ if (existsSync40(path7) && isValidBinary(path7)) {
37219
37223
  return path7;
37220
37224
  }
37221
37225
  }
@@ -37270,11 +37274,11 @@ var DEFAULT_MAX_MATCHES = 500;
37270
37274
 
37271
37275
  // src/tools/ast-grep/cli.ts
37272
37276
  var {spawn: spawn7 } = globalThis.Bun;
37273
- import { existsSync as existsSync42 } from "fs";
37277
+ import { existsSync as existsSync41 } from "fs";
37274
37278
  var resolvedCliPath3 = null;
37275
37279
  var initPromise2 = null;
37276
37280
  async function getAstGrepPath() {
37277
- if (resolvedCliPath3 !== null && existsSync42(resolvedCliPath3)) {
37281
+ if (resolvedCliPath3 !== null && existsSync41(resolvedCliPath3)) {
37278
37282
  return resolvedCliPath3;
37279
37283
  }
37280
37284
  if (initPromise2) {
@@ -37282,7 +37286,7 @@ async function getAstGrepPath() {
37282
37286
  }
37283
37287
  initPromise2 = (async () => {
37284
37288
  const syncPath = findSgCliPathSync();
37285
- if (syncPath && existsSync42(syncPath)) {
37289
+ if (syncPath && existsSync41(syncPath)) {
37286
37290
  resolvedCliPath3 = syncPath;
37287
37291
  setSgCliPath(syncPath);
37288
37292
  return syncPath;
@@ -37316,7 +37320,7 @@ async function runSg(options) {
37316
37320
  const paths = options.paths && options.paths.length > 0 ? options.paths : ["."];
37317
37321
  args.push(...paths);
37318
37322
  let cliPath = getSgCliPath();
37319
- if (!existsSync42(cliPath) && cliPath !== "sg") {
37323
+ if (!existsSync41(cliPath) && cliPath !== "sg") {
37320
37324
  const downloadedPath = await getAstGrepPath();
37321
37325
  if (downloadedPath) {
37322
37326
  cliPath = downloadedPath;
@@ -37580,17 +37584,17 @@ var ast_grep_replace = tool({
37580
37584
  var {spawn: spawn9 } = globalThis.Bun;
37581
37585
 
37582
37586
  // src/tools/grep/constants.ts
37583
- import { existsSync as existsSync44 } from "fs";
37587
+ import { existsSync as existsSync43 } from "fs";
37584
37588
  import { join as join49, dirname as dirname10 } from "path";
37585
37589
  import { spawnSync } from "child_process";
37586
37590
 
37587
37591
  // src/tools/grep/downloader.ts
37588
- import { existsSync as existsSync43, mkdirSync as mkdirSync12, chmodSync as chmodSync3, unlinkSync as unlinkSync11, readdirSync as readdirSync13 } from "fs";
37592
+ import { existsSync as existsSync42, mkdirSync as mkdirSync12, chmodSync as chmodSync3, unlinkSync as unlinkSync11, readdirSync as readdirSync12 } from "fs";
37589
37593
  import { join as join48 } from "path";
37590
37594
  var {spawn: spawn8 } = globalThis.Bun;
37591
37595
  function findFileRecursive(dir, filename) {
37592
37596
  try {
37593
- const entries = readdirSync13(dir, { withFileTypes: true, recursive: true });
37597
+ const entries = readdirSync12(dir, { withFileTypes: true, recursive: true });
37594
37598
  for (const entry of entries) {
37595
37599
  if (entry.isFile() && entry.name === filename) {
37596
37600
  return join48(entry.parentPath ?? dir, entry.name);
@@ -37695,7 +37699,7 @@ async function downloadAndInstallRipgrep() {
37695
37699
  }
37696
37700
  const installDir = getInstallDir();
37697
37701
  const rgPath = getRgPath();
37698
- if (existsSync43(rgPath)) {
37702
+ if (existsSync42(rgPath)) {
37699
37703
  return rgPath;
37700
37704
  }
37701
37705
  mkdirSync12(installDir, { recursive: true });
@@ -37712,12 +37716,12 @@ async function downloadAndInstallRipgrep() {
37712
37716
  if (process.platform !== "win32") {
37713
37717
  chmodSync3(rgPath, 493);
37714
37718
  }
37715
- if (!existsSync43(rgPath)) {
37719
+ if (!existsSync42(rgPath)) {
37716
37720
  throw new Error("ripgrep binary not found after extraction");
37717
37721
  }
37718
37722
  return rgPath;
37719
37723
  } finally {
37720
- if (existsSync43(archivePath)) {
37724
+ if (existsSync42(archivePath)) {
37721
37725
  try {
37722
37726
  unlinkSync11(archivePath);
37723
37727
  } catch {}
@@ -37726,7 +37730,7 @@ async function downloadAndInstallRipgrep() {
37726
37730
  }
37727
37731
  function getInstalledRipgrepPath() {
37728
37732
  const rgPath = getRgPath();
37729
- return existsSync43(rgPath) ? rgPath : null;
37733
+ return existsSync42(rgPath) ? rgPath : null;
37730
37734
  }
37731
37735
 
37732
37736
  // src/tools/grep/constants.ts
@@ -37757,7 +37761,7 @@ function getOpenCodeBundledRg() {
37757
37761
  join49(execDir, "..", "libexec", rgName)
37758
37762
  ];
37759
37763
  for (const candidate of candidates) {
37760
- if (existsSync44(candidate)) {
37764
+ if (existsSync43(candidate)) {
37761
37765
  return candidate;
37762
37766
  }
37763
37767
  }
@@ -38210,13 +38214,13 @@ var glob = tool({
38210
38214
  }
38211
38215
  });
38212
38216
  // src/tools/slashcommand/tools.ts
38213
- import { existsSync as existsSync45, readdirSync as readdirSync14, readFileSync as readFileSync29 } from "fs";
38217
+ import { existsSync as existsSync44, readdirSync as readdirSync13, readFileSync as readFileSync28 } from "fs";
38214
38218
  import { join as join50, basename as basename3, dirname as dirname11 } from "path";
38215
38219
  function discoverCommandsFromDir2(commandsDir, scope) {
38216
- if (!existsSync45(commandsDir)) {
38220
+ if (!existsSync44(commandsDir)) {
38217
38221
  return [];
38218
38222
  }
38219
- const entries = readdirSync14(commandsDir, { withFileTypes: true });
38223
+ const entries = readdirSync13(commandsDir, { withFileTypes: true });
38220
38224
  const commands = [];
38221
38225
  for (const entry of entries) {
38222
38226
  if (!isMarkdownFile(entry))
@@ -38224,7 +38228,7 @@ function discoverCommandsFromDir2(commandsDir, scope) {
38224
38228
  const commandPath = join50(commandsDir, entry.name);
38225
38229
  const commandName = basename3(entry.name, ".md");
38226
38230
  try {
38227
- const content = readFileSync29(commandPath, "utf-8");
38231
+ const content = readFileSync28(commandPath, "utf-8");
38228
38232
  const { data, body } = parseFrontmatter(content);
38229
38233
  const isOpencodeSource = scope === "opencode" || scope === "opencode-project";
38230
38234
  const metadata = {
@@ -38276,17 +38280,6 @@ function skillToCommandInfo2(skill) {
38276
38280
  scope: skill.scope
38277
38281
  };
38278
38282
  }
38279
- var availableCommands = discoverCommandsSync();
38280
- var availableSkills = discoverAllSkills();
38281
- var availableItems = [
38282
- ...availableCommands,
38283
- ...availableSkills.map(skillToCommandInfo2)
38284
- ];
38285
- var commandListForDescription = availableItems.map((cmd) => {
38286
- const hint = cmd.metadata.argumentHint ? ` ${cmd.metadata.argumentHint}` : "";
38287
- return `- /${cmd.name}${hint}: ${cmd.metadata.description} (${cmd.scope})`;
38288
- }).join(`
38289
- `);
38290
38283
  async function formatLoadedCommand(cmd) {
38291
38284
  const sections = [];
38292
38285
  sections.push(`# /${cmd.name} Command
@@ -38339,49 +38332,86 @@ function formatCommandList(items) {
38339
38332
  return lines.join(`
38340
38333
  `);
38341
38334
  }
38342
- var slashcommand = tool({
38343
- description: `Load a skill to get detailed instructions for a specific task.
38335
+ var TOOL_DESCRIPTION_PREFIX = `Load a skill to get detailed instructions for a specific task.
38344
38336
 
38345
38337
  Skills provide specialized knowledge and step-by-step guidance.
38346
38338
  Use this when a task matches an available skill's description.
38347
-
38339
+ `;
38340
+ function buildDescriptionFromItems(items) {
38341
+ const commandListForDescription = items.map((cmd) => {
38342
+ const hint = cmd.metadata.argumentHint ? ` ${cmd.metadata.argumentHint}` : "";
38343
+ return `- /${cmd.name}${hint}: ${cmd.metadata.description} (${cmd.scope})`;
38344
+ }).join(`
38345
+ `);
38346
+ return `${TOOL_DESCRIPTION_PREFIX}
38348
38347
  <available_skills>
38349
38348
  ${commandListForDescription}
38350
- </available_skills>`,
38351
- args: {
38352
- command: tool.schema.string().describe("The slash command to execute (without the leading slash). E.g., 'commit', 'plan', 'execute'.")
38353
- },
38354
- async execute(args) {
38355
- const commands = discoverCommandsSync();
38356
- const skills = discoverAllSkills();
38357
- const allItems = [
38358
- ...commands,
38359
- ...skills.map(skillToCommandInfo2)
38360
- ];
38361
- if (!args.command) {
38362
- return formatCommandList(allItems) + `
38349
+ </available_skills>`;
38350
+ }
38351
+ function createSlashcommandTool(options = {}) {
38352
+ let cachedCommands = options.commands ?? null;
38353
+ let cachedSkills = options.skills ?? null;
38354
+ let cachedDescription = null;
38355
+ const getCommands = () => {
38356
+ if (cachedCommands)
38357
+ return cachedCommands;
38358
+ cachedCommands = discoverCommandsSync();
38359
+ return cachedCommands;
38360
+ };
38361
+ const getSkills = async () => {
38362
+ if (cachedSkills)
38363
+ return cachedSkills;
38364
+ cachedSkills = await discoverAllSkills();
38365
+ return cachedSkills;
38366
+ };
38367
+ const getAllItems = async () => {
38368
+ const commands = getCommands();
38369
+ const skills = await getSkills();
38370
+ return [...commands, ...skills.map(skillToCommandInfo2)];
38371
+ };
38372
+ const buildDescription = async () => {
38373
+ if (cachedDescription)
38374
+ return cachedDescription;
38375
+ const allItems = await getAllItems();
38376
+ cachedDescription = buildDescriptionFromItems(allItems);
38377
+ return cachedDescription;
38378
+ };
38379
+ buildDescription();
38380
+ return tool({
38381
+ get description() {
38382
+ return cachedDescription ?? TOOL_DESCRIPTION_PREFIX;
38383
+ },
38384
+ args: {
38385
+ command: tool.schema.string().describe("The slash command to execute (without the leading slash). E.g., 'commit', 'plan', 'execute'.")
38386
+ },
38387
+ async execute(args) {
38388
+ const allItems = await getAllItems();
38389
+ if (!args.command) {
38390
+ return formatCommandList(allItems) + `
38363
38391
 
38364
38392
  Provide a command or skill name to execute.`;
38365
- }
38366
- const cmdName = args.command.replace(/^\//, "");
38367
- const exactMatch = allItems.find((cmd) => cmd.name.toLowerCase() === cmdName.toLowerCase());
38368
- if (exactMatch) {
38369
- return await formatLoadedCommand(exactMatch);
38370
- }
38371
- const partialMatches = allItems.filter((cmd) => cmd.name.toLowerCase().includes(cmdName.toLowerCase()));
38372
- if (partialMatches.length > 0) {
38373
- const matchList = partialMatches.map((cmd) => `/${cmd.name}`).join(", ");
38374
- return `No exact match for "/${cmdName}". Did you mean: ${matchList}?
38393
+ }
38394
+ const cmdName = args.command.replace(/^\//, "");
38395
+ const exactMatch = allItems.find((cmd) => cmd.name.toLowerCase() === cmdName.toLowerCase());
38396
+ if (exactMatch) {
38397
+ return await formatLoadedCommand(exactMatch);
38398
+ }
38399
+ const partialMatches = allItems.filter((cmd) => cmd.name.toLowerCase().includes(cmdName.toLowerCase()));
38400
+ if (partialMatches.length > 0) {
38401
+ const matchList = partialMatches.map((cmd) => `/${cmd.name}`).join(", ");
38402
+ return `No exact match for "/${cmdName}". Did you mean: ${matchList}?
38375
38403
 
38376
38404
  ` + formatCommandList(allItems);
38377
- }
38378
- return `Command or skill "/${cmdName}" not found.
38405
+ }
38406
+ return `Command or skill "/${cmdName}" not found.
38379
38407
 
38380
38408
  ` + formatCommandList(allItems) + `
38381
38409
 
38382
38410
  Try a different name.`;
38383
- }
38384
- });
38411
+ }
38412
+ });
38413
+ }
38414
+ var slashcommand = createSlashcommandTool();
38385
38415
  // src/tools/session-manager/constants.ts
38386
38416
  import { join as join51 } from "path";
38387
38417
  var OPENCODE_STORAGE9 = getOpenCodeStorageDir();
@@ -38462,11 +38492,11 @@ Has Todos: Yes (12 items, 8 completed)
38462
38492
  Has Transcript: Yes (234 entries)`;
38463
38493
 
38464
38494
  // src/tools/session-manager/storage.ts
38465
- import { existsSync as existsSync46, readdirSync as readdirSync15 } from "fs";
38495
+ import { existsSync as existsSync45, readdirSync as readdirSync14 } from "fs";
38466
38496
  import { readdir, readFile } from "fs/promises";
38467
38497
  import { join as join52 } from "path";
38468
38498
  async function getMainSessions(options) {
38469
- if (!existsSync46(SESSION_STORAGE))
38499
+ if (!existsSync45(SESSION_STORAGE))
38470
38500
  return [];
38471
38501
  const sessions = [];
38472
38502
  try {
@@ -38498,7 +38528,7 @@ async function getMainSessions(options) {
38498
38528
  return sessions.sort((a, b) => b.time.updated - a.time.updated);
38499
38529
  }
38500
38530
  async function getAllSessions() {
38501
- if (!existsSync46(MESSAGE_STORAGE4))
38531
+ if (!existsSync45(MESSAGE_STORAGE4))
38502
38532
  return [];
38503
38533
  const sessions = [];
38504
38534
  async function scanDirectory(dir) {
@@ -38523,16 +38553,16 @@ async function getAllSessions() {
38523
38553
  return [...new Set(sessions)];
38524
38554
  }
38525
38555
  function getMessageDir9(sessionID) {
38526
- if (!existsSync46(MESSAGE_STORAGE4))
38556
+ if (!existsSync45(MESSAGE_STORAGE4))
38527
38557
  return "";
38528
38558
  const directPath = join52(MESSAGE_STORAGE4, sessionID);
38529
- if (existsSync46(directPath)) {
38559
+ if (existsSync45(directPath)) {
38530
38560
  return directPath;
38531
38561
  }
38532
38562
  try {
38533
- for (const dir of readdirSync15(MESSAGE_STORAGE4)) {
38563
+ for (const dir of readdirSync14(MESSAGE_STORAGE4)) {
38534
38564
  const sessionPath = join52(MESSAGE_STORAGE4, dir, sessionID);
38535
- if (existsSync46(sessionPath)) {
38565
+ if (existsSync45(sessionPath)) {
38536
38566
  return sessionPath;
38537
38567
  }
38538
38568
  }
@@ -38546,7 +38576,7 @@ function sessionExists(sessionID) {
38546
38576
  }
38547
38577
  async function readSessionMessages(sessionID) {
38548
38578
  const messageDir = getMessageDir9(sessionID);
38549
- if (!messageDir || !existsSync46(messageDir))
38579
+ if (!messageDir || !existsSync45(messageDir))
38550
38580
  return [];
38551
38581
  const messages = [];
38552
38582
  try {
@@ -38582,7 +38612,7 @@ async function readSessionMessages(sessionID) {
38582
38612
  }
38583
38613
  async function readParts2(messageID) {
38584
38614
  const partDir = join52(PART_STORAGE4, messageID);
38585
- if (!existsSync46(partDir))
38615
+ if (!existsSync45(partDir))
38586
38616
  return [];
38587
38617
  const parts = [];
38588
38618
  try {
@@ -38603,7 +38633,7 @@ async function readParts2(messageID) {
38603
38633
  return parts.sort((a, b) => a.id.localeCompare(b.id));
38604
38634
  }
38605
38635
  async function readSessionTodos(sessionID) {
38606
- if (!existsSync46(TODO_DIR2))
38636
+ if (!existsSync45(TODO_DIR2))
38607
38637
  return [];
38608
38638
  try {
38609
38639
  const allFiles = await readdir(TODO_DIR2);
@@ -38630,10 +38660,10 @@ async function readSessionTodos(sessionID) {
38630
38660
  return [];
38631
38661
  }
38632
38662
  async function readSessionTranscript(sessionID) {
38633
- if (!existsSync46(TRANSCRIPT_DIR2))
38663
+ if (!existsSync45(TRANSCRIPT_DIR2))
38634
38664
  return 0;
38635
38665
  const transcriptFile = join52(TRANSCRIPT_DIR2, `${sessionID}.jsonl`);
38636
- if (!existsSync46(transcriptFile))
38666
+ if (!existsSync45(transcriptFile))
38637
38667
  return 0;
38638
38668
  try {
38639
38669
  const content = await readFile(transcriptFile, "utf-8");
@@ -39104,13 +39134,13 @@ var interactive_bash = tool({
39104
39134
  });
39105
39135
  // src/tools/skill/constants.ts
39106
39136
  var TOOL_DESCRIPTION_NO_SKILLS = "Load a skill to get detailed instructions for a specific task. No skills are currently available.";
39107
- var TOOL_DESCRIPTION_PREFIX = `Load a skill to get detailed instructions for a specific task.
39137
+ var TOOL_DESCRIPTION_PREFIX2 = `Load a skill to get detailed instructions for a specific task.
39108
39138
 
39109
39139
  Skills provide specialized knowledge and step-by-step guidance.
39110
39140
  Use this when a task matches an available skill's description.`;
39111
39141
  // src/tools/skill/tools.ts
39112
39142
  import { dirname as dirname12 } from "path";
39113
- import { readFileSync as readFileSync30 } from "fs";
39143
+ import { readFileSync as readFileSync29 } from "fs";
39114
39144
  function loadedSkillToInfo(skill) {
39115
39145
  return {
39116
39146
  name: skill.name,
@@ -39153,7 +39183,7 @@ async function extractSkillBody(skill) {
39153
39183
  return templateMatch2 ? templateMatch2[1].trim() : fullTemplate;
39154
39184
  }
39155
39185
  if (skill.path) {
39156
- const content = readFileSync30(skill.path, "utf-8");
39186
+ const content = readFileSync29(skill.path, "utf-8");
39157
39187
  const { body } = parseFrontmatter(content);
39158
39188
  return body.trim();
39159
39189
  }
@@ -39221,16 +39251,35 @@ async function formatMcpCapabilities(skill, manager, sessionID) {
39221
39251
  `);
39222
39252
  }
39223
39253
  function createSkillTool(options = {}) {
39224
- const skills = options.skills ?? discoverSkills({ includeClaudeCodePaths: !options.opencodeOnly });
39225
- const skillInfos = skills.map(loadedSkillToInfo);
39226
- const description = skillInfos.length === 0 ? TOOL_DESCRIPTION_NO_SKILLS : TOOL_DESCRIPTION_PREFIX + formatSkillsXml(skillInfos);
39254
+ let cachedSkills = null;
39255
+ let cachedDescription = null;
39256
+ const getSkills = async () => {
39257
+ if (options.skills)
39258
+ return options.skills;
39259
+ if (cachedSkills)
39260
+ return cachedSkills;
39261
+ cachedSkills = await discoverSkills({ includeClaudeCodePaths: !options.opencodeOnly });
39262
+ return cachedSkills;
39263
+ };
39264
+ const getDescription = async () => {
39265
+ if (cachedDescription)
39266
+ return cachedDescription;
39267
+ const skills = await getSkills();
39268
+ const skillInfos = skills.map(loadedSkillToInfo);
39269
+ cachedDescription = skillInfos.length === 0 ? TOOL_DESCRIPTION_NO_SKILLS : TOOL_DESCRIPTION_PREFIX2 + formatSkillsXml(skillInfos);
39270
+ return cachedDescription;
39271
+ };
39272
+ getDescription();
39227
39273
  return tool({
39228
- description,
39274
+ get description() {
39275
+ return cachedDescription ?? TOOL_DESCRIPTION_PREFIX2;
39276
+ },
39229
39277
  args: {
39230
39278
  name: tool.schema.string().describe("The skill identifier from available_skills (e.g., 'code-review')")
39231
39279
  },
39232
39280
  async execute(args) {
39233
- const skill = options.skills ? skills.find((s) => s.name === args.name) : skills.find((s) => s.name === args.name);
39281
+ const skills = await getSkills();
39282
+ const skill = skills.find((s) => s.name === args.name);
39234
39283
  if (!skill) {
39235
39284
  const available = skills.map((s) => s.name).join(", ");
39236
39285
  throw new Error(`Skill "${args.name}" not found. Available skills: ${available || "none"}`);
@@ -39401,7 +39450,7 @@ function createSkillMcpTool(options) {
39401
39450
  });
39402
39451
  }
39403
39452
  // src/tools/background-task/tools.ts
39404
- import { existsSync as existsSync47, readdirSync as readdirSync16 } from "fs";
39453
+ import { existsSync as existsSync46, readdirSync as readdirSync15 } from "fs";
39405
39454
  import { join as join53 } from "path";
39406
39455
 
39407
39456
  // src/tools/background-task/constants.ts
@@ -39413,14 +39462,14 @@ var BACKGROUND_CANCEL_DESCRIPTION = `Cancel running background task(s). Use all=
39413
39462
 
39414
39463
  // src/tools/background-task/tools.ts
39415
39464
  function getMessageDir10(sessionID) {
39416
- if (!existsSync47(MESSAGE_STORAGE))
39465
+ if (!existsSync46(MESSAGE_STORAGE))
39417
39466
  return null;
39418
39467
  const directPath = join53(MESSAGE_STORAGE, sessionID);
39419
- if (existsSync47(directPath))
39468
+ if (existsSync46(directPath))
39420
39469
  return directPath;
39421
- for (const dir of readdirSync16(MESSAGE_STORAGE)) {
39470
+ for (const dir of readdirSync15(MESSAGE_STORAGE)) {
39422
39471
  const sessionPath = join53(MESSAGE_STORAGE, dir, sessionID);
39423
- if (existsSync47(sessionPath))
39472
+ if (existsSync46(sessionPath))
39424
39473
  return sessionPath;
39425
39474
  }
39426
39475
  return null;
@@ -39999,25 +40048,24 @@ var builtinTools = {
39999
40048
  ast_grep_replace,
40000
40049
  grep,
40001
40050
  glob,
40002
- slashcommand,
40003
40051
  session_list,
40004
40052
  session_read,
40005
40053
  session_search,
40006
40054
  session_info
40007
40055
  };
40008
40056
  // src/features/background-agent/manager.ts
40009
- import { existsSync as existsSync48, readdirSync as readdirSync17 } from "fs";
40057
+ import { existsSync as existsSync47, readdirSync as readdirSync16 } from "fs";
40010
40058
  import { join as join54 } from "path";
40011
40059
  var TASK_TTL_MS = 30 * 60 * 1000;
40012
40060
  function getMessageDir11(sessionID) {
40013
- if (!existsSync48(MESSAGE_STORAGE))
40061
+ if (!existsSync47(MESSAGE_STORAGE))
40014
40062
  return null;
40015
40063
  const directPath = join54(MESSAGE_STORAGE, sessionID);
40016
- if (existsSync48(directPath))
40064
+ if (existsSync47(directPath))
40017
40065
  return directPath;
40018
- for (const dir of readdirSync17(MESSAGE_STORAGE)) {
40066
+ for (const dir of readdirSync16(MESSAGE_STORAGE)) {
40019
40067
  const sessionPath = join54(MESSAGE_STORAGE, dir, sessionID);
40020
- if (existsSync48(sessionPath))
40068
+ if (existsSync47(sessionPath))
40021
40069
  return sessionPath;
40022
40070
  }
40023
40071
  return null;
@@ -43178,11 +43226,12 @@ class SkillMcpManager {
43178
43226
  }
43179
43227
  }
43180
43228
  // src/plugin-config.ts
43181
- import * as fs10 from "fs";
43229
+ import * as fs11 from "fs";
43182
43230
  import * as path7 from "path";
43183
43231
 
43184
43232
  // src/mcp/types.ts
43185
- var McpNameSchema = exports_external.enum(["websearch_exa", "context7", "grep_app"]);
43233
+ var McpNameSchema = exports_external.enum(["context7", "grep_app"]);
43234
+ var AnyMcpNameSchema = exports_external.string().min(1);
43186
43235
 
43187
43236
  // src/config/schema.ts
43188
43237
  var PermissionValue = exports_external.enum(["ask", "allow", "deny"]);
@@ -43380,7 +43429,7 @@ var RalphLoopConfigSchema = exports_external.object({
43380
43429
  });
43381
43430
  var OhMyOpenCodeConfigSchema = exports_external.object({
43382
43431
  $schema: exports_external.string().optional(),
43383
- disabled_mcps: exports_external.array(McpNameSchema).optional(),
43432
+ disabled_mcps: exports_external.array(AnyMcpNameSchema).optional(),
43384
43433
  disabled_agents: exports_external.array(BuiltinAgentNameSchema).optional(),
43385
43434
  disabled_skills: exports_external.array(BuiltinSkillNameSchema).optional(),
43386
43435
  disabled_hooks: exports_external.array(HookNameSchema).optional(),
@@ -43398,8 +43447,8 @@ var OhMyOpenCodeConfigSchema = exports_external.object({
43398
43447
  // src/plugin-config.ts
43399
43448
  function loadConfigFromPath2(configPath, ctx) {
43400
43449
  try {
43401
- if (fs10.existsSync(configPath)) {
43402
- const content = fs10.readFileSync(configPath, "utf-8");
43450
+ if (fs11.existsSync(configPath)) {
43451
+ const content = fs11.readFileSync(configPath, "utf-8");
43403
43452
  const rawConfig = parseJsonc(content);
43404
43453
  migrateConfigFile(configPath, rawConfig);
43405
43454
  const result = OhMyOpenCodeConfigSchema.safeParse(rawConfig);
@@ -44115,9 +44164,9 @@ var SISYPHUS_SOFT_GUIDELINES = `## Soft Guidelines
44115
44164
  </Constraints>
44116
44165
 
44117
44166
  `;
44118
- function buildDynamicSisyphusPrompt(availableAgents, availableTools = [], availableSkills2 = []) {
44119
- const keyTriggers = buildKeyTriggersSection(availableAgents, availableSkills2);
44120
- const toolSelection = buildToolSelectionTable(availableAgents, availableTools, availableSkills2);
44167
+ function buildDynamicSisyphusPrompt(availableAgents, availableTools = [], availableSkills = []) {
44168
+ const keyTriggers = buildKeyTriggersSection(availableAgents, availableSkills);
44169
+ const toolSelection = buildToolSelectionTable(availableAgents, availableTools, availableSkills);
44121
44170
  const exploreSection = buildExploreSection(availableAgents);
44122
44171
  const librarianSection = buildLibrarianSection(availableAgents);
44123
44172
  const frontendSection = buildFrontendSection(availableAgents);
@@ -44191,9 +44240,9 @@ function buildDynamicSisyphusPrompt(availableAgents, availableTools = [], availa
44191
44240
  return sections.filter((s) => s !== "").join(`
44192
44241
  `);
44193
44242
  }
44194
- function createSisyphusAgent(model = DEFAULT_MODEL, availableAgents, availableToolNames, availableSkills2) {
44243
+ function createSisyphusAgent(model = DEFAULT_MODEL, availableAgents, availableToolNames, availableSkills) {
44195
44244
  const tools4 = availableToolNames ? categorizeTools(availableToolNames) : [];
44196
- const skills = availableSkills2 ?? [];
44245
+ const skills = availableSkills ?? [];
44197
44246
  const prompt = availableAgents ? buildDynamicSisyphusPrompt(availableAgents, tools4, skills) : buildDynamicSisyphusPrompt([], tools4, skills);
44198
44247
  const base = {
44199
44248
  description: "Sisyphus - Powerful AI orchestrator from OhMyOpenCode. Plans obsessively with todos, assesses search complexity before exploration, delegates strategically to specialized agents. Uses explore for internal code (parallel-friendly), librarian only for external docs, and always delegates UI work to frontend engineer.",
@@ -44376,10 +44425,10 @@ Classify EVERY request into one of these categories before taking action:
44376
44425
 
44377
44426
  | Type | Trigger Examples | Tools |
44378
44427
  |------|------------------|-------|
44379
- | **TYPE A: CONCEPTUAL** | "How do I use X?", "Best practice for Y?" | context7 + websearch_exa (parallel) |
44428
+ | **TYPE A: CONCEPTUAL** | "How do I use X?", "Best practice for Y?" | context7 + web search (if available) in parallel |
44380
44429
  | **TYPE B: IMPLEMENTATION** | "How does X implement Y?", "Show me source of Z" | gh clone + read + blame |
44381
- | **TYPE C: CONTEXT** | "Why was this changed?", "History of X?" | gh issues/prs + git log/blame |
44382
- | **TYPE D: COMPREHENSIVE** | Complex/ambiguous requests | ALL tools in parallel |
44430
+ | **TYPE C: CONTEXT** | "Why was this changed?", "What's the history?", "Related issues/PRs?" | gh issues/prs + git log/blame |
44431
+ | **TYPE D: COMPREHENSIVE** | Complex/ambiguous requests | ALL available tools in parallel |
44383
44432
 
44384
44433
  ---
44385
44434
 
@@ -44388,12 +44437,12 @@ Classify EVERY request into one of these categories before taking action:
44388
44437
  ### TYPE A: CONCEPTUAL QUESTION
44389
44438
  **Trigger**: "How do I...", "What is...", "Best practice for...", rough/general questions
44390
44439
 
44391
- **Execute in parallel (3+ calls)**:
44440
+ **Execute in parallel (2+ calls)**:
44392
44441
  \`\`\`
44393
44442
  Tool 1: context7_resolve-library-id("library-name")
44394
44443
  \u2192 then context7_get-library-docs(id, topic: "specific-topic")
44395
- Tool 2: websearch_exa_web_search_exa("library-name topic 2025")
44396
- Tool 3: grep_app_searchGitHub(query: "usage pattern", language: ["TypeScript"])
44444
+ Tool 2: grep_app_searchGitHub(query: "usage pattern", language: ["TypeScript"])
44445
+ Tool 3 (optional): If web search is available, search "library-name topic 2025"
44397
44446
  \`\`\`
44398
44447
 
44399
44448
  **Output**: Summarize findings with links to official docs and real-world examples.
@@ -44455,21 +44504,22 @@ gh api repos/owner/repo/pulls/<number>/files
44455
44504
  ### TYPE D: COMPREHENSIVE RESEARCH
44456
44505
  **Trigger**: Complex questions, ambiguous requests, "deep dive into..."
44457
44506
 
44458
- **Execute ALL in parallel (6+ calls)**:
44507
+ **Execute ALL available tools in parallel (5+ calls)**:
44459
44508
  \`\`\`
44460
- // Documentation & Web
44509
+ // Documentation
44461
44510
  Tool 1: context7_resolve-library-id \u2192 context7_get-library-docs
44462
- Tool 2: websearch_exa_web_search_exa("topic recent updates")
44463
44511
 
44464
44512
  // Code Search
44465
- Tool 3: grep_app_searchGitHub(query: "pattern1", language: [...])
44466
- Tool 4: grep_app_searchGitHub(query: "pattern2", useRegexp: true)
44513
+ Tool 2: grep_app_searchGitHub(query: "pattern1", language: [...])
44514
+ Tool 3: grep_app_searchGitHub(query: "pattern2", useRegexp: true)
44467
44515
 
44468
44516
  // Source Analysis
44469
- Tool 5: gh repo clone owner/repo \${TMPDIR:-/tmp}/repo -- --depth 1
44517
+ Tool 4: gh repo clone owner/repo \${TMPDIR:-/tmp}/repo -- --depth 1
44470
44518
 
44471
44519
  // Context
44472
- Tool 6: gh search issues "topic" --repo owner/repo
44520
+ Tool 5: gh search issues "topic" --repo owner/repo
44521
+
44522
+ // Optional: If web search is available, search for recent updates
44473
44523
  \`\`\`
44474
44524
 
44475
44525
  ---
@@ -44515,7 +44565,6 @@ https://github.com/tanstack/query/blob/abc123def/packages/react-query/src/useQue
44515
44565
  | Purpose | Tool | Command/Usage |
44516
44566
  |---------|------|---------------|
44517
44567
  | **Official Docs** | context7 | \`context7_resolve-library-id\` \u2192 \`context7_get-library-docs\` |
44518
- | **Latest Info** | websearch_exa | \`websearch_exa_web_search_exa("query 2025")\` |
44519
44568
  | **Fast Code Search** | grep_app | \`grep_app_searchGitHub(query, language, useRegexp)\` |
44520
44569
  | **Deep Code Search** | gh CLI | \`gh search code "query" --repo owner/repo\` |
44521
44570
  | **Clone Repo** | gh CLI | \`gh repo clone owner/repo \${TMPDIR:-/tmp}/name -- --depth 1\` |
@@ -44524,6 +44573,7 @@ https://github.com/tanstack/query/blob/abc123def/packages/react-query/src/useQue
44524
44573
  | **Release Info** | gh CLI | \`gh api repos/owner/repo/releases/latest\` |
44525
44574
  | **Git History** | git | \`git log\`, \`git blame\`, \`git show\` |
44526
44575
  | **Read URL** | webfetch | \`webfetch(url)\` for blog posts, SO threads |
44576
+ | **Web Search** | (if available) | Use any available web search tool for latest info |
44527
44577
 
44528
44578
  ### Temp Directory
44529
44579
 
@@ -45170,15 +45220,18 @@ function createBuiltinAgents(disabledAgents = [], agentOverrides = {}, directory
45170
45220
  return result;
45171
45221
  }
45172
45222
  // src/features/claude-code-command-loader/loader.ts
45173
- import { existsSync as existsSync50, readdirSync as readdirSync18, readFileSync as readFileSync32, realpathSync as realpathSync2 } from "fs";
45223
+ import { promises as fs12 } from "fs";
45174
45224
  import { join as join56, basename as basename5 } from "path";
45175
- function loadCommandsFromDir(commandsDir, scope, visited = new Set, prefix = "") {
45176
- if (!existsSync50(commandsDir)) {
45225
+ import { homedir as homedir16 } from "os";
45226
+ async function loadCommandsFromDir(commandsDir, scope, visited = new Set, prefix = "") {
45227
+ try {
45228
+ await fs12.access(commandsDir);
45229
+ } catch {
45177
45230
  return [];
45178
45231
  }
45179
45232
  let realPath;
45180
45233
  try {
45181
- realPath = realpathSync2(commandsDir);
45234
+ realPath = await fs12.realpath(commandsDir);
45182
45235
  } catch (error45) {
45183
45236
  log(`Failed to resolve command directory: ${commandsDir}`, error45);
45184
45237
  return [];
@@ -45189,7 +45242,7 @@ function loadCommandsFromDir(commandsDir, scope, visited = new Set, prefix = "")
45189
45242
  visited.add(realPath);
45190
45243
  let entries;
45191
45244
  try {
45192
- entries = readdirSync18(commandsDir, { withFileTypes: true });
45245
+ entries = await fs12.readdir(commandsDir, { withFileTypes: true });
45193
45246
  } catch (error45) {
45194
45247
  log(`Failed to read command directory: ${commandsDir}`, error45);
45195
45248
  return [];
@@ -45201,7 +45254,8 @@ function loadCommandsFromDir(commandsDir, scope, visited = new Set, prefix = "")
45201
45254
  continue;
45202
45255
  const subDirPath = join56(commandsDir, entry.name);
45203
45256
  const subPrefix = prefix ? `${prefix}:${entry.name}` : entry.name;
45204
- commands.push(...loadCommandsFromDir(subDirPath, scope, visited, subPrefix));
45257
+ const subCommands = await loadCommandsFromDir(subDirPath, scope, visited, subPrefix);
45258
+ commands.push(...subCommands);
45205
45259
  continue;
45206
45260
  }
45207
45261
  if (!isMarkdownFile(entry))
@@ -45210,7 +45264,7 @@ function loadCommandsFromDir(commandsDir, scope, visited = new Set, prefix = "")
45210
45264
  const baseCommandName = basename5(entry.name, ".md");
45211
45265
  const commandName = prefix ? `${prefix}:${baseCommandName}` : baseCommandName;
45212
45266
  try {
45213
- const content = readFileSync32(commandPath, "utf-8");
45267
+ const content = await fs12.readFile(commandPath, "utf-8");
45214
45268
  const { data, body } = parseFrontmatter(content);
45215
45269
  const wrappedTemplate = `<command-instruction>
45216
45270
  ${body.trim()}
@@ -45252,25 +45306,24 @@ function commandsToRecord(commands) {
45252
45306
  }
45253
45307
  return result;
45254
45308
  }
45255
- function loadUserCommands() {
45309
+ async function loadUserCommands() {
45256
45310
  const userCommandsDir = join56(getClaudeConfigDir(), "commands");
45257
- const commands = loadCommandsFromDir(userCommandsDir, "user");
45311
+ const commands = await loadCommandsFromDir(userCommandsDir, "user");
45258
45312
  return commandsToRecord(commands);
45259
45313
  }
45260
- function loadProjectCommands() {
45314
+ async function loadProjectCommands() {
45261
45315
  const projectCommandsDir = join56(process.cwd(), ".claude", "commands");
45262
- const commands = loadCommandsFromDir(projectCommandsDir, "project");
45316
+ const commands = await loadCommandsFromDir(projectCommandsDir, "project");
45263
45317
  return commandsToRecord(commands);
45264
45318
  }
45265
- function loadOpencodeGlobalCommands() {
45266
- const { homedir: homedir16 } = __require("os");
45319
+ async function loadOpencodeGlobalCommands() {
45267
45320
  const opencodeCommandsDir = join56(homedir16(), ".config", "opencode", "command");
45268
- const commands = loadCommandsFromDir(opencodeCommandsDir, "opencode");
45321
+ const commands = await loadCommandsFromDir(opencodeCommandsDir, "opencode");
45269
45322
  return commandsToRecord(commands);
45270
45323
  }
45271
- function loadOpencodeProjectCommands() {
45324
+ async function loadOpencodeProjectCommands() {
45272
45325
  const opencodeProjectDir = join56(process.cwd(), ".opencode", "command");
45273
- const commands = loadCommandsFromDir(opencodeProjectDir, "opencode-project");
45326
+ const commands = await loadCommandsFromDir(opencodeProjectDir, "opencode-project");
45274
45327
  return commandsToRecord(commands);
45275
45328
  }
45276
45329
  // src/features/builtin-commands/templates/init-deep.ts
@@ -45614,6 +45667,632 @@ This will:
45614
45667
 
45615
45668
  Check if a loop is active and cancel it. Inform the user of the result.`;
45616
45669
 
45670
+ // src/features/builtin-commands/templates/refactor.ts
45671
+ var REFACTOR_TEMPLATE = `# Intelligent Refactor Command
45672
+
45673
+ ## Usage
45674
+ \`\`\`
45675
+ /refactor <refactoring-target> [--scope=<file|module|project>] [--strategy=<safe|aggressive>]
45676
+
45677
+ Arguments:
45678
+ refactoring-target: What to refactor. Can be:
45679
+ - File path: src/auth/handler.ts
45680
+ - Symbol name: "AuthService class"
45681
+ - Pattern: "all functions using deprecated API"
45682
+ - Description: "extract validation logic into separate module"
45683
+
45684
+ Options:
45685
+ --scope: Refactoring scope (default: module)
45686
+ - file: Single file only
45687
+ - module: Module/directory scope
45688
+ - project: Entire codebase
45689
+
45690
+ --strategy: Risk tolerance (default: safe)
45691
+ - safe: Conservative, maximum test coverage required
45692
+ - aggressive: Allow broader changes with adequate coverage
45693
+ \`\`\`
45694
+
45695
+ ## What This Command Does
45696
+
45697
+ Performs intelligent, deterministic refactoring with full codebase awareness. Unlike blind search-and-replace, this command:
45698
+
45699
+ 1. **Understands your intent** - Analyzes what you actually want to achieve
45700
+ 2. **Maps the codebase** - Builds a definitive codemap before touching anything
45701
+ 3. **Assesses risk** - Evaluates test coverage and determines verification strategy
45702
+ 4. **Plans meticulously** - Creates a detailed plan with Plan agent
45703
+ 5. **Executes precisely** - Step-by-step refactoring with LSP and AST-grep
45704
+ 6. **Verifies constantly** - Runs tests after each change to ensure zero regression
45705
+
45706
+ ---
45707
+
45708
+ # PHASE 0: INTENT GATE (MANDATORY FIRST STEP)
45709
+
45710
+ **BEFORE ANY ACTION, classify and validate the request.**
45711
+
45712
+ ## Step 0.1: Parse Request Type
45713
+
45714
+ | Signal | Classification | Action |
45715
+ |--------|----------------|--------|
45716
+ | Specific file/symbol | Explicit | Proceed to codebase analysis |
45717
+ | "Refactor X to Y" | Clear transformation | Proceed to codebase analysis |
45718
+ | "Improve", "Clean up" | Open-ended | **MUST ask**: "What specific improvement?" |
45719
+ | Ambiguous scope | Uncertain | **MUST ask**: "Which modules/files?" |
45720
+ | Missing context | Incomplete | **MUST ask**: "What's the desired outcome?" |
45721
+
45722
+ ## Step 0.2: Validate Understanding
45723
+
45724
+ Before proceeding, confirm:
45725
+ - [ ] Target is clearly identified
45726
+ - [ ] Desired outcome is understood
45727
+ - [ ] Scope is defined (file/module/project)
45728
+ - [ ] Success criteria can be articulated
45729
+
45730
+ **If ANY of above is unclear, ASK CLARIFYING QUESTION:**
45731
+
45732
+ \`\`\`
45733
+ I want to make sure I understand the refactoring goal correctly.
45734
+
45735
+ **What I understood**: [interpretation]
45736
+ **What I'm unsure about**: [specific ambiguity]
45737
+
45738
+ Options I see:
45739
+ 1. [Option A] - [implications]
45740
+ 2. [Option B] - [implications]
45741
+
45742
+ **My recommendation**: [suggestion with reasoning]
45743
+
45744
+ Should I proceed with [recommendation], or would you prefer differently?
45745
+ \`\`\`
45746
+
45747
+ ## Step 0.3: Create Initial Todos
45748
+
45749
+ **IMMEDIATELY after understanding the request, create todos:**
45750
+
45751
+ \`\`\`
45752
+ TodoWrite([
45753
+ {"id": "phase-1", "content": "PHASE 1: Codebase Analysis - launch parallel explore agents", "status": "pending", "priority": "high"},
45754
+ {"id": "phase-2", "content": "PHASE 2: Build Codemap - map dependencies and impact zones", "status": "pending", "priority": "high"},
45755
+ {"id": "phase-3", "content": "PHASE 3: Test Assessment - analyze test coverage and verification strategy", "status": "pending", "priority": "high"},
45756
+ {"id": "phase-4", "content": "PHASE 4: Plan Generation - invoke Plan agent for detailed refactoring plan", "status": "pending", "priority": "high"},
45757
+ {"id": "phase-5", "content": "PHASE 5: Execute Refactoring - step-by-step with continuous verification", "status": "pending", "priority": "high"},
45758
+ {"id": "phase-6", "content": "PHASE 6: Final Verification - full test suite and regression check", "status": "pending", "priority": "high"}
45759
+ ])
45760
+ \`\`\`
45761
+
45762
+ ---
45763
+
45764
+ # PHASE 1: CODEBASE ANALYSIS (PARALLEL EXPLORATION)
45765
+
45766
+ **Mark phase-1 as in_progress.**
45767
+
45768
+ ## 1.1: Launch Parallel Explore Agents (BACKGROUND)
45769
+
45770
+ Fire ALL of these simultaneously using \`call_omo_agent\`:
45771
+
45772
+ \`\`\`
45773
+ // Agent 1: Find the refactoring target
45774
+ call_omo_agent(
45775
+ subagent_type="explore",
45776
+ run_in_background=true,
45777
+ prompt="Find all occurrences and definitions of [TARGET].
45778
+ Report: file paths, line numbers, usage patterns."
45779
+ )
45780
+
45781
+ // Agent 2: Find related code
45782
+ call_omo_agent(
45783
+ subagent_type="explore",
45784
+ run_in_background=true,
45785
+ prompt="Find all code that imports, uses, or depends on [TARGET].
45786
+ Report: dependency chains, import graphs."
45787
+ )
45788
+
45789
+ // Agent 3: Find similar patterns
45790
+ call_omo_agent(
45791
+ subagent_type="explore",
45792
+ run_in_background=true,
45793
+ prompt="Find similar code patterns to [TARGET] in the codebase.
45794
+ Report: analogous implementations, established conventions."
45795
+ )
45796
+
45797
+ // Agent 4: Find tests
45798
+ call_omo_agent(
45799
+ subagent_type="explore",
45800
+ run_in_background=true,
45801
+ prompt="Find all test files related to [TARGET].
45802
+ Report: test file paths, test case names, coverage indicators."
45803
+ )
45804
+
45805
+ // Agent 5: Architecture context
45806
+ call_omo_agent(
45807
+ subagent_type="explore",
45808
+ run_in_background=true,
45809
+ prompt="Find architectural patterns and module organization around [TARGET].
45810
+ Report: module boundaries, layer structure, design patterns in use."
45811
+ )
45812
+ \`\`\`
45813
+
45814
+ ## 1.2: Direct Tool Exploration (WHILE AGENTS RUN)
45815
+
45816
+ While background agents are running, use direct tools:
45817
+
45818
+ ### LSP Tools for Precise Analysis:
45819
+
45820
+ \`\`\`typescript
45821
+ // Get symbol information at target location
45822
+ lsp_hover(filePath, line, character) // Type info, docs, signatures
45823
+
45824
+ // Find definition(s)
45825
+ lsp_goto_definition(filePath, line, character) // Where is it defined?
45826
+
45827
+ // Find ALL usages across workspace
45828
+ lsp_find_references(filePath, line, character, includeDeclaration=true)
45829
+
45830
+ // Get file structure
45831
+ lsp_document_symbols(filePath) // Hierarchical outline
45832
+
45833
+ // Search symbols by name
45834
+ lsp_workspace_symbols(filePath, query="[target_symbol]")
45835
+
45836
+ // Get current diagnostics
45837
+ lsp_diagnostics(filePath) // Errors, warnings before we start
45838
+ \`\`\`
45839
+
45840
+ ### AST-Grep for Pattern Analysis:
45841
+
45842
+ \`\`\`typescript
45843
+ // Find structural patterns
45844
+ ast_grep_search(
45845
+ pattern="function $NAME($$$) { $$$ }", // or relevant pattern
45846
+ lang="typescript", // or relevant language
45847
+ paths=["src/"]
45848
+ )
45849
+
45850
+ // Preview refactoring (DRY RUN)
45851
+ ast_grep_replace(
45852
+ pattern="[old_pattern]",
45853
+ rewrite="[new_pattern]",
45854
+ lang="[language]",
45855
+ dryRun=true // ALWAYS preview first
45856
+ )
45857
+ \`\`\`
45858
+
45859
+ ### Grep for Text Patterns:
45860
+
45861
+ \`\`\`
45862
+ grep(pattern="[search_term]", path="src/", include="*.ts")
45863
+ \`\`\`
45864
+
45865
+ ## 1.3: Collect Background Results
45866
+
45867
+ \`\`\`
45868
+ background_output(task_id="[agent_1_id]")
45869
+ background_output(task_id="[agent_2_id]")
45870
+ ...
45871
+ \`\`\`
45872
+
45873
+ **Mark phase-1 as completed after all results collected.**
45874
+
45875
+ ---
45876
+
45877
+ # PHASE 2: BUILD CODEMAP (DEPENDENCY MAPPING)
45878
+
45879
+ **Mark phase-2 as in_progress.**
45880
+
45881
+ ## 2.1: Construct Definitive Codemap
45882
+
45883
+ Based on Phase 1 results, build:
45884
+
45885
+ \`\`\`
45886
+ ## CODEMAP: [TARGET]
45887
+
45888
+ ### Core Files (Direct Impact)
45889
+ - \`path/to/file.ts:L10-L50\` - Primary definition
45890
+ - \`path/to/file2.ts:L25\` - Key usage
45891
+
45892
+ ### Dependency Graph
45893
+ \`\`\`
45894
+ [TARGET]
45895
+ \u251C\u2500\u2500 imports from:
45896
+ \u2502 \u251C\u2500\u2500 module-a (types)
45897
+ \u2502 \u2514\u2500\u2500 module-b (utils)
45898
+ \u251C\u2500\u2500 imported by:
45899
+ \u2502 \u251C\u2500\u2500 consumer-1.ts
45900
+ \u2502 \u251C\u2500\u2500 consumer-2.ts
45901
+ \u2502 \u2514\u2500\u2500 consumer-3.ts
45902
+ \u2514\u2500\u2500 used by:
45903
+ \u251C\u2500\u2500 handler.ts (direct call)
45904
+ \u2514\u2500\u2500 service.ts (dependency injection)
45905
+ \`\`\`
45906
+
45907
+ ### Impact Zones
45908
+ | Zone | Risk Level | Files Affected | Test Coverage |
45909
+ |------|------------|----------------|---------------|
45910
+ | Core | HIGH | 3 files | 85% covered |
45911
+ | Consumers | MEDIUM | 8 files | 70% covered |
45912
+ | Edge | LOW | 2 files | 50% covered |
45913
+
45914
+ ### Established Patterns
45915
+ - Pattern A: [description] - used in N places
45916
+ - Pattern B: [description] - established convention
45917
+ \`\`\`
45918
+
45919
+ ## 2.2: Identify Refactoring Constraints
45920
+
45921
+ Based on codemap:
45922
+ - **MUST follow**: [existing patterns identified]
45923
+ - **MUST NOT break**: [critical dependencies]
45924
+ - **Safe to change**: [isolated code zones]
45925
+ - **Requires migration**: [breaking changes impact]
45926
+
45927
+ **Mark phase-2 as completed.**
45928
+
45929
+ ---
45930
+
45931
+ # PHASE 3: TEST ASSESSMENT (VERIFICATION STRATEGY)
45932
+
45933
+ **Mark phase-3 as in_progress.**
45934
+
45935
+ ## 3.1: Detect Test Infrastructure
45936
+
45937
+ \`\`\`bash
45938
+ # Check for test commands
45939
+ cat package.json | jq '.scripts | keys[] | select(test("test"))'
45940
+
45941
+ # Or for Python
45942
+ ls -la pytest.ini pyproject.toml setup.cfg
45943
+
45944
+ # Or for Go
45945
+ ls -la *_test.go
45946
+ \`\`\`
45947
+
45948
+ ## 3.2: Analyze Test Coverage
45949
+
45950
+ \`\`\`
45951
+ // Find all tests related to target
45952
+ call_omo_agent(
45953
+ subagent_type="explore",
45954
+ run_in_background=false, // Need this synchronously
45955
+ prompt="Analyze test coverage for [TARGET]:
45956
+ 1. Which test files cover this code?
45957
+ 2. What test cases exist?
45958
+ 3. Are there integration tests?
45959
+ 4. What edge cases are tested?
45960
+ 5. Estimated coverage percentage?"
45961
+ )
45962
+ \`\`\`
45963
+
45964
+ ## 3.3: Determine Verification Strategy
45965
+
45966
+ Based on test analysis:
45967
+
45968
+ | Coverage Level | Strategy |
45969
+ |----------------|----------|
45970
+ | HIGH (>80%) | Run existing tests after each step |
45971
+ | MEDIUM (50-80%) | Run tests + add safety assertions |
45972
+ | LOW (<50%) | **PAUSE**: Propose adding tests first |
45973
+ | NONE | **BLOCK**: Refuse aggressive refactoring |
45974
+
45975
+ **If coverage is LOW or NONE, ask user:**
45976
+
45977
+ \`\`\`
45978
+ Test coverage for [TARGET] is [LEVEL].
45979
+
45980
+ **Risk Assessment**: Refactoring without adequate tests is dangerous.
45981
+
45982
+ Options:
45983
+ 1. Add tests first, then refactor (RECOMMENDED)
45984
+ 2. Proceed with extra caution, manual verification required
45985
+ 3. Abort refactoring
45986
+
45987
+ Which approach do you prefer?
45988
+ \`\`\`
45989
+
45990
+ ## 3.4: Document Verification Plan
45991
+
45992
+ \`\`\`
45993
+ ## VERIFICATION PLAN
45994
+
45995
+ ### Test Commands
45996
+ - Unit: \`bun test\` / \`npm test\` / \`pytest\` / etc.
45997
+ - Integration: [command if exists]
45998
+ - Type check: \`tsc --noEmit\` / \`pyright\` / etc.
45999
+
46000
+ ### Verification Checkpoints
46001
+ After each refactoring step:
46002
+ 1. lsp_diagnostics \u2192 zero new errors
46003
+ 2. Run test command \u2192 all pass
46004
+ 3. Type check \u2192 clean
46005
+
46006
+ ### Regression Indicators
46007
+ - [Specific test that must pass]
46008
+ - [Behavior that must be preserved]
46009
+ - [API contract that must not change]
46010
+ \`\`\`
46011
+
46012
+ **Mark phase-3 as completed.**
46013
+
46014
+ ---
46015
+
46016
+ # PHASE 4: PLAN GENERATION (PLAN AGENT)
46017
+
46018
+ **Mark phase-4 as in_progress.**
46019
+
46020
+ ## 4.1: Invoke Plan Agent
46021
+
46022
+ \`\`\`
46023
+ Task(
46024
+ subagent_type="plan",
46025
+ prompt="Create a detailed refactoring plan:
46026
+
46027
+ ## Refactoring Goal
46028
+ [User's original request]
46029
+
46030
+ ## Codemap (from Phase 2)
46031
+ [Insert codemap here]
46032
+
46033
+ ## Test Coverage (from Phase 3)
46034
+ [Insert verification plan here]
46035
+
46036
+ ## Constraints
46037
+ - MUST follow existing patterns: [list]
46038
+ - MUST NOT break: [critical paths]
46039
+ - MUST run tests after each step
46040
+
46041
+ ## Requirements
46042
+ 1. Break down into atomic refactoring steps
46043
+ 2. Each step must be independently verifiable
46044
+ 3. Order steps by dependency (what must happen first)
46045
+ 4. Specify exact files and line ranges for each step
46046
+ 5. Include rollback strategy for each step
46047
+ 6. Define commit checkpoints"
46048
+ )
46049
+ \`\`\`
46050
+
46051
+ ## 4.2: Review and Validate Plan
46052
+
46053
+ After receiving plan from Plan agent:
46054
+
46055
+ 1. **Verify completeness**: All identified files addressed?
46056
+ 2. **Verify safety**: Each step reversible?
46057
+ 3. **Verify order**: Dependencies respected?
46058
+ 4. **Verify verification**: Test commands specified?
46059
+
46060
+ ## 4.3: Register Detailed Todos
46061
+
46062
+ Convert Plan agent output into granular todos:
46063
+
46064
+ \`\`\`
46065
+ TodoWrite([
46066
+ // Each step from the plan becomes a todo
46067
+ {"id": "refactor-1", "content": "Step 1: [description]", "status": "pending", "priority": "high"},
46068
+ {"id": "verify-1", "content": "Verify Step 1: run tests", "status": "pending", "priority": "high"},
46069
+ {"id": "refactor-2", "content": "Step 2: [description]", "status": "pending", "priority": "medium"},
46070
+ {"id": "verify-2", "content": "Verify Step 2: run tests", "status": "pending", "priority": "medium"},
46071
+ // ... continue for all steps
46072
+ ])
46073
+ \`\`\`
46074
+
46075
+ **Mark phase-4 as completed.**
46076
+
46077
+ ---
46078
+
46079
+ # PHASE 5: EXECUTE REFACTORING (DETERMINISTIC EXECUTION)
46080
+
46081
+ **Mark phase-5 as in_progress.**
46082
+
46083
+ ## 5.1: Execution Protocol
46084
+
46085
+ For EACH refactoring step:
46086
+
46087
+ ### Pre-Step
46088
+ 1. Mark step todo as \`in_progress\`
46089
+ 2. Read current file state
46090
+ 3. Verify lsp_diagnostics is baseline
46091
+
46092
+ ### Execute Step
46093
+ Use appropriate tool:
46094
+
46095
+ **For Symbol Renames:**
46096
+ \`\`\`typescript
46097
+ lsp_prepare_rename(filePath, line, character) // Validate rename is possible
46098
+ lsp_rename(filePath, line, character, newName) // Execute rename
46099
+ \`\`\`
46100
+
46101
+ **For Pattern Transformations:**
46102
+ \`\`\`typescript
46103
+ // Preview first
46104
+ ast_grep_replace(pattern, rewrite, lang, dryRun=true)
46105
+
46106
+ // If preview looks good, execute
46107
+ ast_grep_replace(pattern, rewrite, lang, dryRun=false)
46108
+ \`\`\`
46109
+
46110
+ **For Structural Changes:**
46111
+ \`\`\`typescript
46112
+ // Use Edit tool for precise changes
46113
+ edit(filePath, oldString, newString)
46114
+ \`\`\`
46115
+
46116
+ ### Post-Step Verification (MANDATORY)
46117
+
46118
+ \`\`\`typescript
46119
+ // 1. Check diagnostics
46120
+ lsp_diagnostics(filePath) // Must be clean or same as baseline
46121
+
46122
+ // 2. Run tests
46123
+ bash("bun test") // Or appropriate test command
46124
+
46125
+ // 3. Type check
46126
+ bash("tsc --noEmit") // Or appropriate type check
46127
+ \`\`\`
46128
+
46129
+ ### Step Completion
46130
+ 1. If verification passes \u2192 Mark step todo as \`completed\`
46131
+ 2. If verification fails \u2192 **STOP AND FIX**
46132
+
46133
+ ## 5.2: Failure Recovery Protocol
46134
+
46135
+ If ANY verification fails:
46136
+
46137
+ 1. **STOP** immediately
46138
+ 2. **REVERT** the failed change
46139
+ 3. **DIAGNOSE** what went wrong
46140
+ 4. **OPTIONS**:
46141
+ - Fix the issue and retry
46142
+ - Skip this step (if optional)
46143
+ - Consult oracle agent for help
46144
+ - Ask user for guidance
46145
+
46146
+ **NEVER proceed to next step with broken tests.**
46147
+
46148
+ ## 5.3: Commit Checkpoints
46149
+
46150
+ After each logical group of changes:
46151
+
46152
+ \`\`\`bash
46153
+ git add [changed-files]
46154
+ git commit -m "refactor(scope): description
46155
+
46156
+ [details of what was changed and why]"
46157
+ \`\`\`
46158
+
46159
+ **Mark phase-5 as completed when all refactoring steps done.**
46160
+
46161
+ ---
46162
+
46163
+ # PHASE 6: FINAL VERIFICATION (REGRESSION CHECK)
46164
+
46165
+ **Mark phase-6 as in_progress.**
46166
+
46167
+ ## 6.1: Full Test Suite
46168
+
46169
+ \`\`\`bash
46170
+ # Run complete test suite
46171
+ bun test # or npm test, pytest, go test, etc.
46172
+ \`\`\`
46173
+
46174
+ ## 6.2: Type Check
46175
+
46176
+ \`\`\`bash
46177
+ # Full type check
46178
+ tsc --noEmit # or equivalent
46179
+ \`\`\`
46180
+
46181
+ ## 6.3: Lint Check
46182
+
46183
+ \`\`\`bash
46184
+ # Run linter
46185
+ eslint . # or equivalent
46186
+ \`\`\`
46187
+
46188
+ ## 6.4: Build Verification (if applicable)
46189
+
46190
+ \`\`\`bash
46191
+ # Ensure build still works
46192
+ bun run build # or npm run build, etc.
46193
+ \`\`\`
46194
+
46195
+ ## 6.5: Final Diagnostics
46196
+
46197
+ \`\`\`typescript
46198
+ // Check all changed files
46199
+ for (file of changedFiles) {
46200
+ lsp_diagnostics(file) // Must all be clean
46201
+ }
46202
+ \`\`\`
46203
+
46204
+ ## 6.6: Generate Summary
46205
+
46206
+ \`\`\`markdown
46207
+ ## Refactoring Complete
46208
+
46209
+ ### What Changed
46210
+ - [List of changes made]
46211
+
46212
+ ### Files Modified
46213
+ - \`path/to/file.ts\` - [what changed]
46214
+ - \`path/to/file2.ts\` - [what changed]
46215
+
46216
+ ### Verification Results
46217
+ - Tests: PASSED (X/Y passing)
46218
+ - Type Check: CLEAN
46219
+ - Lint: CLEAN
46220
+ - Build: SUCCESS
46221
+
46222
+ ### No Regressions Detected
46223
+ All existing tests pass. No new errors introduced.
46224
+ \`\`\`
46225
+
46226
+ **Mark phase-6 as completed.**
46227
+
46228
+ ---
46229
+
46230
+ # CRITICAL RULES
46231
+
46232
+ ## NEVER DO
46233
+ - Skip lsp_diagnostics check after changes
46234
+ - Proceed with failing tests
46235
+ - Make changes without understanding impact
46236
+ - Use \`as any\`, \`@ts-ignore\`, \`@ts-expect-error\`
46237
+ - Delete tests to make them pass
46238
+ - Commit broken code
46239
+ - Refactor without understanding existing patterns
46240
+
46241
+ ## ALWAYS DO
46242
+ - Understand before changing
46243
+ - Preview before applying (ast_grep dryRun=true)
46244
+ - Verify after every change
46245
+ - Follow existing codebase patterns
46246
+ - Keep todos updated in real-time
46247
+ - Commit at logical checkpoints
46248
+ - Report issues immediately
46249
+
46250
+ ## ABORT CONDITIONS
46251
+ If any of these occur, **STOP and consult user**:
46252
+ - Test coverage is zero for target code
46253
+ - Changes would break public API
46254
+ - Refactoring scope is unclear
46255
+ - 3 consecutive verification failures
46256
+ - User-defined constraints violated
46257
+
46258
+ ---
46259
+
46260
+ # Tool Usage Philosophy
46261
+
46262
+ You already know these tools. Use them intelligently:
46263
+
46264
+ ## LSP Tools
46265
+ Leverage the full LSP toolset (\`lsp_*\`) for precision analysis. Key patterns:
46266
+ - **Understand before changing**: \`lsp_hover\`, \`lsp_goto_definition\` to grasp context
46267
+ - **Impact analysis**: \`lsp_find_references\` to map all usages before modification
46268
+ - **Safe refactoring**: \`lsp_prepare_rename\` \u2192 \`lsp_rename\` for symbol renames
46269
+ - **Continuous verification**: \`lsp_diagnostics\` after every change
46270
+
46271
+ ## AST-Grep
46272
+ Use \`ast_grep_search\` and \`ast_grep_replace\` for structural transformations.
46273
+ **Critical**: Always \`dryRun=true\` first, review, then execute.
46274
+
46275
+ ## Agents
46276
+ - \`explore\`: Parallel codebase pattern discovery
46277
+ - \`plan\`: Detailed refactoring plan generation
46278
+ - \`oracle\`: Consult for complex architectural decisions
46279
+ - \`librarian\`: **Use proactively** when encountering deprecated methods or library migration tasks. Query official docs and OSS examples for modern replacements.
46280
+
46281
+ ## Deprecated Code & Library Migration
46282
+ When you encounter deprecated methods/APIs during refactoring:
46283
+ 1. Fire \`librarian\` to find the recommended modern alternative
46284
+ 2. **DO NOT auto-upgrade to latest version** unless user explicitly requests migration
46285
+ 3. If user requests library migration, use \`librarian\` to fetch latest API docs before making changes
46286
+
46287
+ ---
46288
+
46289
+ **Remember: Refactoring without tests is reckless. Refactoring without understanding is destructive. This command ensures you do neither.**
46290
+
46291
+ <user-request>
46292
+ $ARGUMENTS
46293
+ </user-request>
46294
+ `;
46295
+
45617
46296
  // src/features/builtin-commands/commands.ts
45618
46297
  var BUILTIN_COMMAND_DEFINITIONS = {
45619
46298
  "init-deep": {
@@ -45643,6 +46322,13 @@ $ARGUMENTS
45643
46322
  template: `<command-instruction>
45644
46323
  ${CANCEL_RALPH_TEMPLATE}
45645
46324
  </command-instruction>`
46325
+ },
46326
+ refactor: {
46327
+ description: "(builtin) Intelligent refactoring command with LSP, AST-grep, architecture analysis, codemap, and TDD verification.",
46328
+ template: `<command-instruction>
46329
+ ${REFACTOR_TEMPLATE}
46330
+ </command-instruction>`,
46331
+ argumentHint: "<refactoring-target> [--scope=<file|module|project>] [--strategy=<safe|aggressive>]"
45646
46332
  }
45647
46333
  };
45648
46334
  function loadBuiltinCommands(disabledCommands) {
@@ -45657,7 +46343,7 @@ function loadBuiltinCommands(disabledCommands) {
45657
46343
  return commands;
45658
46344
  }
45659
46345
  // src/features/claude-code-agent-loader/loader.ts
45660
- import { existsSync as existsSync51, readdirSync as readdirSync19, readFileSync as readFileSync33 } from "fs";
46346
+ import { existsSync as existsSync49, readdirSync as readdirSync17, readFileSync as readFileSync31 } from "fs";
45661
46347
  import { join as join57, basename as basename6 } from "path";
45662
46348
  function parseToolsConfig(toolsStr) {
45663
46349
  if (!toolsStr)
@@ -45672,10 +46358,10 @@ function parseToolsConfig(toolsStr) {
45672
46358
  return result;
45673
46359
  }
45674
46360
  function loadAgentsFromDir(agentsDir, scope) {
45675
- if (!existsSync51(agentsDir)) {
46361
+ if (!existsSync49(agentsDir)) {
45676
46362
  return [];
45677
46363
  }
45678
- const entries = readdirSync19(agentsDir, { withFileTypes: true });
46364
+ const entries = readdirSync17(agentsDir, { withFileTypes: true });
45679
46365
  const agents = [];
45680
46366
  for (const entry of entries) {
45681
46367
  if (!isMarkdownFile(entry))
@@ -45683,7 +46369,7 @@ function loadAgentsFromDir(agentsDir, scope) {
45683
46369
  const agentPath = join57(agentsDir, entry.name);
45684
46370
  const agentName = basename6(entry.name, ".md");
45685
46371
  try {
45686
- const content = readFileSync33(agentPath, "utf-8");
46372
+ const content = readFileSync31(agentPath, "utf-8");
45687
46373
  const { data, body } = parseFrontmatter(content);
45688
46374
  const name = data.name || agentName;
45689
46375
  const originalDescription = data.description || "";
@@ -45728,15 +46414,15 @@ function loadProjectAgents() {
45728
46414
  return result;
45729
46415
  }
45730
46416
  // src/features/claude-code-plugin-loader/loader.ts
45731
- import { existsSync as existsSync52, readdirSync as readdirSync20, readFileSync as readFileSync34 } from "fs";
45732
- import { homedir as homedir16 } from "os";
46417
+ import { existsSync as existsSync50, readdirSync as readdirSync18, readFileSync as readFileSync32 } from "fs";
46418
+ import { homedir as homedir17 } from "os";
45733
46419
  import { join as join58, basename as basename7 } from "path";
45734
46420
  var CLAUDE_PLUGIN_ROOT_VAR = "${CLAUDE_PLUGIN_ROOT}";
45735
46421
  function getPluginsBaseDir() {
45736
46422
  if (process.env.CLAUDE_PLUGINS_HOME) {
45737
46423
  return process.env.CLAUDE_PLUGINS_HOME;
45738
46424
  }
45739
- return join58(homedir16(), ".claude", "plugins");
46425
+ return join58(homedir17(), ".claude", "plugins");
45740
46426
  }
45741
46427
  function getInstalledPluginsPath() {
45742
46428
  return join58(getPluginsBaseDir(), "installed_plugins.json");
@@ -45764,11 +46450,11 @@ function resolvePluginPaths(obj, pluginRoot) {
45764
46450
  }
45765
46451
  function loadInstalledPlugins() {
45766
46452
  const dbPath = getInstalledPluginsPath();
45767
- if (!existsSync52(dbPath)) {
46453
+ if (!existsSync50(dbPath)) {
45768
46454
  return null;
45769
46455
  }
45770
46456
  try {
45771
- const content = readFileSync34(dbPath, "utf-8");
46457
+ const content = readFileSync32(dbPath, "utf-8");
45772
46458
  return JSON.parse(content);
45773
46459
  } catch (error45) {
45774
46460
  log("Failed to load installed plugins database", error45);
@@ -45779,15 +46465,15 @@ function getClaudeSettingsPath() {
45779
46465
  if (process.env.CLAUDE_SETTINGS_PATH) {
45780
46466
  return process.env.CLAUDE_SETTINGS_PATH;
45781
46467
  }
45782
- return join58(homedir16(), ".claude", "settings.json");
46468
+ return join58(homedir17(), ".claude", "settings.json");
45783
46469
  }
45784
46470
  function loadClaudeSettings() {
45785
46471
  const settingsPath = getClaudeSettingsPath();
45786
- if (!existsSync52(settingsPath)) {
46472
+ if (!existsSync50(settingsPath)) {
45787
46473
  return null;
45788
46474
  }
45789
46475
  try {
45790
- const content = readFileSync34(settingsPath, "utf-8");
46476
+ const content = readFileSync32(settingsPath, "utf-8");
45791
46477
  return JSON.parse(content);
45792
46478
  } catch (error45) {
45793
46479
  log("Failed to load Claude settings", error45);
@@ -45796,11 +46482,11 @@ function loadClaudeSettings() {
45796
46482
  }
45797
46483
  function loadPluginManifest(installPath) {
45798
46484
  const manifestPath = join58(installPath, ".claude-plugin", "plugin.json");
45799
- if (!existsSync52(manifestPath)) {
46485
+ if (!existsSync50(manifestPath)) {
45800
46486
  return null;
45801
46487
  }
45802
46488
  try {
45803
- const content = readFileSync34(manifestPath, "utf-8");
46489
+ const content = readFileSync32(manifestPath, "utf-8");
45804
46490
  return JSON.parse(content);
45805
46491
  } catch (error45) {
45806
46492
  log(`Failed to load plugin manifest from ${manifestPath}`, error45);
@@ -45847,7 +46533,7 @@ function discoverInstalledPlugins(options) {
45847
46533
  continue;
45848
46534
  }
45849
46535
  const { installPath, scope, version: version2 } = installation;
45850
- if (!existsSync52(installPath)) {
46536
+ if (!existsSync50(installPath)) {
45851
46537
  errors3.push({
45852
46538
  pluginKey,
45853
46539
  installPath,
@@ -45865,21 +46551,21 @@ function discoverInstalledPlugins(options) {
45865
46551
  pluginKey,
45866
46552
  manifest: manifest ?? undefined
45867
46553
  };
45868
- if (existsSync52(join58(installPath, "commands"))) {
46554
+ if (existsSync50(join58(installPath, "commands"))) {
45869
46555
  loadedPlugin.commandsDir = join58(installPath, "commands");
45870
46556
  }
45871
- if (existsSync52(join58(installPath, "agents"))) {
46557
+ if (existsSync50(join58(installPath, "agents"))) {
45872
46558
  loadedPlugin.agentsDir = join58(installPath, "agents");
45873
46559
  }
45874
- if (existsSync52(join58(installPath, "skills"))) {
46560
+ if (existsSync50(join58(installPath, "skills"))) {
45875
46561
  loadedPlugin.skillsDir = join58(installPath, "skills");
45876
46562
  }
45877
46563
  const hooksPath = join58(installPath, "hooks", "hooks.json");
45878
- if (existsSync52(hooksPath)) {
46564
+ if (existsSync50(hooksPath)) {
45879
46565
  loadedPlugin.hooksPath = hooksPath;
45880
46566
  }
45881
46567
  const mcpPath = join58(installPath, ".mcp.json");
45882
- if (existsSync52(mcpPath)) {
46568
+ if (existsSync50(mcpPath)) {
45883
46569
  loadedPlugin.mcpPath = mcpPath;
45884
46570
  }
45885
46571
  plugins.push(loadedPlugin);
@@ -45890,9 +46576,9 @@ function discoverInstalledPlugins(options) {
45890
46576
  function loadPluginCommands(plugins) {
45891
46577
  const commands2 = {};
45892
46578
  for (const plugin2 of plugins) {
45893
- if (!plugin2.commandsDir || !existsSync52(plugin2.commandsDir))
46579
+ if (!plugin2.commandsDir || !existsSync50(plugin2.commandsDir))
45894
46580
  continue;
45895
- const entries = readdirSync20(plugin2.commandsDir, { withFileTypes: true });
46581
+ const entries = readdirSync18(plugin2.commandsDir, { withFileTypes: true });
45896
46582
  for (const entry of entries) {
45897
46583
  if (!isMarkdownFile(entry))
45898
46584
  continue;
@@ -45900,7 +46586,7 @@ function loadPluginCommands(plugins) {
45900
46586
  const commandName = basename7(entry.name, ".md");
45901
46587
  const namespacedName = `${plugin2.name}:${commandName}`;
45902
46588
  try {
45903
- const content = readFileSync34(commandPath, "utf-8");
46589
+ const content = readFileSync32(commandPath, "utf-8");
45904
46590
  const { data, body } = parseFrontmatter(content);
45905
46591
  const wrappedTemplate = `<command-instruction>
45906
46592
  ${body.trim()}
@@ -45932,9 +46618,9 @@ $ARGUMENTS
45932
46618
  function loadPluginSkillsAsCommands(plugins) {
45933
46619
  const skills = {};
45934
46620
  for (const plugin2 of plugins) {
45935
- if (!plugin2.skillsDir || !existsSync52(plugin2.skillsDir))
46621
+ if (!plugin2.skillsDir || !existsSync50(plugin2.skillsDir))
45936
46622
  continue;
45937
- const entries = readdirSync20(plugin2.skillsDir, { withFileTypes: true });
46623
+ const entries = readdirSync18(plugin2.skillsDir, { withFileTypes: true });
45938
46624
  for (const entry of entries) {
45939
46625
  if (entry.name.startsWith("."))
45940
46626
  continue;
@@ -45943,10 +46629,10 @@ function loadPluginSkillsAsCommands(plugins) {
45943
46629
  continue;
45944
46630
  const resolvedPath = resolveSymlink(skillPath);
45945
46631
  const skillMdPath = join58(resolvedPath, "SKILL.md");
45946
- if (!existsSync52(skillMdPath))
46632
+ if (!existsSync50(skillMdPath))
45947
46633
  continue;
45948
46634
  try {
45949
- const content = readFileSync34(skillMdPath, "utf-8");
46635
+ const content = readFileSync32(skillMdPath, "utf-8");
45950
46636
  const { data, body } = parseFrontmatter(content);
45951
46637
  const skillName = data.name || entry.name;
45952
46638
  const namespacedName = `${plugin2.name}:${skillName}`;
@@ -45993,9 +46679,9 @@ function parseToolsConfig2(toolsStr) {
45993
46679
  function loadPluginAgents(plugins) {
45994
46680
  const agents = {};
45995
46681
  for (const plugin2 of plugins) {
45996
- if (!plugin2.agentsDir || !existsSync52(plugin2.agentsDir))
46682
+ if (!plugin2.agentsDir || !existsSync50(plugin2.agentsDir))
45997
46683
  continue;
45998
- const entries = readdirSync20(plugin2.agentsDir, { withFileTypes: true });
46684
+ const entries = readdirSync18(plugin2.agentsDir, { withFileTypes: true });
45999
46685
  for (const entry of entries) {
46000
46686
  if (!isMarkdownFile(entry))
46001
46687
  continue;
@@ -46003,7 +46689,7 @@ function loadPluginAgents(plugins) {
46003
46689
  const agentName = basename7(entry.name, ".md");
46004
46690
  const namespacedName = `${plugin2.name}:${agentName}`;
46005
46691
  try {
46006
- const content = readFileSync34(agentPath, "utf-8");
46692
+ const content = readFileSync32(agentPath, "utf-8");
46007
46693
  const { data, body } = parseFrontmatter(content);
46008
46694
  const name = data.name || agentName;
46009
46695
  const originalDescription = data.description || "";
@@ -46029,7 +46715,7 @@ function loadPluginAgents(plugins) {
46029
46715
  async function loadPluginMcpServers(plugins) {
46030
46716
  const servers = {};
46031
46717
  for (const plugin2 of plugins) {
46032
- if (!plugin2.mcpPath || !existsSync52(plugin2.mcpPath))
46718
+ if (!plugin2.mcpPath || !existsSync50(plugin2.mcpPath))
46033
46719
  continue;
46034
46720
  try {
46035
46721
  const content = await Bun.file(plugin2.mcpPath).text();
@@ -46061,10 +46747,10 @@ async function loadPluginMcpServers(plugins) {
46061
46747
  function loadPluginHooksConfigs(plugins) {
46062
46748
  const configs = [];
46063
46749
  for (const plugin2 of plugins) {
46064
- if (!plugin2.hooksPath || !existsSync52(plugin2.hooksPath))
46750
+ if (!plugin2.hooksPath || !existsSync50(plugin2.hooksPath))
46065
46751
  continue;
46066
46752
  try {
46067
- const content = readFileSync34(plugin2.hooksPath, "utf-8");
46753
+ const content = readFileSync32(plugin2.hooksPath, "utf-8");
46068
46754
  let config3 = JSON.parse(content);
46069
46755
  config3 = resolvePluginPaths(config3, plugin2.installPath);
46070
46756
  configs.push(config3);
@@ -46095,13 +46781,6 @@ async function loadAllPluginComponents(options) {
46095
46781
  errors: errors3
46096
46782
  };
46097
46783
  }
46098
- // src/mcp/websearch-exa.ts
46099
- var websearch_exa = {
46100
- type: "remote",
46101
- url: "https://mcp.exa.ai/mcp?tools=web_search_exa",
46102
- enabled: true
46103
- };
46104
-
46105
46784
  // src/mcp/context7.ts
46106
46785
  var context7 = {
46107
46786
  type: "remote",
@@ -46118,7 +46797,6 @@ var grep_app = {
46118
46797
 
46119
46798
  // src/mcp/index.ts
46120
46799
  var allBuiltinMcps = {
46121
- websearch_exa,
46122
46800
  context7,
46123
46801
  grep_app
46124
46802
  };
@@ -46243,17 +46921,9 @@ function createConfigHandler(deps) {
46243
46921
  log(`Plugin load errors`, { errors: pluginComponents.errors });
46244
46922
  }
46245
46923
  const builtinAgents = createBuiltinAgents(pluginConfig.disabled_agents, pluginConfig.agents, ctx.directory, config3.model);
46246
- const rawUserAgents = pluginConfig.claude_code?.agents ?? true ? loadUserAgents() : {};
46247
- const rawProjectAgents = pluginConfig.claude_code?.agents ?? true ? loadProjectAgents() : {};
46924
+ const userAgents = pluginConfig.claude_code?.agents ?? true ? loadUserAgents() : {};
46925
+ const projectAgents = pluginConfig.claude_code?.agents ?? true ? loadProjectAgents() : {};
46248
46926
  const rawPluginAgents = pluginComponents.agents;
46249
- const userAgents = Object.fromEntries(Object.entries(rawUserAgents).map(([k, v]) => [
46250
- k,
46251
- v ? migrateAgentConfig(v) : v
46252
- ]));
46253
- const projectAgents = Object.fromEntries(Object.entries(rawProjectAgents).map(([k, v]) => [
46254
- k,
46255
- v ? migrateAgentConfig(v) : v
46256
- ]));
46257
46927
  const pluginAgents = Object.fromEntries(Object.entries(rawPluginAgents).map(([k, v]) => [
46258
46928
  k,
46259
46929
  v ? migrateAgentConfig(v) : v
@@ -46285,7 +46955,7 @@ function createConfigHandler(deps) {
46285
46955
  const defaultModel = config3.model;
46286
46956
  const plannerSisyphusBase = {
46287
46957
  model: migratedPlanConfig.model ?? defaultModel,
46288
- mode: "all",
46958
+ mode: "primary",
46289
46959
  prompt: PLAN_SYSTEM_PROMPT,
46290
46960
  permission: PLAN_PERMISSION,
46291
46961
  description: `${configAgent?.plan?.description ?? "Plan agent"} (OhMyOpenCode version)`,
@@ -46363,15 +47033,28 @@ function createConfigHandler(deps) {
46363
47033
  ...pluginComponents.mcpServers
46364
47034
  };
46365
47035
  const builtinCommands = loadBuiltinCommands(pluginConfig.disabled_commands);
46366
- const userCommands = pluginConfig.claude_code?.commands ?? true ? loadUserCommands() : {};
46367
- const opencodeGlobalCommands = loadOpencodeGlobalCommands();
46368
47036
  const systemCommands = config3.command ?? {};
46369
- const projectCommands = pluginConfig.claude_code?.commands ?? true ? loadProjectCommands() : {};
46370
- const opencodeProjectCommands = loadOpencodeProjectCommands();
46371
- const userSkills = pluginConfig.claude_code?.skills ?? true ? loadUserSkills() : {};
46372
- const projectSkills = pluginConfig.claude_code?.skills ?? true ? loadProjectSkills() : {};
46373
- const opencodeGlobalSkills = loadOpencodeGlobalSkills();
46374
- const opencodeProjectSkills = loadOpencodeProjectSkills();
47037
+ const includeClaudeCommands = pluginConfig.claude_code?.commands ?? true;
47038
+ const includeClaudeSkills = pluginConfig.claude_code?.skills ?? true;
47039
+ const [
47040
+ userCommands,
47041
+ projectCommands,
47042
+ opencodeGlobalCommands,
47043
+ opencodeProjectCommands,
47044
+ userSkills,
47045
+ projectSkills,
47046
+ opencodeGlobalSkills,
47047
+ opencodeProjectSkills
47048
+ ] = await Promise.all([
47049
+ includeClaudeCommands ? loadUserCommands() : Promise.resolve({}),
47050
+ includeClaudeCommands ? loadProjectCommands() : Promise.resolve({}),
47051
+ loadOpencodeGlobalCommands(),
47052
+ loadOpencodeProjectCommands(),
47053
+ includeClaudeSkills ? loadUserSkills() : Promise.resolve({}),
47054
+ includeClaudeSkills ? loadProjectSkills() : Promise.resolve({}),
47055
+ loadOpencodeGlobalSkills(),
47056
+ loadOpencodeProjectSkills()
47057
+ ]);
46375
47058
  config3.command = {
46376
47059
  ...builtinCommands,
46377
47060
  ...userCommands,
@@ -46464,10 +47147,10 @@ var OhMyOpenCodePlugin = async (ctx) => {
46464
47147
  });
46465
47148
  const includeClaudeSkills = pluginConfig.claude_code?.skills !== false;
46466
47149
  const [userSkills, globalSkills, projectSkills, opencodeProjectSkills] = await Promise.all([
46467
- includeClaudeSkills ? discoverUserClaudeSkillsAsync() : Promise.resolve([]),
46468
- discoverOpencodeGlobalSkillsAsync(),
46469
- includeClaudeSkills ? discoverProjectClaudeSkillsAsync() : Promise.resolve([]),
46470
- discoverOpencodeProjectSkillsAsync()
47150
+ includeClaudeSkills ? discoverUserClaudeSkills() : Promise.resolve([]),
47151
+ discoverOpencodeGlobalSkills(),
47152
+ includeClaudeSkills ? discoverProjectClaudeSkills() : Promise.resolve([]),
47153
+ discoverOpencodeProjectSkills()
46471
47154
  ]);
46472
47155
  const mergedSkills = mergeSkills(builtinSkills, pluginConfig.skills, userSkills, globalSkills, projectSkills, opencodeProjectSkills);
46473
47156
  const skillMcpManager = new SkillMcpManager;
@@ -46482,6 +47165,11 @@ var OhMyOpenCodePlugin = async (ctx) => {
46482
47165
  getLoadedSkills: () => mergedSkills,
46483
47166
  getSessionID: getSessionIDForMcp
46484
47167
  });
47168
+ const commands2 = discoverCommandsSync();
47169
+ const slashcommandTool = createSlashcommandTool({
47170
+ commands: commands2,
47171
+ skills: mergedSkills
47172
+ });
46485
47173
  const googleAuthHooks = pluginConfig.google_auth !== false ? await createGoogleAntigravityAuthPlugin(ctx) : null;
46486
47174
  const configHandler = createConfigHandler({
46487
47175
  ctx,
@@ -46497,6 +47185,7 @@ var OhMyOpenCodePlugin = async (ctx) => {
46497
47185
  look_at: lookAt,
46498
47186
  skill: skillTool,
46499
47187
  skill_mcp: skillMcpTool,
47188
+ slashcommand: slashcommandTool,
46500
47189
  interactive_bash
46501
47190
  },
46502
47191
  "chat.message": async (input, output) => {