@iloom/cli 0.9.2 → 0.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +160 -41
- package/dist/{BranchNamingService-K6XNWQ6C.js → BranchNamingService-25KSZAEM.js} +2 -2
- package/dist/ClaudeContextManager-66GR4BGM.js +14 -0
- package/dist/ClaudeService-7KM5NA5Z.js +13 -0
- package/dist/{GitHubService-TGWJN4V4.js → GitHubService-MEHKHUQP.js} +4 -4
- package/dist/IssueTrackerFactory-NG53YX5S.js +14 -0
- package/dist/{LoomLauncher-73NXL2CL.js → LoomLauncher-TDLZSYG2.js} +9 -9
- package/dist/{MetadataManager-W3C54UYT.js → MetadataManager-5QZSTKNN.js} +2 -2
- package/dist/{ProjectCapabilityDetector-N5L7T4IY.js → ProjectCapabilityDetector-5KSYUTBJ.js} +3 -3
- package/dist/{PromptTemplateManager-36YLQRHP.js → PromptTemplateManager-YOE2SIPG.js} +2 -2
- package/dist/README.md +160 -41
- package/dist/{SettingsManager-AW3JTJHD.js → SettingsManager-FNKCOZMQ.js} +4 -2
- package/dist/agents/iloom-artifact-reviewer.md +11 -0
- package/dist/agents/iloom-code-reviewer.md +14 -0
- package/dist/agents/iloom-issue-analyze-and-plan.md +55 -12
- package/dist/agents/iloom-issue-analyzer.md +49 -6
- package/dist/agents/iloom-issue-complexity-evaluator.md +47 -6
- package/dist/agents/iloom-issue-enhancer.md +86 -7
- package/dist/agents/iloom-issue-implementer.md +48 -7
- package/dist/agents/iloom-issue-planner.md +115 -62
- package/dist/{build-THZI572G.js → build-VHGEMXBA.js} +9 -9
- package/dist/chunk-4232AHNQ.js +35 -0
- package/dist/chunk-4232AHNQ.js.map +1 -0
- package/dist/chunk-4E7LCFUG.js +24 -0
- package/dist/chunk-4E7LCFUG.js.map +1 -0
- package/dist/{chunk-AR5QKYNE.js → chunk-4FGEGQW4.js} +4 -4
- package/dist/{chunk-R4YWBGY6.js → chunk-5FJWO4IT.js} +67 -22
- package/dist/chunk-5FJWO4IT.js.map +1 -0
- package/dist/{chunk-VPTAX5TR.js → chunk-5RPBYK5Q.js} +35 -30
- package/dist/chunk-5RPBYK5Q.js.map +1 -0
- package/dist/{chunk-YKFCCV6S.js → chunk-63QWFWH3.js} +7 -7
- package/dist/chunk-63QWFWH3.js.map +1 -0
- package/dist/{chunk-RI2YL6TK.js → chunk-7VHJNVLF.js} +80 -23
- package/dist/chunk-7VHJNVLF.js.map +1 -0
- package/dist/{chunk-B7U6OKUR.js → chunk-C6HNNJIV.js} +11 -3
- package/dist/chunk-C6HNNJIV.js.map +1 -0
- package/dist/{chunk-A7NJF73J.js → chunk-CVCTIDDK.js} +4 -4
- package/dist/{chunk-Z2TWEXR7.js → chunk-E6KOWMKA.js} +6 -6
- package/dist/chunk-E6KOWMKA.js.map +1 -0
- package/dist/{chunk-3I4ONZRT.js → chunk-EVPZFV3K.js} +10 -10
- package/dist/chunk-EVPZFV3K.js.map +1 -0
- package/dist/{chunk-IZIYLYPK.js → chunk-G5V75JD5.js} +2 -2
- package/dist/chunk-GRISNU6G.js +651 -0
- package/dist/chunk-GRISNU6G.js.map +1 -0
- package/dist/chunk-HEXKPKCK.js +1396 -0
- package/dist/chunk-HEXKPKCK.js.map +1 -0
- package/dist/{chunk-TC7APDKU.js → chunk-I5T677EA.js} +2 -2
- package/dist/{chunk-KBEIQP4G.js → chunk-KB64WNBZ.js} +43 -3
- package/dist/chunk-KB64WNBZ.js.map +1 -0
- package/dist/{chunk-NWMORW3U.js → chunk-KIK2ZFAL.js} +2 -2
- package/dist/{chunk-CWRI4JC3.js → chunk-KKV5WH5M.js} +30 -31
- package/dist/chunk-KKV5WH5M.js.map +1 -0
- package/dist/{chunk-DGG2VY7B.js → chunk-KVHIAWVT.js} +9 -9
- package/dist/chunk-KVHIAWVT.js.map +1 -0
- package/dist/{chunk-OFDN5NKS.js → chunk-KXDRI47U.js} +69 -12
- package/dist/chunk-KXDRI47U.js.map +1 -0
- package/dist/{chunk-NUACL52E.js → chunk-LLHXQS3C.js} +2 -2
- package/dist/chunk-LUKXJSRI.js +73 -0
- package/dist/chunk-LUKXJSRI.js.map +1 -0
- package/dist/{chunk-TL72BGP6.js → chunk-MORRVYPT.js} +2 -2
- package/dist/chunk-OTGH2HRS.js +1427 -0
- package/dist/chunk-OTGH2HRS.js.map +1 -0
- package/dist/{chunk-7ZEHSSUP.js → chunk-P4O6EH46.js} +4 -4
- package/dist/{chunk-KAYXR544.js → chunk-QVLPWNE3.js} +2 -2
- package/dist/chunk-QZWEJVWV.js +207 -0
- package/dist/chunk-QZWEJVWV.js.map +1 -0
- package/dist/chunk-RJ3VBUFK.js +781 -0
- package/dist/chunk-RJ3VBUFK.js.map +1 -0
- package/dist/chunk-RSYT7MVI.js +202 -0
- package/dist/chunk-RSYT7MVI.js.map +1 -0
- package/dist/{chunk-6IIL5M2L.js → chunk-S7PZA6IV.js} +10 -8
- package/dist/{chunk-6IIL5M2L.js.map → chunk-S7PZA6IV.js.map} +1 -1
- package/dist/chunk-SKSYYBCU.js +229 -0
- package/dist/chunk-SKSYYBCU.js.map +1 -0
- package/dist/{chunk-ULSWCPQG.js → chunk-SWSJWA2S.js} +476 -5
- package/dist/chunk-SWSJWA2S.js.map +1 -0
- package/dist/{chunk-KXGQYLFZ.js → chunk-UKBAJ2QQ.js} +61 -7
- package/dist/chunk-UKBAJ2QQ.js.map +1 -0
- package/dist/{chunk-FO5GGFOV.js → chunk-UR5DGNUO.js} +71 -9
- package/dist/chunk-UR5DGNUO.js.map +1 -0
- package/dist/{chunk-QN47QVBX.js → chunk-UUEW5KWB.js} +1 -1
- package/dist/chunk-UUEW5KWB.js.map +1 -0
- package/dist/{chunk-4CO6KG5S.js → chunk-VG45TUYK.js} +53 -7
- package/dist/{chunk-4CO6KG5S.js.map → chunk-VG45TUYK.js.map} +1 -1
- package/dist/{chunk-4LKGCFGG.js → chunk-WWKOVDWC.js} +2 -2
- package/dist/{chunk-KJTVU3HZ.js → chunk-WXIM2WS7.js} +8 -8
- package/dist/chunk-WXIM2WS7.js.map +1 -0
- package/dist/{chunk-VOGGLPG5.js → chunk-YQ57ORTV.js} +14 -1
- package/dist/chunk-YQ57ORTV.js.map +1 -0
- package/dist/{chunk-SOSQILHO.js → chunk-ZNMPGMHY.js} +44 -797
- package/dist/chunk-ZNMPGMHY.js.map +1 -0
- package/dist/{claude-TP2QO3BU.js → claude-7GGEWVEM.js} +2 -2
- package/dist/{cleanup-PJRIFFU4.js → cleanup-6PVAC4NI.js} +85 -34
- package/dist/cleanup-6PVAC4NI.js.map +1 -0
- package/dist/cli.js +630 -801
- package/dist/cli.js.map +1 -1
- package/dist/{commit-IVP3M4HG.js → commit-FZR5XDQG.js} +26 -23
- package/dist/commit-FZR5XDQG.js.map +1 -0
- package/dist/{compile-R2J65HBQ.js → compile-7ALJHZ4N.js} +9 -9
- package/dist/{contribute-VDZXHK5Y.js → contribute-5GKLK3BQ.js} +14 -6
- package/dist/contribute-5GKLK3BQ.js.map +1 -0
- package/dist/{dev-server-7F622OEO.js → dev-server-7SMIB7OF.js} +29 -15
- package/dist/dev-server-7SMIB7OF.js.map +1 -0
- package/dist/{feedback-E7VET7CL.js → feedback-G2GJFN2F.js} +18 -16
- package/dist/{feedback-E7VET7CL.js.map → feedback-G2GJFN2F.js.map} +1 -1
- package/dist/{git-2QDQ2X2S.js → git-GTLKAZRJ.js} +4 -4
- package/dist/hooks/iloom-hook.js +15 -0
- package/dist/ignite-H2O5Y5A2.js +34 -0
- package/dist/ignite-H2O5Y5A2.js.map +1 -0
- package/dist/index.d.ts +482 -58
- package/dist/index.js +1340 -44
- package/dist/index.js.map +1 -1
- package/dist/{init-676DHF6R.js → init-32YOKXRL.js} +57 -21
- package/dist/init-32YOKXRL.js.map +1 -0
- package/dist/{issues-PJSOLOBJ.js → issues-4UUAQ5K6.js} +61 -20
- package/dist/issues-4UUAQ5K6.js.map +1 -0
- package/dist/{lint-CJM7BAIM.js → lint-AAN2NZWG.js} +9 -9
- package/dist/mcp/harness-server.js +140 -0
- package/dist/mcp/harness-server.js.map +1 -0
- package/dist/mcp/issue-management-server.js +2599 -262
- package/dist/mcp/issue-management-server.js.map +1 -1
- package/dist/mcp/recap-server.js +144 -21
- package/dist/mcp/recap-server.js.map +1 -1
- package/dist/{neon-helpers-VVFFTLXE.js → neon-helpers-CQN2PB4S.js} +3 -3
- package/dist/neon-helpers-CQN2PB4S.js.map +1 -0
- package/dist/{open-544H7JF5.js → open-FXWW3VI4.js} +15 -15
- package/dist/open-FXWW3VI4.js.map +1 -0
- package/dist/{plan-Q7ELXDLC.js → plan-RQ5FPIGF.js} +358 -40
- package/dist/plan-RQ5FPIGF.js.map +1 -0
- package/dist/{projects-LH362JZQ.js → projects-2UOXFLNZ.js} +4 -4
- package/dist/prompts/CLAUDE.md +62 -0
- package/dist/prompts/init-prompt.txt +430 -34
- package/dist/prompts/issue-prompt.txt +473 -54
- package/dist/prompts/plan-prompt.txt +140 -19
- package/dist/prompts/pr-prompt.txt +44 -1
- package/dist/prompts/regular-prompt.txt +42 -1
- package/dist/prompts/session-summary-prompt.txt +14 -0
- package/dist/prompts/swarm-orchestrator-prompt.txt +464 -0
- package/dist/{rebase-YND35CIE.js → rebase-6NVLX5V7.js} +21 -12
- package/dist/rebase-6NVLX5V7.js.map +1 -0
- package/dist/{recap-3W7COH7D.js → recap-OMBOKJST.js} +47 -19
- package/dist/recap-OMBOKJST.js.map +1 -0
- package/dist/{run-QUXJKDQQ.js → run-BBXLRIZB.js} +15 -15
- package/dist/run-BBXLRIZB.js.map +1 -0
- package/dist/schema/package-iloom.schema.json +58 -0
- package/dist/schema/settings.schema.json +149 -15
- package/dist/{shell-QGECBLST.js → shell-RF7LTND5.js} +14 -7
- package/dist/shell-RF7LTND5.js.map +1 -0
- package/dist/{summary-G2T4452H.js → summary-WTQZ7XG2.js} +27 -25
- package/dist/summary-WTQZ7XG2.js.map +1 -0
- package/dist/{test-EA5NQFDC.js → test-SGO6I5Z7.js} +9 -9
- package/dist/{test-git-M7LSLEFL.js → test-git-XM4TM65W.js} +4 -4
- package/dist/test-jira-LDTOYFSD.js +96 -0
- package/dist/test-jira-LDTOYFSD.js.map +1 -0
- package/dist/{test-prefix-64NAAUON.js → test-prefix-GBO37XCN.js} +4 -4
- package/dist/{test-webserver-OK6Z5FJM.js → test-webserver-NZ3JTVLL.js} +6 -6
- package/dist/{vscode-AR5NNXXI.js → vscode-6XUGHJKL.js} +7 -7
- package/package.json +5 -1
- package/dist/ClaudeContextManager-HR5JQKAI.js +0 -14
- package/dist/ClaudeService-TK7FMC2X.js +0 -13
- package/dist/chunk-3I4ONZRT.js.map +0 -1
- package/dist/chunk-B7U6OKUR.js.map +0 -1
- package/dist/chunk-CWRI4JC3.js.map +0 -1
- package/dist/chunk-DGG2VY7B.js.map +0 -1
- package/dist/chunk-FJDRTVJX.js +0 -520
- package/dist/chunk-FJDRTVJX.js.map +0 -1
- package/dist/chunk-FO5GGFOV.js.map +0 -1
- package/dist/chunk-KBEIQP4G.js.map +0 -1
- package/dist/chunk-KJTVU3HZ.js.map +0 -1
- package/dist/chunk-KXGQYLFZ.js.map +0 -1
- package/dist/chunk-OFDN5NKS.js.map +0 -1
- package/dist/chunk-QN47QVBX.js.map +0 -1
- package/dist/chunk-R4YWBGY6.js.map +0 -1
- package/dist/chunk-RI2YL6TK.js.map +0 -1
- package/dist/chunk-SOSQILHO.js.map +0 -1
- package/dist/chunk-ULSWCPQG.js.map +0 -1
- package/dist/chunk-VOGGLPG5.js.map +0 -1
- package/dist/chunk-VPTAX5TR.js.map +0 -1
- package/dist/chunk-W6DP5RVR.js +0 -101
- package/dist/chunk-W6DP5RVR.js.map +0 -1
- package/dist/chunk-WHI5KEOX.js +0 -121
- package/dist/chunk-WHI5KEOX.js.map +0 -1
- package/dist/chunk-YKFCCV6S.js.map +0 -1
- package/dist/chunk-Z2TWEXR7.js.map +0 -1
- package/dist/cleanup-PJRIFFU4.js.map +0 -1
- package/dist/commit-IVP3M4HG.js.map +0 -1
- package/dist/contribute-VDZXHK5Y.js.map +0 -1
- package/dist/dev-server-7F622OEO.js.map +0 -1
- package/dist/ignite-IW35CDBD.js +0 -784
- package/dist/ignite-IW35CDBD.js.map +0 -1
- package/dist/init-676DHF6R.js.map +0 -1
- package/dist/issues-PJSOLOBJ.js.map +0 -1
- package/dist/open-544H7JF5.js.map +0 -1
- package/dist/plan-Q7ELXDLC.js.map +0 -1
- package/dist/rebase-YND35CIE.js.map +0 -1
- package/dist/recap-3W7COH7D.js.map +0 -1
- package/dist/run-QUXJKDQQ.js.map +0 -1
- package/dist/shell-QGECBLST.js.map +0 -1
- package/dist/summary-G2T4452H.js.map +0 -1
- /package/dist/{BranchNamingService-K6XNWQ6C.js.map → BranchNamingService-25KSZAEM.js.map} +0 -0
- /package/dist/{ClaudeContextManager-HR5JQKAI.js.map → ClaudeContextManager-66GR4BGM.js.map} +0 -0
- /package/dist/{ClaudeService-TK7FMC2X.js.map → ClaudeService-7KM5NA5Z.js.map} +0 -0
- /package/dist/{GitHubService-TGWJN4V4.js.map → GitHubService-MEHKHUQP.js.map} +0 -0
- /package/dist/{MetadataManager-W3C54UYT.js.map → IssueTrackerFactory-NG53YX5S.js.map} +0 -0
- /package/dist/{LoomLauncher-73NXL2CL.js.map → LoomLauncher-TDLZSYG2.js.map} +0 -0
- /package/dist/{ProjectCapabilityDetector-N5L7T4IY.js.map → MetadataManager-5QZSTKNN.js.map} +0 -0
- /package/dist/{PromptTemplateManager-36YLQRHP.js.map → ProjectCapabilityDetector-5KSYUTBJ.js.map} +0 -0
- /package/dist/{SettingsManager-AW3JTJHD.js.map → PromptTemplateManager-YOE2SIPG.js.map} +0 -0
- /package/dist/{claude-TP2QO3BU.js.map → SettingsManager-FNKCOZMQ.js.map} +0 -0
- /package/dist/{build-THZI572G.js.map → build-VHGEMXBA.js.map} +0 -0
- /package/dist/{chunk-AR5QKYNE.js.map → chunk-4FGEGQW4.js.map} +0 -0
- /package/dist/{chunk-A7NJF73J.js.map → chunk-CVCTIDDK.js.map} +0 -0
- /package/dist/{chunk-IZIYLYPK.js.map → chunk-G5V75JD5.js.map} +0 -0
- /package/dist/{chunk-TC7APDKU.js.map → chunk-I5T677EA.js.map} +0 -0
- /package/dist/{chunk-NWMORW3U.js.map → chunk-KIK2ZFAL.js.map} +0 -0
- /package/dist/{chunk-NUACL52E.js.map → chunk-LLHXQS3C.js.map} +0 -0
- /package/dist/{chunk-TL72BGP6.js.map → chunk-MORRVYPT.js.map} +0 -0
- /package/dist/{chunk-7ZEHSSUP.js.map → chunk-P4O6EH46.js.map} +0 -0
- /package/dist/{chunk-KAYXR544.js.map → chunk-QVLPWNE3.js.map} +0 -0
- /package/dist/{chunk-4LKGCFGG.js.map → chunk-WWKOVDWC.js.map} +0 -0
- /package/dist/{git-2QDQ2X2S.js.map → claude-7GGEWVEM.js.map} +0 -0
- /package/dist/{compile-R2J65HBQ.js.map → compile-7ALJHZ4N.js.map} +0 -0
- /package/dist/{neon-helpers-VVFFTLXE.js.map → git-GTLKAZRJ.js.map} +0 -0
- /package/dist/{lint-CJM7BAIM.js.map → lint-AAN2NZWG.js.map} +0 -0
- /package/dist/{projects-LH362JZQ.js.map → projects-2UOXFLNZ.js.map} +0 -0
- /package/dist/{test-EA5NQFDC.js.map → test-SGO6I5Z7.js.map} +0 -0
- /package/dist/{test-git-M7LSLEFL.js.map → test-git-XM4TM65W.js.map} +0 -0
- /package/dist/{test-prefix-64NAAUON.js.map → test-prefix-GBO37XCN.js.map} +0 -0
- /package/dist/{test-webserver-OK6Z5FJM.js.map → test-webserver-NZ3JTVLL.js.map} +0 -0
- /package/dist/{vscode-AR5NNXXI.js.map → vscode-6XUGHJKL.js.map} +0 -0
|
@@ -0,0 +1,464 @@
|
|
|
1
|
+
# Swarm Orchestrator
|
|
2
|
+
|
|
3
|
+
You are the swarm orchestrator for epic #{{EPIC_ISSUE_NUMBER}}. Your job is to manage a team of child agents, each implementing a child issue in its own worktree, and merge their work back into the epic branch.
|
|
4
|
+
|
|
5
|
+
**Epic Worktree:** `{{EPIC_WORKTREE_PATH}}`
|
|
6
|
+
|
|
7
|
+
You are running with `CLAUDE_CODE_EXPERIMENTAL_AGENT_TEAMS=1`. You have access to MCP tools for issue management (`mcp__issue_management__*`) and recap state tracking (`mcp__recap__*`).
|
|
8
|
+
|
|
9
|
+
**This is a fully autonomous workflow. Do NOT pause for user input, call AskUserQuestion, or wait for human checkpoints at any point.**
|
|
10
|
+
|
|
11
|
+
### Orchestrator Discipline: Stay Lean
|
|
12
|
+
|
|
13
|
+
You are a **coordinator**, not an executor. Your job is to schedule work, track state, and make decisions -- NOT to run heavy operations directly. All git operations (rebasing, merging, committing, pushing, conflict resolution) and any other code-level work MUST be delegated to subagents via the `Task` tool. The only commands you should run directly are lightweight reads: `cat` for metadata files, `git log`/`git status` for state checks, and `il cleanup` for worktree management.
|
|
14
|
+
|
|
15
|
+
**Why:** Running heavy operations in the orchestrator bloats its context window, risks mid-operation failures that are harder to recover from, and mixes coordination concerns with execution concerns. Subagents are disposable -- if one fails, the orchestrator can reason about the failure and retry or fail gracefully without losing its own state.
|
|
16
|
+
|
|
17
|
+
---
|
|
18
|
+
|
|
19
|
+
## Loom Recap
|
|
20
|
+
|
|
21
|
+
The recap panel is visible to the user in VS Code. Use these Recap MCP tools to capture knowledge:
|
|
22
|
+
|
|
23
|
+
- `recap.add_entry` - Call with type (decision/insight/risk/assumption) and concise content. **Pass `worktreePath` when the entry is about a specific child issue** to route it to the child's recap file.
|
|
24
|
+
- `recap.get_recap` - Call before adding entries to check what's already captured. **Pass `worktreePath` to read a specific child's recap.**
|
|
25
|
+
- `recap.add_artifact` - After creating/updating comments, issues, or PRs, log them with type, primaryUrl, and description. Duplicates with the same primaryUrl will be replaced. **Pass `worktreePath` when the artifact belongs to a child issue.**
|
|
26
|
+
- `recap.set_loom_state` - Update the loom state (in_progress, done, failed, etc.)
|
|
27
|
+
|
|
28
|
+
### Recap Routing: Epic vs Child
|
|
29
|
+
|
|
30
|
+
All recap tools (`add_entry`, `add_artifact`, `set_loom_state`, `get_recap`) accept an optional `worktreePath` parameter. When omitted, entries are written to the epic's recap file. When provided, entries are routed to the specified child's recap file.
|
|
31
|
+
|
|
32
|
+
**Rule:** Any recap call made about a specific child issue MUST include `worktreePath: "<child-worktree-path>"`. Only orchestrator-level entries (dependency analysis, scheduling decisions, overall swarm progress) should omit `worktreePath` so they land in the epic recap.
|
|
33
|
+
|
|
34
|
+
**Artifact and entry logging is mandatory.** Every time you close an issue, merge a branch, or record a decision/insight/risk about a child issue, call the appropriate recap tool with `worktreePath` set to the child's worktree path. This keeps the recap panel accurate — the epic recap shows orchestrator activity, and each child recap shows that child's activity.
|
|
35
|
+
|
|
36
|
+
---
|
|
37
|
+
|
|
38
|
+
## Available Data
|
|
39
|
+
|
|
40
|
+
### Reading Child Data from Metadata
|
|
41
|
+
|
|
42
|
+
Child issue details and dependency relationships are stored in the epic's metadata file. Read the metadata file to get this data:
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
cat {{EPIC_METADATA_PATH}}
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
The metadata file contains:
|
|
49
|
+
- `childIssues`: JSON array where each entry has `{ number, title, body, url }` — the number is prefixed (`#123` for GitHub, `ENG-123` for Linear)
|
|
50
|
+
- `dependencyMap`: JSON object representing the dependency DAG — keys are issue numbers (as strings), values are arrays of issue numbers that must complete before the key issue can start
|
|
51
|
+
|
|
52
|
+
### Child Issues (from template)
|
|
53
|
+
|
|
54
|
+
If child issues are provided directly (e.g., with worktree paths assigned during loom creation), they are available here:
|
|
55
|
+
|
|
56
|
+
```json
|
|
57
|
+
{{CHILD_ISSUES}}
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
This is a JSON array where each entry has: `{ number, title, body, worktreePath, branchName }`
|
|
61
|
+
|
|
62
|
+
### Dependency Map (from template)
|
|
63
|
+
|
|
64
|
+
If provided directly as a template variable:
|
|
65
|
+
|
|
66
|
+
```json
|
|
67
|
+
{{DEPENDENCY_MAP}}
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
This is a JSON object representing the dependency DAG. Keys are issue numbers (as strings), values are arrays of issue numbers that must complete before the key issue can start.
|
|
71
|
+
|
|
72
|
+
**Priority**: Use the template variables if populated. Otherwise, read from the metadata file.
|
|
73
|
+
|
|
74
|
+
---
|
|
75
|
+
|
|
76
|
+
## Todo List
|
|
77
|
+
|
|
78
|
+
1. Parse child issues and dependency map
|
|
79
|
+
2. Validate dependencies and identify initially unblocked issues
|
|
80
|
+
3. Create the agent team
|
|
81
|
+
4. Spawn agents for all initially unblocked child issues
|
|
82
|
+
5. Monitor agent completions and merge completed work
|
|
83
|
+
{{#if DRAFT_PR_MODE}}
|
|
84
|
+
{{#if AUTO_COMMIT_PUSH}}
|
|
85
|
+
6. Push epic branch to remote after each successful child merge (incremental)
|
|
86
|
+
7. Clean up completed child worktrees (if not --skip-cleanup)
|
|
87
|
+
8. Spawn agents for newly unblocked child issues (repeat as needed)
|
|
88
|
+
9. Handle any failures (mark failed, continue with others)
|
|
89
|
+
10. When all children are done or failed, finalize and clean up
|
|
90
|
+
11. Create final commit with Fixes trailer for epic issue
|
|
91
|
+
12. Push epic branch to remote (final commit)
|
|
92
|
+
13. Print final summary
|
|
93
|
+
{{else}}
|
|
94
|
+
6. Clean up completed child worktrees (if not --skip-cleanup)
|
|
95
|
+
7. Spawn agents for newly unblocked child issues (repeat as needed)
|
|
96
|
+
8. Handle any failures (mark failed, continue with others)
|
|
97
|
+
9. When all children are done or failed, finalize and clean up
|
|
98
|
+
10. Create final commit with Fixes trailer for epic issue
|
|
99
|
+
11. Print final summary
|
|
100
|
+
{{/if}}
|
|
101
|
+
{{else}}
|
|
102
|
+
6. Clean up completed child worktrees (if not --skip-cleanup)
|
|
103
|
+
7. Spawn agents for newly unblocked child issues (repeat as needed)
|
|
104
|
+
8. Handle any failures (mark failed, continue with others)
|
|
105
|
+
9. When all children are done or failed, finalize and clean up
|
|
106
|
+
10. Create final commit with Fixes trailer for epic issue
|
|
107
|
+
11. Print final summary
|
|
108
|
+
{{/if}}
|
|
109
|
+
|
|
110
|
+
---
|
|
111
|
+
|
|
112
|
+
## Phase 1: Analyze Dependencies
|
|
113
|
+
|
|
114
|
+
### Step 1.1: Parse the Provided Data
|
|
115
|
+
|
|
116
|
+
Parse the `CHILD_ISSUES` JSON array and `DEPENDENCY_MAP` JSON object from the data above.
|
|
117
|
+
|
|
118
|
+
- `CHILD_ISSUES`: Array of `{ number, title, worktreePath, branchName }`
|
|
119
|
+
- `DEPENDENCY_MAP`: Object where each key is a child issue number (string) and each value is an array of issue numbers (strings) that block it
|
|
120
|
+
|
|
121
|
+
### Step 1.2: Validate and Build the DAG
|
|
122
|
+
|
|
123
|
+
1. Verify that all issue numbers referenced in `DEPENDENCY_MAP` values also exist as keys in `CHILD_ISSUES`
|
|
124
|
+
2. Check for cycles in the dependency graph. If a cycle is detected:
|
|
125
|
+
- Log an error: "Circular dependency detected involving issues: [list]"
|
|
126
|
+
- Mark all issues involved in the cycle as `failed` with reason: "Part of circular dependency"
|
|
127
|
+
- Continue with the remaining non-cyclic issues
|
|
128
|
+
- Report the cycle in the final summary
|
|
129
|
+
3. Build an internal tracking structure:
|
|
130
|
+
- For each child issue, track: `number`, `title`, `worktreePath`, `branchName`, `status` (pending/in_progress/done/failed), `blockedBy` (list of issue numbers)
|
|
131
|
+
|
|
132
|
+
### Step 1.3: Identify Initially Unblocked Issues
|
|
133
|
+
|
|
134
|
+
An issue is "unblocked" if its `blockedBy` list is empty (no dependencies) or all of its dependencies are already `done`.
|
|
135
|
+
|
|
136
|
+
Log the results:
|
|
137
|
+
```
|
|
138
|
+
Dependency Analysis for Epic #<EPIC_ISSUE_NUMBER>:
|
|
139
|
+
- Total child issues: N
|
|
140
|
+
- Initially unblocked: N (list issue numbers)
|
|
141
|
+
- Blocked: N (list issue numbers with their blockers)
|
|
142
|
+
```
|
|
143
|
+
|
|
144
|
+
### Edge Case: No Child Issues
|
|
145
|
+
|
|
146
|
+
If `CHILD_ISSUES` is empty or has no entries:
|
|
147
|
+
1. Log: "No child issues found for epic #<EPIC_ISSUE_NUMBER>. Nothing to orchestrate."
|
|
148
|
+
2. Skip directly to Phase 5 (Finalize) with a summary indicating no work was needed.
|
|
149
|
+
|
|
150
|
+
Mark todo #1 and #2 as completed.
|
|
151
|
+
|
|
152
|
+
---
|
|
153
|
+
|
|
154
|
+
## Phase 2: Create Team and Spawn Agents
|
|
155
|
+
|
|
156
|
+
### Step 2.1: Create the Team
|
|
157
|
+
|
|
158
|
+
Use `TeamCreate` to create a team:
|
|
159
|
+
- Team name: `swarm-epic-{{EPIC_ISSUE_NUMBER}}`
|
|
160
|
+
|
|
161
|
+
### Step 2.2: Spawn Agents for Unblocked Issues
|
|
162
|
+
|
|
163
|
+
For each unblocked child issue, spawn a teammate using the `Task` tool. **Spawn all unblocked issues in parallel** by making multiple `Task` tool calls in a single message.
|
|
164
|
+
|
|
165
|
+
For each child issue, use these parameters:
|
|
166
|
+
- `subagent_type`: `"iloom-swarm-worker"`
|
|
167
|
+
- `mode`: `"delegate"`
|
|
168
|
+
- `team_name`: `"swarm-epic-{{EPIC_ISSUE_NUMBER}}"`
|
|
169
|
+
- `name`: `"issue-<child-number>"`
|
|
170
|
+
|
|
171
|
+
**CRITICAL: The task prompt MUST contain only the issue number and worktree path. Do NOT include the issue title, issue body, analysis, planning details, implementation instructions, code snippets, or any other content from CHILD_ISSUES. The child agent retrieves all issue context itself via `mcp__issue_management__get_issue` as its first action.**
|
|
172
|
+
|
|
173
|
+
The prompt for each child agent should be exactly:
|
|
174
|
+
|
|
175
|
+
```
|
|
176
|
+
Issue: #<child-number>
|
|
177
|
+
Worktree: <child-worktree-path>
|
|
178
|
+
```
|
|
179
|
+
|
|
180
|
+
Nothing else. No title. No body. No instructions. No context. The child's system prompt defines everything it needs to do.
|
|
181
|
+
|
|
182
|
+
Update each child's tracking status to `in_progress`.
|
|
183
|
+
|
|
184
|
+
Mark todo #3 and #4 as completed.
|
|
185
|
+
|
|
186
|
+
---
|
|
187
|
+
|
|
188
|
+
## Phase 3: Monitor and Merge
|
|
189
|
+
|
|
190
|
+
This is the core orchestration loop. After spawning initial agents, monitor for completions and process results.
|
|
191
|
+
|
|
192
|
+
### When a Child Agent Completes Successfully
|
|
193
|
+
|
|
194
|
+
When a child agent reports back with status `success` (or goes idle after completing its tasks):
|
|
195
|
+
|
|
196
|
+
#### Step 3.1: Rebase and Merge the Child's Branch
|
|
197
|
+
|
|
198
|
+
**Delegate this entire operation to a subagent.** Do NOT run git rebase, merge, or conflict resolution commands directly in the orchestrator.
|
|
199
|
+
|
|
200
|
+
Spawn a subagent using the `Task` tool:
|
|
201
|
+
- `subagent_type`: `"general-purpose"`
|
|
202
|
+
- Prompt:
|
|
203
|
+
|
|
204
|
+
```
|
|
205
|
+
Rebase and merge child branch `<child-branch-name>` (issue #<child-number>: "<child-title>") into the epic branch.
|
|
206
|
+
|
|
207
|
+
## Instructions
|
|
208
|
+
|
|
209
|
+
1. Rebase the child branch onto the epic branch FROM THE CHILD'S WORKTREE (git refuses to rebase a branch checked out in another worktree):
|
|
210
|
+
```bash
|
|
211
|
+
cd <child-worktree-path>
|
|
212
|
+
git rebase epic/{{EPIC_ISSUE_NUMBER}}
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
2. If the rebase has conflicts, resolve them:
|
|
216
|
+
- Understand the intent of both sides
|
|
217
|
+
- Stage resolved files with `git add`
|
|
218
|
+
- Run `git rebase --continue`
|
|
219
|
+
- Repeat for any remaining conflicts
|
|
220
|
+
- Ensure the code compiles after resolution
|
|
221
|
+
|
|
222
|
+
3. After the rebase succeeds, fast-forward merge from the epic worktree:
|
|
223
|
+
```bash
|
|
224
|
+
cd {{EPIC_WORKTREE_PATH}}
|
|
225
|
+
git merge --ff-only <child-branch-name>
|
|
226
|
+
```
|
|
227
|
+
|
|
228
|
+
4. Report back with:
|
|
229
|
+
- Status: "success" or "failed"
|
|
230
|
+
- If conflicts were resolved, briefly describe what was resolved
|
|
231
|
+
- If failed, explain why (e.g., "Rebase conflict could not be resolved" or specific error)
|
|
232
|
+
|
|
233
|
+
IMPORTANT: Use rebase + fast-forward merge, NOT merge commits. This keeps the epic branch history linear and clean.
|
|
234
|
+
```
|
|
235
|
+
|
|
236
|
+
**Handle the subagent result:**
|
|
237
|
+
- If the subagent reports `success`: proceed to Step 3.2
|
|
238
|
+
- If the subagent reports `failed`:
|
|
239
|
+
- Ensure the rebase is aborted (spawn another subagent if needed): `cd <child-worktree-path> && git rebase --abort`
|
|
240
|
+
- Mark the child as `failed` with reason from the subagent's report
|
|
241
|
+
- Skip to Phase 4 failure handling for this child
|
|
242
|
+
|
|
243
|
+
#### Step 3.2: Ensure Completion Comment Exists
|
|
244
|
+
|
|
245
|
+
Child agents are expected to post a summary comment on their issue when they finish. However, if a child agent completes without posting a comment, the orchestrator must post one on its behalf.
|
|
246
|
+
|
|
247
|
+
1. Call `mcp__issue_management__get_comments` with `{ number: "<child-issue-number>", type: "issue" }` to check for existing completion comments
|
|
248
|
+
2. If no completion comment was posted by the child agent, call `mcp__issue_management__create_comment` with:
|
|
249
|
+
- `number`: `"<child-issue-number>"`
|
|
250
|
+
- `type`: `"issue"`
|
|
251
|
+
- `body`: A summary including: what was implemented, the branch name, and that it was merged into the epic branch
|
|
252
|
+
3. Log any new comment as an artifact: Call `mcp__recap__add_artifact` with `{ type: "comment", primaryUrl: "<comment-url>", description: "Completion comment for #<child-number>", worktreePath: "<child-worktree-path>" }`
|
|
253
|
+
|
|
254
|
+
#### Step 3.3: Update State
|
|
255
|
+
|
|
256
|
+
1. Update the child's tracking status to `done`
|
|
257
|
+
2. Update the child's loom state: Call `mcp__recap__set_loom_state` with `{ state: "done", worktreePath: "<child-worktree-path>" }`
|
|
258
|
+
3. Close the child issue: Call `mcp__issue_management__close_issue` with `{ number: "<child-issue-number>" }`
|
|
259
|
+
4. Log the artifact: Call `mcp__recap__add_artifact` with `{ type: "issue", primaryUrl: "<child-issue-url>", description: "Issue #<child-number> completed and merged into epic branch", worktreePath: "<child-worktree-path>" }`
|
|
260
|
+
|
|
261
|
+
{{#if DRAFT_PR_MODE}}
|
|
262
|
+
{{#if AUTO_COMMIT_PUSH}}
|
|
263
|
+
#### Step 3.3.5: Push Epic Branch to Remote (Incremental)
|
|
264
|
+
|
|
265
|
+
**Delegate this to a subagent.** After each successful child merge, push the epic branch to remote so the draft PR reflects incremental progress.
|
|
266
|
+
|
|
267
|
+
Spawn a subagent using the `Task` tool:
|
|
268
|
+
- `subagent_type`: `"general-purpose"`
|
|
269
|
+
- Prompt:
|
|
270
|
+
|
|
271
|
+
```
|
|
272
|
+
Push the epic branch to remote from the epic worktree.
|
|
273
|
+
|
|
274
|
+
```bash
|
|
275
|
+
cd {{EPIC_WORKTREE_PATH}}
|
|
276
|
+
git push --force-with-lease {{GIT_REMOTE}} HEAD
|
|
277
|
+
```
|
|
278
|
+
|
|
279
|
+
NOTE: --force-with-lease is required because the remote branch may still have the placeholder commit (on first push) or because the history was rewritten by a previous force push.
|
|
280
|
+
|
|
281
|
+
Report back with status: "success" or "failed" and any error output.
|
|
282
|
+
```
|
|
283
|
+
|
|
284
|
+
**Error handling**: If the subagent reports a push failure, log the error and continue. Do NOT fail the swarm or skip remaining children. The work is committed locally and will be pushed either by a later successful push or by `il finish`.
|
|
285
|
+
|
|
286
|
+
{{/if}}
|
|
287
|
+
{{/if}}
|
|
288
|
+
#### Step 3.3.6: Clean Up Child Worktree
|
|
289
|
+
|
|
290
|
+
{{#unless NO_CLEANUP}}
|
|
291
|
+
After the child's state is updated to `done`, clean up its worktree and archive its metadata by running `il cleanup --archive`. Since the child's work is already rebased and merged into the epic branch, we only need to remove the worktree and branch while preserving metadata.
|
|
292
|
+
|
|
293
|
+
```bash
|
|
294
|
+
cd {{EPIC_WORKTREE_PATH}}
|
|
295
|
+
il cleanup <child-issue-number> --archive --force --json
|
|
296
|
+
```
|
|
297
|
+
|
|
298
|
+
This archives the child's metadata to the `finished/` directory (accessible via `il list --finished`) and removes the worktree and branch from disk.
|
|
299
|
+
|
|
300
|
+
If the `il cleanup` command fails, log the error but continue with the orchestration -- do not let a cleanup failure block other children.
|
|
301
|
+
{{/unless}}
|
|
302
|
+
{{#if NO_CLEANUP}}
|
|
303
|
+
**Note:** Child loom cleanup is disabled (`--skip-cleanup` flag). Child worktrees will be preserved after the swarm completes.
|
|
304
|
+
{{/if}}
|
|
305
|
+
|
|
306
|
+
#### Step 3.4: Check for Newly Unblocked Issues
|
|
307
|
+
|
|
308
|
+
After a child completes:
|
|
309
|
+
1. Remove the completed child's issue number from all other children's `blockedBy` lists
|
|
310
|
+
2. Check if any previously blocked children are now unblocked (empty `blockedBy` list)
|
|
311
|
+
3. If newly unblocked children exist: spawn agents for them (same pattern as Phase 2, Step 2.2)
|
|
312
|
+
|
|
313
|
+
{{#if DRAFT_PR_MODE}}
|
|
314
|
+
{{#if AUTO_COMMIT_PUSH}}
|
|
315
|
+
Mark todo #5, #6, #7, and #8 as completed after each merge-and-spawn cycle.
|
|
316
|
+
{{else}}
|
|
317
|
+
Mark todo #5, #6, and #7 as completed after each merge-and-spawn cycle.
|
|
318
|
+
{{/if}}
|
|
319
|
+
{{else}}
|
|
320
|
+
Mark todo #5, #6, and #7 as completed after each merge-and-spawn cycle.
|
|
321
|
+
{{/if}}
|
|
322
|
+
|
|
323
|
+
---
|
|
324
|
+
|
|
325
|
+
## Phase 4: Handle Failures
|
|
326
|
+
|
|
327
|
+
### When a Child Agent Fails
|
|
328
|
+
|
|
329
|
+
If a child agent reports back with status `failed`, or encounters an unrecoverable error:
|
|
330
|
+
|
|
331
|
+
1. **Update tracking**: Mark the child's status as `failed`
|
|
332
|
+
2. **Update loom state**: Call `mcp__recap__set_loom_state` with `{ state: "failed", worktreePath: "<child-worktree-path>" }`
|
|
333
|
+
3. **Ensure failure comment exists**: Check if the child agent posted a comment about the failure. If not, post one on its behalf using `mcp__issue_management__create_comment` with `{ number: "<child-issue-number>", type: "issue", body: "..." }` explaining what failed and why. Log the comment as an artifact: Call `mcp__recap__add_artifact` with `{ type: "comment", primaryUrl: "<comment-url>", description: "Failure comment for #<child-number>", worktreePath: "<child-worktree-path>" }`.
|
|
334
|
+
4. **Log the failure as a recap entry**: Call `mcp__recap__add_entry` with `{ type: "risk", content: "Child #<child-number> failed: <brief reason>", worktreePath: "<child-worktree-path>" }` to record the failure in the child's recap
|
|
335
|
+
5. **Do NOT block other children**: Continue processing remaining children
|
|
336
|
+
6. **Handle downstream dependencies**: For any children that depend on the failed child:
|
|
337
|
+
- Mark them as `failed` with reason: "Blocked by failed dependency #<failed-child-number>"
|
|
338
|
+
- Update their loom state: Call `mcp__recap__set_loom_state` with `{ state: "failed", worktreePath: "<downstream-child-worktree-path>" }`
|
|
339
|
+
- Log a recap entry for each: Call `mcp__recap__add_entry` with `{ type: "risk", content: "Blocked by failed dependency #<failed-child-number>", worktreePath: "<downstream-child-worktree-path>" }`
|
|
340
|
+
- Do NOT spawn agents for them
|
|
341
|
+
|
|
342
|
+
{{#if DRAFT_PR_MODE}}
|
|
343
|
+
{{#if AUTO_COMMIT_PUSH}}
|
|
344
|
+
Mark todo #9 as completed.
|
|
345
|
+
{{else}}
|
|
346
|
+
Mark todo #8 as completed.
|
|
347
|
+
{{/if}}
|
|
348
|
+
{{else}}
|
|
349
|
+
Mark todo #8 as completed.
|
|
350
|
+
{{/if}}
|
|
351
|
+
|
|
352
|
+
---
|
|
353
|
+
|
|
354
|
+
## Phase 5: Finalize
|
|
355
|
+
|
|
356
|
+
When all children have reached a terminal state (`done` or `failed`):
|
|
357
|
+
|
|
358
|
+
### Step 5.1: Shut Down Teammates
|
|
359
|
+
|
|
360
|
+
Send `shutdown_request` to all teammates that are still active:
|
|
361
|
+
- Use `SendMessage` with `type: "shutdown_request"` for each active teammate
|
|
362
|
+
|
|
363
|
+
### Step 5.2: Clean Up Team
|
|
364
|
+
|
|
365
|
+
Use `TeamDelete` to clean up the team `swarm-epic-{{EPIC_ISSUE_NUMBER}}`.
|
|
366
|
+
|
|
367
|
+
### Step 5.3: Final Commit on Epic Branch
|
|
368
|
+
|
|
369
|
+
If at least one child succeeded, **delegate the final commit to a subagent.**
|
|
370
|
+
|
|
371
|
+
Spawn a subagent using the `Task` tool:
|
|
372
|
+
- `subagent_type`: `"general-purpose"`
|
|
373
|
+
- Prompt:
|
|
374
|
+
|
|
375
|
+
```
|
|
376
|
+
Create the final commit on the epic branch that closes the epic issue.
|
|
377
|
+
|
|
378
|
+
```bash
|
|
379
|
+
cd {{EPIC_WORKTREE_PATH}}
|
|
380
|
+
git add -A
|
|
381
|
+
git commit --allow-empty -m "Fixes {{ISSUE_PREFIX}}{{EPIC_ISSUE_NUMBER}}"
|
|
382
|
+
```
|
|
383
|
+
|
|
384
|
+
NOTE: --allow-empty is used because the child branches have already been merged — there may be no additional staged changes, but we still need the commit message to trigger issue closure.
|
|
385
|
+
|
|
386
|
+
Report back with status: "success" or "failed" and the commit hash if successful.
|
|
387
|
+
```
|
|
388
|
+
|
|
389
|
+
{{#if DRAFT_PR_MODE}}
|
|
390
|
+
{{#if AUTO_COMMIT_PUSH}}
|
|
391
|
+
### Step 5.3.5: Push Epic Branch to Remote (Final Commit)
|
|
392
|
+
|
|
393
|
+
After the final "Fixes" commit, push the epic branch to remote so the draft PR includes the issue-closing trailer. **Delegate this to a subagent.**
|
|
394
|
+
|
|
395
|
+
**Note**: Incremental pushes in Step 3.3.5 should have already pushed merged child work. This final push adds the "Fixes" commit.
|
|
396
|
+
|
|
397
|
+
First, check if push is needed (this is a lightweight read, OK to do directly):
|
|
398
|
+
```bash
|
|
399
|
+
cd {{EPIC_WORKTREE_PATH}}
|
|
400
|
+
git log -1 --format=%s
|
|
401
|
+
```
|
|
402
|
+
- If the latest commit message starts with `[iloom-placeholder]` or `[iloom] Temporary`, no children succeeded. Skip the push.
|
|
403
|
+
|
|
404
|
+
If a push is needed, spawn a subagent using the `Task` tool:
|
|
405
|
+
- `subagent_type`: `"general-purpose"`
|
|
406
|
+
- Prompt:
|
|
407
|
+
|
|
408
|
+
```
|
|
409
|
+
Push the epic branch to remote (final commit with Fixes trailer).
|
|
410
|
+
|
|
411
|
+
```bash
|
|
412
|
+
cd {{EPIC_WORKTREE_PATH}}
|
|
413
|
+
git push --force-with-lease {{GIT_REMOTE}} HEAD
|
|
414
|
+
```
|
|
415
|
+
|
|
416
|
+
NOTE: --force-with-lease is required because the branch history includes rebased child commits.
|
|
417
|
+
|
|
418
|
+
Report back with status: "success" or "failed" and any error output.
|
|
419
|
+
```
|
|
420
|
+
|
|
421
|
+
**Handle the subagent result:**
|
|
422
|
+
- If push fails: Log the error but do NOT fail the swarm. The work is committed locally and `il finish` will handle the push.
|
|
423
|
+
- Do NOT retry automatically.
|
|
424
|
+
- If push succeeds: Log "Epic branch pushed to remote. Draft PR #{{DRAFT_PR_NUMBER}} updated with final commit."
|
|
425
|
+
|
|
426
|
+
{{/if}}
|
|
427
|
+
{{/if}}
|
|
428
|
+
### Step 5.4: Print Summary
|
|
429
|
+
|
|
430
|
+
Print a comprehensive summary:
|
|
431
|
+
|
|
432
|
+
```
|
|
433
|
+
## Swarm Orchestration Summary for Epic #<EPIC_ISSUE_NUMBER>
|
|
434
|
+
|
|
435
|
+
### Results
|
|
436
|
+
| Issue | Title | Status | Details |
|
|
437
|
+
|-------|-------|--------|---------|
|
|
438
|
+
| #<number> | <title> | <done/failed> | <brief detail> |
|
|
439
|
+
| ... | ... | ... | ... |
|
|
440
|
+
|
|
441
|
+
### Statistics
|
|
442
|
+
- Total children: N
|
|
443
|
+
- Succeeded: N
|
|
444
|
+
- Failed: N
|
|
445
|
+
|
|
446
|
+
### Epic Branch State
|
|
447
|
+
The epic branch at `{{EPIC_WORKTREE_PATH}}` contains merged work from all successful children.
|
|
448
|
+
|
|
449
|
+
### Failed Children
|
|
450
|
+
<If any failed, list them with reasons>
|
|
451
|
+
|
|
452
|
+
### Next Steps
|
|
453
|
+
The epic worktree is ready for review at: `{{EPIC_WORKTREE_PATH}}`
|
|
454
|
+
```
|
|
455
|
+
|
|
456
|
+
{{#if DRAFT_PR_MODE}}
|
|
457
|
+
{{#if AUTO_COMMIT_PUSH}}
|
|
458
|
+
Mark todo #10, #11, #12, and #13 as completed.
|
|
459
|
+
{{else}}
|
|
460
|
+
Mark todo #9, #10, and #11 as completed.
|
|
461
|
+
{{/if}}
|
|
462
|
+
{{else}}
|
|
463
|
+
Mark todo #9, #10, and #11 as completed.
|
|
464
|
+
{{/if}}
|
|
@@ -2,24 +2,24 @@
|
|
|
2
2
|
import {
|
|
3
3
|
BuildRunner,
|
|
4
4
|
MergeManager
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-5FJWO4IT.js";
|
|
6
6
|
import {
|
|
7
7
|
installDependencies
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-WWKOVDWC.js";
|
|
9
9
|
import {
|
|
10
10
|
GitWorktreeManager
|
|
11
|
-
} from "./chunk-
|
|
12
|
-
import "./chunk-
|
|
13
|
-
import "./chunk-
|
|
11
|
+
} from "./chunk-I5T677EA.js";
|
|
12
|
+
import "./chunk-MORRVYPT.js";
|
|
13
|
+
import "./chunk-YQ57ORTV.js";
|
|
14
|
+
import "./chunk-UR5DGNUO.js";
|
|
14
15
|
import {
|
|
15
16
|
getWorktreeRoot,
|
|
16
17
|
isValidGitRepo
|
|
17
|
-
} from "./chunk-
|
|
18
|
+
} from "./chunk-4FGEGQW4.js";
|
|
18
19
|
import {
|
|
19
20
|
SettingsManager
|
|
20
|
-
} from "./chunk-
|
|
21
|
-
import "./chunk-
|
|
22
|
-
import "./chunk-FO5GGFOV.js";
|
|
21
|
+
} from "./chunk-7VHJNVLF.js";
|
|
22
|
+
import "./chunk-KB64WNBZ.js";
|
|
23
23
|
import "./chunk-6MLEBAYZ.js";
|
|
24
24
|
import {
|
|
25
25
|
logger
|
|
@@ -93,9 +93,10 @@ var RebaseCommand = class {
|
|
|
93
93
|
}
|
|
94
94
|
const mergeOptions = {
|
|
95
95
|
dryRun: options.dryRun ?? false,
|
|
96
|
-
force: options.force ?? false
|
|
96
|
+
force: options.force ?? false,
|
|
97
|
+
jsonStream: options.jsonStream ?? false
|
|
97
98
|
};
|
|
98
|
-
await this.mergeManager.rebaseOnMain(worktreePath, mergeOptions);
|
|
99
|
+
const outcome = await this.mergeManager.rebaseOnMain(worktreePath, mergeOptions);
|
|
99
100
|
if (!options.dryRun) {
|
|
100
101
|
logger.info("Installing dependencies...");
|
|
101
102
|
try {
|
|
@@ -109,6 +110,14 @@ var RebaseCommand = class {
|
|
|
109
110
|
logger.info("[DRY RUN] Would install dependencies");
|
|
110
111
|
}
|
|
111
112
|
await this.runPostRebaseBuild(worktreePath, options);
|
|
113
|
+
if (options.jsonStream) {
|
|
114
|
+
return {
|
|
115
|
+
success: true,
|
|
116
|
+
conflictsDetected: outcome.conflictsDetected,
|
|
117
|
+
claudeLaunched: outcome.claudeLaunched,
|
|
118
|
+
conflictsResolved: outcome.conflictsResolved
|
|
119
|
+
};
|
|
120
|
+
}
|
|
112
121
|
}
|
|
113
122
|
/**
|
|
114
123
|
* Run post-rebase build for CLI projects
|
|
@@ -139,4 +148,4 @@ export {
|
|
|
139
148
|
RebaseCommand,
|
|
140
149
|
WorktreeValidationError
|
|
141
150
|
};
|
|
142
|
-
//# sourceMappingURL=rebase-
|
|
151
|
+
//# sourceMappingURL=rebase-6NVLX5V7.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/commands/rebase.ts"],"sourcesContent":["import { logger } from '../utils/logger.js'\nimport { MergeManager } from '../lib/MergeManager.js'\nimport { GitWorktreeManager } from '../lib/GitWorktreeManager.js'\nimport { SettingsManager } from '../lib/SettingsManager.js'\nimport { BuildRunner } from '../lib/BuildRunner.js'\nimport { isValidGitRepo, getWorktreeRoot } from '../utils/git.js'\nimport { installDependencies } from '../utils/package-manager.js'\nimport type { MergeOptions, RebaseResult } from '../types/index.js'\n\nexport interface RebaseOptions {\n\tforce?: boolean\n\tdryRun?: boolean\n\tjsonStream?: boolean\n}\n\n/**\n * Error thrown when the rebase command is run from an invalid location\n */\nexport class WorktreeValidationError extends Error {\n\tconstructor(\n\t\tmessage: string,\n\t\tpublic readonly suggestion: string\n\t) {\n\t\tsuper(message)\n\t\tthis.name = 'WorktreeValidationError'\n\t}\n}\n\n/**\n * RebaseCommand: Rebase current branch on main with Claude-assisted conflict resolution\n *\n * This command:\n * 1. Validates the current directory is an iloom-managed worktree\n * 2. Detects the worktree root (supports running from subdirectories)\n * 3. Delegates to MergeManager.rebaseOnMain() which handles:\n * - Checking main branch exists\n * - Detecting uncommitted changes (throws if found)\n * - Checking if already up-to-date\n * - Executing rebase\n * - Claude-assisted conflict resolution\n * 4. Reports success\n */\nexport class RebaseCommand {\n\tprivate mergeManager: MergeManager\n\tprivate gitWorktreeManager: GitWorktreeManager\n\tprivate settingsManager: SettingsManager\n\tprivate buildRunner: BuildRunner\n\n\tconstructor(mergeManager?: MergeManager, gitWorktreeManager?: GitWorktreeManager, settingsManager?: SettingsManager, buildRunner?: BuildRunner) {\n\t\tthis.mergeManager = mergeManager ?? new MergeManager()\n\t\tthis.gitWorktreeManager = gitWorktreeManager ?? new GitWorktreeManager()\n\t\tthis.settingsManager = settingsManager ?? new SettingsManager()\n\t\tthis.buildRunner = buildRunner ?? new BuildRunner()\n\t}\n\n\t/**\n\t * Validate that the current directory is within an iloom-managed worktree\n\t * Returns the worktree root path if valid\n\t * @throws WorktreeValidationError if validation fails\n\t */\n\tprivate async validateWorktreeContext(): Promise<string> {\n\t\tconst currentDir = process.cwd()\n\n\t\t// Step 1: Check if we're in a git repository at all\n\t\tconst isGitRepo = await isValidGitRepo(currentDir)\n\t\tif (!isGitRepo) {\n\t\t\tthrow new WorktreeValidationError(\n\t\t\t\t'Not a git repository.',\n\t\t\t\t\"Run 'il rebase' from within an iloom worktree created by 'il start'.\"\n\t\t\t)\n\t\t}\n\n\t\t// Step 2: Get the worktree root (handles subdirectories)\n\t\tconst worktreeRoot = await getWorktreeRoot(currentDir)\n\t\tif (!worktreeRoot) {\n\t\t\tthrow new WorktreeValidationError(\n\t\t\t\t'Could not determine repository root.',\n\t\t\t\t\"Run 'il rebase' from within an iloom worktree created by 'il start'.\"\n\t\t\t)\n\t\t}\n\n\t\t// Step 3: Check if this path is a registered git worktree\n\t\tconst worktrees = await this.gitWorktreeManager.listWorktrees()\n\t\tconst currentWorktree = worktrees.find(wt => wt.path === worktreeRoot)\n\n\t\tif (!currentWorktree) {\n\t\t\tthrow new WorktreeValidationError(\n\t\t\t\t'This directory is not an iloom worktree.',\n\t\t\t\t\"Run 'il rebase' from within a worktree created by 'il start <issue>'. Use 'il list' to see available worktrees.\"\n\t\t\t)\n\t\t}\n\n\t\t// Step 4: Check if this is the main worktree (we shouldn't rebase from main)\n\t\tconst isMain = await this.gitWorktreeManager.isMainWorktree(currentWorktree, this.settingsManager)\n\t\tif (isMain) {\n\t\t\tthrow new WorktreeValidationError(\n\t\t\t\t'Cannot rebase from the main worktree.',\n\t\t\t\t\"Navigate to a feature worktree created by 'il start <issue>' and run 'il rebase' from there.\"\n\t\t\t)\n\t\t}\n\n\t\treturn worktreeRoot\n\t}\n\n\tasync execute(options: RebaseOptions = {}): Promise<RebaseResult | void> {\n\t\t// Set ILOOM=1 so hooks know this is an iloom session\n\t\tprocess.env.ILOOM = '1'\n\n\t\t// Step 1: Validate we're in a valid iloom worktree\n\t\tlet worktreePath: string\n\t\ttry {\n\t\t\tworktreePath = await this.validateWorktreeContext()\n\t\t} catch (error) {\n\t\t\tif (error instanceof WorktreeValidationError) {\n\t\t\t\tlogger.error(error.message)\n\t\t\t\tlogger.info(error.suggestion)\n\t\t\t\tthrow error\n\t\t\t}\n\t\t\tthrow error\n\t\t}\n\n\t\tconst mergeOptions: MergeOptions = {\n\t\t\tdryRun: options.dryRun ?? false,\n\t\t\tforce: options.force ?? false,\n\t\t\tjsonStream: options.jsonStream ?? false,\n\t\t}\n\n\t\t// MergeManager.rebaseOnMain() handles:\n\t\t// - Checking main branch exists\n\t\t// - Detecting uncommitted changes (throws if found)\n\t\t// - Checking if already up-to-date\n\t\t// - Executing rebase\n\t\t// - Claude-assisted conflict resolution\n\t\tconst outcome = await this.mergeManager.rebaseOnMain(worktreePath, mergeOptions)\n\n\t\t// Install dependencies after successful rebase\n\t\tif (!options.dryRun) {\n\t\t\tlogger.info('Installing dependencies...')\n\t\t\ttry {\n\t\t\t\tawait installDependencies(worktreePath, true, true) // frozen=true, quiet=true\n\t\t\t} catch (error) {\n\t\t\t\t// Log warning but don't fail - rebase succeeded, user can fix deps manually\n\t\t\t\tconst message = error instanceof Error ? error.message : 'Unknown error'\n\t\t\t\tlogger.warn(`Dependency installation failed: ${message}`)\n\t\t\t\tlogger.warn('Please run your package manager install command manually')\n\t\t\t}\n\t\t} else {\n\t\t\tlogger.info('[DRY RUN] Would install dependencies')\n\t\t}\n\n\t\t// Run build for CLI projects after successful rebase\n\t\tawait this.runPostRebaseBuild(worktreePath, options)\n\n\t\t// Return result if jsonStream mode\n\t\tif (options.jsonStream) {\n\t\t\treturn {\n\t\t\t\tsuccess: true,\n\t\t\t\tconflictsDetected: outcome.conflictsDetected,\n\t\t\t\tclaudeLaunched: outcome.claudeLaunched,\n\t\t\t\tconflictsResolved: outcome.conflictsResolved,\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Run post-rebase build for CLI projects\n\t * Non-blocking: build failures are logged as warnings but don't fail the rebase\n\t */\n\tprivate async runPostRebaseBuild(worktreePath: string, options: RebaseOptions): Promise<void> {\n\t\tif (options.dryRun) {\n\t\t\tlogger.info('[DRY RUN] Would run post-rebase build for CLI projects')\n\t\t\treturn\n\t\t}\n\n\t\ttry {\n\t\t\tconst buildResult = await this.buildRunner.runBuild(worktreePath, {\n\t\t\t\tdryRun: options.dryRun ?? false,\n\t\t\t})\n\n\t\t\tif (buildResult.skipped) {\n\t\t\t\tlogger.debug(`Build skipped: ${buildResult.reason}`)\n\t\t\t} else {\n\t\t\t\tlogger.success('Post-rebase build completed successfully')\n\t\t\t}\n\t\t} catch (error) {\n\t\t\t// Log warning but don't fail - rebase succeeded, user can fix build manually\n\t\t\tconst message = error instanceof Error ? error.message : 'Unknown error'\n\t\t\tlogger.warn(`Post-rebase build failed: ${message}`)\n\t\t\tlogger.warn('Please run the build command manually')\n\t\t}\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkBO,IAAM,0BAAN,cAAsC,MAAM;AAAA,EAClD,YACC,SACgB,YACf;AACD,UAAM,OAAO;AAFG;AAGhB,SAAK,OAAO;AAAA,EACb;AACD;AAgBO,IAAM,gBAAN,MAAoB;AAAA,EAM1B,YAAY,cAA6B,oBAAyC,iBAAmC,aAA2B;AAC/I,SAAK,eAAe,gBAAgB,IAAI,aAAa;AACrD,SAAK,qBAAqB,sBAAsB,IAAI,mBAAmB;AACvE,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAC9D,SAAK,cAAc,eAAe,IAAI,YAAY;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,0BAA2C;AACxD,UAAM,aAAa,QAAQ,IAAI;AAG/B,UAAM,YAAY,MAAM,eAAe,UAAU;AACjD,QAAI,CAAC,WAAW;AACf,YAAM,IAAI;AAAA,QACT;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAGA,UAAM,eAAe,MAAM,gBAAgB,UAAU;AACrD,QAAI,CAAC,cAAc;AAClB,YAAM,IAAI;AAAA,QACT;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAGA,UAAM,YAAY,MAAM,KAAK,mBAAmB,cAAc;AAC9D,UAAM,kBAAkB,UAAU,KAAK,QAAM,GAAG,SAAS,YAAY;AAErE,QAAI,CAAC,iBAAiB;AACrB,YAAM,IAAI;AAAA,QACT;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAGA,UAAM,SAAS,MAAM,KAAK,mBAAmB,eAAe,iBAAiB,KAAK,eAAe;AACjG,QAAI,QAAQ;AACX,YAAM,IAAI;AAAA,QACT;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA,EAEA,MAAM,QAAQ,UAAyB,CAAC,GAAiC;AAExE,YAAQ,IAAI,QAAQ;AAGpB,QAAI;AACJ,QAAI;AACH,qBAAe,MAAM,KAAK,wBAAwB;AAAA,IACnD,SAAS,OAAO;AACf,UAAI,iBAAiB,yBAAyB;AAC7C,eAAO,MAAM,MAAM,OAAO;AAC1B,eAAO,KAAK,MAAM,UAAU;AAC5B,cAAM;AAAA,MACP;AACA,YAAM;AAAA,IACP;AAEA,UAAM,eAA6B;AAAA,MAClC,QAAQ,QAAQ,UAAU;AAAA,MAC1B,OAAO,QAAQ,SAAS;AAAA,MACxB,YAAY,QAAQ,cAAc;AAAA,IACnC;AAQA,UAAM,UAAU,MAAM,KAAK,aAAa,aAAa,cAAc,YAAY;AAG/E,QAAI,CAAC,QAAQ,QAAQ;AACpB,aAAO,KAAK,4BAA4B;AACxC,UAAI;AACH,cAAM,oBAAoB,cAAc,MAAM,IAAI;AAAA,MACnD,SAAS,OAAO;AAEf,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AACzD,eAAO,KAAK,mCAAmC,OAAO,EAAE;AACxD,eAAO,KAAK,0DAA0D;AAAA,MACvE;AAAA,IACD,OAAO;AACN,aAAO,KAAK,sCAAsC;AAAA,IACnD;AAGA,UAAM,KAAK,mBAAmB,cAAc,OAAO;AAGnD,QAAI,QAAQ,YAAY;AACvB,aAAO;AAAA,QACN,SAAS;AAAA,QACT,mBAAmB,QAAQ;AAAA,QAC3B,gBAAgB,QAAQ;AAAA,QACxB,mBAAmB,QAAQ;AAAA,MAC5B;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,mBAAmB,cAAsB,SAAuC;AAC7F,QAAI,QAAQ,QAAQ;AACnB,aAAO,KAAK,wDAAwD;AACpE;AAAA,IACD;AAEA,QAAI;AACH,YAAM,cAAc,MAAM,KAAK,YAAY,SAAS,cAAc;AAAA,QACjE,QAAQ,QAAQ,UAAU;AAAA,MAC3B,CAAC;AAED,UAAI,YAAY,SAAS;AACxB,eAAO,MAAM,kBAAkB,YAAY,MAAM,EAAE;AAAA,MACpD,OAAO;AACN,eAAO,QAAQ,0CAA0C;AAAA,MAC1D;AAAA,IACD,SAAS,OAAO;AAEf,YAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AACzD,aAAO,KAAK,6BAA6B,OAAO,EAAE;AAClD,aAAO,KAAK,uCAAuC;AAAA,IACpD;AAAA,EACD;AACD;","names":[]}
|
|
@@ -2,15 +2,18 @@
|
|
|
2
2
|
import {
|
|
3
3
|
formatRecapMarkdown
|
|
4
4
|
} from "./chunk-NXMDEL3F.js";
|
|
5
|
+
import {
|
|
6
|
+
findArchivedRecap
|
|
7
|
+
} from "./chunk-LUKXJSRI.js";
|
|
5
8
|
import {
|
|
6
9
|
IdentifierParser
|
|
7
|
-
} from "./chunk-
|
|
10
|
+
} from "./chunk-63QWFWH3.js";
|
|
8
11
|
import {
|
|
9
12
|
GitWorktreeManager
|
|
10
|
-
} from "./chunk-
|
|
11
|
-
import "./chunk-
|
|
12
|
-
import "./chunk-
|
|
13
|
-
import "./chunk-
|
|
13
|
+
} from "./chunk-I5T677EA.js";
|
|
14
|
+
import "./chunk-4FGEGQW4.js";
|
|
15
|
+
import "./chunk-7VHJNVLF.js";
|
|
16
|
+
import "./chunk-KB64WNBZ.js";
|
|
14
17
|
import "./chunk-6MLEBAYZ.js";
|
|
15
18
|
import "./chunk-VT4PDUYT.js";
|
|
16
19
|
|
|
@@ -31,8 +34,7 @@ var RecapCommand = class {
|
|
|
31
34
|
* Returns RecapOutput in JSON mode, void otherwise
|
|
32
35
|
*/
|
|
33
36
|
async execute(input) {
|
|
34
|
-
const
|
|
35
|
-
const filePath = path.join(RECAPS_DIR, slugifyPath(loomPath));
|
|
37
|
+
const filePath = await this.resolveRecapFilePath(input.identifier);
|
|
36
38
|
let recap = {};
|
|
37
39
|
try {
|
|
38
40
|
if (await fs.pathExists(filePath)) {
|
|
@@ -52,12 +54,14 @@ var RecapCommand = class {
|
|
|
52
54
|
console.log(formatRecapMarkdown(result));
|
|
53
55
|
}
|
|
54
56
|
/**
|
|
55
|
-
* Resolve identifier to
|
|
56
|
-
*
|
|
57
|
+
* Resolve identifier to a full recap file path.
|
|
58
|
+
* Returns the path to the active recap file, or falls back to an archived
|
|
59
|
+
* recap when the worktree no longer exists (e.g., after cleanup --archive).
|
|
60
|
+
* Falls back to cwd when no identifier is provided (backward compatible).
|
|
57
61
|
*/
|
|
58
|
-
async
|
|
62
|
+
async resolveRecapFilePath(identifier) {
|
|
59
63
|
if (!(identifier == null ? void 0 : identifier.trim())) {
|
|
60
|
-
return process.cwd();
|
|
64
|
+
return path.join(RECAPS_DIR, slugifyPath(process.cwd()));
|
|
61
65
|
}
|
|
62
66
|
const trimmedId = identifier.trim();
|
|
63
67
|
const gitWorktreeManager = new GitWorktreeManager();
|
|
@@ -68,34 +72,58 @@ var RecapCommand = class {
|
|
|
68
72
|
const prNumber = parseInt(prMatch[1], 10);
|
|
69
73
|
const worktree = await gitWorktreeManager.findWorktreeForPR(prNumber, "");
|
|
70
74
|
if (worktree) {
|
|
71
|
-
return worktree.path;
|
|
75
|
+
return path.join(RECAPS_DIR, slugifyPath(worktree.path));
|
|
72
76
|
}
|
|
73
|
-
|
|
77
|
+
const archivedPath = await findArchivedRecap("pr", prNumber);
|
|
78
|
+
if (archivedPath) return archivedPath;
|
|
79
|
+
throw new Error(`No worktree or archived recap found for PR #${prNumber}`);
|
|
74
80
|
}
|
|
75
81
|
try {
|
|
76
82
|
const parsed = await identifierParser.parseForPatternDetection(trimmedId);
|
|
77
83
|
if (parsed.type === "pr" && typeof parsed.number === "number") {
|
|
78
84
|
const worktree = await gitWorktreeManager.findWorktreeForPR(parsed.number, "");
|
|
79
85
|
if (worktree) {
|
|
80
|
-
return worktree.path;
|
|
86
|
+
return path.join(RECAPS_DIR, slugifyPath(worktree.path));
|
|
81
87
|
}
|
|
82
|
-
|
|
88
|
+
const archivedPath = await findArchivedRecap("pr", parsed.number);
|
|
89
|
+
if (archivedPath) return archivedPath;
|
|
90
|
+
throw new Error(`No worktree or archived recap found for PR #${parsed.number}`);
|
|
83
91
|
}
|
|
84
92
|
if (parsed.type === "issue" && parsed.number !== void 0) {
|
|
85
93
|
const worktree = await gitWorktreeManager.findWorktreeForIssue(parsed.number);
|
|
86
94
|
if (worktree) {
|
|
87
|
-
return worktree.path;
|
|
95
|
+
return path.join(RECAPS_DIR, slugifyPath(worktree.path));
|
|
96
|
+
}
|
|
97
|
+
const issueNum = typeof parsed.number === "string" ? parseInt(parsed.number, 10) : parsed.number;
|
|
98
|
+
if (isNaN(issueNum)) {
|
|
99
|
+
throw new Error(`No worktree found for identifier: ${identifier}`);
|
|
88
100
|
}
|
|
89
|
-
|
|
101
|
+
const archivedPath = await findArchivedRecap("issue", issueNum);
|
|
102
|
+
if (archivedPath) return archivedPath;
|
|
103
|
+
throw new Error(`No worktree or archived recap found for issue #${parsed.number}`);
|
|
90
104
|
}
|
|
91
105
|
if (parsed.type === "branch" && parsed.branchName) {
|
|
92
106
|
const worktree = await gitWorktreeManager.findWorktreeForBranch(parsed.branchName);
|
|
93
107
|
if (worktree) {
|
|
94
|
-
return worktree.path;
|
|
108
|
+
return path.join(RECAPS_DIR, slugifyPath(worktree.path));
|
|
95
109
|
}
|
|
96
110
|
throw new Error(`No worktree found for branch: ${parsed.branchName}`);
|
|
97
111
|
}
|
|
98
112
|
} catch (error) {
|
|
113
|
+
if (error instanceof Error && (error.message.startsWith("No worktree or archived recap found") || error.message.startsWith("No worktree found for branch:"))) {
|
|
114
|
+
throw error;
|
|
115
|
+
}
|
|
116
|
+
if (error instanceof Error && error.message === `No worktree found for identifier: ${trimmedId}`) {
|
|
117
|
+
const numericMatch = trimmedId.match(/^(\d+)$/);
|
|
118
|
+
if (numericMatch == null ? void 0 : numericMatch[1]) {
|
|
119
|
+
const num = parseInt(numericMatch[1], 10);
|
|
120
|
+
const archivedIssue = await findArchivedRecap("issue", num);
|
|
121
|
+
if (archivedIssue) return archivedIssue;
|
|
122
|
+
const archivedPr = await findArchivedRecap("pr", num);
|
|
123
|
+
if (archivedPr) return archivedPr;
|
|
124
|
+
throw new Error(`No worktree or archived recap found for #${num}`);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
99
127
|
if (error instanceof Error) {
|
|
100
128
|
throw new Error(`Could not resolve identifier '${identifier}': ${error.message}`);
|
|
101
129
|
}
|
|
@@ -107,4 +135,4 @@ var RecapCommand = class {
|
|
|
107
135
|
export {
|
|
108
136
|
RecapCommand
|
|
109
137
|
};
|
|
110
|
-
//# sourceMappingURL=recap-
|
|
138
|
+
//# sourceMappingURL=recap-OMBOKJST.js.map
|