@kodrunhq/opencode-autopilot 1.1.2 → 1.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,40 +5,20 @@ agent: autopilot
5
5
  Help the user configure opencode-autopilot by walking through the model
6
6
  assignment process interactively.
7
7
 
8
- ## Step 1: Discover available models
8
+ ## Step 1: Discover models and show the list
9
9
 
10
- Call the oc_configure tool with subcommand "start". This returns:
11
- - `availableModels`: a map of provider -> list of "provider/model" strings
10
+ Call the oc_configure tool with subcommand "start". The response contains:
11
+ - `displayText`: a pre-formatted numbered list of ALL available models.
12
+ **Show this to the user VERBATIM. Do not summarize, truncate, or reformat it.**
13
+ - `modelIndex`: a map of number -> model ID (e.g. {"1": "anthropic/claude-opus-4-6"})
12
14
  - `groups`: the 8 agent groups with descriptions and recommendations
13
15
  - `currentConfig`: existing assignments if reconfiguring
14
16
  - `diversityRules`: adversarial diversity constraints
15
17
 
16
- If `availableModels` is empty or has no entries, tell the user:
17
- "No models were discovered from your providers. Run `opencode models`
18
- in your terminal to see available models, then type them manually below."
18
+ Print `displayText` exactly as returned. This is the complete model list
19
+ with instructions. Do not add, remove, or reorder entries.
19
20
 
20
- ## Step 2: Build the complete model list
21
-
22
- Combine ALL models from ALL providers into a single numbered list.
23
- Every model the user has access to must appear. Do NOT filter, summarize,
24
- or show only "recommended" models. The user decides — you present options.
25
-
26
- Example (show ALL of them, not a subset). The IDs must match exactly
27
- what `availableModels` returns (provider prefix comes from the provider):
28
- ```
29
- Available models:
30
- 1. anthropic/claude-opus-4-6
31
- 2. anthropic/claude-sonnet-4-6
32
- 3. anthropic/claude-haiku-4-5
33
- 4. openai/gpt-5.4
34
- 5. openai/gpt-5.4-mini
35
- 6. openai/gpt-5.4-codex
36
- 7. google/gemini-3.1-pro
37
- 8. google/gemini-3-flash
38
- ...
39
- ```
40
-
41
- ## Step 3: Walk through each group
21
+ ## Step 2: Walk through each group
42
22
 
43
23
  For each of the 8 groups (architects first, utilities last):
44
24
 
@@ -46,31 +26,23 @@ For each of the 8 groups (architects first, utilities last):
46
26
  2. Show the tier recommendation
47
27
  3. For adversarial groups (challengers, reviewers, red-team): explain WHY
48
28
  model diversity matters and which group they are adversarial to
49
- 4. Show the full numbered model list again whenever asking for selections
50
-
51
- ### Collecting models for each group
29
+ 4. Re-print `displayText` so the user can see the numbered list
52
30
 
53
- For each group, collect an ORDERED LIST of models (not just one):
31
+ Then ask:
54
32
 
55
33
  ```
56
- Group: Architects
57
- Pick models in priority order. The first is the primary; the rest are
58
- fallbacks tried in sequence when the primary is rate-limited or fails.
59
-
60
- Enter model numbers separated by commas (e.g. 1,4,7):
34
+ Enter model numbers for [Group Name], separated by commas (e.g. 1,4,7):
35
+ First = primary, rest = fallbacks in order.
61
36
  ```
62
37
 
63
- - The FIRST number is the primary model
64
- - All subsequent numbers are fallbacks, tried in the order given
65
- - Minimum 1 model (the primary), recommend 2-3 total
66
- - Emphasize that fallbacks are the core feature: "When your primary model
67
- hits a rate limit, the plugin automatically retries with the next model
68
- in your fallback chain. More fallbacks = more resilience."
38
+ ### Parsing the user's response
39
+
40
+ - Numbers like "1,4,7": look up each in `modelIndex` to get model IDs
41
+ - Model IDs typed directly (e.g. "anthropic/claude-opus-4-6"): use as-is
42
+ - Single number (e.g. "1"): primary only, no fallbacks
69
43
 
70
- Parse the user's response:
71
- - If they send numbers like "1,4,7": map to model IDs
72
- - If they send model IDs directly: use as-is
73
- - If they send a single number: that's the primary with no fallbacks
44
+ The FIRST model is the primary. All subsequent models are fallbacks,
45
+ tried in sequence when the primary is rate-limited or fails.
74
46
 
75
47
  Call oc_configure with subcommand "assign":
76
48
  - `group`: the group ID
@@ -88,7 +60,7 @@ Challengers are supposed to critique Architect decisions — using the same
88
60
  model family means you get confirmation bias instead of genuine challenge.
89
61
  Consider picking a different family for one of them. Continue anyway?"
90
62
 
91
- ## Step 4: Commit and verify
63
+ ## Step 3: Commit and verify
92
64
 
93
65
  After all 8 groups are assigned, call oc_configure with subcommand "commit".
94
66
 
@@ -106,9 +78,11 @@ Challengers | openai/gpt-5.4 | google/gemini-3.1-pro
106
78
 
107
79
  ## Rules
108
80
 
109
- - NEVER pre-select models for the user. Always present the full list.
110
- - NEVER skip fallback collection. Always ask for ordered model lists.
111
- - NEVER filter the model list to "recommended" models. Show everything.
81
+ - ALWAYS show `displayText` VERBATIM never summarize or truncate the model list.
82
+ - ALWAYS re-print `displayText` before asking for each group's selection.
83
+ - ALWAYS ask for comma-separated numbers (ordered list, not just one pick).
84
+ - NEVER pre-select models for the user. They choose from the full list.
85
+ - NEVER skip fallback collection. Emphasize: more fallbacks = more resilience.
112
86
  - If the user says "pick for me" or "use defaults", THEN you may suggest
113
87
  assignments based on the tier recommendations and diversity rules, but
114
88
  still show what you picked and ask for confirmation.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kodrunhq/opencode-autopilot",
3
- "version": "1.1.2",
3
+ "version": "1.1.3",
4
4
  "description": "Curated agents, skills, and commands for the OpenCode AI coding CLI — autonomous orchestrator, multi-agent code review, model fallback, and in-session asset creation tools.",
5
5
  "main": "src/index.ts",
6
6
  "keywords": [
@@ -117,6 +117,37 @@ function serializeDiversityWarnings(warnings: readonly DiversityWarning[]): read
117
117
  }));
118
118
  }
119
119
 
120
+ /**
121
+ * Build a flat numbered list of all available models and an index map.
122
+ * Returns { numberedList: "1. provider/model\n2. ...", indexMap: { "1": "provider/model", ... } }
123
+ */
124
+ function buildNumberedModelList(modelsByProvider: Map<string, string[]>): {
125
+ numberedList: string;
126
+ indexMap: Record<string, string>;
127
+ totalCount: number;
128
+ } {
129
+ const allModels: string[] = [];
130
+ for (const models of modelsByProvider.values()) {
131
+ allModels.push(...models);
132
+ }
133
+ // Sort alphabetically for stable ordering
134
+ allModels.sort();
135
+
136
+ const indexMap: Record<string, string> = {};
137
+ const lines: string[] = [];
138
+ for (let i = 0; i < allModels.length; i++) {
139
+ const num = String(i + 1);
140
+ indexMap[num] = allModels[i];
141
+ lines.push(` ${num}. ${allModels[i]}`);
142
+ }
143
+
144
+ return {
145
+ numberedList: lines.join("\n"),
146
+ indexMap,
147
+ totalCount: allModels.length,
148
+ };
149
+ }
150
+
120
151
  async function handleStart(configPath?: string): Promise<string> {
121
152
  // Wait for background provider discovery (up to 5s) before building model list
122
153
  await Promise.race([
@@ -125,6 +156,7 @@ async function handleStart(configPath?: string): Promise<string> {
125
156
  ]);
126
157
 
127
158
  const modelsByProvider = discoverAvailableModels();
159
+ const { numberedList, indexMap, totalCount } = buildNumberedModelList(modelsByProvider);
128
160
 
129
161
  // Load current plugin config to show existing assignments
130
162
  const currentConfig = await loadConfig(configPath);
@@ -148,10 +180,29 @@ async function handleStart(configPath?: string): Promise<string> {
148
180
  };
149
181
  });
150
182
 
183
+ // Pre-formatted text the LLM should show verbatim — avoids summarization
184
+ const displayText =
185
+ totalCount > 0
186
+ ? [
187
+ `Available models (${totalCount} total):`,
188
+ numberedList,
189
+ "",
190
+ "For each group below, enter model numbers separated by commas (e.g. 1,4,7).",
191
+ "First number = primary model. Remaining = fallbacks tried in order.",
192
+ "More fallbacks = more resilience when a model is rate-limited.",
193
+ ].join("\n")
194
+ : [
195
+ "No models were discovered from your providers.",
196
+ "Run `opencode models` in your terminal to see available models,",
197
+ "then type model IDs manually (e.g. anthropic/claude-opus-4-6).",
198
+ ].join("\n");
199
+
151
200
  return JSON.stringify({
152
201
  action: "configure",
153
202
  stage: "start",
154
203
  availableModels: Object.fromEntries(modelsByProvider),
204
+ modelIndex: indexMap,
205
+ displayText,
155
206
  groups,
156
207
  currentConfig: currentConfig
157
208
  ? { configured: currentConfig.configured, groups: currentConfig.groups }