@oh-my-pi/pi-coding-agent 4.3.0 → 4.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/package.json +5 -5
- package/src/cli/update-cli.ts +2 -2
- package/src/config.ts +5 -5
- package/src/core/auth-storage.ts +6 -1
- package/src/core/custom-commands/loader.ts +3 -1
- package/src/core/custom-tools/loader.ts +1 -18
- package/src/core/extensions/loader.ts +5 -21
- package/src/core/hooks/loader.ts +1 -18
- package/src/core/keybindings.ts +3 -1
- package/src/core/logger.ts +1 -2
- package/src/core/prompt-templates.ts +5 -4
- package/src/core/sdk.ts +5 -3
- package/src/core/skills.ts +5 -4
- package/src/core/tools/exa/mcp-client.ts +2 -2
- package/src/core/tools/task/agents.ts +5 -64
- package/src/core/tools/task/commands.ts +7 -33
- package/src/core/tools/task/discovery.ts +4 -66
- package/src/core/tools/task/executor.ts +32 -3
- package/src/core/tools/task/index.ts +11 -2
- package/src/core/tools/task/render.ts +25 -15
- package/src/core/tools/task/types.ts +3 -0
- package/src/core/tools/task/worker-protocol.ts +2 -1
- package/src/core/tools/task/worker.ts +2 -1
- package/src/core/tools/web-scrapers/huggingface.ts +1 -1
- package/src/core/tools/web-scrapers/readthedocs.ts +1 -1
- package/src/core/tools/web-scrapers/types.ts +1 -1
- package/src/core/tools/web-search/auth.ts +5 -3
- package/src/discovery/codex.ts +3 -1
- package/src/discovery/helpers.ts +124 -3
- package/src/migrations.ts +11 -9
- package/src/modes/interactive/components/extensions/state-manager.ts +19 -18
- package/src/prompts/agents/frontmatter.md +1 -0
- package/src/prompts/agents/reviewer.md +32 -4
- package/src/prompts/tools/task.md +3 -1
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
* Runs each subagent in a Bun Worker and forwards AgentEvents for progress tracking.
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
|
-
import type { AgentEvent } from "@oh-my-pi/pi-agent-core";
|
|
7
|
+
import type { AgentEvent, ThinkingLevel } from "@oh-my-pi/pi-agent-core";
|
|
8
8
|
import type { AuthStorage } from "../../auth-storage";
|
|
9
9
|
import type { EventBus } from "../../event-bus";
|
|
10
10
|
import { callTool } from "../../mcp/client";
|
|
@@ -18,6 +18,7 @@ import {
|
|
|
18
18
|
type AgentProgress,
|
|
19
19
|
MAX_OUTPUT_BYTES,
|
|
20
20
|
MAX_OUTPUT_LINES,
|
|
21
|
+
type ReviewFinding,
|
|
21
22
|
type SingleResult,
|
|
22
23
|
TASK_SUBAGENT_EVENT_CHANNEL,
|
|
23
24
|
TASK_SUBAGENT_PROGRESS_CHANNEL,
|
|
@@ -39,6 +40,7 @@ export interface ExecutorOptions {
|
|
|
39
40
|
taskId: string;
|
|
40
41
|
context?: string;
|
|
41
42
|
modelOverride?: string;
|
|
43
|
+
thinkingLevel?: ThinkingLevel;
|
|
42
44
|
outputSchema?: unknown;
|
|
43
45
|
enableLsp?: boolean;
|
|
44
46
|
signal?: AbortSignal;
|
|
@@ -183,8 +185,20 @@ function extractMCPToolMetadata(mcpManager: MCPManager): MCPToolMetadata[] {
|
|
|
183
185
|
* Run a single agent in a worker.
|
|
184
186
|
*/
|
|
185
187
|
export async function runSubprocess(options: ExecutorOptions): Promise<SingleResult> {
|
|
186
|
-
const {
|
|
187
|
-
|
|
188
|
+
const {
|
|
189
|
+
cwd,
|
|
190
|
+
agent,
|
|
191
|
+
task,
|
|
192
|
+
index,
|
|
193
|
+
taskId,
|
|
194
|
+
context,
|
|
195
|
+
modelOverride,
|
|
196
|
+
thinkingLevel,
|
|
197
|
+
outputSchema,
|
|
198
|
+
enableLsp,
|
|
199
|
+
signal,
|
|
200
|
+
onProgress,
|
|
201
|
+
} = options;
|
|
188
202
|
const startTime = Date.now();
|
|
189
203
|
|
|
190
204
|
// Initialize progress
|
|
@@ -578,6 +592,7 @@ export async function runSubprocess(options: ExecutorOptions): Promise<SingleRes
|
|
|
578
592
|
task: fullTask,
|
|
579
593
|
systemPrompt: agent.systemPrompt,
|
|
580
594
|
model: resolvedModel,
|
|
595
|
+
thinkingLevel,
|
|
581
596
|
toolNames,
|
|
582
597
|
outputSchema,
|
|
583
598
|
sessionFile,
|
|
@@ -751,6 +766,20 @@ export async function runSubprocess(options: ExecutorOptions): Promise<SingleRes
|
|
|
751
766
|
// Not valid JSON, keep as string
|
|
752
767
|
}
|
|
753
768
|
}
|
|
769
|
+
// Special case: merge report_finding data into review output for parent visibility
|
|
770
|
+
const reportFindings = progress.extractedToolData?.report_finding as ReviewFinding[] | undefined;
|
|
771
|
+
if (
|
|
772
|
+
Array.isArray(reportFindings) &&
|
|
773
|
+
reportFindings.length > 0 &&
|
|
774
|
+
completeData &&
|
|
775
|
+
typeof completeData === "object" &&
|
|
776
|
+
!Array.isArray(completeData)
|
|
777
|
+
) {
|
|
778
|
+
const record = completeData as Record<string, unknown>;
|
|
779
|
+
if (!("findings" in record)) {
|
|
780
|
+
completeData = { ...record, findings: reportFindings };
|
|
781
|
+
}
|
|
782
|
+
}
|
|
754
783
|
try {
|
|
755
784
|
rawOutput = JSON.stringify(completeData, null, 2) ?? "null";
|
|
756
785
|
} catch (err) {
|
|
@@ -156,6 +156,11 @@ export async function createTaskTool(
|
|
|
156
156
|
const shouldInheritSessionModel = model === undefined && isDefaultModelAlias(agent.model);
|
|
157
157
|
const sessionModel = shouldInheritSessionModel ? session.getActiveModelString?.() : undefined;
|
|
158
158
|
const modelOverride = model ?? sessionModel ?? session.getModelString?.();
|
|
159
|
+
const thinkingLevelOverride = agent.thinkingLevel;
|
|
160
|
+
|
|
161
|
+
// Output schema priority: agent frontmatter > params > inherited from parent session
|
|
162
|
+
const schemaOverridden = outputSchema !== undefined && agent.output !== undefined;
|
|
163
|
+
const effectiveOutputSchema = agent.output ?? outputSchema ?? session.outputSchema;
|
|
159
164
|
|
|
160
165
|
// Handle empty or missing tasks
|
|
161
166
|
if (!params.tasks || params.tasks.length === 0) {
|
|
@@ -345,7 +350,8 @@ export async function createTaskTool(
|
|
|
345
350
|
taskId: task.taskId,
|
|
346
351
|
context: undefined, // Already prepended above
|
|
347
352
|
modelOverride,
|
|
348
|
-
|
|
353
|
+
thinkingLevel: thinkingLevelOverride,
|
|
354
|
+
outputSchema: effectiveOutputSchema,
|
|
349
355
|
sessionFile,
|
|
350
356
|
persistArtifacts: !!artifactsDir,
|
|
351
357
|
artifactsDir: effectiveArtifactsDir,
|
|
@@ -399,9 +405,12 @@ export async function createTaskTool(
|
|
|
399
405
|
const outputIds = results.map((r) => r.taskId);
|
|
400
406
|
const outputHint =
|
|
401
407
|
outputIds.length > 0 ? `\n\nUse output tool for full logs: output ids ${outputIds.join(", ")}` : "";
|
|
408
|
+
const schemaNote = schemaOverridden
|
|
409
|
+
? `\n\nNote: Agent '${agentName}' has a fixed output schema; your 'output' parameter was ignored.\nRequired schema: ${JSON.stringify(agent.output)}`
|
|
410
|
+
: "";
|
|
402
411
|
const summary = `${successCount}/${results.length} succeeded [${formatDuration(
|
|
403
412
|
totalDuration,
|
|
404
|
-
)}]\n\n${summaries.join("\n\n---\n\n")}${outputHint}`;
|
|
413
|
+
)}]\n\n${summaries.join("\n\n---\n\n")}${outputHint}${schemaNote}`;
|
|
405
414
|
|
|
406
415
|
// Cleanup temp directory if used
|
|
407
416
|
if (tempArtifactsDir) {
|
|
@@ -369,18 +369,28 @@ function renderAgentProgress(
|
|
|
369
369
|
}
|
|
370
370
|
|
|
371
371
|
for (const [toolName, dataArray] of Object.entries(progress.extractedToolData)) {
|
|
372
|
+
// Handle report_finding with tree formatting
|
|
373
|
+
if (toolName === "report_finding" && (dataArray as ReportFindingDetails[]).length > 0) {
|
|
374
|
+
const findings = dataArray as ReportFindingDetails[];
|
|
375
|
+
lines.push(`${continuePrefix}${formatFindingSummary(findings, theme)}`);
|
|
376
|
+
lines.push(...renderFindings(findings, continuePrefix, expanded, theme));
|
|
377
|
+
continue;
|
|
378
|
+
}
|
|
379
|
+
|
|
372
380
|
const handler = subprocessToolRegistry.getHandler(toolName);
|
|
373
381
|
if (handler?.renderInline) {
|
|
374
|
-
|
|
375
|
-
const recentData = (dataArray as unknown[]).slice(-
|
|
382
|
+
const displayCount = expanded ? (dataArray as unknown[]).length : 3;
|
|
383
|
+
const recentData = (dataArray as unknown[]).slice(-displayCount);
|
|
376
384
|
for (const data of recentData) {
|
|
377
385
|
const component = handler.renderInline(data, theme);
|
|
378
386
|
if (component instanceof Text) {
|
|
379
387
|
lines.push(`${continuePrefix}${component.getText()}`);
|
|
380
388
|
}
|
|
381
389
|
}
|
|
382
|
-
if (dataArray.length >
|
|
383
|
-
lines.push(
|
|
390
|
+
if ((dataArray as unknown[]).length > displayCount) {
|
|
391
|
+
lines.push(
|
|
392
|
+
`${continuePrefix}${theme.fg("dim", formatMoreItems((dataArray as unknown[]).length - displayCount, "item", theme))}`,
|
|
393
|
+
);
|
|
384
394
|
}
|
|
385
395
|
}
|
|
386
396
|
}
|
|
@@ -436,7 +446,6 @@ function renderReviewResult(
|
|
|
436
446
|
lines.push(`${continuePrefix}${formatFindingSummary(findings, theme)}`);
|
|
437
447
|
|
|
438
448
|
if (findings.length > 0) {
|
|
439
|
-
lines.push(`${continuePrefix}`); // Spacing
|
|
440
449
|
lines.push(...renderFindings(findings, continuePrefix, expanded, theme));
|
|
441
450
|
}
|
|
442
451
|
|
|
@@ -453,11 +462,14 @@ function renderFindings(
|
|
|
453
462
|
theme: Theme,
|
|
454
463
|
): string[] {
|
|
455
464
|
const lines: string[] = [];
|
|
456
|
-
|
|
465
|
+
|
|
466
|
+
// Sort by priority (lower = more severe) when collapsed to show most important first
|
|
467
|
+
const sortedFindings = expanded ? findings : [...findings].sort((a, b) => a.priority - b.priority);
|
|
468
|
+
const displayCount = expanded ? sortedFindings.length : Math.min(3, sortedFindings.length);
|
|
457
469
|
|
|
458
470
|
for (let i = 0; i < displayCount; i++) {
|
|
459
|
-
const finding =
|
|
460
|
-
const isLastFinding = i === displayCount - 1 && (expanded ||
|
|
471
|
+
const finding = sortedFindings[i];
|
|
472
|
+
const isLastFinding = i === displayCount - 1 && (expanded || sortedFindings.length <= 3);
|
|
461
473
|
const findingPrefix = isLastFinding ? theme.tree.last : theme.tree.branch;
|
|
462
474
|
const findingContinue = isLastFinding ? " " : `${theme.tree.vertical} `;
|
|
463
475
|
|
|
@@ -538,14 +550,12 @@ function renderAgentResult(result: SingleResult, isLast: boolean, expanded: bool
|
|
|
538
550
|
return lines;
|
|
539
551
|
}
|
|
540
552
|
if (reportFindingData && reportFindingData.length > 0) {
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
);
|
|
553
|
+
const hasCompleteData = completeData && completeData.length > 0;
|
|
554
|
+
const message = hasCompleteData
|
|
555
|
+
? "Review verdict missing expected fields"
|
|
556
|
+
: "Review incomplete (complete not called)";
|
|
557
|
+
lines.push(`${continuePrefix}${theme.fg("warning", theme.status.warning)} ${theme.fg("dim", message)}`);
|
|
547
558
|
lines.push(`${continuePrefix}${formatFindingSummary(reportFindingData, theme)}`);
|
|
548
|
-
lines.push(`${continuePrefix}`); // Spacing
|
|
549
559
|
lines.push(...renderFindings(reportFindingData, continuePrefix, expanded, theme));
|
|
550
560
|
return lines;
|
|
551
561
|
}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import type { ThinkingLevel } from "@oh-my-pi/pi-agent-core";
|
|
1
2
|
import type { Usage } from "@oh-my-pi/pi-ai";
|
|
2
3
|
import { type Static, Type } from "@sinclair/typebox";
|
|
3
4
|
|
|
@@ -106,6 +107,8 @@ export interface AgentDefinition {
|
|
|
106
107
|
tools?: string[];
|
|
107
108
|
spawns?: string[] | "*";
|
|
108
109
|
model?: string;
|
|
110
|
+
thinkingLevel?: ThinkingLevel;
|
|
111
|
+
output?: unknown;
|
|
109
112
|
source: AgentSource;
|
|
110
113
|
filePath?: string;
|
|
111
114
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { AgentEvent } from "@oh-my-pi/pi-agent-core";
|
|
1
|
+
import type { AgentEvent, ThinkingLevel } from "@oh-my-pi/pi-agent-core";
|
|
2
2
|
import type { SerializedAuthStorage } from "../../auth-storage";
|
|
3
3
|
import type { SerializedModelRegistry } from "../../model-registry";
|
|
4
4
|
|
|
@@ -43,6 +43,7 @@ export interface SubagentWorkerStartPayload {
|
|
|
43
43
|
task: string;
|
|
44
44
|
systemPrompt: string;
|
|
45
45
|
model?: string;
|
|
46
|
+
thinkingLevel?: ThinkingLevel;
|
|
46
47
|
toolNames?: string[];
|
|
47
48
|
outputSchema?: unknown;
|
|
48
49
|
enableLsp?: boolean;
|
|
@@ -287,7 +287,8 @@ async function runTask(runState: RunState, payload: SubagentWorkerStartPayload):
|
|
|
287
287
|
const mcpProxyTools = payload.mcpTools?.map(createMCPProxyTool) ?? [];
|
|
288
288
|
|
|
289
289
|
// Resolve model override (equivalent to CLI's parseModelPattern with --model)
|
|
290
|
-
const { model, thinkingLevel } = resolveModelOverride(payload.model, modelRegistry);
|
|
290
|
+
const { model, thinkingLevel: modelThinkingLevel } = resolveModelOverride(payload.model, modelRegistry);
|
|
291
|
+
const thinkingLevel = modelThinkingLevel ?? payload.thinkingLevel;
|
|
291
292
|
|
|
292
293
|
// Create session manager (equivalent to CLI's --session or --no-session)
|
|
293
294
|
const sessionManager = payload.sessionFile
|
|
@@ -170,7 +170,7 @@ export async function loadPage(url: string, options: LoadPageOptions = {}): Prom
|
|
|
170
170
|
}
|
|
171
171
|
|
|
172
172
|
return { content, contentType, finalUrl, ok: true, status: response.status };
|
|
173
|
-
} catch
|
|
173
|
+
} catch {
|
|
174
174
|
if (signal?.aborted) {
|
|
175
175
|
return { content: "", contentType: "", finalUrl: url, ok: false };
|
|
176
176
|
}
|
|
@@ -14,6 +14,7 @@ import { buildBetaHeader, claudeCodeHeaders, claudeCodeVersion } from "@oh-my-pi
|
|
|
14
14
|
import { getAgentDbPath, getConfigDirPaths } from "../../../config";
|
|
15
15
|
import { AgentStorage } from "../../agent-storage";
|
|
16
16
|
import type { AuthCredential, AuthCredentialEntry, AuthStorageData } from "../../auth-storage";
|
|
17
|
+
import { logger } from "../../logger";
|
|
17
18
|
import { migrateJsonStorage } from "../../storage-migration";
|
|
18
19
|
import type { AnthropicAuthConfig, AnthropicOAuthCredential, ModelsJson } from "./types";
|
|
19
20
|
|
|
@@ -48,8 +49,8 @@ async function parseEnvFile(filePath: string): Promise<Record<string, string>> {
|
|
|
48
49
|
|
|
49
50
|
result[key] = value;
|
|
50
51
|
}
|
|
51
|
-
} catch {
|
|
52
|
-
|
|
52
|
+
} catch (error) {
|
|
53
|
+
logger.warn("Failed to read .env file", { path: filePath, error: String(error) });
|
|
53
54
|
}
|
|
54
55
|
return result;
|
|
55
56
|
}
|
|
@@ -82,7 +83,8 @@ async function readJson<T>(filePath: string): Promise<T | null> {
|
|
|
82
83
|
if (!(await file.exists())) return null;
|
|
83
84
|
const content = await file.text();
|
|
84
85
|
return JSON.parse(content) as T;
|
|
85
|
-
} catch {
|
|
86
|
+
} catch (error) {
|
|
87
|
+
logger.warn("Failed to parse JSON file", { path: filePath, error: String(error) });
|
|
86
88
|
return null;
|
|
87
89
|
}
|
|
88
90
|
}
|
package/src/discovery/codex.ts
CHANGED
|
@@ -29,6 +29,7 @@ import { slashCommandCapability } from "../capability/slash-command";
|
|
|
29
29
|
import type { CustomTool } from "../capability/tool";
|
|
30
30
|
import { toolCapability } from "../capability/tool";
|
|
31
31
|
import type { LoadContext, LoadResult } from "../capability/types";
|
|
32
|
+
import { logger } from "../core/logger";
|
|
32
33
|
import {
|
|
33
34
|
createSourceMeta,
|
|
34
35
|
discoverExtensionModulePaths,
|
|
@@ -117,7 +118,8 @@ async function loadTomlConfig(_ctx: LoadContext, path: string): Promise<Record<s
|
|
|
117
118
|
|
|
118
119
|
try {
|
|
119
120
|
return parseToml(content) as Record<string, unknown>;
|
|
120
|
-
} catch (
|
|
121
|
+
} catch (error) {
|
|
122
|
+
logger.warn("Failed to parse TOML config", { path, error: String(error) });
|
|
121
123
|
return null;
|
|
122
124
|
}
|
|
123
125
|
}
|
package/src/discovery/helpers.ts
CHANGED
|
@@ -2,11 +2,38 @@
|
|
|
2
2
|
* Shared helpers for discovery providers.
|
|
3
3
|
*/
|
|
4
4
|
|
|
5
|
+
import { homedir } from "node:os";
|
|
5
6
|
import { join, resolve } from "node:path";
|
|
7
|
+
import type { ThinkingLevel } from "@oh-my-pi/pi-agent-core";
|
|
6
8
|
import { parse as parseYAML } from "yaml";
|
|
7
9
|
import { readDirEntries, readFile } from "../capability/fs";
|
|
8
10
|
import type { Skill, SkillFrontmatter } from "../capability/skill";
|
|
9
11
|
import type { LoadContext, LoadResult, SourceMeta } from "../capability/types";
|
|
12
|
+
import { logger } from "../core/logger";
|
|
13
|
+
|
|
14
|
+
const VALID_THINKING_LEVELS: readonly string[] = ["off", "minimal", "low", "medium", "high", "xhigh"];
|
|
15
|
+
const UNICODE_SPACES = /[\u00A0\u2000-\u200A\u202F\u205F\u3000]/g;
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Normalize unicode spaces to regular spaces.
|
|
19
|
+
*/
|
|
20
|
+
export function normalizeUnicodeSpaces(str: string): string {
|
|
21
|
+
return str.replace(UNICODE_SPACES, " ");
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Expand ~ to home directory and normalize unicode spaces.
|
|
26
|
+
*/
|
|
27
|
+
export function expandPath(p: string): string {
|
|
28
|
+
const normalized = normalizeUnicodeSpaces(p);
|
|
29
|
+
if (normalized.startsWith("~/")) {
|
|
30
|
+
return join(homedir(), normalized.slice(2));
|
|
31
|
+
}
|
|
32
|
+
if (normalized.startsWith("~")) {
|
|
33
|
+
return join(homedir(), normalized.slice(1));
|
|
34
|
+
}
|
|
35
|
+
return normalized;
|
|
36
|
+
}
|
|
10
37
|
|
|
11
38
|
/**
|
|
12
39
|
* Standard paths for each config source.
|
|
@@ -117,14 +144,108 @@ export function parseFrontmatter(content: string): {
|
|
|
117
144
|
const body = normalized.slice(endIndex + 4).trim();
|
|
118
145
|
|
|
119
146
|
try {
|
|
120
|
-
|
|
147
|
+
// Replace tabs with spaces for YAML compatibility, use failsafe mode for robustness
|
|
148
|
+
const frontmatter = parseYAML(raw.replaceAll("\t", " "), { compat: "failsafe" }) as Record<
|
|
149
|
+
string,
|
|
150
|
+
unknown
|
|
151
|
+
> | null;
|
|
121
152
|
return { frontmatter: frontmatter ?? {}, body, raw };
|
|
122
|
-
} catch {
|
|
123
|
-
|
|
153
|
+
} catch (error) {
|
|
154
|
+
logger.warn("Failed to parse YAML frontmatter", { error: String(error) });
|
|
124
155
|
return { frontmatter: {}, body, raw };
|
|
125
156
|
}
|
|
126
157
|
}
|
|
127
158
|
|
|
159
|
+
/**
|
|
160
|
+
* Parse thinking level from frontmatter.
|
|
161
|
+
* Supports keys: thinkingLevel, thinking-level, thinking
|
|
162
|
+
*/
|
|
163
|
+
export function parseThinkingLevel(frontmatter: Record<string, unknown>): ThinkingLevel | undefined {
|
|
164
|
+
const raw = frontmatter.thinkingLevel ?? frontmatter["thinking-level"] ?? frontmatter.thinking;
|
|
165
|
+
if (typeof raw === "string" && VALID_THINKING_LEVELS.includes(raw)) {
|
|
166
|
+
return raw as ThinkingLevel;
|
|
167
|
+
}
|
|
168
|
+
return undefined;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Parse a comma-separated string into an array of trimmed, non-empty strings.
|
|
173
|
+
*/
|
|
174
|
+
export function parseCSV(value: string): string[] {
|
|
175
|
+
return value
|
|
176
|
+
.split(",")
|
|
177
|
+
.map((s) => s.trim())
|
|
178
|
+
.filter(Boolean);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Parse a value that may be an array of strings or a comma-separated string.
|
|
183
|
+
* Returns undefined if the result would be empty.
|
|
184
|
+
*/
|
|
185
|
+
export function parseArrayOrCSV(value: unknown): string[] | undefined {
|
|
186
|
+
if (Array.isArray(value)) {
|
|
187
|
+
const filtered = value.filter((item): item is string => typeof item === "string");
|
|
188
|
+
return filtered.length > 0 ? filtered : undefined;
|
|
189
|
+
}
|
|
190
|
+
if (typeof value === "string") {
|
|
191
|
+
const parsed = parseCSV(value);
|
|
192
|
+
return parsed.length > 0 ? parsed : undefined;
|
|
193
|
+
}
|
|
194
|
+
return undefined;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
/** Parsed agent fields from frontmatter (excludes source/filePath/systemPrompt) */
|
|
198
|
+
export interface ParsedAgentFields {
|
|
199
|
+
name: string;
|
|
200
|
+
description: string;
|
|
201
|
+
tools?: string[];
|
|
202
|
+
spawns?: string[] | "*";
|
|
203
|
+
model?: string;
|
|
204
|
+
output?: unknown;
|
|
205
|
+
thinkingLevel?: ThinkingLevel;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Parse agent fields from frontmatter.
|
|
210
|
+
* Returns null if required fields (name, description) are missing.
|
|
211
|
+
*/
|
|
212
|
+
export function parseAgentFields(frontmatter: Record<string, unknown>): ParsedAgentFields | null {
|
|
213
|
+
const name = typeof frontmatter.name === "string" ? frontmatter.name : undefined;
|
|
214
|
+
const description = typeof frontmatter.description === "string" ? frontmatter.description : undefined;
|
|
215
|
+
|
|
216
|
+
if (!name || !description) {
|
|
217
|
+
return null;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
const tools = parseArrayOrCSV(frontmatter.tools);
|
|
221
|
+
|
|
222
|
+
// Parse spawns field (array, "*", or CSV)
|
|
223
|
+
let spawns: string[] | "*" | undefined;
|
|
224
|
+
if (frontmatter.spawns === "*") {
|
|
225
|
+
spawns = "*";
|
|
226
|
+
} else if (typeof frontmatter.spawns === "string") {
|
|
227
|
+
const trimmed = frontmatter.spawns.trim();
|
|
228
|
+
if (trimmed === "*") {
|
|
229
|
+
spawns = "*";
|
|
230
|
+
} else {
|
|
231
|
+
spawns = parseArrayOrCSV(trimmed);
|
|
232
|
+
}
|
|
233
|
+
} else {
|
|
234
|
+
spawns = parseArrayOrCSV(frontmatter.spawns);
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Backward compat: infer spawns: "*" when tools includes "task"
|
|
238
|
+
if (spawns === undefined && tools?.includes("task")) {
|
|
239
|
+
spawns = "*";
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
const output = frontmatter.output !== undefined ? frontmatter.output : undefined;
|
|
243
|
+
const model = typeof frontmatter.model === "string" ? frontmatter.model : undefined;
|
|
244
|
+
const thinkingLevel = parseThinkingLevel(frontmatter);
|
|
245
|
+
|
|
246
|
+
return { name, description, tools, spawns, model, output, thinkingLevel };
|
|
247
|
+
}
|
|
248
|
+
|
|
128
249
|
export async function loadSkillsFromDir(
|
|
129
250
|
_ctx: LoadContext,
|
|
130
251
|
options: {
|
package/src/migrations.ts
CHANGED
|
@@ -8,6 +8,7 @@ import chalk from "chalk";
|
|
|
8
8
|
import { getAgentDbPath, getAgentDir, getBinDir } from "./config";
|
|
9
9
|
import { AgentStorage } from "./core/agent-storage";
|
|
10
10
|
import type { AuthCredential } from "./core/auth-storage";
|
|
11
|
+
import { logger } from "./core/logger";
|
|
11
12
|
|
|
12
13
|
/**
|
|
13
14
|
* Migrate PI_* environment variables to OMP_* equivalents.
|
|
@@ -55,8 +56,8 @@ export function migrateAuthToAgentDb(): string[] {
|
|
|
55
56
|
providers.push(provider);
|
|
56
57
|
}
|
|
57
58
|
renameSync(oauthPath, `${oauthPath}.migrated`);
|
|
58
|
-
} catch {
|
|
59
|
-
|
|
59
|
+
} catch (error) {
|
|
60
|
+
logger.warn("Failed to migrate oauth.json", { path: oauthPath, error: String(error) });
|
|
60
61
|
}
|
|
61
62
|
}
|
|
62
63
|
|
|
@@ -75,8 +76,8 @@ export function migrateAuthToAgentDb(): string[] {
|
|
|
75
76
|
delete settings.apiKeys;
|
|
76
77
|
writeFileSync(settingsPath, JSON.stringify(settings, null, 2));
|
|
77
78
|
}
|
|
78
|
-
} catch {
|
|
79
|
-
|
|
79
|
+
} catch (error) {
|
|
80
|
+
logger.warn("Failed to migrate settings.json apiKeys", { path: settingsPath, error: String(error) });
|
|
80
81
|
}
|
|
81
82
|
}
|
|
82
83
|
|
|
@@ -105,7 +106,8 @@ export function migrateSessionsFromAgentRoot(): void {
|
|
|
105
106
|
files = readdirSync(agentDir)
|
|
106
107
|
.filter((f) => f.endsWith(".jsonl"))
|
|
107
108
|
.map((f) => join(agentDir, f));
|
|
108
|
-
} catch {
|
|
109
|
+
} catch (error) {
|
|
110
|
+
logger.warn("Failed to read agent directory for session migration", { path: agentDir, error: String(error) });
|
|
109
111
|
return;
|
|
110
112
|
}
|
|
111
113
|
|
|
@@ -137,8 +139,8 @@ export function migrateSessionsFromAgentRoot(): void {
|
|
|
137
139
|
if (existsSync(newPath)) continue; // Skip if target exists
|
|
138
140
|
|
|
139
141
|
renameSync(file, newPath);
|
|
140
|
-
} catch {
|
|
141
|
-
|
|
142
|
+
} catch (error) {
|
|
143
|
+
logger.warn("Failed to migrate session file", { path: file, error: String(error) });
|
|
142
144
|
}
|
|
143
145
|
}
|
|
144
146
|
}
|
|
@@ -168,8 +170,8 @@ function migrateToolsToBin(): void {
|
|
|
168
170
|
try {
|
|
169
171
|
renameSync(oldPath, newPath);
|
|
170
172
|
movedAny = true;
|
|
171
|
-
} catch {
|
|
172
|
-
|
|
173
|
+
} catch (error) {
|
|
174
|
+
logger.warn("Failed to migrate binary", { from: oldPath, to: newPath, error: String(error) });
|
|
173
175
|
}
|
|
174
176
|
} else {
|
|
175
177
|
// Target exists, just delete the old one
|
|
@@ -13,6 +13,7 @@ import type { Skill } from "../../../../capability/skill";
|
|
|
13
13
|
import type { SlashCommand } from "../../../../capability/slash-command";
|
|
14
14
|
import type { CustomTool } from "../../../../capability/tool";
|
|
15
15
|
import type { SourceMeta } from "../../../../capability/types";
|
|
16
|
+
import { logger } from "../../../../core/logger";
|
|
16
17
|
import {
|
|
17
18
|
disableProvider,
|
|
18
19
|
enableProvider,
|
|
@@ -105,8 +106,8 @@ export async function loadAllExtensions(cwd?: string, disabledIds?: string[]): P
|
|
|
105
106
|
getDescription: (s) => s.frontmatter?.description,
|
|
106
107
|
getTrigger: (s) => s.frontmatter?.globs?.join(", "),
|
|
107
108
|
});
|
|
108
|
-
} catch {
|
|
109
|
-
|
|
109
|
+
} catch (error) {
|
|
110
|
+
logger.warn("Failed to load skills capability", { error: String(error) });
|
|
110
111
|
}
|
|
111
112
|
|
|
112
113
|
// Load rules
|
|
@@ -116,8 +117,8 @@ export async function loadAllExtensions(cwd?: string, disabledIds?: string[]): P
|
|
|
116
117
|
getDescription: (r) => r.description,
|
|
117
118
|
getTrigger: (r) => r.globs?.join(", ") || (r.alwaysApply ? "always" : undefined),
|
|
118
119
|
});
|
|
119
|
-
} catch {
|
|
120
|
-
|
|
120
|
+
} catch (error) {
|
|
121
|
+
logger.warn("Failed to load rules capability", { error: String(error) });
|
|
121
122
|
}
|
|
122
123
|
|
|
123
124
|
// Load custom tools
|
|
@@ -126,8 +127,8 @@ export async function loadAllExtensions(cwd?: string, disabledIds?: string[]): P
|
|
|
126
127
|
addItems(tools.all, "tool", {
|
|
127
128
|
getDescription: (t) => t.description,
|
|
128
129
|
});
|
|
129
|
-
} catch {
|
|
130
|
-
|
|
130
|
+
} catch (error) {
|
|
131
|
+
logger.warn("Failed to load tools capability", { error: String(error) });
|
|
131
132
|
}
|
|
132
133
|
|
|
133
134
|
// Load extension modules
|
|
@@ -135,8 +136,8 @@ export async function loadAllExtensions(cwd?: string, disabledIds?: string[]): P
|
|
|
135
136
|
const modules = await loadCapability<ExtensionModule>("extension-modules", loadOpts);
|
|
136
137
|
const nativeModules = modules.all.filter((module) => module._source.provider === "native");
|
|
137
138
|
addItems(nativeModules, "extension-module");
|
|
138
|
-
} catch {
|
|
139
|
-
|
|
139
|
+
} catch (error) {
|
|
140
|
+
logger.warn("Failed to load extension-modules capability", { error: String(error) });
|
|
140
141
|
}
|
|
141
142
|
|
|
142
143
|
// Load MCP servers
|
|
@@ -178,8 +179,8 @@ export async function loadAllExtensions(cwd?: string, disabledIds?: string[]): P
|
|
|
178
179
|
raw: server,
|
|
179
180
|
});
|
|
180
181
|
}
|
|
181
|
-
} catch {
|
|
182
|
-
|
|
182
|
+
} catch (error) {
|
|
183
|
+
logger.warn("Failed to load mcps capability", { error: String(error) });
|
|
183
184
|
}
|
|
184
185
|
|
|
185
186
|
// Load prompts
|
|
@@ -189,8 +190,8 @@ export async function loadAllExtensions(cwd?: string, disabledIds?: string[]): P
|
|
|
189
190
|
getDescription: () => undefined,
|
|
190
191
|
getTrigger: (p) => `/prompts:${p.name}`,
|
|
191
192
|
});
|
|
192
|
-
} catch {
|
|
193
|
-
|
|
193
|
+
} catch (error) {
|
|
194
|
+
logger.warn("Failed to load prompts capability", { error: String(error) });
|
|
194
195
|
}
|
|
195
196
|
|
|
196
197
|
// Load slash commands
|
|
@@ -200,8 +201,8 @@ export async function loadAllExtensions(cwd?: string, disabledIds?: string[]): P
|
|
|
200
201
|
getDescription: () => undefined,
|
|
201
202
|
getTrigger: (c) => `/${c.name}`,
|
|
202
203
|
});
|
|
203
|
-
} catch {
|
|
204
|
-
|
|
204
|
+
} catch (error) {
|
|
205
|
+
logger.warn("Failed to load slash-commands capability", { error: String(error) });
|
|
205
206
|
}
|
|
206
207
|
|
|
207
208
|
// Load hooks
|
|
@@ -243,8 +244,8 @@ export async function loadAllExtensions(cwd?: string, disabledIds?: string[]): P
|
|
|
243
244
|
raw: hook,
|
|
244
245
|
});
|
|
245
246
|
}
|
|
246
|
-
} catch {
|
|
247
|
-
|
|
247
|
+
} catch (error) {
|
|
248
|
+
logger.warn("Failed to load hooks capability", { error: String(error) });
|
|
248
249
|
}
|
|
249
250
|
|
|
250
251
|
// Load context files
|
|
@@ -288,8 +289,8 @@ export async function loadAllExtensions(cwd?: string, disabledIds?: string[]): P
|
|
|
288
289
|
raw: file,
|
|
289
290
|
});
|
|
290
291
|
}
|
|
291
|
-
} catch {
|
|
292
|
-
|
|
292
|
+
} catch (error) {
|
|
293
|
+
logger.warn("Failed to load context-files capability", { error: String(error) });
|
|
293
294
|
}
|
|
294
295
|
|
|
295
296
|
return extensions;
|