@mariozechner/pi-coding-agent 0.12.14 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/compaction.d.ts +1 -0
- package/dist/compaction.d.ts.map +1 -1
- package/dist/compaction.js +2 -1
- package/dist/compaction.js.map +1 -1
- package/dist/config.d.ts +1 -1
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +2 -2
- package/dist/config.js.map +1 -1
- package/dist/tools/index.d.ts +11 -11
- package/package.json +5 -5
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,16 @@
|
|
|
2
2
|
|
|
3
3
|
## [Unreleased]
|
|
4
4
|
|
|
5
|
+
### Fixed
|
|
6
|
+
|
|
7
|
+
- **Windows binary detection**: Fixed Bun compiled binary detection on Windows by checking for URL-encoded `%7EBUN` in addition to `$bunfs` and `~BUN` in `import.meta.url`. This ensures the binary correctly locates supporting files (package.json, themes, etc.) next to the executable.
|
|
8
|
+
|
|
9
|
+
## [0.12.15] - 2025-12-06
|
|
10
|
+
|
|
11
|
+
### Fixed
|
|
12
|
+
|
|
13
|
+
- **Editor crash with emojis/CJK characters**: Fixed crash when pasting or typing text containing wide characters (emojis like ✅, CJK characters) that caused line width to exceed terminal width. The editor now uses grapheme-aware text wrapping with proper visible width calculation.
|
|
14
|
+
|
|
5
15
|
## [0.12.14] - 2025-12-06
|
|
6
16
|
|
|
7
17
|
### Added
|
package/dist/compaction.d.ts
CHANGED
|
@@ -15,6 +15,7 @@ export interface CompactionSettings {
|
|
|
15
15
|
export declare const DEFAULT_COMPACTION_SETTINGS: CompactionSettings;
|
|
16
16
|
/**
|
|
17
17
|
* Calculate total context tokens from usage.
|
|
18
|
+
* Uses the native totalTokens field when available, falls back to computing from components.
|
|
18
19
|
*/
|
|
19
20
|
export declare function calculateContextTokens(usage: Usage): number;
|
|
20
21
|
/**
|
package/dist/compaction.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"compaction.d.ts","sourceRoot":"","sources":["../src/compaction.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,6BAA6B,CAAC;AAC9D,OAAO,KAAK,EAAoB,KAAK,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAE1E,OAAO,KAAK,EAAE,eAAe,EAAE,YAAY,EAAE,MAAM,sBAAsB,CAAC;AAM1E,MAAM,WAAW,kBAAkB;IAClC,OAAO,EAAE,OAAO,CAAC;IACjB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;CACzB;AAED,eAAO,MAAM,2BAA2B,EAAE,kBAIzC,CAAC;AAMF;;GAEG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,CAE3D;AAeD;;GAEG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,EAAE,YAAY,EAAE,GAAG,KAAK,GAAG,IAAI,CAS3E;AAED;;GAEG;AACH,wBAAgB,aAAa,CAAC,aAAa,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,QAAQ,EAAE,kBAAkB,GAAG,OAAO,CAGjH;AAoBD;;;;;GAKG;AACH,wBAAgB,YAAY,CAC3B,OAAO,EAAE,YAAY,EAAE,EACvB,UAAU,EAAE,MAAM,EAClB,QAAQ,EAAE,MAAM,EAChB,gBAAgB,EAAE,MAAM,GACtB,MAAM,CAgDR;AAiBD;;GAEG;AACH,wBAAsB,eAAe,CACpC,eAAe,EAAE,UAAU,EAAE,EAC7B,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,EACjB,aAAa,EAAE,MAAM,EACrB,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,WAAW,EACpB,kBAAkB,CAAC,EAAE,MAAM,GACzB,OAAO,CAAC,MAAM,CAAC,CAwBjB;AAMD;;;;;;;;;;GAUG;AACH,wBAAsB,OAAO,CAC5B,OAAO,EAAE,YAAY,EAAE,EACvB,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,EACjB,QAAQ,EAAE,kBAAkB,EAC5B,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,WAAW,EACpB,kBAAkB,CAAC,EAAE,MAAM,GACzB,OAAO,CAAC,eAAe,CAAC,CA6D1B","sourcesContent":["/**\n * Context compaction for long sessions.\n *\n * Pure functions for compaction logic. The session manager handles I/O,\n * and after compaction the session is reloaded.\n */\n\nimport type { AppMessage } from \"@mariozechner/pi-agent-core\";\nimport type { AssistantMessage, Model, Usage } from \"@mariozechner/pi-ai\";\nimport { complete } from \"@mariozechner/pi-ai\";\nimport type { CompactionEntry, SessionEntry } from \"./session-manager.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface CompactionSettings {\n\tenabled: boolean;\n\treserveTokens: number;\n\tkeepRecentTokens: number;\n}\n\nexport const DEFAULT_COMPACTION_SETTINGS: CompactionSettings = {\n\tenabled: true,\n\treserveTokens: 16384,\n\tkeepRecentTokens: 20000,\n};\n\n// ============================================================================\n// Token calculation\n// ============================================================================\n\n/**\n * Calculate total context tokens from usage.\n */\nexport function calculateContextTokens(usage: Usage): number {\n\treturn usage.input + usage.output + usage.cacheRead + usage.cacheWrite;\n}\n\n/**\n * Get usage from an assistant message if available.\n */\nfunction getAssistantUsage(msg: AppMessage): Usage | null {\n\tif (msg.role === \"assistant\" && \"usage\" in msg) {\n\t\tconst assistantMsg = msg as AssistantMessage;\n\t\tif (assistantMsg.stopReason !== \"aborted\" && assistantMsg.usage) {\n\t\t\treturn assistantMsg.usage;\n\t\t}\n\t}\n\treturn null;\n}\n\n/**\n * Find the last non-aborted assistant message usage from session entries.\n */\nexport function getLastAssistantUsage(entries: SessionEntry[]): Usage | null {\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) return usage;\n\t\t}\n\t}\n\treturn null;\n}\n\n/**\n * Check if compaction should trigger based on context usage.\n */\nexport function shouldCompact(contextTokens: number, contextWindow: number, settings: CompactionSettings): boolean {\n\tif (!settings.enabled) return false;\n\treturn contextTokens > contextWindow - settings.reserveTokens;\n}\n\n// ============================================================================\n// Cut point detection\n// ============================================================================\n\n/**\n * Find indices of message entries that are user messages (turn boundaries).\n */\nfunction findTurnBoundaries(entries: SessionEntry[], startIndex: number, endIndex: number): number[] {\n\tconst boundaries: number[] = [];\n\tfor (let i = startIndex; i < endIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\" && entry.message.role === \"user\") {\n\t\t\tboundaries.push(i);\n\t\t}\n\t}\n\treturn boundaries;\n}\n\n/**\n * Find the cut point in session entries that keeps approximately `keepRecentTokens`.\n * Returns the entry index of the first message to keep (a user message for turn integrity).\n *\n * Only considers entries between `startIndex` and `endIndex` (exclusive).\n */\nexport function findCutPoint(\n\tentries: SessionEntry[],\n\tstartIndex: number,\n\tendIndex: number,\n\tkeepRecentTokens: number,\n): number {\n\tconst boundaries = findTurnBoundaries(entries, startIndex, endIndex);\n\n\tif (boundaries.length === 0) {\n\t\treturn startIndex; // No user messages, keep everything in range\n\t}\n\n\t// Collect assistant usages walking backwards from endIndex\n\tconst assistantUsages: Array<{ index: number; tokens: number }> = [];\n\tfor (let i = endIndex - 1; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) {\n\t\t\t\tassistantUsages.push({\n\t\t\t\t\tindex: i,\n\t\t\t\t\ttokens: calculateContextTokens(usage),\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t}\n\n\tif (assistantUsages.length === 0) {\n\t\t// No usage info, keep last turn only\n\t\treturn boundaries[boundaries.length - 1];\n\t}\n\n\t// Walk through and find where cumulative token difference exceeds keepRecentTokens\n\tconst newestTokens = assistantUsages[0].tokens;\n\tlet cutIndex = startIndex; // Default: keep everything in range\n\n\tfor (let i = 1; i < assistantUsages.length; i++) {\n\t\tconst tokenDiff = newestTokens - assistantUsages[i].tokens;\n\t\tif (tokenDiff >= keepRecentTokens) {\n\t\t\t// Find the turn boundary at or before the assistant we want to keep\n\t\t\tconst lastKeptAssistantIndex = assistantUsages[i - 1].index;\n\n\t\t\tfor (let b = boundaries.length - 1; b >= 0; b--) {\n\t\t\t\tif (boundaries[b] <= lastKeptAssistantIndex) {\n\t\t\t\t\tcutIndex = boundaries[b];\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t}\n\n\treturn cutIndex;\n}\n\n// ============================================================================\n// Summarization\n// ============================================================================\n\nconst SUMMARIZATION_PROMPT = `You are performing a CONTEXT CHECKPOINT COMPACTION. Create a handoff summary for another LLM that will resume the task.\n\nInclude:\n- Current progress and key decisions made\n- Important context, constraints, or user preferences\n- Absolute file paths of any relevant files that were read or modified\n- What remains to be done (clear next steps)\n- Any critical data, examples, or references needed to continue\n\nBe concise, structured, and focused on helping the next LLM seamlessly continue the work.`;\n\n/**\n * Generate a summary of the conversation using the LLM.\n */\nexport async function generateSummary(\n\tcurrentMessages: AppMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.8 * reserveTokens);\n\n\tconst prompt = customInstructions\n\t\t? `${SUMMARIZATION_PROMPT}\\n\\nAdditional focus: ${customInstructions}`\n\t\t: SUMMARIZATION_PROMPT;\n\n\tconst summarizationMessages = [\n\t\t...currentMessages,\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: prompt,\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await complete(model, { messages: summarizationMessages }, { maxTokens, signal, apiKey });\n\n\tconst textContent = response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n\n\treturn textContent;\n}\n\n// ============================================================================\n// Main compaction function\n// ============================================================================\n\n/**\n * Calculate compaction and generate summary.\n * Returns the CompactionEntry to append to the session file.\n *\n * @param entries - All session entries\n * @param model - Model to use for summarization\n * @param settings - Compaction settings\n * @param apiKey - API key for LLM\n * @param signal - Optional abort signal\n * @param customInstructions - Optional custom focus for the summary\n */\nexport async function compact(\n\tentries: SessionEntry[],\n\tmodel: Model<any>,\n\tsettings: CompactionSettings,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n): Promise<CompactionEntry> {\n\t// Don't compact if the last entry is already a compaction\n\tif (entries.length > 0 && entries[entries.length - 1].type === \"compaction\") {\n\t\tthrow new Error(\"Already compacted\");\n\t}\n\n\t// Find previous compaction boundary\n\tlet prevCompactionIndex = -1;\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tif (entries[i].type === \"compaction\") {\n\t\t\tprevCompactionIndex = i;\n\t\t\tbreak;\n\t\t}\n\t}\n\tconst boundaryStart = prevCompactionIndex + 1;\n\tconst boundaryEnd = entries.length;\n\n\t// Get token count before compaction\n\tconst lastUsage = getLastAssistantUsage(entries);\n\tconst tokensBefore = lastUsage ? calculateContextTokens(lastUsage) : 0;\n\n\t// Find cut point (entry index) within the valid range\n\tconst firstKeptEntryIndex = findCutPoint(entries, boundaryStart, boundaryEnd, settings.keepRecentTokens);\n\n\t// Extract messages to summarize (before the cut point)\n\tconst messagesToSummarize: AppMessage[] = [];\n\tfor (let i = boundaryStart; i < firstKeptEntryIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tmessagesToSummarize.push(entry.message);\n\t\t}\n\t}\n\n\t// Also include the previous summary if there was a compaction\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = entries[prevCompactionIndex] as CompactionEntry;\n\t\t// Prepend the previous summary as context\n\t\tmessagesToSummarize.unshift({\n\t\t\trole: \"user\",\n\t\t\tcontent: `Previous session summary:\\n${prevCompaction.summary}`,\n\t\t\ttimestamp: Date.now(),\n\t\t});\n\t}\n\n\t// Generate summary from messages before the cut point\n\tconst summary = await generateSummary(\n\t\tmessagesToSummarize,\n\t\tmodel,\n\t\tsettings.reserveTokens,\n\t\tapiKey,\n\t\tsignal,\n\t\tcustomInstructions,\n\t);\n\n\treturn {\n\t\ttype: \"compaction\",\n\t\ttimestamp: new Date().toISOString(),\n\t\tsummary,\n\t\tfirstKeptEntryIndex,\n\t\ttokensBefore,\n\t};\n}\n"]}
|
|
1
|
+
{"version":3,"file":"compaction.d.ts","sourceRoot":"","sources":["../src/compaction.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,6BAA6B,CAAC;AAC9D,OAAO,KAAK,EAAoB,KAAK,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAE1E,OAAO,KAAK,EAAE,eAAe,EAAE,YAAY,EAAE,MAAM,sBAAsB,CAAC;AAM1E,MAAM,WAAW,kBAAkB;IAClC,OAAO,EAAE,OAAO,CAAC;IACjB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;CACzB;AAED,eAAO,MAAM,2BAA2B,EAAE,kBAIzC,CAAC;AAMF;;;GAGG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,CAE3D;AAeD;;GAEG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,EAAE,YAAY,EAAE,GAAG,KAAK,GAAG,IAAI,CAS3E;AAED;;GAEG;AACH,wBAAgB,aAAa,CAAC,aAAa,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,QAAQ,EAAE,kBAAkB,GAAG,OAAO,CAGjH;AAoBD;;;;;GAKG;AACH,wBAAgB,YAAY,CAC3B,OAAO,EAAE,YAAY,EAAE,EACvB,UAAU,EAAE,MAAM,EAClB,QAAQ,EAAE,MAAM,EAChB,gBAAgB,EAAE,MAAM,GACtB,MAAM,CAgDR;AAiBD;;GAEG;AACH,wBAAsB,eAAe,CACpC,eAAe,EAAE,UAAU,EAAE,EAC7B,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,EACjB,aAAa,EAAE,MAAM,EACrB,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,WAAW,EACpB,kBAAkB,CAAC,EAAE,MAAM,GACzB,OAAO,CAAC,MAAM,CAAC,CAwBjB;AAMD;;;;;;;;;;GAUG;AACH,wBAAsB,OAAO,CAC5B,OAAO,EAAE,YAAY,EAAE,EACvB,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,EACjB,QAAQ,EAAE,kBAAkB,EAC5B,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,WAAW,EACpB,kBAAkB,CAAC,EAAE,MAAM,GACzB,OAAO,CAAC,eAAe,CAAC,CA6D1B","sourcesContent":["/**\n * Context compaction for long sessions.\n *\n * Pure functions for compaction logic. The session manager handles I/O,\n * and after compaction the session is reloaded.\n */\n\nimport type { AppMessage } from \"@mariozechner/pi-agent-core\";\nimport type { AssistantMessage, Model, Usage } from \"@mariozechner/pi-ai\";\nimport { complete } from \"@mariozechner/pi-ai\";\nimport type { CompactionEntry, SessionEntry } from \"./session-manager.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface CompactionSettings {\n\tenabled: boolean;\n\treserveTokens: number;\n\tkeepRecentTokens: number;\n}\n\nexport const DEFAULT_COMPACTION_SETTINGS: CompactionSettings = {\n\tenabled: true,\n\treserveTokens: 16384,\n\tkeepRecentTokens: 20000,\n};\n\n// ============================================================================\n// Token calculation\n// ============================================================================\n\n/**\n * Calculate total context tokens from usage.\n * Uses the native totalTokens field when available, falls back to computing from components.\n */\nexport function calculateContextTokens(usage: Usage): number {\n\treturn usage.totalTokens || usage.input + usage.output + usage.cacheRead + usage.cacheWrite;\n}\n\n/**\n * Get usage from an assistant message if available.\n */\nfunction getAssistantUsage(msg: AppMessage): Usage | null {\n\tif (msg.role === \"assistant\" && \"usage\" in msg) {\n\t\tconst assistantMsg = msg as AssistantMessage;\n\t\tif (assistantMsg.stopReason !== \"aborted\" && assistantMsg.usage) {\n\t\t\treturn assistantMsg.usage;\n\t\t}\n\t}\n\treturn null;\n}\n\n/**\n * Find the last non-aborted assistant message usage from session entries.\n */\nexport function getLastAssistantUsage(entries: SessionEntry[]): Usage | null {\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) return usage;\n\t\t}\n\t}\n\treturn null;\n}\n\n/**\n * Check if compaction should trigger based on context usage.\n */\nexport function shouldCompact(contextTokens: number, contextWindow: number, settings: CompactionSettings): boolean {\n\tif (!settings.enabled) return false;\n\treturn contextTokens > contextWindow - settings.reserveTokens;\n}\n\n// ============================================================================\n// Cut point detection\n// ============================================================================\n\n/**\n * Find indices of message entries that are user messages (turn boundaries).\n */\nfunction findTurnBoundaries(entries: SessionEntry[], startIndex: number, endIndex: number): number[] {\n\tconst boundaries: number[] = [];\n\tfor (let i = startIndex; i < endIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\" && entry.message.role === \"user\") {\n\t\t\tboundaries.push(i);\n\t\t}\n\t}\n\treturn boundaries;\n}\n\n/**\n * Find the cut point in session entries that keeps approximately `keepRecentTokens`.\n * Returns the entry index of the first message to keep (a user message for turn integrity).\n *\n * Only considers entries between `startIndex` and `endIndex` (exclusive).\n */\nexport function findCutPoint(\n\tentries: SessionEntry[],\n\tstartIndex: number,\n\tendIndex: number,\n\tkeepRecentTokens: number,\n): number {\n\tconst boundaries = findTurnBoundaries(entries, startIndex, endIndex);\n\n\tif (boundaries.length === 0) {\n\t\treturn startIndex; // No user messages, keep everything in range\n\t}\n\n\t// Collect assistant usages walking backwards from endIndex\n\tconst assistantUsages: Array<{ index: number; tokens: number }> = [];\n\tfor (let i = endIndex - 1; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) {\n\t\t\t\tassistantUsages.push({\n\t\t\t\t\tindex: i,\n\t\t\t\t\ttokens: calculateContextTokens(usage),\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t}\n\n\tif (assistantUsages.length === 0) {\n\t\t// No usage info, keep last turn only\n\t\treturn boundaries[boundaries.length - 1];\n\t}\n\n\t// Walk through and find where cumulative token difference exceeds keepRecentTokens\n\tconst newestTokens = assistantUsages[0].tokens;\n\tlet cutIndex = startIndex; // Default: keep everything in range\n\n\tfor (let i = 1; i < assistantUsages.length; i++) {\n\t\tconst tokenDiff = newestTokens - assistantUsages[i].tokens;\n\t\tif (tokenDiff >= keepRecentTokens) {\n\t\t\t// Find the turn boundary at or before the assistant we want to keep\n\t\t\tconst lastKeptAssistantIndex = assistantUsages[i - 1].index;\n\n\t\t\tfor (let b = boundaries.length - 1; b >= 0; b--) {\n\t\t\t\tif (boundaries[b] <= lastKeptAssistantIndex) {\n\t\t\t\t\tcutIndex = boundaries[b];\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t}\n\n\treturn cutIndex;\n}\n\n// ============================================================================\n// Summarization\n// ============================================================================\n\nconst SUMMARIZATION_PROMPT = `You are performing a CONTEXT CHECKPOINT COMPACTION. Create a handoff summary for another LLM that will resume the task.\n\nInclude:\n- Current progress and key decisions made\n- Important context, constraints, or user preferences\n- Absolute file paths of any relevant files that were read or modified\n- What remains to be done (clear next steps)\n- Any critical data, examples, or references needed to continue\n\nBe concise, structured, and focused on helping the next LLM seamlessly continue the work.`;\n\n/**\n * Generate a summary of the conversation using the LLM.\n */\nexport async function generateSummary(\n\tcurrentMessages: AppMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.8 * reserveTokens);\n\n\tconst prompt = customInstructions\n\t\t? `${SUMMARIZATION_PROMPT}\\n\\nAdditional focus: ${customInstructions}`\n\t\t: SUMMARIZATION_PROMPT;\n\n\tconst summarizationMessages = [\n\t\t...currentMessages,\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: prompt,\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await complete(model, { messages: summarizationMessages }, { maxTokens, signal, apiKey });\n\n\tconst textContent = response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n\n\treturn textContent;\n}\n\n// ============================================================================\n// Main compaction function\n// ============================================================================\n\n/**\n * Calculate compaction and generate summary.\n * Returns the CompactionEntry to append to the session file.\n *\n * @param entries - All session entries\n * @param model - Model to use for summarization\n * @param settings - Compaction settings\n * @param apiKey - API key for LLM\n * @param signal - Optional abort signal\n * @param customInstructions - Optional custom focus for the summary\n */\nexport async function compact(\n\tentries: SessionEntry[],\n\tmodel: Model<any>,\n\tsettings: CompactionSettings,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n): Promise<CompactionEntry> {\n\t// Don't compact if the last entry is already a compaction\n\tif (entries.length > 0 && entries[entries.length - 1].type === \"compaction\") {\n\t\tthrow new Error(\"Already compacted\");\n\t}\n\n\t// Find previous compaction boundary\n\tlet prevCompactionIndex = -1;\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tif (entries[i].type === \"compaction\") {\n\t\t\tprevCompactionIndex = i;\n\t\t\tbreak;\n\t\t}\n\t}\n\tconst boundaryStart = prevCompactionIndex + 1;\n\tconst boundaryEnd = entries.length;\n\n\t// Get token count before compaction\n\tconst lastUsage = getLastAssistantUsage(entries);\n\tconst tokensBefore = lastUsage ? calculateContextTokens(lastUsage) : 0;\n\n\t// Find cut point (entry index) within the valid range\n\tconst firstKeptEntryIndex = findCutPoint(entries, boundaryStart, boundaryEnd, settings.keepRecentTokens);\n\n\t// Extract messages to summarize (before the cut point)\n\tconst messagesToSummarize: AppMessage[] = [];\n\tfor (let i = boundaryStart; i < firstKeptEntryIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tmessagesToSummarize.push(entry.message);\n\t\t}\n\t}\n\n\t// Also include the previous summary if there was a compaction\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = entries[prevCompactionIndex] as CompactionEntry;\n\t\t// Prepend the previous summary as context\n\t\tmessagesToSummarize.unshift({\n\t\t\trole: \"user\",\n\t\t\tcontent: `Previous session summary:\\n${prevCompaction.summary}`,\n\t\t\ttimestamp: Date.now(),\n\t\t});\n\t}\n\n\t// Generate summary from messages before the cut point\n\tconst summary = await generateSummary(\n\t\tmessagesToSummarize,\n\t\tmodel,\n\t\tsettings.reserveTokens,\n\t\tapiKey,\n\t\tsignal,\n\t\tcustomInstructions,\n\t);\n\n\treturn {\n\t\ttype: \"compaction\",\n\t\ttimestamp: new Date().toISOString(),\n\t\tsummary,\n\t\tfirstKeptEntryIndex,\n\t\ttokensBefore,\n\t};\n}\n"]}
|
package/dist/compaction.js
CHANGED
|
@@ -15,9 +15,10 @@ export const DEFAULT_COMPACTION_SETTINGS = {
|
|
|
15
15
|
// ============================================================================
|
|
16
16
|
/**
|
|
17
17
|
* Calculate total context tokens from usage.
|
|
18
|
+
* Uses the native totalTokens field when available, falls back to computing from components.
|
|
18
19
|
*/
|
|
19
20
|
export function calculateContextTokens(usage) {
|
|
20
|
-
return usage.input + usage.output + usage.cacheRead + usage.cacheWrite;
|
|
21
|
+
return usage.totalTokens || usage.input + usage.output + usage.cacheRead + usage.cacheWrite;
|
|
21
22
|
}
|
|
22
23
|
/**
|
|
23
24
|
* Get usage from an assistant message if available.
|
package/dist/compaction.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"compaction.js","sourceRoot":"","sources":["../src/compaction.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,OAAO,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAa/C,MAAM,CAAC,MAAM,2BAA2B,GAAuB;IAC9D,OAAO,EAAE,IAAI;IACb,aAAa,EAAE,KAAK;IACpB,gBAAgB,EAAE,KAAK;CACvB,CAAC;AAEF,+EAA+E;AAC/E,oBAAoB;AACpB,+EAA+E;AAE/E;;GAEG;AACH,MAAM,UAAU,sBAAsB,CAAC,KAAY,EAAU;IAC5D,OAAO,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,UAAU,CAAC;AAAA,CACvE;AAED;;GAEG;AACH,SAAS,iBAAiB,CAAC,GAAe,EAAgB;IACzD,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,IAAI,OAAO,IAAI,GAAG,EAAE,CAAC;QAChD,MAAM,YAAY,GAAG,GAAuB,CAAC;QAC7C,IAAI,YAAY,CAAC,UAAU,KAAK,SAAS,IAAI,YAAY,CAAC,KAAK,EAAE,CAAC;YACjE,OAAO,YAAY,CAAC,KAAK,CAAC;QAC3B,CAAC;IACF,CAAC;IACD,OAAO,IAAI,CAAC;AAAA,CACZ;AAED;;GAEG;AACH,MAAM,UAAU,qBAAqB,CAAC,OAAuB,EAAgB;IAC5E,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC9C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC/C,IAAI,KAAK;gBAAE,OAAO,KAAK,CAAC;QACzB,CAAC;IACF,CAAC;IACD,OAAO,IAAI,CAAC;AAAA,CACZ;AAED;;GAEG;AACH,MAAM,UAAU,aAAa,CAAC,aAAqB,EAAE,aAAqB,EAAE,QAA4B,EAAW;IAClH,IAAI,CAAC,QAAQ,CAAC,OAAO;QAAE,OAAO,KAAK,CAAC;IACpC,OAAO,aAAa,GAAG,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC;AAAA,CAC9D;AAED,+EAA+E;AAC/E,sBAAsB;AACtB,+EAA+E;AAE/E;;GAEG;AACH,SAAS,kBAAkB,CAAC,OAAuB,EAAE,UAAkB,EAAE,QAAgB,EAAY;IACpG,MAAM,UAAU,GAAa,EAAE,CAAC;IAChC,KAAK,IAAI,CAAC,GAAG,UAAU,EAAE,CAAC,GAAG,QAAQ,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC/D,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QACpB,CAAC;IACF,CAAC;IACD,OAAO,UAAU,CAAC;AAAA,CAClB;AAED;;;;;GAKG;AACH,MAAM,UAAU,YAAY,CAC3B,OAAuB,EACvB,UAAkB,EAClB,QAAgB,EAChB,gBAAwB,EACf;IACT,MAAM,UAAU,GAAG,kBAAkB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,CAAC,CAAC;IAErE,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC7B,OAAO,UAAU,CAAC,CAAC,6CAA6C;IACjE,CAAC;IAED,2DAA2D;IAC3D,MAAM,eAAe,GAA6C,EAAE,CAAC;IACrE,KAAK,IAAI,CAAC,GAAG,QAAQ,GAAG,CAAC,EAAE,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;QACjD,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC/C,IAAI,KAAK,EAAE,CAAC;gBACX,eAAe,CAAC,IAAI,CAAC;oBACpB,KAAK,EAAE,CAAC;oBACR,MAAM,EAAE,sBAAsB,CAAC,KAAK,CAAC;iBACrC,CAAC,CAAC;YACJ,CAAC;QACF,CAAC;IACF,CAAC;IAED,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAClC,qCAAqC;QACrC,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,mFAAmF;IACnF,MAAM,YAAY,GAAG,eAAe,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;IAC/C,IAAI,QAAQ,GAAG,UAAU,CAAC,CAAC,oCAAoC;IAE/D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,eAAe,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACjD,MAAM,SAAS,GAAG,YAAY,GAAG,eAAe,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;QAC3D,IAAI,SAAS,IAAI,gBAAgB,EAAE,CAAC;YACnC,oEAAoE;YACpE,MAAM,sBAAsB,GAAG,eAAe,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC;YAE5D,KAAK,IAAI,CAAC,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBACjD,IAAI,UAAU,CAAC,CAAC,CAAC,IAAI,sBAAsB,EAAE,CAAC;oBAC7C,QAAQ,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC;oBACzB,MAAM;gBACP,CAAC;YACF,CAAC;YACD,MAAM;QACP,CAAC;IACF,CAAC;IAED,OAAO,QAAQ,CAAC;AAAA,CAChB;AAED,+EAA+E;AAC/E,gBAAgB;AAChB,+EAA+E;AAE/E,MAAM,oBAAoB,GAAG;;;;;;;;;0FAS6D,CAAC;AAE3F;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACpC,eAA6B,EAC7B,KAAiB,EACjB,aAAqB,EACrB,MAAc,EACd,MAAoB,EACpB,kBAA2B,EACT;IAClB,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,aAAa,CAAC,CAAC;IAElD,MAAM,MAAM,GAAG,kBAAkB;QAChC,CAAC,CAAC,GAAG,oBAAoB,yBAAyB,kBAAkB,EAAE;QACtE,CAAC,CAAC,oBAAoB,CAAC;IAExB,MAAM,qBAAqB,GAAG;QAC7B,GAAG,eAAe;QAClB;YACC,IAAI,EAAE,MAAe;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB;KACD,CAAC;IAEF,MAAM,QAAQ,GAAG,MAAM,QAAQ,CAAC,KAAK,EAAE,EAAE,QAAQ,EAAE,qBAAqB,EAAE,EAAE,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;IAE3G,MAAM,WAAW,GAAG,QAAQ,CAAC,OAAO;SAClC,MAAM,CAAC,CAAC,CAAC,EAAuC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC;SACrE,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;SAClB,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,OAAO,WAAW,CAAC;AAAA,CACnB;AAED,+EAA+E;AAC/E,2BAA2B;AAC3B,+EAA+E;AAE/E;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,OAAO,CAC5B,OAAuB,EACvB,KAAiB,EACjB,QAA4B,EAC5B,MAAc,EACd,MAAoB,EACpB,kBAA2B,EACA;IAC3B,0DAA0D;IAC1D,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;QAC7E,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;IACtC,CAAC;IAED,oCAAoC;IACpC,IAAI,mBAAmB,GAAG,CAAC,CAAC,CAAC;IAC7B,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC9C,IAAI,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YACtC,mBAAmB,GAAG,CAAC,CAAC;YACxB,MAAM;QACP,CAAC;IACF,CAAC;IACD,MAAM,aAAa,GAAG,mBAAmB,GAAG,CAAC,CAAC;IAC9C,MAAM,WAAW,GAAG,OAAO,CAAC,MAAM,CAAC;IAEnC,oCAAoC;IACpC,MAAM,SAAS,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,YAAY,GAAG,SAAS,CAAC,CAAC,CAAC,sBAAsB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAEvE,sDAAsD;IACtD,MAAM,mBAAmB,GAAG,YAAY,CAAC,OAAO,EAAE,aAAa,EAAE,WAAW,EAAE,QAAQ,CAAC,gBAAgB,CAAC,CAAC;IAEzG,uDAAuD;IACvD,MAAM,mBAAmB,GAAiB,EAAE,CAAC;IAC7C,KAAK,IAAI,CAAC,GAAG,aAAa,EAAE,CAAC,GAAG,mBAAmB,EAAE,CAAC,EAAE,EAAE,CAAC;QAC1D,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzC,CAAC;IACF,CAAC;IAED,8DAA8D;IAC9D,IAAI,mBAAmB,IAAI,CAAC,EAAE,CAAC;QAC9B,MAAM,cAAc,GAAG,OAAO,CAAC,mBAAmB,CAAoB,CAAC;QACvE,0CAA0C;QAC1C,mBAAmB,CAAC,OAAO,CAAC;YAC3B,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,8BAA8B,cAAc,CAAC,OAAO,EAAE;YAC/D,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC,CAAC;IACJ,CAAC;IAED,sDAAsD;IACtD,MAAM,OAAO,GAAG,MAAM,eAAe,CACpC,mBAAmB,EACnB,KAAK,EACL,QAAQ,CAAC,aAAa,EACtB,MAAM,EACN,MAAM,EACN,kBAAkB,CAClB,CAAC;IAEF,OAAO;QACN,IAAI,EAAE,YAAY;QAClB,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;QACnC,OAAO;QACP,mBAAmB;QACnB,YAAY;KACZ,CAAC;AAAA,CACF","sourcesContent":["/**\n * Context compaction for long sessions.\n *\n * Pure functions for compaction logic. The session manager handles I/O,\n * and after compaction the session is reloaded.\n */\n\nimport type { AppMessage } from \"@mariozechner/pi-agent-core\";\nimport type { AssistantMessage, Model, Usage } from \"@mariozechner/pi-ai\";\nimport { complete } from \"@mariozechner/pi-ai\";\nimport type { CompactionEntry, SessionEntry } from \"./session-manager.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface CompactionSettings {\n\tenabled: boolean;\n\treserveTokens: number;\n\tkeepRecentTokens: number;\n}\n\nexport const DEFAULT_COMPACTION_SETTINGS: CompactionSettings = {\n\tenabled: true,\n\treserveTokens: 16384,\n\tkeepRecentTokens: 20000,\n};\n\n// ============================================================================\n// Token calculation\n// ============================================================================\n\n/**\n * Calculate total context tokens from usage.\n */\nexport function calculateContextTokens(usage: Usage): number {\n\treturn usage.input + usage.output + usage.cacheRead + usage.cacheWrite;\n}\n\n/**\n * Get usage from an assistant message if available.\n */\nfunction getAssistantUsage(msg: AppMessage): Usage | null {\n\tif (msg.role === \"assistant\" && \"usage\" in msg) {\n\t\tconst assistantMsg = msg as AssistantMessage;\n\t\tif (assistantMsg.stopReason !== \"aborted\" && assistantMsg.usage) {\n\t\t\treturn assistantMsg.usage;\n\t\t}\n\t}\n\treturn null;\n}\n\n/**\n * Find the last non-aborted assistant message usage from session entries.\n */\nexport function getLastAssistantUsage(entries: SessionEntry[]): Usage | null {\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) return usage;\n\t\t}\n\t}\n\treturn null;\n}\n\n/**\n * Check if compaction should trigger based on context usage.\n */\nexport function shouldCompact(contextTokens: number, contextWindow: number, settings: CompactionSettings): boolean {\n\tif (!settings.enabled) return false;\n\treturn contextTokens > contextWindow - settings.reserveTokens;\n}\n\n// ============================================================================\n// Cut point detection\n// ============================================================================\n\n/**\n * Find indices of message entries that are user messages (turn boundaries).\n */\nfunction findTurnBoundaries(entries: SessionEntry[], startIndex: number, endIndex: number): number[] {\n\tconst boundaries: number[] = [];\n\tfor (let i = startIndex; i < endIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\" && entry.message.role === \"user\") {\n\t\t\tboundaries.push(i);\n\t\t}\n\t}\n\treturn boundaries;\n}\n\n/**\n * Find the cut point in session entries that keeps approximately `keepRecentTokens`.\n * Returns the entry index of the first message to keep (a user message for turn integrity).\n *\n * Only considers entries between `startIndex` and `endIndex` (exclusive).\n */\nexport function findCutPoint(\n\tentries: SessionEntry[],\n\tstartIndex: number,\n\tendIndex: number,\n\tkeepRecentTokens: number,\n): number {\n\tconst boundaries = findTurnBoundaries(entries, startIndex, endIndex);\n\n\tif (boundaries.length === 0) {\n\t\treturn startIndex; // No user messages, keep everything in range\n\t}\n\n\t// Collect assistant usages walking backwards from endIndex\n\tconst assistantUsages: Array<{ index: number; tokens: number }> = [];\n\tfor (let i = endIndex - 1; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) {\n\t\t\t\tassistantUsages.push({\n\t\t\t\t\tindex: i,\n\t\t\t\t\ttokens: calculateContextTokens(usage),\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t}\n\n\tif (assistantUsages.length === 0) {\n\t\t// No usage info, keep last turn only\n\t\treturn boundaries[boundaries.length - 1];\n\t}\n\n\t// Walk through and find where cumulative token difference exceeds keepRecentTokens\n\tconst newestTokens = assistantUsages[0].tokens;\n\tlet cutIndex = startIndex; // Default: keep everything in range\n\n\tfor (let i = 1; i < assistantUsages.length; i++) {\n\t\tconst tokenDiff = newestTokens - assistantUsages[i].tokens;\n\t\tif (tokenDiff >= keepRecentTokens) {\n\t\t\t// Find the turn boundary at or before the assistant we want to keep\n\t\t\tconst lastKeptAssistantIndex = assistantUsages[i - 1].index;\n\n\t\t\tfor (let b = boundaries.length - 1; b >= 0; b--) {\n\t\t\t\tif (boundaries[b] <= lastKeptAssistantIndex) {\n\t\t\t\t\tcutIndex = boundaries[b];\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t}\n\n\treturn cutIndex;\n}\n\n// ============================================================================\n// Summarization\n// ============================================================================\n\nconst SUMMARIZATION_PROMPT = `You are performing a CONTEXT CHECKPOINT COMPACTION. Create a handoff summary for another LLM that will resume the task.\n\nInclude:\n- Current progress and key decisions made\n- Important context, constraints, or user preferences\n- Absolute file paths of any relevant files that were read or modified\n- What remains to be done (clear next steps)\n- Any critical data, examples, or references needed to continue\n\nBe concise, structured, and focused on helping the next LLM seamlessly continue the work.`;\n\n/**\n * Generate a summary of the conversation using the LLM.\n */\nexport async function generateSummary(\n\tcurrentMessages: AppMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.8 * reserveTokens);\n\n\tconst prompt = customInstructions\n\t\t? `${SUMMARIZATION_PROMPT}\\n\\nAdditional focus: ${customInstructions}`\n\t\t: SUMMARIZATION_PROMPT;\n\n\tconst summarizationMessages = [\n\t\t...currentMessages,\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: prompt,\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await complete(model, { messages: summarizationMessages }, { maxTokens, signal, apiKey });\n\n\tconst textContent = response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n\n\treturn textContent;\n}\n\n// ============================================================================\n// Main compaction function\n// ============================================================================\n\n/**\n * Calculate compaction and generate summary.\n * Returns the CompactionEntry to append to the session file.\n *\n * @param entries - All session entries\n * @param model - Model to use for summarization\n * @param settings - Compaction settings\n * @param apiKey - API key for LLM\n * @param signal - Optional abort signal\n * @param customInstructions - Optional custom focus for the summary\n */\nexport async function compact(\n\tentries: SessionEntry[],\n\tmodel: Model<any>,\n\tsettings: CompactionSettings,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n): Promise<CompactionEntry> {\n\t// Don't compact if the last entry is already a compaction\n\tif (entries.length > 0 && entries[entries.length - 1].type === \"compaction\") {\n\t\tthrow new Error(\"Already compacted\");\n\t}\n\n\t// Find previous compaction boundary\n\tlet prevCompactionIndex = -1;\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tif (entries[i].type === \"compaction\") {\n\t\t\tprevCompactionIndex = i;\n\t\t\tbreak;\n\t\t}\n\t}\n\tconst boundaryStart = prevCompactionIndex + 1;\n\tconst boundaryEnd = entries.length;\n\n\t// Get token count before compaction\n\tconst lastUsage = getLastAssistantUsage(entries);\n\tconst tokensBefore = lastUsage ? calculateContextTokens(lastUsage) : 0;\n\n\t// Find cut point (entry index) within the valid range\n\tconst firstKeptEntryIndex = findCutPoint(entries, boundaryStart, boundaryEnd, settings.keepRecentTokens);\n\n\t// Extract messages to summarize (before the cut point)\n\tconst messagesToSummarize: AppMessage[] = [];\n\tfor (let i = boundaryStart; i < firstKeptEntryIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tmessagesToSummarize.push(entry.message);\n\t\t}\n\t}\n\n\t// Also include the previous summary if there was a compaction\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = entries[prevCompactionIndex] as CompactionEntry;\n\t\t// Prepend the previous summary as context\n\t\tmessagesToSummarize.unshift({\n\t\t\trole: \"user\",\n\t\t\tcontent: `Previous session summary:\\n${prevCompaction.summary}`,\n\t\t\ttimestamp: Date.now(),\n\t\t});\n\t}\n\n\t// Generate summary from messages before the cut point\n\tconst summary = await generateSummary(\n\t\tmessagesToSummarize,\n\t\tmodel,\n\t\tsettings.reserveTokens,\n\t\tapiKey,\n\t\tsignal,\n\t\tcustomInstructions,\n\t);\n\n\treturn {\n\t\ttype: \"compaction\",\n\t\ttimestamp: new Date().toISOString(),\n\t\tsummary,\n\t\tfirstKeptEntryIndex,\n\t\ttokensBefore,\n\t};\n}\n"]}
|
|
1
|
+
{"version":3,"file":"compaction.js","sourceRoot":"","sources":["../src/compaction.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,OAAO,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAa/C,MAAM,CAAC,MAAM,2BAA2B,GAAuB;IAC9D,OAAO,EAAE,IAAI;IACb,aAAa,EAAE,KAAK;IACpB,gBAAgB,EAAE,KAAK;CACvB,CAAC;AAEF,+EAA+E;AAC/E,oBAAoB;AACpB,+EAA+E;AAE/E;;;GAGG;AACH,MAAM,UAAU,sBAAsB,CAAC,KAAY,EAAU;IAC5D,OAAO,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,UAAU,CAAC;AAAA,CAC5F;AAED;;GAEG;AACH,SAAS,iBAAiB,CAAC,GAAe,EAAgB;IACzD,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,IAAI,OAAO,IAAI,GAAG,EAAE,CAAC;QAChD,MAAM,YAAY,GAAG,GAAuB,CAAC;QAC7C,IAAI,YAAY,CAAC,UAAU,KAAK,SAAS,IAAI,YAAY,CAAC,KAAK,EAAE,CAAC;YACjE,OAAO,YAAY,CAAC,KAAK,CAAC;QAC3B,CAAC;IACF,CAAC;IACD,OAAO,IAAI,CAAC;AAAA,CACZ;AAED;;GAEG;AACH,MAAM,UAAU,qBAAqB,CAAC,OAAuB,EAAgB;IAC5E,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC9C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC/C,IAAI,KAAK;gBAAE,OAAO,KAAK,CAAC;QACzB,CAAC;IACF,CAAC;IACD,OAAO,IAAI,CAAC;AAAA,CACZ;AAED;;GAEG;AACH,MAAM,UAAU,aAAa,CAAC,aAAqB,EAAE,aAAqB,EAAE,QAA4B,EAAW;IAClH,IAAI,CAAC,QAAQ,CAAC,OAAO;QAAE,OAAO,KAAK,CAAC;IACpC,OAAO,aAAa,GAAG,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC;AAAA,CAC9D;AAED,+EAA+E;AAC/E,sBAAsB;AACtB,+EAA+E;AAE/E;;GAEG;AACH,SAAS,kBAAkB,CAAC,OAAuB,EAAE,UAAkB,EAAE,QAAgB,EAAY;IACpG,MAAM,UAAU,GAAa,EAAE,CAAC;IAChC,KAAK,IAAI,CAAC,GAAG,UAAU,EAAE,CAAC,GAAG,QAAQ,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC/D,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QACpB,CAAC;IACF,CAAC;IACD,OAAO,UAAU,CAAC;AAAA,CAClB;AAED;;;;;GAKG;AACH,MAAM,UAAU,YAAY,CAC3B,OAAuB,EACvB,UAAkB,EAClB,QAAgB,EAChB,gBAAwB,EACf;IACT,MAAM,UAAU,GAAG,kBAAkB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,CAAC,CAAC;IAErE,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC7B,OAAO,UAAU,CAAC,CAAC,6CAA6C;IACjE,CAAC;IAED,2DAA2D;IAC3D,MAAM,eAAe,GAA6C,EAAE,CAAC;IACrE,KAAK,IAAI,CAAC,GAAG,QAAQ,GAAG,CAAC,EAAE,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;QACjD,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC/C,IAAI,KAAK,EAAE,CAAC;gBACX,eAAe,CAAC,IAAI,CAAC;oBACpB,KAAK,EAAE,CAAC;oBACR,MAAM,EAAE,sBAAsB,CAAC,KAAK,CAAC;iBACrC,CAAC,CAAC;YACJ,CAAC;QACF,CAAC;IACF,CAAC;IAED,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAClC,qCAAqC;QACrC,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,mFAAmF;IACnF,MAAM,YAAY,GAAG,eAAe,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;IAC/C,IAAI,QAAQ,GAAG,UAAU,CAAC,CAAC,oCAAoC;IAE/D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,eAAe,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACjD,MAAM,SAAS,GAAG,YAAY,GAAG,eAAe,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;QAC3D,IAAI,SAAS,IAAI,gBAAgB,EAAE,CAAC;YACnC,oEAAoE;YACpE,MAAM,sBAAsB,GAAG,eAAe,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC;YAE5D,KAAK,IAAI,CAAC,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBACjD,IAAI,UAAU,CAAC,CAAC,CAAC,IAAI,sBAAsB,EAAE,CAAC;oBAC7C,QAAQ,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC;oBACzB,MAAM;gBACP,CAAC;YACF,CAAC;YACD,MAAM;QACP,CAAC;IACF,CAAC;IAED,OAAO,QAAQ,CAAC;AAAA,CAChB;AAED,+EAA+E;AAC/E,gBAAgB;AAChB,+EAA+E;AAE/E,MAAM,oBAAoB,GAAG;;;;;;;;;0FAS6D,CAAC;AAE3F;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACpC,eAA6B,EAC7B,KAAiB,EACjB,aAAqB,EACrB,MAAc,EACd,MAAoB,EACpB,kBAA2B,EACT;IAClB,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,aAAa,CAAC,CAAC;IAElD,MAAM,MAAM,GAAG,kBAAkB;QAChC,CAAC,CAAC,GAAG,oBAAoB,yBAAyB,kBAAkB,EAAE;QACtE,CAAC,CAAC,oBAAoB,CAAC;IAExB,MAAM,qBAAqB,GAAG;QAC7B,GAAG,eAAe;QAClB;YACC,IAAI,EAAE,MAAe;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB;KACD,CAAC;IAEF,MAAM,QAAQ,GAAG,MAAM,QAAQ,CAAC,KAAK,EAAE,EAAE,QAAQ,EAAE,qBAAqB,EAAE,EAAE,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;IAE3G,MAAM,WAAW,GAAG,QAAQ,CAAC,OAAO;SAClC,MAAM,CAAC,CAAC,CAAC,EAAuC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC;SACrE,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;SAClB,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,OAAO,WAAW,CAAC;AAAA,CACnB;AAED,+EAA+E;AAC/E,2BAA2B;AAC3B,+EAA+E;AAE/E;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,OAAO,CAC5B,OAAuB,EACvB,KAAiB,EACjB,QAA4B,EAC5B,MAAc,EACd,MAAoB,EACpB,kBAA2B,EACA;IAC3B,0DAA0D;IAC1D,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;QAC7E,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;IACtC,CAAC;IAED,oCAAoC;IACpC,IAAI,mBAAmB,GAAG,CAAC,CAAC,CAAC;IAC7B,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC9C,IAAI,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YACtC,mBAAmB,GAAG,CAAC,CAAC;YACxB,MAAM;QACP,CAAC;IACF,CAAC;IACD,MAAM,aAAa,GAAG,mBAAmB,GAAG,CAAC,CAAC;IAC9C,MAAM,WAAW,GAAG,OAAO,CAAC,MAAM,CAAC;IAEnC,oCAAoC;IACpC,MAAM,SAAS,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,YAAY,GAAG,SAAS,CAAC,CAAC,CAAC,sBAAsB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAEvE,sDAAsD;IACtD,MAAM,mBAAmB,GAAG,YAAY,CAAC,OAAO,EAAE,aAAa,EAAE,WAAW,EAAE,QAAQ,CAAC,gBAAgB,CAAC,CAAC;IAEzG,uDAAuD;IACvD,MAAM,mBAAmB,GAAiB,EAAE,CAAC;IAC7C,KAAK,IAAI,CAAC,GAAG,aAAa,EAAE,CAAC,GAAG,mBAAmB,EAAE,CAAC,EAAE,EAAE,CAAC;QAC1D,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzC,CAAC;IACF,CAAC;IAED,8DAA8D;IAC9D,IAAI,mBAAmB,IAAI,CAAC,EAAE,CAAC;QAC9B,MAAM,cAAc,GAAG,OAAO,CAAC,mBAAmB,CAAoB,CAAC;QACvE,0CAA0C;QAC1C,mBAAmB,CAAC,OAAO,CAAC;YAC3B,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,8BAA8B,cAAc,CAAC,OAAO,EAAE;YAC/D,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC,CAAC;IACJ,CAAC;IAED,sDAAsD;IACtD,MAAM,OAAO,GAAG,MAAM,eAAe,CACpC,mBAAmB,EACnB,KAAK,EACL,QAAQ,CAAC,aAAa,EACtB,MAAM,EACN,MAAM,EACN,kBAAkB,CAClB,CAAC;IAEF,OAAO;QACN,IAAI,EAAE,YAAY;QAClB,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;QACnC,OAAO;QACP,mBAAmB;QACnB,YAAY;KACZ,CAAC;AAAA,CACF","sourcesContent":["/**\n * Context compaction for long sessions.\n *\n * Pure functions for compaction logic. The session manager handles I/O,\n * and after compaction the session is reloaded.\n */\n\nimport type { AppMessage } from \"@mariozechner/pi-agent-core\";\nimport type { AssistantMessage, Model, Usage } from \"@mariozechner/pi-ai\";\nimport { complete } from \"@mariozechner/pi-ai\";\nimport type { CompactionEntry, SessionEntry } from \"./session-manager.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface CompactionSettings {\n\tenabled: boolean;\n\treserveTokens: number;\n\tkeepRecentTokens: number;\n}\n\nexport const DEFAULT_COMPACTION_SETTINGS: CompactionSettings = {\n\tenabled: true,\n\treserveTokens: 16384,\n\tkeepRecentTokens: 20000,\n};\n\n// ============================================================================\n// Token calculation\n// ============================================================================\n\n/**\n * Calculate total context tokens from usage.\n * Uses the native totalTokens field when available, falls back to computing from components.\n */\nexport function calculateContextTokens(usage: Usage): number {\n\treturn usage.totalTokens || usage.input + usage.output + usage.cacheRead + usage.cacheWrite;\n}\n\n/**\n * Get usage from an assistant message if available.\n */\nfunction getAssistantUsage(msg: AppMessage): Usage | null {\n\tif (msg.role === \"assistant\" && \"usage\" in msg) {\n\t\tconst assistantMsg = msg as AssistantMessage;\n\t\tif (assistantMsg.stopReason !== \"aborted\" && assistantMsg.usage) {\n\t\t\treturn assistantMsg.usage;\n\t\t}\n\t}\n\treturn null;\n}\n\n/**\n * Find the last non-aborted assistant message usage from session entries.\n */\nexport function getLastAssistantUsage(entries: SessionEntry[]): Usage | null {\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) return usage;\n\t\t}\n\t}\n\treturn null;\n}\n\n/**\n * Check if compaction should trigger based on context usage.\n */\nexport function shouldCompact(contextTokens: number, contextWindow: number, settings: CompactionSettings): boolean {\n\tif (!settings.enabled) return false;\n\treturn contextTokens > contextWindow - settings.reserveTokens;\n}\n\n// ============================================================================\n// Cut point detection\n// ============================================================================\n\n/**\n * Find indices of message entries that are user messages (turn boundaries).\n */\nfunction findTurnBoundaries(entries: SessionEntry[], startIndex: number, endIndex: number): number[] {\n\tconst boundaries: number[] = [];\n\tfor (let i = startIndex; i < endIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\" && entry.message.role === \"user\") {\n\t\t\tboundaries.push(i);\n\t\t}\n\t}\n\treturn boundaries;\n}\n\n/**\n * Find the cut point in session entries that keeps approximately `keepRecentTokens`.\n * Returns the entry index of the first message to keep (a user message for turn integrity).\n *\n * Only considers entries between `startIndex` and `endIndex` (exclusive).\n */\nexport function findCutPoint(\n\tentries: SessionEntry[],\n\tstartIndex: number,\n\tendIndex: number,\n\tkeepRecentTokens: number,\n): number {\n\tconst boundaries = findTurnBoundaries(entries, startIndex, endIndex);\n\n\tif (boundaries.length === 0) {\n\t\treturn startIndex; // No user messages, keep everything in range\n\t}\n\n\t// Collect assistant usages walking backwards from endIndex\n\tconst assistantUsages: Array<{ index: number; tokens: number }> = [];\n\tfor (let i = endIndex - 1; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) {\n\t\t\t\tassistantUsages.push({\n\t\t\t\t\tindex: i,\n\t\t\t\t\ttokens: calculateContextTokens(usage),\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t}\n\n\tif (assistantUsages.length === 0) {\n\t\t// No usage info, keep last turn only\n\t\treturn boundaries[boundaries.length - 1];\n\t}\n\n\t// Walk through and find where cumulative token difference exceeds keepRecentTokens\n\tconst newestTokens = assistantUsages[0].tokens;\n\tlet cutIndex = startIndex; // Default: keep everything in range\n\n\tfor (let i = 1; i < assistantUsages.length; i++) {\n\t\tconst tokenDiff = newestTokens - assistantUsages[i].tokens;\n\t\tif (tokenDiff >= keepRecentTokens) {\n\t\t\t// Find the turn boundary at or before the assistant we want to keep\n\t\t\tconst lastKeptAssistantIndex = assistantUsages[i - 1].index;\n\n\t\t\tfor (let b = boundaries.length - 1; b >= 0; b--) {\n\t\t\t\tif (boundaries[b] <= lastKeptAssistantIndex) {\n\t\t\t\t\tcutIndex = boundaries[b];\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t}\n\n\treturn cutIndex;\n}\n\n// ============================================================================\n// Summarization\n// ============================================================================\n\nconst SUMMARIZATION_PROMPT = `You are performing a CONTEXT CHECKPOINT COMPACTION. Create a handoff summary for another LLM that will resume the task.\n\nInclude:\n- Current progress and key decisions made\n- Important context, constraints, or user preferences\n- Absolute file paths of any relevant files that were read or modified\n- What remains to be done (clear next steps)\n- Any critical data, examples, or references needed to continue\n\nBe concise, structured, and focused on helping the next LLM seamlessly continue the work.`;\n\n/**\n * Generate a summary of the conversation using the LLM.\n */\nexport async function generateSummary(\n\tcurrentMessages: AppMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.8 * reserveTokens);\n\n\tconst prompt = customInstructions\n\t\t? `${SUMMARIZATION_PROMPT}\\n\\nAdditional focus: ${customInstructions}`\n\t\t: SUMMARIZATION_PROMPT;\n\n\tconst summarizationMessages = [\n\t\t...currentMessages,\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: prompt,\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await complete(model, { messages: summarizationMessages }, { maxTokens, signal, apiKey });\n\n\tconst textContent = response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n\n\treturn textContent;\n}\n\n// ============================================================================\n// Main compaction function\n// ============================================================================\n\n/**\n * Calculate compaction and generate summary.\n * Returns the CompactionEntry to append to the session file.\n *\n * @param entries - All session entries\n * @param model - Model to use for summarization\n * @param settings - Compaction settings\n * @param apiKey - API key for LLM\n * @param signal - Optional abort signal\n * @param customInstructions - Optional custom focus for the summary\n */\nexport async function compact(\n\tentries: SessionEntry[],\n\tmodel: Model<any>,\n\tsettings: CompactionSettings,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n): Promise<CompactionEntry> {\n\t// Don't compact if the last entry is already a compaction\n\tif (entries.length > 0 && entries[entries.length - 1].type === \"compaction\") {\n\t\tthrow new Error(\"Already compacted\");\n\t}\n\n\t// Find previous compaction boundary\n\tlet prevCompactionIndex = -1;\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tif (entries[i].type === \"compaction\") {\n\t\t\tprevCompactionIndex = i;\n\t\t\tbreak;\n\t\t}\n\t}\n\tconst boundaryStart = prevCompactionIndex + 1;\n\tconst boundaryEnd = entries.length;\n\n\t// Get token count before compaction\n\tconst lastUsage = getLastAssistantUsage(entries);\n\tconst tokensBefore = lastUsage ? calculateContextTokens(lastUsage) : 0;\n\n\t// Find cut point (entry index) within the valid range\n\tconst firstKeptEntryIndex = findCutPoint(entries, boundaryStart, boundaryEnd, settings.keepRecentTokens);\n\n\t// Extract messages to summarize (before the cut point)\n\tconst messagesToSummarize: AppMessage[] = [];\n\tfor (let i = boundaryStart; i < firstKeptEntryIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tmessagesToSummarize.push(entry.message);\n\t\t}\n\t}\n\n\t// Also include the previous summary if there was a compaction\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = entries[prevCompactionIndex] as CompactionEntry;\n\t\t// Prepend the previous summary as context\n\t\tmessagesToSummarize.unshift({\n\t\t\trole: \"user\",\n\t\t\tcontent: `Previous session summary:\\n${prevCompaction.summary}`,\n\t\t\ttimestamp: Date.now(),\n\t\t});\n\t}\n\n\t// Generate summary from messages before the cut point\n\tconst summary = await generateSummary(\n\t\tmessagesToSummarize,\n\t\tmodel,\n\t\tsettings.reserveTokens,\n\t\tapiKey,\n\t\tsignal,\n\t\tcustomInstructions,\n\t);\n\n\treturn {\n\t\ttype: \"compaction\",\n\t\ttimestamp: new Date().toISOString(),\n\t\tsummary,\n\t\tfirstKeptEntryIndex,\n\t\ttokensBefore,\n\t};\n}\n"]}
|
package/dist/config.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Detect if we're running as a Bun compiled binary.
|
|
3
|
-
* Bun binaries have import.meta.url containing "$bunfs" (Bun's virtual filesystem path)
|
|
3
|
+
* Bun binaries have import.meta.url containing "$bunfs", "~BUN", or "%7EBUN" (Bun's virtual filesystem path)
|
|
4
4
|
*/
|
|
5
5
|
export declare const isBunBinary: boolean;
|
|
6
6
|
/**
|
package/dist/config.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAYA;;;GAGG;AACH,eAAO,MAAM,WAAW,
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAYA;;;GAGG;AACH,eAAO,MAAM,WAAW,SACqF,CAAC;AAM9G;;;;;GAKG;AACH,wBAAgB,aAAa,IAAI,MAAM,CAWtC;AAED;;;;;GAKG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAMrC;AAED,+BAA+B;AAC/B,wBAAgB,kBAAkB,IAAI,MAAM,CAE3C;AAED,4BAA4B;AAC5B,wBAAgB,aAAa,IAAI,MAAM,CAEtC;AAED,+BAA+B;AAC/B,wBAAgB,gBAAgB,IAAI,MAAM,CAEzC;AAQD,eAAO,MAAM,QAAQ,EAAE,MAAmC,CAAC;AAC3D,eAAO,MAAM,eAAe,EAAE,MAAyC,CAAC;AACxE,eAAO,MAAM,OAAO,EAAE,MAAoB,CAAC;AAG3C,eAAO,MAAM,aAAa,QAA+C,CAAC;AAM1E,0DAA0D;AAC1D,wBAAgB,WAAW,IAAI,MAAM,CAEpC;AAED,iDAAiD;AACjD,wBAAgB,kBAAkB,IAAI,MAAM,CAE3C;AAED,8BAA8B;AAC9B,wBAAgB,aAAa,IAAI,MAAM,CAEtC;AAED,6BAA6B;AAC7B,wBAAgB,YAAY,IAAI,MAAM,CAErC;AAED,gCAAgC;AAChC,wBAAgB,eAAe,IAAI,MAAM,CAExC;AAED,kCAAkC;AAClC,wBAAgB,WAAW,IAAI,MAAM,CAEpC;AAED,2CAA2C;AAC3C,wBAAgB,cAAc,IAAI,MAAM,CAEvC;AAED,qCAAqC;AACrC,wBAAgB,cAAc,IAAI,MAAM,CAEvC;AAED,iCAAiC;AACjC,wBAAgB,eAAe,IAAI,MAAM,CAExC","sourcesContent":["import { existsSync, readFileSync } from \"fs\";\nimport { homedir } from \"os\";\nimport { dirname, join, resolve } from \"path\";\nimport { fileURLToPath } from \"url\";\n\n// =============================================================================\n// Package Detection\n// =============================================================================\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\n\n/**\n * Detect if we're running as a Bun compiled binary.\n * Bun binaries have import.meta.url containing \"$bunfs\", \"~BUN\", or \"%7EBUN\" (Bun's virtual filesystem path)\n */\nexport const isBunBinary =\n\timport.meta.url.includes(\"$bunfs\") || import.meta.url.includes(\"~BUN\") || import.meta.url.includes(\"%7EBUN\");\n\n// =============================================================================\n// Package Asset Paths (shipped with executable)\n// =============================================================================\n\n/**\n * Get the base directory for resolving package assets (themes, package.json, README.md, CHANGELOG.md).\n * - For Bun binary: returns the directory containing the executable\n * - For Node.js (dist/): returns __dirname (the dist/ directory)\n * - For tsx (src/): returns parent directory (the package root)\n */\nexport function getPackageDir(): string {\n\tif (isBunBinary) {\n\t\t// Bun binary: process.execPath points to the compiled executable\n\t\treturn dirname(process.execPath);\n\t}\n\t// Node.js: check if package.json exists in __dirname (dist/) or parent (src/ case)\n\tif (existsSync(join(__dirname, \"package.json\"))) {\n\t\treturn __dirname;\n\t}\n\t// Running from src/ via tsx - go up one level to package root\n\treturn dirname(__dirname);\n}\n\n/**\n * Get path to built-in themes directory (shipped with package)\n * - For Bun binary: theme/ next to executable\n * - For Node.js (dist/): dist/theme/\n * - For tsx (src/): src/theme/\n */\nexport function getThemesDir(): string {\n\tif (isBunBinary) {\n\t\treturn join(dirname(process.execPath), \"theme\");\n\t}\n\t// __dirname is either dist/ or src/ - theme is always a subdirectory\n\treturn join(__dirname, \"theme\");\n}\n\n/** Get path to package.json */\nexport function getPackageJsonPath(): string {\n\treturn join(getPackageDir(), \"package.json\");\n}\n\n/** Get path to README.md */\nexport function getReadmePath(): string {\n\treturn resolve(join(getPackageDir(), \"README.md\"));\n}\n\n/** Get path to CHANGELOG.md */\nexport function getChangelogPath(): string {\n\treturn resolve(join(getPackageDir(), \"CHANGELOG.md\"));\n}\n\n// =============================================================================\n// App Config (from package.json piConfig)\n// =============================================================================\n\nconst pkg = JSON.parse(readFileSync(getPackageJsonPath(), \"utf-8\"));\n\nexport const APP_NAME: string = pkg.piConfig?.name || \"pi\";\nexport const CONFIG_DIR_NAME: string = pkg.piConfig?.configDir || \".pi\";\nexport const VERSION: string = pkg.version;\n\n// e.g., PI_CODING_AGENT_DIR or TAU_CODING_AGENT_DIR\nexport const ENV_AGENT_DIR = `${APP_NAME.toUpperCase()}_CODING_AGENT_DIR`;\n\n// =============================================================================\n// User Config Paths (~/.pi/agent/*)\n// =============================================================================\n\n/** Get the agent config directory (e.g., ~/.pi/agent/) */\nexport function getAgentDir(): string {\n\treturn process.env[ENV_AGENT_DIR] || join(homedir(), CONFIG_DIR_NAME, \"agent\");\n}\n\n/** Get path to user's custom themes directory */\nexport function getCustomThemesDir(): string {\n\treturn join(getAgentDir(), \"themes\");\n}\n\n/** Get path to models.json */\nexport function getModelsPath(): string {\n\treturn join(getAgentDir(), \"models.json\");\n}\n\n/** Get path to oauth.json */\nexport function getOAuthPath(): string {\n\treturn join(getAgentDir(), \"oauth.json\");\n}\n\n/** Get path to settings.json */\nexport function getSettingsPath(): string {\n\treturn join(getAgentDir(), \"settings.json\");\n}\n\n/** Get path to tools directory */\nexport function getToolsDir(): string {\n\treturn join(getAgentDir(), \"tools\");\n}\n\n/** Get path to slash commands directory */\nexport function getCommandsDir(): string {\n\treturn join(getAgentDir(), \"commands\");\n}\n\n/** Get path to sessions directory */\nexport function getSessionsDir(): string {\n\treturn join(getAgentDir(), \"sessions\");\n}\n\n/** Get path to debug log file */\nexport function getDebugLogPath(): string {\n\treturn join(getAgentDir(), `${APP_NAME}-debug.log`);\n}\n"]}
|
package/dist/config.js
CHANGED
|
@@ -9,9 +9,9 @@ const __filename = fileURLToPath(import.meta.url);
|
|
|
9
9
|
const __dirname = dirname(__filename);
|
|
10
10
|
/**
|
|
11
11
|
* Detect if we're running as a Bun compiled binary.
|
|
12
|
-
* Bun binaries have import.meta.url containing "$bunfs" (Bun's virtual filesystem path)
|
|
12
|
+
* Bun binaries have import.meta.url containing "$bunfs", "~BUN", or "%7EBUN" (Bun's virtual filesystem path)
|
|
13
13
|
*/
|
|
14
|
-
export const isBunBinary = import.meta.url.includes("$bunfs");
|
|
14
|
+
export const isBunBinary = import.meta.url.includes("$bunfs") || import.meta.url.includes("~BUN") || import.meta.url.includes("%7EBUN");
|
|
15
15
|
// =============================================================================
|
|
16
16
|
// Package Asset Paths (shipped with executable)
|
|
17
17
|
// =============================================================================
|
package/dist/config.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.js","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,IAAI,CAAC;AAC9C,OAAO,EAAE,OAAO,EAAE,MAAM,IAAI,CAAC;AAC7B,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAC9C,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AAEpC,gFAAgF;AAChF,oBAAoB;AACpB,gFAAgF;AAEhF,MAAM,UAAU,GAAG,aAAa,CAAC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,MAAM,SAAS,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;AAEtC;;;GAGG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG,OAAO,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;
|
|
1
|
+
{"version":3,"file":"config.js","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,IAAI,CAAC;AAC9C,OAAO,EAAE,OAAO,EAAE,MAAM,IAAI,CAAC;AAC7B,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAC9C,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AAEpC,gFAAgF;AAChF,oBAAoB;AACpB,gFAAgF;AAEhF,MAAM,UAAU,GAAG,aAAa,CAAC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,MAAM,SAAS,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;AAEtC;;;GAGG;AACH,MAAM,CAAC,MAAM,WAAW,GACvB,OAAO,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,OAAO,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,OAAO,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAE9G,gFAAgF;AAChF,gDAAgD;AAChD,gFAAgF;AAEhF;;;;;GAKG;AACH,MAAM,UAAU,aAAa,GAAW;IACvC,IAAI,WAAW,EAAE,CAAC;QACjB,iEAAiE;QACjE,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IAClC,CAAC;IACD,mFAAmF;IACnF,IAAI,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,SAAS,CAAC;IAClB,CAAC;IACD,8DAA8D;IAC9D,OAAO,OAAO,CAAC,SAAS,CAAC,CAAC;AAAA,CAC1B;AAED;;;;;GAKG;AACH,MAAM,UAAU,YAAY,GAAW;IACtC,IAAI,WAAW,EAAE,CAAC;QACjB,OAAO,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAC,CAAC;IACjD,CAAC;IACD,qEAAqE;IACrE,OAAO,IAAI,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AAAA,CAChC;AAED,+BAA+B;AAC/B,MAAM,UAAU,kBAAkB,GAAW;IAC5C,OAAO,IAAI,CAAC,aAAa,EAAE,EAAE,cAAc,CAAC,CAAC;AAAA,CAC7C;AAED,4BAA4B;AAC5B,MAAM,UAAU,aAAa,GAAW;IACvC,OAAO,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,EAAE,WAAW,CAAC,CAAC,CAAC;AAAA,CACnD;AAED,+BAA+B;AAC/B,MAAM,UAAU,gBAAgB,GAAW;IAC1C,OAAO,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,EAAE,cAAc,CAAC,CAAC,CAAC;AAAA,CACtD;AAED,gFAAgF;AAChF,0CAA0C;AAC1C,gFAAgF;AAEhF,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,kBAAkB,EAAE,EAAE,OAAO,CAAC,CAAC,CAAC;AAEpE,MAAM,CAAC,MAAM,QAAQ,GAAW,GAAG,CAAC,QAAQ,EAAE,IAAI,IAAI,IAAI,CAAC;AAC3D,MAAM,CAAC,MAAM,eAAe,GAAW,GAAG,CAAC,QAAQ,EAAE,SAAS,IAAI,KAAK,CAAC;AACxE,MAAM,CAAC,MAAM,OAAO,GAAW,GAAG,CAAC,OAAO,CAAC;AAE3C,oDAAoD;AACpD,MAAM,CAAC,MAAM,aAAa,GAAG,GAAG,QAAQ,CAAC,WAAW,EAAE,mBAAmB,CAAC;AAE1E,gFAAgF;AAChF,oCAAoC;AACpC,gFAAgF;AAEhF,0DAA0D;AAC1D,MAAM,UAAU,WAAW,GAAW;IACrC,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,eAAe,EAAE,OAAO,CAAC,CAAC;AAAA,CAC/E;AAED,iDAAiD;AACjD,MAAM,UAAU,kBAAkB,GAAW;IAC5C,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,QAAQ,CAAC,CAAC;AAAA,CACrC;AAED,8BAA8B;AAC9B,MAAM,UAAU,aAAa,GAAW;IACvC,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,aAAa,CAAC,CAAC;AAAA,CAC1C;AAED,6BAA6B;AAC7B,MAAM,UAAU,YAAY,GAAW;IACtC,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,YAAY,CAAC,CAAC;AAAA,CACzC;AAED,gCAAgC;AAChC,MAAM,UAAU,eAAe,GAAW;IACzC,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,eAAe,CAAC,CAAC;AAAA,CAC5C;AAED,kCAAkC;AAClC,MAAM,UAAU,WAAW,GAAW;IACrC,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,OAAO,CAAC,CAAC;AAAA,CACpC;AAED,2CAA2C;AAC3C,MAAM,UAAU,cAAc,GAAW;IACxC,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,UAAU,CAAC,CAAC;AAAA,CACvC;AAED,qCAAqC;AACrC,MAAM,UAAU,cAAc,GAAW;IACxC,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,UAAU,CAAC,CAAC;AAAA,CACvC;AAED,iCAAiC;AACjC,MAAM,UAAU,eAAe,GAAW;IACzC,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,GAAG,QAAQ,YAAY,CAAC,CAAC;AAAA,CACpD","sourcesContent":["import { existsSync, readFileSync } from \"fs\";\nimport { homedir } from \"os\";\nimport { dirname, join, resolve } from \"path\";\nimport { fileURLToPath } from \"url\";\n\n// =============================================================================\n// Package Detection\n// =============================================================================\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\n\n/**\n * Detect if we're running as a Bun compiled binary.\n * Bun binaries have import.meta.url containing \"$bunfs\", \"~BUN\", or \"%7EBUN\" (Bun's virtual filesystem path)\n */\nexport const isBunBinary =\n\timport.meta.url.includes(\"$bunfs\") || import.meta.url.includes(\"~BUN\") || import.meta.url.includes(\"%7EBUN\");\n\n// =============================================================================\n// Package Asset Paths (shipped with executable)\n// =============================================================================\n\n/**\n * Get the base directory for resolving package assets (themes, package.json, README.md, CHANGELOG.md).\n * - For Bun binary: returns the directory containing the executable\n * - For Node.js (dist/): returns __dirname (the dist/ directory)\n * - For tsx (src/): returns parent directory (the package root)\n */\nexport function getPackageDir(): string {\n\tif (isBunBinary) {\n\t\t// Bun binary: process.execPath points to the compiled executable\n\t\treturn dirname(process.execPath);\n\t}\n\t// Node.js: check if package.json exists in __dirname (dist/) or parent (src/ case)\n\tif (existsSync(join(__dirname, \"package.json\"))) {\n\t\treturn __dirname;\n\t}\n\t// Running from src/ via tsx - go up one level to package root\n\treturn dirname(__dirname);\n}\n\n/**\n * Get path to built-in themes directory (shipped with package)\n * - For Bun binary: theme/ next to executable\n * - For Node.js (dist/): dist/theme/\n * - For tsx (src/): src/theme/\n */\nexport function getThemesDir(): string {\n\tif (isBunBinary) {\n\t\treturn join(dirname(process.execPath), \"theme\");\n\t}\n\t// __dirname is either dist/ or src/ - theme is always a subdirectory\n\treturn join(__dirname, \"theme\");\n}\n\n/** Get path to package.json */\nexport function getPackageJsonPath(): string {\n\treturn join(getPackageDir(), \"package.json\");\n}\n\n/** Get path to README.md */\nexport function getReadmePath(): string {\n\treturn resolve(join(getPackageDir(), \"README.md\"));\n}\n\n/** Get path to CHANGELOG.md */\nexport function getChangelogPath(): string {\n\treturn resolve(join(getPackageDir(), \"CHANGELOG.md\"));\n}\n\n// =============================================================================\n// App Config (from package.json piConfig)\n// =============================================================================\n\nconst pkg = JSON.parse(readFileSync(getPackageJsonPath(), \"utf-8\"));\n\nexport const APP_NAME: string = pkg.piConfig?.name || \"pi\";\nexport const CONFIG_DIR_NAME: string = pkg.piConfig?.configDir || \".pi\";\nexport const VERSION: string = pkg.version;\n\n// e.g., PI_CODING_AGENT_DIR or TAU_CODING_AGENT_DIR\nexport const ENV_AGENT_DIR = `${APP_NAME.toUpperCase()}_CODING_AGENT_DIR`;\n\n// =============================================================================\n// User Config Paths (~/.pi/agent/*)\n// =============================================================================\n\n/** Get the agent config directory (e.g., ~/.pi/agent/) */\nexport function getAgentDir(): string {\n\treturn process.env[ENV_AGENT_DIR] || join(homedir(), CONFIG_DIR_NAME, \"agent\");\n}\n\n/** Get path to user's custom themes directory */\nexport function getCustomThemesDir(): string {\n\treturn join(getAgentDir(), \"themes\");\n}\n\n/** Get path to models.json */\nexport function getModelsPath(): string {\n\treturn join(getAgentDir(), \"models.json\");\n}\n\n/** Get path to oauth.json */\nexport function getOAuthPath(): string {\n\treturn join(getAgentDir(), \"oauth.json\");\n}\n\n/** Get path to settings.json */\nexport function getSettingsPath(): string {\n\treturn join(getAgentDir(), \"settings.json\");\n}\n\n/** Get path to tools directory */\nexport function getToolsDir(): string {\n\treturn join(getAgentDir(), \"tools\");\n}\n\n/** Get path to slash commands directory */\nexport function getCommandsDir(): string {\n\treturn join(getAgentDir(), \"commands\");\n}\n\n/** Get path to sessions directory */\nexport function getSessionsDir(): string {\n\treturn join(getAgentDir(), \"sessions\");\n}\n\n/** Get path to debug log file */\nexport function getDebugLogPath(): string {\n\treturn join(getAgentDir(), `${APP_NAME}-debug.log`);\n}\n"]}
|
package/dist/tools/index.d.ts
CHANGED
|
@@ -5,41 +5,41 @@ export { grepTool } from "./grep.js";
|
|
|
5
5
|
export { lsTool } from "./ls.js";
|
|
6
6
|
export { readTool } from "./read.js";
|
|
7
7
|
export { writeTool } from "./write.js";
|
|
8
|
-
export declare const codingTools: (import("
|
|
8
|
+
export declare const codingTools: (import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
9
9
|
command: import("@sinclair/typebox").TString;
|
|
10
10
|
timeout: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
|
|
11
|
-
}>, any> | import("
|
|
11
|
+
}>, any> | import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
12
12
|
path: import("@sinclair/typebox").TString;
|
|
13
13
|
oldText: import("@sinclair/typebox").TString;
|
|
14
14
|
newText: import("@sinclair/typebox").TString;
|
|
15
|
-
}>, any> | import("
|
|
15
|
+
}>, any> | import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
16
16
|
path: import("@sinclair/typebox").TString;
|
|
17
17
|
offset: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
|
|
18
18
|
limit: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
|
|
19
|
-
}>, any> | import("
|
|
19
|
+
}>, any> | import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
20
20
|
path: import("@sinclair/typebox").TString;
|
|
21
21
|
content: import("@sinclair/typebox").TString;
|
|
22
22
|
}>, any>)[];
|
|
23
23
|
export declare const allTools: {
|
|
24
|
-
read: import("
|
|
24
|
+
read: import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
25
25
|
path: import("@sinclair/typebox").TString;
|
|
26
26
|
offset: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
|
|
27
27
|
limit: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
|
|
28
28
|
}>, any>;
|
|
29
|
-
bash: import("
|
|
29
|
+
bash: import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
30
30
|
command: import("@sinclair/typebox").TString;
|
|
31
31
|
timeout: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
|
|
32
32
|
}>, any>;
|
|
33
|
-
edit: import("
|
|
33
|
+
edit: import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
34
34
|
path: import("@sinclair/typebox").TString;
|
|
35
35
|
oldText: import("@sinclair/typebox").TString;
|
|
36
36
|
newText: import("@sinclair/typebox").TString;
|
|
37
37
|
}>, any>;
|
|
38
|
-
write: import("
|
|
38
|
+
write: import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
39
39
|
path: import("@sinclair/typebox").TString;
|
|
40
40
|
content: import("@sinclair/typebox").TString;
|
|
41
41
|
}>, any>;
|
|
42
|
-
grep: import("
|
|
42
|
+
grep: import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
43
43
|
pattern: import("@sinclair/typebox").TString;
|
|
44
44
|
path: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TString>;
|
|
45
45
|
glob: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TString>;
|
|
@@ -48,12 +48,12 @@ export declare const allTools: {
|
|
|
48
48
|
context: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
|
|
49
49
|
limit: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
|
|
50
50
|
}>, any>;
|
|
51
|
-
find: import("
|
|
51
|
+
find: import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
52
52
|
pattern: import("@sinclair/typebox").TString;
|
|
53
53
|
path: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TString>;
|
|
54
54
|
limit: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
|
|
55
55
|
}>, any>;
|
|
56
|
-
ls: import("
|
|
56
|
+
ls: import("@mariozechner/pi-ai").AgentTool<import("@sinclair/typebox").TObject<{
|
|
57
57
|
path: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TString>;
|
|
58
58
|
limit: import("@sinclair/typebox").TOptional<import("@sinclair/typebox").TNumber>;
|
|
59
59
|
}>, any>;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mariozechner/pi-coding-agent",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.13.0",
|
|
4
4
|
"description": "Coding agent CLI with read, bash, edit, write tools and session management",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"piConfig": {
|
|
@@ -19,7 +19,7 @@
|
|
|
19
19
|
"scripts": {
|
|
20
20
|
"clean": "rm -rf dist",
|
|
21
21
|
"build": "tsgo -p tsconfig.build.json && chmod +x dist/cli.js && npm run copy-assets",
|
|
22
|
-
"build:binary": "
|
|
22
|
+
"build:binary": "npm run build && bun build --compile ./dist/cli.js --outfile dist/pi && npm run copy-binary-assets",
|
|
23
23
|
"copy-assets": "cp src/theme/*.json dist/theme/",
|
|
24
24
|
"copy-binary-assets": "cp package.json dist/ && cp README.md dist/ && cp CHANGELOG.md dist/",
|
|
25
25
|
"dev": "tsgo -p tsconfig.build.json --watch --preserveWatchOutput",
|
|
@@ -28,9 +28,9 @@
|
|
|
28
28
|
"prepublishOnly": "npm run clean && npm run build"
|
|
29
29
|
},
|
|
30
30
|
"dependencies": {
|
|
31
|
-
"@mariozechner/pi-agent-core": "^0.
|
|
32
|
-
"@mariozechner/pi-ai": "^0.
|
|
33
|
-
"@mariozechner/pi-tui": "^0.
|
|
31
|
+
"@mariozechner/pi-agent-core": "^0.13.0",
|
|
32
|
+
"@mariozechner/pi-ai": "^0.13.0",
|
|
33
|
+
"@mariozechner/pi-tui": "^0.13.0",
|
|
34
34
|
"chalk": "^5.5.0",
|
|
35
35
|
"diff": "^8.0.2",
|
|
36
36
|
"glob": "^11.0.3"
|