@oh-my-pi/pi-coding-agent 13.9.2 → 13.9.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/CHANGELOG.md +64 -0
  2. package/examples/sdk/02-custom-model.ts +2 -1
  3. package/package.json +7 -7
  4. package/src/cli/args.ts +10 -6
  5. package/src/cli/list-models.ts +2 -2
  6. package/src/commands/launch.ts +3 -3
  7. package/src/config/model-registry.ts +136 -38
  8. package/src/config/model-resolver.ts +47 -21
  9. package/src/config/settings-schema.ts +56 -2
  10. package/src/discovery/helpers.ts +3 -3
  11. package/src/extensibility/custom-tools/types.ts +2 -0
  12. package/src/extensibility/extensions/loader.ts +3 -2
  13. package/src/extensibility/extensions/types.ts +10 -7
  14. package/src/extensibility/hooks/types.ts +2 -0
  15. package/src/main.ts +5 -22
  16. package/src/memories/index.ts +7 -3
  17. package/src/modes/components/footer.ts +10 -8
  18. package/src/modes/components/model-selector.ts +33 -38
  19. package/src/modes/components/settings-defs.ts +32 -3
  20. package/src/modes/components/settings-selector.ts +16 -5
  21. package/src/modes/components/status-line/context-thresholds.ts +68 -0
  22. package/src/modes/components/status-line/segments.ts +11 -12
  23. package/src/modes/components/status-line.ts +2 -6
  24. package/src/modes/components/thinking-selector.ts +7 -7
  25. package/src/modes/components/tree-selector.ts +3 -2
  26. package/src/modes/controllers/command-controller.ts +11 -26
  27. package/src/modes/controllers/event-controller.ts +16 -3
  28. package/src/modes/controllers/input-controller.ts +4 -2
  29. package/src/modes/controllers/selector-controller.ts +5 -4
  30. package/src/modes/interactive-mode.ts +2 -2
  31. package/src/modes/rpc/rpc-client.ts +5 -10
  32. package/src/modes/rpc/rpc-types.ts +5 -5
  33. package/src/modes/theme/theme.ts +8 -3
  34. package/src/priority.json +1 -0
  35. package/src/prompts/system/auto-handoff-threshold-focus.md +1 -0
  36. package/src/prompts/system/system-prompt.md +18 -2
  37. package/src/prompts/tools/hashline.md +139 -83
  38. package/src/sdk.ts +24 -16
  39. package/src/session/agent-session.ts +261 -118
  40. package/src/session/agent-storage.ts +14 -14
  41. package/src/session/compaction/compaction.ts +500 -13
  42. package/src/session/messages.ts +12 -1
  43. package/src/session/session-manager.ts +77 -19
  44. package/src/slash-commands/builtin-registry.ts +48 -0
  45. package/src/task/agents.ts +3 -2
  46. package/src/task/executor.ts +2 -2
  47. package/src/task/types.ts +2 -1
  48. package/src/thinking.ts +87 -0
  49. package/src/tools/browser.ts +15 -6
  50. package/src/tools/fetch.ts +118 -100
  51. package/src/tools/index.ts +2 -1
  52. package/src/web/kagi.ts +62 -7
  53. package/src/web/search/providers/exa.ts +74 -3
@@ -276,22 +276,20 @@ CREATE TABLE settings (
276
276
  * @returns Array of stored credentials with their database IDs
277
277
  */
278
278
  listAuthCredentials(provider?: string, includeDisabled = false): StoredAuthCredential[] {
279
- // AuthCredentialStore doesn't expose includeDisabled yet, so we filter if needed
280
279
  const credentials = this.#authStore.listAuthCredentials(provider);
281
280
  if (!includeDisabled) return credentials;
282
281
 
283
- // For now, includeDisabled requires direct DB access
284
- // This is only used internally, so it's acceptable
285
282
  const stmt = this.#db.prepare(
286
283
  provider
287
- ? "SELECT id, provider, credential_type, data FROM auth_credentials WHERE provider = ? ORDER BY id ASC"
288
- : "SELECT id, provider, credential_type, data FROM auth_credentials ORDER BY id ASC",
284
+ ? "SELECT id, provider, credential_type, data, disabled_cause FROM auth_credentials WHERE provider = ? ORDER BY id ASC"
285
+ : "SELECT id, provider, credential_type, data, disabled_cause FROM auth_credentials ORDER BY id ASC",
289
286
  );
290
287
  const rows = (provider ? stmt.all(provider) : stmt.all()) as Array<{
291
288
  id: number;
292
289
  provider: string;
293
290
  credential_type: string;
294
291
  data: string;
292
+ disabled_cause: string | null;
295
293
  }>;
296
294
 
297
295
  const results: StoredAuthCredential[] = [];
@@ -309,7 +307,7 @@ CREATE TABLE settings (
309
307
  continue;
310
308
  }
311
309
 
312
- results.push({ id: row.id, provider: row.provider, credential });
310
+ results.push({ id: row.id, provider: row.provider, credential, disabledCause: row.disabled_cause });
313
311
  } catch {}
314
312
  }
315
313
  return results;
@@ -336,19 +334,21 @@ CREATE TABLE settings (
336
334
  }
337
335
 
338
336
  /**
339
- * Deletes an auth credential by ID.
340
- * @param id - Database row ID of the credential to delete
337
+ * Disables an auth credential by ID with a persisted cause.
338
+ * @param id - Database row ID of the credential to disable
339
+ * @param disabledCause - Human-readable cause stored with the disabled row
341
340
  */
342
- deleteAuthCredential(id: number): void {
343
- this.#authStore.deleteAuthCredential(id);
341
+ deleteAuthCredential(id: number, disabledCause: string): void {
342
+ this.#authStore.deleteAuthCredential(id, disabledCause);
344
343
  }
345
344
 
346
345
  /**
347
- * Deletes all auth credentials for a provider.
348
- * @param provider - Provider name whose credentials should be deleted
346
+ * Disables all auth credentials for a provider with a persisted cause.
347
+ * @param provider - Provider name whose credentials should be disabled
348
+ * @param disabledCause - Human-readable cause stored with the disabled rows
349
349
  */
350
- deleteAuthCredentialsForProvider(provider: string): void {
351
- this.#authStore.deleteAuthCredentialsForProvider(provider);
350
+ deleteAuthCredentialsForProvider(provider: string, disabledCause: string): void {
351
+ this.#authStore.deleteAuthCredentialsForProvider(provider, disabledCause);
352
352
  }
353
353
 
354
354
  /**
@@ -5,8 +5,15 @@
5
5
  * and after compaction the session is reloaded.
6
6
  */
7
7
  import type { AgentMessage } from "@oh-my-pi/pi-agent-core";
8
- import type { AssistantMessage, Model, Usage } from "@oh-my-pi/pi-ai";
9
- import { completeSimple } from "@oh-my-pi/pi-ai";
8
+ import { type AssistantMessage, completeSimple, Effort, type Model, type Usage } from "@oh-my-pi/pi-ai";
9
+ import {
10
+ CODEX_BASE_URL,
11
+ getCodexAccountId,
12
+ OPENAI_HEADER_VALUES,
13
+ OPENAI_HEADERS,
14
+ } from "@oh-my-pi/pi-ai/providers/openai-codex/constants";
15
+ import { transformMessages } from "@oh-my-pi/pi-ai/providers/transform-messages";
16
+ import { normalizeResponsesToolCallId } from "@oh-my-pi/pi-ai/utils";
10
17
  import { logger } from "@oh-my-pi/pi-utils";
11
18
  import { renderPromptTemplate } from "../../config/prompt-templates";
12
19
  import compactionShortSummaryPrompt from "../../prompts/compaction/compaction-short-summary.md" with { type: "text" };
@@ -115,17 +122,23 @@ export interface CompactionResult<T = unknown> {
115
122
 
116
123
  export interface CompactionSettings {
117
124
  enabled: boolean;
125
+ strategy?: "context-full" | "handoff" | "off";
126
+ thresholdPercent?: number;
118
127
  reserveTokens: number;
119
128
  keepRecentTokens: number;
120
129
  autoContinue?: boolean;
130
+ remoteEnabled?: boolean;
121
131
  remoteEndpoint?: string;
122
132
  }
123
133
 
124
134
  export const DEFAULT_COMPACTION_SETTINGS: CompactionSettings = {
125
135
  enabled: true,
136
+ strategy: "context-full",
137
+ thresholdPercent: -1,
126
138
  reserveTokens: 16384,
127
139
  keepRecentTokens: 20000,
128
140
  autoContinue: true,
141
+ remoteEnabled: true,
129
142
  };
130
143
 
131
144
  // ============================================================================
@@ -187,8 +200,18 @@ export function effectiveReserveTokens(contextWindow: number, settings: Compacti
187
200
  * Check if compaction should trigger based on context usage.
188
201
  */
189
202
  export function shouldCompact(contextTokens: number, contextWindow: number, settings: CompactionSettings): boolean {
190
- if (!settings.enabled) return false;
191
- return contextTokens > contextWindow - effectiveReserveTokens(contextWindow, settings);
203
+ if (!settings.enabled || settings.strategy === "off" || contextWindow <= 0) return false;
204
+ const thresholdTokens = resolveThresholdTokens(contextWindow, settings);
205
+ return contextTokens > thresholdTokens;
206
+ }
207
+
208
+ function resolveThresholdTokens(contextWindow: number, settings: CompactionSettings): number {
209
+ const thresholdPercent = settings.thresholdPercent;
210
+ if (typeof thresholdPercent !== "number" || !Number.isFinite(thresholdPercent) || thresholdPercent <= 0) {
211
+ return contextWindow - effectiveReserveTokens(contextWindow, settings);
212
+ }
213
+ const clampedThresholdPercent = Math.min(99, Math.max(1, thresholdPercent));
214
+ return Math.floor(contextWindow * (clampedThresholdPercent / 100));
192
215
  }
193
216
 
194
217
  // ============================================================================
@@ -441,16 +464,439 @@ function formatAdditionalContext(context: string[] | undefined): string {
441
464
  return `<additional-context>\n${lines}\n</additional-context>\n\n`;
442
465
  }
443
466
 
444
- interface RemoteCompactionRequest {
445
- systemPrompt: string;
446
- prompt: string;
467
+ const OPENAI_REMOTE_COMPACTION_PRESERVE_KEY = "openaiRemoteCompaction";
468
+
469
+ type OpenAiRemoteCompactionItem = {
470
+ type: "compaction" | "compaction_summary";
471
+ encrypted_content?: string;
472
+ summary?: string;
473
+ };
474
+
475
+ interface OpenAiRemoteCompactionPreserveData {
476
+ replacementHistory: Array<Record<string, unknown>>;
477
+ compactionItem: OpenAiRemoteCompactionItem;
447
478
  }
448
479
 
480
+ interface OpenAiRemoteCompactionRequest {
481
+ model: string;
482
+ input: Array<Record<string, unknown>>;
483
+ instructions: string;
484
+ }
485
+
486
+ interface OpenAiRemoteCompactionResponse extends OpenAiRemoteCompactionPreserveData {}
487
+
449
488
  interface RemoteCompactionResponse {
450
489
  summary: string;
451
490
  shortSummary?: string;
452
491
  }
453
492
 
493
+ function shouldUseOpenAiRemoteCompaction(model: Model): boolean {
494
+ return model.provider === "openai" || model.provider === "openai-codex";
495
+ }
496
+
497
+ function resolveOpenAiCompactEndpoint(model: Model): string {
498
+ if (model.provider === "openai-codex") {
499
+ return resolveOpenAiCodexCompactEndpoint(model.baseUrl);
500
+ }
501
+
502
+ const defaultBase = "https://api.openai.com/v1";
503
+ const rawBase = model.baseUrl && model.baseUrl.length > 0 ? model.baseUrl : defaultBase;
504
+ const normalizedBase = rawBase.endsWith("/") ? rawBase.slice(0, -1) : rawBase;
505
+ if (normalizedBase.endsWith("/v1")) return `${normalizedBase}/responses/compact`;
506
+ return `${normalizedBase}/v1/responses/compact`;
507
+ }
508
+
509
+ function resolveOpenAiCodexCompactEndpoint(baseUrl: string | undefined): string {
510
+ const rawBase = baseUrl && baseUrl.length > 0 ? baseUrl : CODEX_BASE_URL;
511
+ const normalizedBase = rawBase.endsWith("/") ? rawBase.slice(0, -1) : rawBase;
512
+ if (/\/codex(?:\/v\d+)?$/.test(normalizedBase)) return `${normalizedBase}/responses/compact`;
513
+ return `${normalizedBase}/codex/responses/compact`;
514
+ }
515
+
516
+ function normalizeOpenAiCompactionToolCallId(id: string): string {
517
+ const normalized = normalizeResponsesToolCallId(id);
518
+ return `${normalized.callId}|${normalized.itemId ?? normalized.callId}`;
519
+ }
520
+
521
+ function getPreservedOpenAiRemoteCompactionData(
522
+ preserveData: Record<string, unknown> | undefined,
523
+ ): OpenAiRemoteCompactionPreserveData | undefined {
524
+ const candidate = preserveData?.[OPENAI_REMOTE_COMPACTION_PRESERVE_KEY];
525
+ if (!candidate || typeof candidate !== "object") return undefined;
526
+ const maybeData = candidate as { replacementHistory?: unknown; compactionItem?: unknown };
527
+ if (!Array.isArray(maybeData.replacementHistory)) return undefined;
528
+ const maybeItem = maybeData.compactionItem;
529
+ if (!maybeItem || typeof maybeItem !== "object") return undefined;
530
+ const compactionItem = maybeItem as { type?: unknown; encrypted_content?: unknown; summary?: unknown };
531
+ const isClassicCompaction =
532
+ compactionItem.type === "compaction" && typeof compactionItem.encrypted_content === "string";
533
+ const isSummaryCompaction = compactionItem.type === "compaction_summary";
534
+ if (!isClassicCompaction && !isSummaryCompaction) {
535
+ return undefined;
536
+ }
537
+ return {
538
+ replacementHistory: maybeData.replacementHistory as Array<Record<string, unknown>>,
539
+ compactionItem: compactionItem as unknown as OpenAiRemoteCompactionItem,
540
+ };
541
+ }
542
+
543
+ function withOpenAiRemoteCompactionPreserveData(
544
+ preserveData: Record<string, unknown> | undefined,
545
+ remoteCompaction: OpenAiRemoteCompactionPreserveData | undefined,
546
+ ): Record<string, unknown> | undefined {
547
+ if (remoteCompaction) {
548
+ return {
549
+ ...(preserveData ?? {}),
550
+ [OPENAI_REMOTE_COMPACTION_PRESERVE_KEY]: remoteCompaction,
551
+ };
552
+ }
553
+
554
+ if (!preserveData || !(OPENAI_REMOTE_COMPACTION_PRESERVE_KEY in preserveData)) {
555
+ return preserveData;
556
+ }
557
+
558
+ const { [OPENAI_REMOTE_COMPACTION_PRESERVE_KEY]: _removed, ...rest } = preserveData;
559
+ return Object.keys(rest).length > 0 ? rest : undefined;
560
+ }
561
+
562
+ function estimateOpenAiCompactInputTokens(input: Array<Record<string, unknown>>, instructions: string): number {
563
+ let chars = instructions.length;
564
+ for (const item of input) {
565
+ chars += JSON.stringify(item).length;
566
+ }
567
+ return Math.ceil(chars / 4);
568
+ }
569
+
570
+ function shouldTrimOpenAiCompactInputItem(item: Record<string, unknown>): boolean {
571
+ return item.type === "function_call_output" || (item.type === "message" && item.role === "developer");
572
+ }
573
+
574
+ function shouldKeepOpenAiCompactOutputUserMessage(item: Record<string, unknown>): boolean {
575
+ if (item.role !== "user") return false;
576
+ const content = item.content;
577
+ if (!Array.isArray(content) || content.length === 0) return false;
578
+ const contextualFragmentPatterns = [
579
+ [/^<system-reminder>[\s\S]*<\/system-reminder>$/i, /<system-reminder>/i],
580
+ [/^#\s*AGENTS\.md instructions for\b[\s\S]*<\/INSTRUCTIONS>$/i, /# AGENTS.md instructions/],
581
+ [/^<environment-context>[\s\S]*<\/environment-context>$/i, /<environment-context>/i],
582
+ [/^<skill>[\s\S]*<\/skill>$/i, /<skill>/i],
583
+ [/^<user-shell-command>[\s\S]*<\/user-shell-command>$/i, /<user-shell-command>/i],
584
+ [/^<turn-aborted>[\s\S]*<\/turn-aborted>$/i, /<turn-aborted>/i],
585
+ [/^<subagent-notification>[\s\S]*<\/subagent-notification>$/i, /<subagent-notification>/i],
586
+ ] as const;
587
+ return content.every(part => {
588
+ if (!part || typeof part !== "object") return false;
589
+ const candidate = part as { type?: unknown; text?: unknown };
590
+ if (candidate.type === "input_image") return true;
591
+ if (candidate.type !== "input_text" || typeof candidate.text !== "string") return false;
592
+ const trimmed = candidate.text.trim();
593
+ if (trimmed.length === 0) return false;
594
+ return !contextualFragmentPatterns.some(([strictPattern, markerPattern]) => {
595
+ return strictPattern.test(trimmed) || markerPattern.test(trimmed);
596
+ });
597
+ });
598
+ }
599
+
600
+ function shouldKeepOpenAiCompactOutputItem(item: Record<string, unknown>): boolean {
601
+ if (item.type === "compaction" || item.type === "compaction_summary") return true;
602
+ if (item.type !== "message") return false;
603
+ if (item.role === "developer") return false;
604
+ if (item.role === "assistant") return true;
605
+ return shouldKeepOpenAiCompactOutputUserMessage(item);
606
+ }
607
+
608
+ function trimOpenAiCompactInput(
609
+ input: Array<Record<string, unknown>>,
610
+ contextWindow: number,
611
+ instructions: string,
612
+ ): Array<Record<string, unknown>> {
613
+ const trimmed = [...input];
614
+ while (trimmed.length > 0 && estimateOpenAiCompactInputTokens(trimmed, instructions) > contextWindow) {
615
+ const last = trimmed[trimmed.length - 1];
616
+ if (last?.type === "function_call_output") {
617
+ const callId = typeof last.call_id === "string" ? last.call_id : undefined;
618
+ trimmed.pop();
619
+ if (callId) {
620
+ const matchingCallIndex = trimmed.findLastIndex(
621
+ item => item.type === "function_call" && item.call_id === callId,
622
+ );
623
+ if (matchingCallIndex >= 0) {
624
+ trimmed.splice(matchingCallIndex, 1);
625
+ }
626
+ }
627
+ continue;
628
+ }
629
+ if (!last || !shouldTrimOpenAiCompactInputItem(last)) {
630
+ break;
631
+ }
632
+ trimmed.pop();
633
+ }
634
+ return trimmed;
635
+ }
636
+
637
+ function getOpenAIResponsesHistoryItems(
638
+ providerPayload: { type?: string; items?: unknown } | undefined,
639
+ ): Array<Record<string, unknown>> | undefined {
640
+ if (providerPayload?.type !== "openaiResponsesHistory" || !Array.isArray(providerPayload.items)) {
641
+ return undefined;
642
+ }
643
+ return providerPayload.items as Array<Record<string, unknown>>;
644
+ }
645
+
646
+ function collectKnownOpenAiCallIds(items: Array<Record<string, unknown>>): Set<string> {
647
+ const knownCallIds = new Set<string>();
648
+ for (const item of items) {
649
+ if (item.type === "function_call" && typeof item.call_id === "string") {
650
+ knownCallIds.add(item.call_id);
651
+ }
652
+ }
653
+ return knownCallIds;
654
+ }
655
+
656
+ function buildOpenAiNativeHistory(
657
+ messages: AgentMessage[],
658
+ model: Model,
659
+ previousReplacementHistory?: Array<Record<string, unknown>>,
660
+ ): Array<Record<string, unknown>> {
661
+ const input: Array<Record<string, unknown>> = previousReplacementHistory ? [...previousReplacementHistory] : [];
662
+ const transformedMessages = transformMessages(convertToLlm(messages), model, id =>
663
+ normalizeOpenAiCompactionToolCallId(id),
664
+ );
665
+
666
+ let msgIndex = 0;
667
+ let knownCallIds = collectKnownOpenAiCallIds(input);
668
+ for (const message of transformedMessages) {
669
+ if (message.role === "user" || message.role === "developer") {
670
+ const providerPayload = (message as { providerPayload?: { type?: string; items?: unknown } }).providerPayload;
671
+ const historyItems = getOpenAIResponsesHistoryItems(providerPayload);
672
+ if (historyItems) {
673
+ input.push(...historyItems);
674
+ knownCallIds = collectKnownOpenAiCallIds(input);
675
+ msgIndex++;
676
+ continue;
677
+ }
678
+
679
+ const contentBlocks: Array<Record<string, unknown>> = [];
680
+ if (typeof message.content === "string") {
681
+ if (message.content.trim().length > 0) {
682
+ contentBlocks.push({ type: "input_text", text: message.content.toWellFormed() });
683
+ }
684
+ } else {
685
+ for (const block of message.content) {
686
+ if (block.type === "text") {
687
+ if (!block.text || block.text.trim().length === 0) continue;
688
+ contentBlocks.push({ type: "input_text", text: block.text.toWellFormed() });
689
+ continue;
690
+ }
691
+ if (block.type === "image") {
692
+ contentBlocks.push({
693
+ type: "input_image",
694
+ detail: "auto",
695
+ image_url: `data:${block.mimeType};base64,${block.data}`,
696
+ });
697
+ }
698
+ }
699
+ }
700
+ if (contentBlocks.length > 0) {
701
+ input.push({ type: "message", role: message.role, content: contentBlocks });
702
+ }
703
+ msgIndex++;
704
+ continue;
705
+ }
706
+
707
+ if (message.role === "assistant") {
708
+ const providerPayload = (
709
+ message as { providerPayload?: { type?: string; incremental?: boolean; items?: unknown } }
710
+ ).providerPayload;
711
+ const historyItems = getOpenAIResponsesHistoryItems(providerPayload);
712
+ if (historyItems) {
713
+ if (providerPayload?.incremental) {
714
+ input.push(...historyItems);
715
+ } else {
716
+ input.splice(0, input.length, ...historyItems);
717
+ }
718
+ knownCallIds = collectKnownOpenAiCallIds(input);
719
+ msgIndex++;
720
+ continue;
721
+ }
722
+
723
+ const assistant = message as AssistantMessage;
724
+ const isDifferentModel =
725
+ assistant.model !== model.id && assistant.provider === model.provider && assistant.api === model.api;
726
+
727
+ for (const block of assistant.content) {
728
+ if (block.type === "thinking" && assistant.stopReason !== "error" && block.thinkingSignature) {
729
+ try {
730
+ const reasoningItem = JSON.parse(block.thinkingSignature) as Record<string, unknown>;
731
+ if (reasoningItem && typeof reasoningItem === "object") {
732
+ input.push(reasoningItem);
733
+ }
734
+ } catch {
735
+ logger.warn("Failed to parse assistant reasoning for remote compaction", {
736
+ model: assistant.model,
737
+ provider: assistant.provider,
738
+ });
739
+ }
740
+ continue;
741
+ }
742
+
743
+ if (block.type === "text") {
744
+ if (!block.text || block.text.trim().length === 0) continue;
745
+ let msgId = block.textSignature;
746
+ if (!msgId) {
747
+ msgId = `msg_${msgIndex}`;
748
+ } else if (msgId.length > 64) {
749
+ msgId = `msg_${Bun.hash.xxHash64(msgId).toString(36)}`;
750
+ }
751
+ input.push({
752
+ type: "message",
753
+ role: "assistant",
754
+ content: [{ type: "output_text", text: block.text.toWellFormed(), annotations: [] }],
755
+ status: "completed",
756
+ id: msgId,
757
+ });
758
+ continue;
759
+ }
760
+
761
+ if (block.type === "toolCall" && assistant.stopReason !== "error") {
762
+ const normalized = normalizeResponsesToolCallId(block.id);
763
+ let itemId: string | undefined = normalized.itemId;
764
+ if (isDifferentModel && (itemId?.startsWith("fc_") || itemId?.startsWith("fcr_"))) {
765
+ itemId = undefined;
766
+ }
767
+ knownCallIds.add(normalized.callId);
768
+ input.push({
769
+ type: "function_call",
770
+ id: itemId,
771
+ call_id: normalized.callId,
772
+ name: block.name,
773
+ arguments: JSON.stringify(block.arguments),
774
+ });
775
+ }
776
+ }
777
+
778
+ msgIndex++;
779
+ continue;
780
+ }
781
+
782
+ if (message.role === "toolResult") {
783
+ const normalized = normalizeResponsesToolCallId(message.toolCallId);
784
+ if (!knownCallIds.has(normalized.callId)) {
785
+ msgIndex++;
786
+ continue;
787
+ }
788
+
789
+ const textOutput = message.content
790
+ .filter(block => block.type === "text")
791
+ .map(block => block.text)
792
+ .join("\n");
793
+ const hasImages = message.content.some(block => block.type === "image");
794
+ input.push({
795
+ type: "function_call_output",
796
+ call_id: normalized.callId,
797
+ output: (textOutput.length > 0 ? textOutput : "(see attached image)").toWellFormed(),
798
+ });
799
+
800
+ if (hasImages && model.input.includes("image")) {
801
+ const contentBlocks: Array<Record<string, unknown>> = [
802
+ { type: "input_text", text: "Attached image(s) from tool result:" },
803
+ ];
804
+ for (const block of message.content) {
805
+ if (block.type !== "image") continue;
806
+ contentBlocks.push({
807
+ type: "input_image",
808
+ detail: "auto",
809
+ image_url: `data:${block.mimeType};base64,${block.data}`,
810
+ });
811
+ }
812
+ input.push({ type: "message", role: "user", content: contentBlocks });
813
+ }
814
+ }
815
+
816
+ msgIndex++;
817
+ }
818
+
819
+ return input;
820
+ }
821
+
822
+ async function requestOpenAiRemoteCompaction(
823
+ model: Model,
824
+ apiKey: string,
825
+ compactInput: Array<Record<string, unknown>>,
826
+ instructions: string,
827
+ ): Promise<OpenAiRemoteCompactionResponse> {
828
+ const endpoint = resolveOpenAiCompactEndpoint(model);
829
+ const request: OpenAiRemoteCompactionRequest = {
830
+ model: model.id,
831
+ input: trimOpenAiCompactInput(compactInput, model.contextWindow, instructions),
832
+ instructions,
833
+ };
834
+ const headers: Record<string, string> = {
835
+ "content-type": "application/json",
836
+ Authorization: `Bearer ${apiKey}`,
837
+ ...(model.headers ?? {}),
838
+ };
839
+
840
+ // Codex endpoints require additional auth headers
841
+ if (model.provider === "openai-codex") {
842
+ const accountId = getCodexAccountId(apiKey);
843
+ if (accountId) {
844
+ headers[OPENAI_HEADERS.ACCOUNT_ID] = accountId;
845
+ }
846
+ headers[OPENAI_HEADERS.BETA] = OPENAI_HEADER_VALUES.BETA_RESPONSES;
847
+ headers[OPENAI_HEADERS.ORIGINATOR] = OPENAI_HEADER_VALUES.ORIGINATOR_CODEX;
848
+ }
849
+
850
+ const response = await fetch(endpoint, {
851
+ method: "POST",
852
+ headers,
853
+ body: JSON.stringify(request),
854
+ });
855
+
856
+ if (!response.ok) {
857
+ const errorText = await response.text().catch(() => "");
858
+ logger.warn("OpenAI remote compaction failed", {
859
+ endpoint,
860
+ status: response.status,
861
+ statusText: response.statusText,
862
+ errorText,
863
+ });
864
+ throw new Error(`Remote compaction failed (${response.status} ${response.statusText})`);
865
+ }
866
+
867
+ const data = (await response.json()) as { output?: unknown[] } | undefined;
868
+ const rawOutput = data?.output ?? [];
869
+ const replacementHistory = rawOutput.filter(
870
+ (item): item is Record<string, unknown> =>
871
+ !!item && typeof item === "object" && shouldKeepOpenAiCompactOutputItem(item as Record<string, unknown>),
872
+ );
873
+ const compactionItem = [...replacementHistory].reverse().find((item): item is OpenAiRemoteCompactionItem => {
874
+ if (item.type === "compaction" && typeof item.encrypted_content === "string") return true;
875
+ if (item.type === "compaction_summary") return true;
876
+ return false;
877
+ });
878
+ if (!compactionItem) {
879
+ const outputTypes = rawOutput.map(item =>
880
+ typeof item === "object" && item !== null ? (item as Record<string, unknown>).type : typeof item,
881
+ );
882
+ logger.warn("Remote compaction response missing compaction item", {
883
+ endpoint,
884
+ model: model.id,
885
+ provider: model.provider,
886
+ rawOutputLength: rawOutput.length,
887
+ outputTypes,
888
+ replacementHistoryLength: replacementHistory.length,
889
+ });
890
+ throw new Error("Remote compaction response missing compaction item");
891
+ }
892
+ return { replacementHistory, compactionItem };
893
+ }
894
+
895
+ interface RemoteCompactionRequest {
896
+ systemPrompt: string;
897
+ prompt: string;
898
+ }
899
+
454
900
  async function requestRemoteCompaction(
455
901
  endpoint: string,
456
902
  request: RemoteCompactionRequest,
@@ -488,6 +934,7 @@ export interface SummaryOptions {
488
934
  promptOverride?: string;
489
935
  extraContext?: string[];
490
936
  remoteEndpoint?: string;
937
+ remoteInstructions?: string;
491
938
  }
492
939
 
493
940
  export async function generateSummary(
@@ -543,7 +990,7 @@ export async function generateSummary(
543
990
  const response = await completeSimple(
544
991
  model,
545
992
  { systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages },
546
- { maxTokens, signal, apiKey, reasoning: "high" },
993
+ { maxTokens, signal, apiKey, reasoning: Effort.High },
547
994
  );
548
995
 
549
996
  if (response.stopReason === "error") {
@@ -592,7 +1039,7 @@ async function generateShortSummary(
592
1039
  systemPrompt: SUMMARIZATION_SYSTEM_PROMPT,
593
1040
  messages: [{ role: "user", content: [{ type: "text", text: promptText }], timestamp: Date.now() }],
594
1041
  },
595
- { maxTokens, signal, apiKey, reasoning: "high" },
1042
+ { maxTokens, signal, apiKey, reasoning: Effort.High },
596
1043
  );
597
1044
 
598
1045
  if (response.stopReason === "error") {
@@ -623,6 +1070,8 @@ export interface CompactionPreparation {
623
1070
  tokensBefore: number;
624
1071
  /** Summary from previous compaction, for iterative update */
625
1072
  previousSummary?: string;
1073
+ /** Preserved opaque compaction payload from the previous compaction, if any. */
1074
+ previousPreserveData?: Record<string, unknown>;
626
1075
  /** File operations extracted from messagesToSummarize */
627
1076
  fileOps: FileOperations;
628
1077
  /** Compaction settions from settings.jsonl */
@@ -692,12 +1141,18 @@ export function prepareCompaction(
692
1141
  const msg = getMessageFromEntry(pathEntries[i]);
693
1142
  if (msg) recentMessages.push(msg);
694
1143
  }
1144
+ // Nothing to summarize means compaction would be a no-op.
1145
+ if (messagesToSummarize.length === 0 && turnPrefixMessages.length === 0) {
1146
+ return undefined;
1147
+ }
695
1148
 
696
- // Get previous summary for iterative update
1149
+ // Get previous summary and preserved data for iterative updates
697
1150
  let previousSummary: string | undefined;
1151
+ let previousPreserveData: Record<string, unknown> | undefined;
698
1152
  if (prevCompactionIndex >= 0) {
699
1153
  const prevCompaction = pathEntries[prevCompactionIndex] as CompactionEntry;
700
1154
  previousSummary = prevCompaction.summary;
1155
+ previousPreserveData = prevCompaction.preserveData;
701
1156
  }
702
1157
 
703
1158
  // Extract file operations from messages and previous compaction
@@ -718,6 +1173,7 @@ export function prepareCompaction(
718
1173
  isSplitTurn: cutPoint.isSplitTurn,
719
1174
  tokensBefore,
720
1175
  previousSummary,
1176
+ previousPreserveData,
721
1177
  fileOps,
722
1178
  settings,
723
1179
  };
@@ -752,6 +1208,7 @@ export async function compact(
752
1208
  isSplitTurn,
753
1209
  tokensBefore,
754
1210
  previousSummary,
1211
+ previousPreserveData,
755
1212
  fileOps,
756
1213
  settings,
757
1214
  } = preparation;
@@ -759,9 +1216,38 @@ export async function compact(
759
1216
  const summaryOptions: SummaryOptions = {
760
1217
  promptOverride: options?.promptOverride,
761
1218
  extraContext: options?.extraContext,
762
- remoteEndpoint: settings.remoteEndpoint,
1219
+ remoteEndpoint: settings.remoteEnabled === false ? undefined : settings.remoteEndpoint,
1220
+ remoteInstructions: options?.remoteInstructions,
763
1221
  };
764
1222
 
1223
+ let preserveData = withOpenAiRemoteCompactionPreserveData(previousPreserveData, undefined);
1224
+ if (settings.remoteEnabled !== false && shouldUseOpenAiRemoteCompaction(model)) {
1225
+ const previousRemoteCompaction = getPreservedOpenAiRemoteCompactionData(previousPreserveData);
1226
+ const remoteMessages = [...messagesToSummarize, ...turnPrefixMessages, ...recentMessages];
1227
+ const remoteHistory = buildOpenAiNativeHistory(
1228
+ remoteMessages,
1229
+ model,
1230
+ previousRemoteCompaction?.replacementHistory,
1231
+ );
1232
+ if (remoteHistory.length > 0) {
1233
+ try {
1234
+ const remote = await requestOpenAiRemoteCompaction(
1235
+ model,
1236
+ apiKey,
1237
+ remoteHistory,
1238
+ summaryOptions.remoteInstructions ?? SUMMARIZATION_SYSTEM_PROMPT,
1239
+ );
1240
+ preserveData = withOpenAiRemoteCompactionPreserveData(previousPreserveData, remote);
1241
+ } catch (err) {
1242
+ logger.warn("OpenAI remote compaction failed, falling back to local summarization", {
1243
+ error: err instanceof Error ? err.message : String(err),
1244
+ model: model.id,
1245
+ provider: model.provider,
1246
+ });
1247
+ }
1248
+ }
1249
+ }
1250
+
765
1251
  // Generate summaries (can be parallel if both needed) and merge into one
766
1252
  let summary: string;
767
1253
 
@@ -811,7 +1297,7 @@ export async function compact(
811
1297
  settings.reserveTokens,
812
1298
  apiKey,
813
1299
  signal,
814
- { extraContext: options?.extraContext, remoteEndpoint: settings.remoteEndpoint },
1300
+ { extraContext: options?.extraContext, remoteEndpoint: summaryOptions.remoteEndpoint },
815
1301
  );
816
1302
 
817
1303
  // Compute file lists and append to summary
@@ -828,6 +1314,7 @@ export async function compact(
828
1314
  firstKeptEntryId,
829
1315
  tokensBefore,
830
1316
  details: { readFiles, modifiedFiles } as CompactionDetails,
1317
+ preserveData,
831
1318
  };
832
1319
  }
833
1320
 
@@ -857,7 +1344,7 @@ async function generateTurnPrefixSummary(
857
1344
  const response = await completeSimple(
858
1345
  model,
859
1346
  { systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages },
860
- { maxTokens, signal, apiKey, reasoning: "high" },
1347
+ { maxTokens, signal, apiKey, reasoning: Effort.High },
861
1348
  );
862
1349
 
863
1350
  if (response.stopReason === "error") {