opencode-openai-codex-auth-multi 4.3.0-multiaccount.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +37 -0
- package/README.md +107 -0
- package/assets/opencode-logo-ornate-dark.svg +18 -0
- package/assets/readme-hero.svg +31 -0
- package/config/README.md +110 -0
- package/config/minimal-opencode.json +13 -0
- package/config/opencode-legacy.json +572 -0
- package/config/opencode-modern.json +240 -0
- package/dist/index.d.ts +44 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +666 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/accounts.d.ts +48 -0
- package/dist/lib/accounts.d.ts.map +1 -0
- package/dist/lib/accounts.js +282 -0
- package/dist/lib/accounts.js.map +1 -0
- package/dist/lib/auth/auth.d.ts +43 -0
- package/dist/lib/auth/auth.d.ts.map +1 -0
- package/dist/lib/auth/auth.js +163 -0
- package/dist/lib/auth/auth.js.map +1 -0
- package/dist/lib/auth/browser.d.ts +17 -0
- package/dist/lib/auth/browser.d.ts.map +1 -0
- package/dist/lib/auth/browser.js +76 -0
- package/dist/lib/auth/browser.js.map +1 -0
- package/dist/lib/auth/server.d.ts +10 -0
- package/dist/lib/auth/server.d.ts.map +1 -0
- package/dist/lib/auth/server.js +78 -0
- package/dist/lib/auth/server.js.map +1 -0
- package/dist/lib/cli.d.ts +8 -0
- package/dist/lib/cli.d.ts.map +1 -0
- package/dist/lib/cli.js +44 -0
- package/dist/lib/cli.js.map +1 -0
- package/dist/lib/config.d.ts +17 -0
- package/dist/lib/config.d.ts.map +1 -0
- package/dist/lib/config.js +51 -0
- package/dist/lib/config.js.map +1 -0
- package/dist/lib/constants.d.ts +67 -0
- package/dist/lib/constants.d.ts.map +1 -0
- package/dist/lib/constants.js +67 -0
- package/dist/lib/constants.js.map +1 -0
- package/dist/lib/logger.d.ts +26 -0
- package/dist/lib/logger.d.ts.map +1 -0
- package/dist/lib/logger.js +110 -0
- package/dist/lib/logger.js.map +1 -0
- package/dist/lib/oauth-success.html +712 -0
- package/dist/lib/prompts/codex-opencode-bridge.d.ts +19 -0
- package/dist/lib/prompts/codex-opencode-bridge.d.ts.map +1 -0
- package/dist/lib/prompts/codex-opencode-bridge.js +152 -0
- package/dist/lib/prompts/codex-opencode-bridge.js.map +1 -0
- package/dist/lib/prompts/codex.d.ts +27 -0
- package/dist/lib/prompts/codex.d.ts.map +1 -0
- package/dist/lib/prompts/codex.js +241 -0
- package/dist/lib/prompts/codex.js.map +1 -0
- package/dist/lib/prompts/opencode-codex.d.ts +21 -0
- package/dist/lib/prompts/opencode-codex.d.ts.map +1 -0
- package/dist/lib/prompts/opencode-codex.js +91 -0
- package/dist/lib/prompts/opencode-codex.js.map +1 -0
- package/dist/lib/request/fetch-helpers.d.ts +81 -0
- package/dist/lib/request/fetch-helpers.d.ts.map +1 -0
- package/dist/lib/request/fetch-helpers.js +321 -0
- package/dist/lib/request/fetch-helpers.js.map +1 -0
- package/dist/lib/request/helpers/input-utils.d.ts +6 -0
- package/dist/lib/request/helpers/input-utils.d.ts.map +1 -0
- package/dist/lib/request/helpers/input-utils.js +174 -0
- package/dist/lib/request/helpers/input-utils.js.map +1 -0
- package/dist/lib/request/helpers/model-map.d.ts +28 -0
- package/dist/lib/request/helpers/model-map.d.ts.map +1 -0
- package/dist/lib/request/helpers/model-map.js +109 -0
- package/dist/lib/request/helpers/model-map.js.map +1 -0
- package/dist/lib/request/request-transformer.d.ts +93 -0
- package/dist/lib/request/request-transformer.d.ts.map +1 -0
- package/dist/lib/request/request-transformer.js +403 -0
- package/dist/lib/request/request-transformer.js.map +1 -0
- package/dist/lib/request/response-handler.d.ts +14 -0
- package/dist/lib/request/response-handler.d.ts.map +1 -0
- package/dist/lib/request/response-handler.js +90 -0
- package/dist/lib/request/response-handler.js.map +1 -0
- package/dist/lib/storage.d.ts +23 -0
- package/dist/lib/storage.d.ts.map +1 -0
- package/dist/lib/storage.js +153 -0
- package/dist/lib/storage.js.map +1 -0
- package/dist/lib/types.d.ts +170 -0
- package/dist/lib/types.d.ts.map +1 -0
- package/dist/lib/types.js +2 -0
- package/dist/lib/types.js.map +1 -0
- package/package.json +71 -0
- package/scripts/copy-oauth-success.js +37 -0
- package/scripts/install-opencode-codex-auth.js +193 -0
- package/scripts/test-all-models.sh +260 -0
- package/scripts/validate-model-map.sh +97 -0
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Model Configuration Map
|
|
3
|
+
*
|
|
4
|
+
* Maps model config IDs to their normalized API model names.
|
|
5
|
+
* Only includes exact config IDs that OpenCode will pass to the plugin.
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Map of config model IDs to normalized API model names
|
|
9
|
+
*
|
|
10
|
+
* Key: The model ID as specified in opencode.json config
|
|
11
|
+
* Value: The normalized model name to send to the API
|
|
12
|
+
*/
|
|
13
|
+
export const MODEL_MAP = {
|
|
14
|
+
// ============================================================================
|
|
15
|
+
// GPT-5.1 Codex Models
|
|
16
|
+
// ============================================================================
|
|
17
|
+
"gpt-5.1-codex": "gpt-5.1-codex",
|
|
18
|
+
"gpt-5.1-codex-low": "gpt-5.1-codex",
|
|
19
|
+
"gpt-5.1-codex-medium": "gpt-5.1-codex",
|
|
20
|
+
"gpt-5.1-codex-high": "gpt-5.1-codex",
|
|
21
|
+
// ============================================================================
|
|
22
|
+
// GPT-5.1 Codex Max Models
|
|
23
|
+
// ============================================================================
|
|
24
|
+
"gpt-5.1-codex-max": "gpt-5.1-codex-max",
|
|
25
|
+
"gpt-5.1-codex-max-low": "gpt-5.1-codex-max",
|
|
26
|
+
"gpt-5.1-codex-max-medium": "gpt-5.1-codex-max",
|
|
27
|
+
"gpt-5.1-codex-max-high": "gpt-5.1-codex-max",
|
|
28
|
+
"gpt-5.1-codex-max-xhigh": "gpt-5.1-codex-max",
|
|
29
|
+
// ============================================================================
|
|
30
|
+
// GPT-5.2 Models (supports none/low/medium/high/xhigh per OpenAI API docs)
|
|
31
|
+
// ============================================================================
|
|
32
|
+
"gpt-5.2": "gpt-5.2",
|
|
33
|
+
"gpt-5.2-none": "gpt-5.2",
|
|
34
|
+
"gpt-5.2-low": "gpt-5.2",
|
|
35
|
+
"gpt-5.2-medium": "gpt-5.2",
|
|
36
|
+
"gpt-5.2-high": "gpt-5.2",
|
|
37
|
+
"gpt-5.2-xhigh": "gpt-5.2",
|
|
38
|
+
// ============================================================================
|
|
39
|
+
// GPT-5.2 Codex Models (low/medium/high/xhigh)
|
|
40
|
+
// ============================================================================
|
|
41
|
+
"gpt-5.2-codex": "gpt-5.2-codex",
|
|
42
|
+
"gpt-5.2-codex-low": "gpt-5.2-codex",
|
|
43
|
+
"gpt-5.2-codex-medium": "gpt-5.2-codex",
|
|
44
|
+
"gpt-5.2-codex-high": "gpt-5.2-codex",
|
|
45
|
+
"gpt-5.2-codex-xhigh": "gpt-5.2-codex",
|
|
46
|
+
// ============================================================================
|
|
47
|
+
// GPT-5.1 Codex Mini Models
|
|
48
|
+
// ============================================================================
|
|
49
|
+
"gpt-5.1-codex-mini": "gpt-5.1-codex-mini",
|
|
50
|
+
"gpt-5.1-codex-mini-medium": "gpt-5.1-codex-mini",
|
|
51
|
+
"gpt-5.1-codex-mini-high": "gpt-5.1-codex-mini",
|
|
52
|
+
// ============================================================================
|
|
53
|
+
// GPT-5.1 General Purpose Models (supports none/low/medium/high per OpenAI API docs)
|
|
54
|
+
// ============================================================================
|
|
55
|
+
"gpt-5.1": "gpt-5.1",
|
|
56
|
+
"gpt-5.1-none": "gpt-5.1",
|
|
57
|
+
"gpt-5.1-low": "gpt-5.1",
|
|
58
|
+
"gpt-5.1-medium": "gpt-5.1",
|
|
59
|
+
"gpt-5.1-high": "gpt-5.1",
|
|
60
|
+
"gpt-5.1-chat-latest": "gpt-5.1",
|
|
61
|
+
// ============================================================================
|
|
62
|
+
// GPT-5 Codex Models (LEGACY - maps to gpt-5.1-codex as gpt-5 is being phased out)
|
|
63
|
+
// ============================================================================
|
|
64
|
+
"gpt-5-codex": "gpt-5.1-codex",
|
|
65
|
+
// ============================================================================
|
|
66
|
+
// GPT-5 Codex Mini Models (LEGACY - maps to gpt-5.1-codex-mini)
|
|
67
|
+
// ============================================================================
|
|
68
|
+
"codex-mini-latest": "gpt-5.1-codex-mini",
|
|
69
|
+
"gpt-5-codex-mini": "gpt-5.1-codex-mini",
|
|
70
|
+
"gpt-5-codex-mini-medium": "gpt-5.1-codex-mini",
|
|
71
|
+
"gpt-5-codex-mini-high": "gpt-5.1-codex-mini",
|
|
72
|
+
// ============================================================================
|
|
73
|
+
// GPT-5 General Purpose Models (LEGACY - maps to gpt-5.1 as gpt-5 is being phased out)
|
|
74
|
+
// ============================================================================
|
|
75
|
+
"gpt-5": "gpt-5.1",
|
|
76
|
+
"gpt-5-mini": "gpt-5.1",
|
|
77
|
+
"gpt-5-nano": "gpt-5.1",
|
|
78
|
+
};
|
|
79
|
+
/**
|
|
80
|
+
* Get normalized model name from config ID
|
|
81
|
+
*
|
|
82
|
+
* @param modelId - Model ID from config (e.g., "gpt-5.1-codex-low")
|
|
83
|
+
* @returns Normalized model name (e.g., "gpt-5.1-codex") or undefined if not found
|
|
84
|
+
*/
|
|
85
|
+
export function getNormalizedModel(modelId) {
|
|
86
|
+
try {
|
|
87
|
+
// Try direct lookup first
|
|
88
|
+
if (MODEL_MAP[modelId]) {
|
|
89
|
+
return MODEL_MAP[modelId];
|
|
90
|
+
}
|
|
91
|
+
// Try case-insensitive lookup
|
|
92
|
+
const lowerModelId = modelId.toLowerCase();
|
|
93
|
+
const match = Object.keys(MODEL_MAP).find((key) => key.toLowerCase() === lowerModelId);
|
|
94
|
+
return match ? MODEL_MAP[match] : undefined;
|
|
95
|
+
}
|
|
96
|
+
catch {
|
|
97
|
+
return undefined;
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Check if a model ID is in the model map
|
|
102
|
+
*
|
|
103
|
+
* @param modelId - Model ID to check
|
|
104
|
+
* @returns True if model is in the map
|
|
105
|
+
*/
|
|
106
|
+
export function isKnownModel(modelId) {
|
|
107
|
+
return getNormalizedModel(modelId) !== undefined;
|
|
108
|
+
}
|
|
109
|
+
//# sourceMappingURL=model-map.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"model-map.js","sourceRoot":"","sources":["../../../../lib/request/helpers/model-map.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH;;;;;GAKG;AACH,MAAM,CAAC,MAAM,SAAS,GAA2B;IACjD,+EAA+E;IAC/E,uBAAuB;IACvB,+EAA+E;IAC9E,eAAe,EAAE,eAAe;IAChC,mBAAmB,EAAE,eAAe;IACpC,sBAAsB,EAAE,eAAe;IACvC,oBAAoB,EAAE,eAAe;IAErC,+EAA+E;IAC/E,2BAA2B;IAC3B,+EAA+E;IAC/E,mBAAmB,EAAE,mBAAmB;IACxC,uBAAuB,EAAE,mBAAmB;IAC5C,0BAA0B,EAAE,mBAAmB;IAC/C,wBAAwB,EAAE,mBAAmB;IAC7C,yBAAyB,EAAE,mBAAmB;IAE9C,+EAA+E;IAC/E,2EAA2E;IAC3E,+EAA+E;IAC/E,SAAS,EAAE,SAAS;IACpB,cAAc,EAAE,SAAS;IACzB,aAAa,EAAE,SAAS;IACxB,gBAAgB,EAAE,SAAS;IAC3B,cAAc,EAAE,SAAS;IACzB,eAAe,EAAE,SAAS;IAE1B,+EAA+E;IAC/E,+CAA+C;IAC/C,+EAA+E;IAC/E,eAAe,EAAE,eAAe;IAChC,mBAAmB,EAAE,eAAe;IACpC,sBAAsB,EAAE,eAAe;IACvC,oBAAoB,EAAE,eAAe;IACrC,qBAAqB,EAAE,eAAe;IAEtC,+EAA+E;IAC/E,4BAA4B;IAC5B,+EAA+E;IAC/E,oBAAoB,EAAE,oBAAoB;IAC1C,2BAA2B,EAAE,oBAAoB;IACjD,yBAAyB,EAAE,oBAAoB;IAE/C,+EAA+E;IAC/E,qFAAqF;IACrF,+EAA+E;IAC/E,SAAS,EAAE,SAAS;IACpB,cAAc,EAAE,SAAS;IACzB,aAAa,EAAE,SAAS;IACxB,gBAAgB,EAAE,SAAS;IAC3B,cAAc,EAAE,SAAS;IACzB,qBAAqB,EAAE,SAAS;IAEhC,+EAA+E;IAC/E,mFAAmF;IACnF,+EAA+E;IAC/E,aAAa,EAAE,eAAe;IAE9B,+EAA+E;IAC/E,gEAAgE;IAChE,+EAA+E;IAC/E,mBAAmB,EAAE,oBAAoB;IACzC,kBAAkB,EAAE,oBAAoB;IACxC,yBAAyB,EAAE,oBAAoB;IAC/C,uBAAuB,EAAE,oBAAoB;IAE7C,+EAA+E;IAC/E,uFAAuF;IACvF,+EAA+E;IAC/E,OAAO,EAAE,SAAS;IAClB,YAAY,EAAE,SAAS;IACvB,YAAY,EAAE,SAAS;CACvB,CAAC;AAEF;;;;;GAKG;AACH,MAAM,UAAU,kBAAkB,CAAC,OAAe;IACjD,IAAI,CAAC;QACJ,0BAA0B;QAC1B,IAAI,SAAS,CAAC,OAAO,CAAC,EAAE,CAAC;YACxB,OAAO,SAAS,CAAC,OAAO,CAAC,CAAC;QAC3B,CAAC;QAED,8BAA8B;QAC9B,MAAM,YAAY,GAAG,OAAO,CAAC,WAAW,EAAE,CAAC;QAC3C,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,IAAI,CACxC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,WAAW,EAAE,KAAK,YAAY,CAC3C,CAAC;QAEF,OAAO,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IAC7C,CAAC;IAAC,MAAM,CAAC;QACR,OAAO,SAAS,CAAC;IAClB,CAAC;AACF,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,YAAY,CAAC,OAAe;IAC3C,OAAO,kBAAkB,CAAC,OAAO,CAAC,KAAK,SAAS,CAAC;AAClD,CAAC"}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import type { ConfigOptions, InputItem, ReasoningConfig, RequestBody, UserConfig } from "../types.js";
|
|
2
|
+
export { isOpenCodeSystemPrompt, filterOpenCodeSystemPromptsWithCachedPrompt, } from "./helpers/input-utils.js";
|
|
3
|
+
/**
|
|
4
|
+
* Normalize model name to Codex-supported variants
|
|
5
|
+
*
|
|
6
|
+
* Uses explicit model map for known models, with fallback pattern matching
|
|
7
|
+
* for unknown/custom model names.
|
|
8
|
+
*
|
|
9
|
+
* @param model - Original model name (e.g., "gpt-5.1-codex-low", "openai/gpt-5-codex")
|
|
10
|
+
* @returns Normalized model name (e.g., "gpt-5.1-codex", "gpt-5-codex")
|
|
11
|
+
*/
|
|
12
|
+
export declare function normalizeModel(model: string | undefined): string;
|
|
13
|
+
/**
|
|
14
|
+
* Extract configuration for a specific model
|
|
15
|
+
* Merges global options with model-specific options (model-specific takes precedence)
|
|
16
|
+
* @param modelName - Model name (e.g., "gpt-5-codex")
|
|
17
|
+
* @param userConfig - Full user configuration object
|
|
18
|
+
* @returns Merged configuration for this model
|
|
19
|
+
*/
|
|
20
|
+
export declare function getModelConfig(modelName: string, userConfig?: UserConfig): ConfigOptions;
|
|
21
|
+
/**
|
|
22
|
+
* Configure reasoning parameters based on model variant and user config
|
|
23
|
+
*
|
|
24
|
+
* NOTE: This plugin follows Codex CLI defaults instead of opencode defaults because:
|
|
25
|
+
* - We're accessing the ChatGPT backend API (not OpenAI Platform API)
|
|
26
|
+
* - opencode explicitly excludes gpt-5-codex from automatic reasoning configuration
|
|
27
|
+
* - Codex CLI has been thoroughly tested against this backend
|
|
28
|
+
*
|
|
29
|
+
* @param originalModel - Original model name before normalization
|
|
30
|
+
* @param userConfig - User configuration object
|
|
31
|
+
* @returns Reasoning configuration
|
|
32
|
+
*/
|
|
33
|
+
export declare function getReasoningConfig(modelName: string | undefined, userConfig?: ConfigOptions): ReasoningConfig;
|
|
34
|
+
/**
|
|
35
|
+
* Filter input array for stateless Codex API (store: false)
|
|
36
|
+
*
|
|
37
|
+
* Two transformations needed:
|
|
38
|
+
* 1. Remove AI SDK-specific items (not supported by Codex API)
|
|
39
|
+
* 2. Strip IDs from all remaining items (stateless mode)
|
|
40
|
+
*
|
|
41
|
+
* AI SDK constructs to REMOVE (not in OpenAI Responses API spec):
|
|
42
|
+
* - type: "item_reference" - AI SDK uses this for server-side state lookup
|
|
43
|
+
*
|
|
44
|
+
* Items to KEEP (strip IDs):
|
|
45
|
+
* - type: "message" - Conversation messages (provides context to LLM)
|
|
46
|
+
* - type: "function_call" - Tool calls from conversation
|
|
47
|
+
* - type: "function_call_output" - Tool results from conversation
|
|
48
|
+
*
|
|
49
|
+
* Context is maintained through:
|
|
50
|
+
* - Full message history (without IDs)
|
|
51
|
+
* - reasoning.encrypted_content (for reasoning continuity)
|
|
52
|
+
*
|
|
53
|
+
* @param input - Original input array from OpenCode/AI SDK
|
|
54
|
+
* @returns Filtered input array compatible with Codex API
|
|
55
|
+
*/
|
|
56
|
+
export declare function filterInput(input: InputItem[] | undefined): InputItem[] | undefined;
|
|
57
|
+
/**
|
|
58
|
+
* Filter out OpenCode system prompts from input
|
|
59
|
+
* Used in CODEX_MODE to replace OpenCode prompts with Codex-OpenCode bridge
|
|
60
|
+
* @param input - Input array
|
|
61
|
+
* @returns Input array without OpenCode system prompts
|
|
62
|
+
*/
|
|
63
|
+
export declare function filterOpenCodeSystemPrompts(input: InputItem[] | undefined): Promise<InputItem[] | undefined>;
|
|
64
|
+
/**
|
|
65
|
+
* Add Codex-OpenCode bridge message to input if tools are present
|
|
66
|
+
* @param input - Input array
|
|
67
|
+
* @param hasTools - Whether tools are present in request
|
|
68
|
+
* @returns Input array with bridge message prepended if needed
|
|
69
|
+
*/
|
|
70
|
+
export declare function addCodexBridgeMessage(input: InputItem[] | undefined, hasTools: boolean): InputItem[] | undefined;
|
|
71
|
+
/**
|
|
72
|
+
* Add tool remapping message to input if tools are present
|
|
73
|
+
* @param input - Input array
|
|
74
|
+
* @param hasTools - Whether tools are present in request
|
|
75
|
+
* @returns Input array with tool remap message prepended if needed
|
|
76
|
+
*/
|
|
77
|
+
export declare function addToolRemapMessage(input: InputItem[] | undefined, hasTools: boolean): InputItem[] | undefined;
|
|
78
|
+
/**
|
|
79
|
+
* Transform request body for Codex API
|
|
80
|
+
*
|
|
81
|
+
* NOTE: Configuration follows Codex CLI patterns instead of opencode defaults:
|
|
82
|
+
* - opencode sets textVerbosity="low" for gpt-5, but Codex CLI uses "medium"
|
|
83
|
+
* - opencode excludes gpt-5-codex from reasoning configuration
|
|
84
|
+
* - This plugin uses store=false (stateless), requiring encrypted reasoning content
|
|
85
|
+
*
|
|
86
|
+
* @param body - Original request body
|
|
87
|
+
* @param codexInstructions - Codex system instructions
|
|
88
|
+
* @param userConfig - User configuration from loader
|
|
89
|
+
* @param codexMode - Enable CODEX_MODE (bridge prompt instead of tool remap) - defaults to true
|
|
90
|
+
* @returns Transformed request body
|
|
91
|
+
*/
|
|
92
|
+
export declare function transformRequestBody(body: RequestBody, codexInstructions: string, userConfig?: UserConfig, codexMode?: boolean): Promise<RequestBody>;
|
|
93
|
+
//# sourceMappingURL=request-transformer.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"request-transformer.d.ts","sourceRoot":"","sources":["../../../lib/request/request-transformer.ts"],"names":[],"mappings":"AASA,OAAO,KAAK,EACX,aAAa,EACb,SAAS,EACT,eAAe,EACf,WAAW,EACX,UAAU,EACV,MAAM,aAAa,CAAC;AAErB,OAAO,EACN,sBAAsB,EACtB,2CAA2C,GAC3C,MAAM,0BAA0B,CAAC;AAElC;;;;;;;;GAQG;AACH,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,MAAM,CAiFhE;AAED;;;;;;GAMG;AACH,wBAAgB,cAAc,CAC7B,SAAS,EAAE,MAAM,EACjB,UAAU,GAAE,UAAuC,GACjD,aAAa,CAMf;AAiDD;;;;;;;;;;;GAWG;AACH,wBAAgB,kBAAkB,CACjC,SAAS,EAAE,MAAM,GAAG,SAAS,EAC7B,UAAU,GAAE,aAAkB,GAC5B,eAAe,CA2FjB;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAgB,WAAW,CAC1B,KAAK,EAAE,SAAS,EAAE,GAAG,SAAS,GAC5B,SAAS,EAAE,GAAG,SAAS,CAmBzB;AAED;;;;;GAKG;AACH,wBAAsB,2BAA2B,CAChD,KAAK,EAAE,SAAS,EAAE,GAAG,SAAS,GAC5B,OAAO,CAAC,SAAS,EAAE,GAAG,SAAS,CAAC,CAalC;AAED;;;;;GAKG;AACH,wBAAgB,qBAAqB,CACpC,KAAK,EAAE,SAAS,EAAE,GAAG,SAAS,EAC9B,QAAQ,EAAE,OAAO,GACf,SAAS,EAAE,GAAG,SAAS,CAezB;AAED;;;;;GAKG;AACH,wBAAgB,mBAAmB,CAClC,KAAK,EAAE,SAAS,EAAE,GAAG,SAAS,EAC9B,QAAQ,EAAE,OAAO,GACf,SAAS,EAAE,GAAG,SAAS,CAezB;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAsB,oBAAoB,CACzC,IAAI,EAAE,WAAW,EACjB,iBAAiB,EAAE,MAAM,EACzB,UAAU,GAAE,UAAuC,EACnD,SAAS,UAAO,GACd,OAAO,CAAC,WAAW,CAAC,CAwGtB"}
|
|
@@ -0,0 +1,403 @@
|
|
|
1
|
+
import { logDebug, logWarn } from "../logger.js";
|
|
2
|
+
import { TOOL_REMAP_MESSAGE } from "../prompts/codex.js";
|
|
3
|
+
import { CODEX_OPENCODE_BRIDGE } from "../prompts/codex-opencode-bridge.js";
|
|
4
|
+
import { getOpenCodeCodexPrompt } from "../prompts/opencode-codex.js";
|
|
5
|
+
import { getNormalizedModel } from "./helpers/model-map.js";
|
|
6
|
+
import { filterOpenCodeSystemPromptsWithCachedPrompt, normalizeOrphanedToolOutputs, } from "./helpers/input-utils.js";
|
|
7
|
+
export { isOpenCodeSystemPrompt, filterOpenCodeSystemPromptsWithCachedPrompt, } from "./helpers/input-utils.js";
|
|
8
|
+
/**
|
|
9
|
+
* Normalize model name to Codex-supported variants
|
|
10
|
+
*
|
|
11
|
+
* Uses explicit model map for known models, with fallback pattern matching
|
|
12
|
+
* for unknown/custom model names.
|
|
13
|
+
*
|
|
14
|
+
* @param model - Original model name (e.g., "gpt-5.1-codex-low", "openai/gpt-5-codex")
|
|
15
|
+
* @returns Normalized model name (e.g., "gpt-5.1-codex", "gpt-5-codex")
|
|
16
|
+
*/
|
|
17
|
+
export function normalizeModel(model) {
|
|
18
|
+
if (!model)
|
|
19
|
+
return "gpt-5.1";
|
|
20
|
+
// Strip provider prefix if present (e.g., "openai/gpt-5-codex" → "gpt-5-codex")
|
|
21
|
+
const modelId = model.includes("/") ? model.split("/").pop() : model;
|
|
22
|
+
// Try explicit model map first (handles all known model variants)
|
|
23
|
+
const mappedModel = getNormalizedModel(modelId);
|
|
24
|
+
if (mappedModel) {
|
|
25
|
+
return mappedModel;
|
|
26
|
+
}
|
|
27
|
+
// Fallback: Pattern-based matching for unknown/custom model names
|
|
28
|
+
// This preserves backwards compatibility with old verbose names
|
|
29
|
+
// like "GPT 5 Codex Low (ChatGPT Subscription)"
|
|
30
|
+
const normalized = modelId.toLowerCase();
|
|
31
|
+
// Priority order for pattern matching (most specific first):
|
|
32
|
+
// 1. GPT-5.2 Codex (newest codex model)
|
|
33
|
+
if (normalized.includes("gpt-5.2-codex") ||
|
|
34
|
+
normalized.includes("gpt 5.2 codex")) {
|
|
35
|
+
return "gpt-5.2-codex";
|
|
36
|
+
}
|
|
37
|
+
// 2. GPT-5.2 (general purpose)
|
|
38
|
+
if (normalized.includes("gpt-5.2") || normalized.includes("gpt 5.2")) {
|
|
39
|
+
return "gpt-5.2";
|
|
40
|
+
}
|
|
41
|
+
// 3. GPT-5.1 Codex Max
|
|
42
|
+
if (normalized.includes("gpt-5.1-codex-max") ||
|
|
43
|
+
normalized.includes("gpt 5.1 codex max")) {
|
|
44
|
+
return "gpt-5.1-codex-max";
|
|
45
|
+
}
|
|
46
|
+
// 4. GPT-5.1 Codex Mini
|
|
47
|
+
if (normalized.includes("gpt-5.1-codex-mini") ||
|
|
48
|
+
normalized.includes("gpt 5.1 codex mini")) {
|
|
49
|
+
return "gpt-5.1-codex-mini";
|
|
50
|
+
}
|
|
51
|
+
// 5. Legacy Codex Mini
|
|
52
|
+
if (normalized.includes("codex-mini-latest") ||
|
|
53
|
+
normalized.includes("gpt-5-codex-mini") ||
|
|
54
|
+
normalized.includes("gpt 5 codex mini")) {
|
|
55
|
+
return "codex-mini-latest";
|
|
56
|
+
}
|
|
57
|
+
// 6. GPT-5.1 Codex
|
|
58
|
+
if (normalized.includes("gpt-5.1-codex") ||
|
|
59
|
+
normalized.includes("gpt 5.1 codex")) {
|
|
60
|
+
return "gpt-5.1-codex";
|
|
61
|
+
}
|
|
62
|
+
// 7. GPT-5.1 (general-purpose)
|
|
63
|
+
if (normalized.includes("gpt-5.1") || normalized.includes("gpt 5.1")) {
|
|
64
|
+
return "gpt-5.1";
|
|
65
|
+
}
|
|
66
|
+
// 8. GPT-5 Codex family (any variant with "codex")
|
|
67
|
+
if (normalized.includes("codex")) {
|
|
68
|
+
return "gpt-5.1-codex";
|
|
69
|
+
}
|
|
70
|
+
// 9. GPT-5 family (any variant) - default to 5.1 as 5 is being phased out
|
|
71
|
+
if (normalized.includes("gpt-5") || normalized.includes("gpt 5")) {
|
|
72
|
+
return "gpt-5.1";
|
|
73
|
+
}
|
|
74
|
+
// Default fallback - use gpt-5.1 as gpt-5 is being phased out
|
|
75
|
+
return "gpt-5.1";
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Extract configuration for a specific model
|
|
79
|
+
* Merges global options with model-specific options (model-specific takes precedence)
|
|
80
|
+
* @param modelName - Model name (e.g., "gpt-5-codex")
|
|
81
|
+
* @param userConfig - Full user configuration object
|
|
82
|
+
* @returns Merged configuration for this model
|
|
83
|
+
*/
|
|
84
|
+
export function getModelConfig(modelName, userConfig = { global: {}, models: {} }) {
|
|
85
|
+
const globalOptions = userConfig.global || {};
|
|
86
|
+
const modelOptions = userConfig.models?.[modelName]?.options || {};
|
|
87
|
+
// Model-specific options override global options
|
|
88
|
+
return { ...globalOptions, ...modelOptions };
|
|
89
|
+
}
|
|
90
|
+
function resolveReasoningConfig(modelName, modelConfig, body) {
|
|
91
|
+
const providerOpenAI = body.providerOptions?.openai;
|
|
92
|
+
const existingEffort = body.reasoning?.effort ?? providerOpenAI?.reasoningEffort;
|
|
93
|
+
const existingSummary = body.reasoning?.summary ?? providerOpenAI?.reasoningSummary;
|
|
94
|
+
const mergedConfig = {
|
|
95
|
+
...modelConfig,
|
|
96
|
+
...(existingEffort ? { reasoningEffort: existingEffort } : {}),
|
|
97
|
+
...(existingSummary ? { reasoningSummary: existingSummary } : {}),
|
|
98
|
+
};
|
|
99
|
+
return getReasoningConfig(modelName, mergedConfig);
|
|
100
|
+
}
|
|
101
|
+
function resolveTextVerbosity(modelConfig, body) {
|
|
102
|
+
const providerOpenAI = body.providerOptions?.openai;
|
|
103
|
+
return (body.text?.verbosity ??
|
|
104
|
+
providerOpenAI?.textVerbosity ??
|
|
105
|
+
modelConfig.textVerbosity ??
|
|
106
|
+
"medium");
|
|
107
|
+
}
|
|
108
|
+
function resolveInclude(modelConfig, body) {
|
|
109
|
+
const providerOpenAI = body.providerOptions?.openai;
|
|
110
|
+
const base = body.include ??
|
|
111
|
+
providerOpenAI?.include ??
|
|
112
|
+
modelConfig.include ??
|
|
113
|
+
["reasoning.encrypted_content"];
|
|
114
|
+
const include = Array.from(new Set(base.filter(Boolean)));
|
|
115
|
+
if (!include.includes("reasoning.encrypted_content")) {
|
|
116
|
+
include.push("reasoning.encrypted_content");
|
|
117
|
+
}
|
|
118
|
+
return include;
|
|
119
|
+
}
|
|
120
|
+
/**
|
|
121
|
+
* Configure reasoning parameters based on model variant and user config
|
|
122
|
+
*
|
|
123
|
+
* NOTE: This plugin follows Codex CLI defaults instead of opencode defaults because:
|
|
124
|
+
* - We're accessing the ChatGPT backend API (not OpenAI Platform API)
|
|
125
|
+
* - opencode explicitly excludes gpt-5-codex from automatic reasoning configuration
|
|
126
|
+
* - Codex CLI has been thoroughly tested against this backend
|
|
127
|
+
*
|
|
128
|
+
* @param originalModel - Original model name before normalization
|
|
129
|
+
* @param userConfig - User configuration object
|
|
130
|
+
* @returns Reasoning configuration
|
|
131
|
+
*/
|
|
132
|
+
export function getReasoningConfig(modelName, userConfig = {}) {
|
|
133
|
+
const normalizedName = modelName?.toLowerCase() ?? "";
|
|
134
|
+
// GPT-5.2 Codex is the newest codex model (supports xhigh, but not "none")
|
|
135
|
+
const isGpt52Codex = normalizedName.includes("gpt-5.2-codex") ||
|
|
136
|
+
normalizedName.includes("gpt 5.2 codex");
|
|
137
|
+
// GPT-5.2 general purpose (not codex variant)
|
|
138
|
+
const isGpt52General = (normalizedName.includes("gpt-5.2") || normalizedName.includes("gpt 5.2")) &&
|
|
139
|
+
!isGpt52Codex;
|
|
140
|
+
const isCodexMax = normalizedName.includes("codex-max") ||
|
|
141
|
+
normalizedName.includes("codex max");
|
|
142
|
+
const isCodexMini = normalizedName.includes("codex-mini") ||
|
|
143
|
+
normalizedName.includes("codex mini") ||
|
|
144
|
+
normalizedName.includes("codex_mini") ||
|
|
145
|
+
normalizedName.includes("codex-mini-latest");
|
|
146
|
+
const isCodex = normalizedName.includes("codex") && !isCodexMini;
|
|
147
|
+
const isLightweight = !isCodexMini &&
|
|
148
|
+
(normalizedName.includes("nano") ||
|
|
149
|
+
normalizedName.includes("mini"));
|
|
150
|
+
// GPT-5.1 general purpose (not codex variants) - supports "none" per OpenAI API docs
|
|
151
|
+
const isGpt51General = (normalizedName.includes("gpt-5.1") || normalizedName.includes("gpt 5.1")) &&
|
|
152
|
+
!isCodex &&
|
|
153
|
+
!isCodexMax &&
|
|
154
|
+
!isCodexMini;
|
|
155
|
+
// GPT 5.2, GPT 5.2 Codex, and Codex Max support xhigh reasoning
|
|
156
|
+
const supportsXhigh = isGpt52General || isGpt52Codex || isCodexMax;
|
|
157
|
+
// GPT 5.1 general and GPT 5.2 general support "none" reasoning per:
|
|
158
|
+
// - OpenAI API docs: "gpt-5.1 defaults to none, supports: none, low, medium, high"
|
|
159
|
+
// - Codex CLI: ReasoningEffort enum includes None variant (codex-rs/protocol/src/openai_models.rs)
|
|
160
|
+
// - Codex CLI: docs/config.md lists "none" as valid for model_reasoning_effort
|
|
161
|
+
// - gpt-5.2 (being newer) also supports: none, low, medium, high, xhigh
|
|
162
|
+
// - Codex models (including GPT-5.2 Codex) do NOT support "none"
|
|
163
|
+
const supportsNone = isGpt52General || isGpt51General;
|
|
164
|
+
// Default based on model type (Codex CLI defaults)
|
|
165
|
+
// Note: OpenAI docs say gpt-5.1 defaults to "none", but we default to "medium"
|
|
166
|
+
// for better coding assistance unless user explicitly requests "none"
|
|
167
|
+
const defaultEffort = isCodexMini
|
|
168
|
+
? "medium"
|
|
169
|
+
: supportsXhigh
|
|
170
|
+
? "high"
|
|
171
|
+
: isLightweight
|
|
172
|
+
? "minimal"
|
|
173
|
+
: "medium";
|
|
174
|
+
// Get user-requested effort
|
|
175
|
+
let effort = userConfig.reasoningEffort || defaultEffort;
|
|
176
|
+
if (isCodexMini) {
|
|
177
|
+
if (effort === "minimal" || effort === "low" || effort === "none") {
|
|
178
|
+
effort = "medium";
|
|
179
|
+
}
|
|
180
|
+
if (effort === "xhigh") {
|
|
181
|
+
effort = "high";
|
|
182
|
+
}
|
|
183
|
+
if (effort !== "high" && effort !== "medium") {
|
|
184
|
+
effort = "medium";
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
// For models that don't support xhigh, downgrade to high
|
|
188
|
+
if (!supportsXhigh && effort === "xhigh") {
|
|
189
|
+
effort = "high";
|
|
190
|
+
}
|
|
191
|
+
// For models that don't support "none", upgrade to "low"
|
|
192
|
+
// (Codex models don't support "none" - only GPT-5.1 and GPT-5.2 general purpose do)
|
|
193
|
+
if (!supportsNone && effort === "none") {
|
|
194
|
+
effort = "low";
|
|
195
|
+
}
|
|
196
|
+
// Normalize "minimal" to "low" for Codex families
|
|
197
|
+
// Codex CLI presets are low/medium/high (or xhigh for Codex Max / GPT-5.2 Codex)
|
|
198
|
+
if (isCodex && effort === "minimal") {
|
|
199
|
+
effort = "low";
|
|
200
|
+
}
|
|
201
|
+
return {
|
|
202
|
+
effort,
|
|
203
|
+
summary: userConfig.reasoningSummary || "auto", // Changed from "detailed" to match Codex CLI
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
/**
|
|
207
|
+
* Filter input array for stateless Codex API (store: false)
|
|
208
|
+
*
|
|
209
|
+
* Two transformations needed:
|
|
210
|
+
* 1. Remove AI SDK-specific items (not supported by Codex API)
|
|
211
|
+
* 2. Strip IDs from all remaining items (stateless mode)
|
|
212
|
+
*
|
|
213
|
+
* AI SDK constructs to REMOVE (not in OpenAI Responses API spec):
|
|
214
|
+
* - type: "item_reference" - AI SDK uses this for server-side state lookup
|
|
215
|
+
*
|
|
216
|
+
* Items to KEEP (strip IDs):
|
|
217
|
+
* - type: "message" - Conversation messages (provides context to LLM)
|
|
218
|
+
* - type: "function_call" - Tool calls from conversation
|
|
219
|
+
* - type: "function_call_output" - Tool results from conversation
|
|
220
|
+
*
|
|
221
|
+
* Context is maintained through:
|
|
222
|
+
* - Full message history (without IDs)
|
|
223
|
+
* - reasoning.encrypted_content (for reasoning continuity)
|
|
224
|
+
*
|
|
225
|
+
* @param input - Original input array from OpenCode/AI SDK
|
|
226
|
+
* @returns Filtered input array compatible with Codex API
|
|
227
|
+
*/
|
|
228
|
+
export function filterInput(input) {
|
|
229
|
+
if (!Array.isArray(input))
|
|
230
|
+
return input;
|
|
231
|
+
return input
|
|
232
|
+
.filter((item) => {
|
|
233
|
+
// Remove AI SDK constructs not supported by Codex API
|
|
234
|
+
if (item.type === "item_reference") {
|
|
235
|
+
return false; // AI SDK only - references server state
|
|
236
|
+
}
|
|
237
|
+
return true; // Keep all other items
|
|
238
|
+
})
|
|
239
|
+
.map((item) => {
|
|
240
|
+
// Strip IDs from all items (Codex API stateless mode)
|
|
241
|
+
if (item.id) {
|
|
242
|
+
const { id, ...itemWithoutId } = item;
|
|
243
|
+
return itemWithoutId;
|
|
244
|
+
}
|
|
245
|
+
return item;
|
|
246
|
+
});
|
|
247
|
+
}
|
|
248
|
+
/**
|
|
249
|
+
* Filter out OpenCode system prompts from input
|
|
250
|
+
* Used in CODEX_MODE to replace OpenCode prompts with Codex-OpenCode bridge
|
|
251
|
+
* @param input - Input array
|
|
252
|
+
* @returns Input array without OpenCode system prompts
|
|
253
|
+
*/
|
|
254
|
+
export async function filterOpenCodeSystemPrompts(input) {
|
|
255
|
+
if (!Array.isArray(input))
|
|
256
|
+
return input;
|
|
257
|
+
// Fetch cached OpenCode prompt for verification
|
|
258
|
+
let cachedPrompt = null;
|
|
259
|
+
try {
|
|
260
|
+
cachedPrompt = await getOpenCodeCodexPrompt();
|
|
261
|
+
}
|
|
262
|
+
catch {
|
|
263
|
+
// If fetch fails, fallback to text-based detection only
|
|
264
|
+
// This is safe because we still have the "starts with" check
|
|
265
|
+
}
|
|
266
|
+
return filterOpenCodeSystemPromptsWithCachedPrompt(input, cachedPrompt);
|
|
267
|
+
}
|
|
268
|
+
/**
|
|
269
|
+
* Add Codex-OpenCode bridge message to input if tools are present
|
|
270
|
+
* @param input - Input array
|
|
271
|
+
* @param hasTools - Whether tools are present in request
|
|
272
|
+
* @returns Input array with bridge message prepended if needed
|
|
273
|
+
*/
|
|
274
|
+
export function addCodexBridgeMessage(input, hasTools) {
|
|
275
|
+
if (!hasTools || !Array.isArray(input))
|
|
276
|
+
return input;
|
|
277
|
+
const bridgeMessage = {
|
|
278
|
+
type: "message",
|
|
279
|
+
role: "developer",
|
|
280
|
+
content: [
|
|
281
|
+
{
|
|
282
|
+
type: "input_text",
|
|
283
|
+
text: CODEX_OPENCODE_BRIDGE,
|
|
284
|
+
},
|
|
285
|
+
],
|
|
286
|
+
};
|
|
287
|
+
return [bridgeMessage, ...input];
|
|
288
|
+
}
|
|
289
|
+
/**
|
|
290
|
+
* Add tool remapping message to input if tools are present
|
|
291
|
+
* @param input - Input array
|
|
292
|
+
* @param hasTools - Whether tools are present in request
|
|
293
|
+
* @returns Input array with tool remap message prepended if needed
|
|
294
|
+
*/
|
|
295
|
+
export function addToolRemapMessage(input, hasTools) {
|
|
296
|
+
if (!hasTools || !Array.isArray(input))
|
|
297
|
+
return input;
|
|
298
|
+
const toolRemapMessage = {
|
|
299
|
+
type: "message",
|
|
300
|
+
role: "developer",
|
|
301
|
+
content: [
|
|
302
|
+
{
|
|
303
|
+
type: "input_text",
|
|
304
|
+
text: TOOL_REMAP_MESSAGE,
|
|
305
|
+
},
|
|
306
|
+
],
|
|
307
|
+
};
|
|
308
|
+
return [toolRemapMessage, ...input];
|
|
309
|
+
}
|
|
310
|
+
/**
|
|
311
|
+
* Transform request body for Codex API
|
|
312
|
+
*
|
|
313
|
+
* NOTE: Configuration follows Codex CLI patterns instead of opencode defaults:
|
|
314
|
+
* - opencode sets textVerbosity="low" for gpt-5, but Codex CLI uses "medium"
|
|
315
|
+
* - opencode excludes gpt-5-codex from reasoning configuration
|
|
316
|
+
* - This plugin uses store=false (stateless), requiring encrypted reasoning content
|
|
317
|
+
*
|
|
318
|
+
* @param body - Original request body
|
|
319
|
+
* @param codexInstructions - Codex system instructions
|
|
320
|
+
* @param userConfig - User configuration from loader
|
|
321
|
+
* @param codexMode - Enable CODEX_MODE (bridge prompt instead of tool remap) - defaults to true
|
|
322
|
+
* @returns Transformed request body
|
|
323
|
+
*/
|
|
324
|
+
export async function transformRequestBody(body, codexInstructions, userConfig = { global: {}, models: {} }, codexMode = true) {
|
|
325
|
+
const originalModel = body.model;
|
|
326
|
+
const normalizedModel = normalizeModel(body.model);
|
|
327
|
+
// Get model-specific configuration using ORIGINAL model name (config key)
|
|
328
|
+
// This allows per-model options like "gpt-5-codex-low" to work correctly
|
|
329
|
+
const lookupModel = originalModel || normalizedModel;
|
|
330
|
+
const modelConfig = getModelConfig(lookupModel, userConfig);
|
|
331
|
+
// Debug: Log which config was resolved
|
|
332
|
+
logDebug(`Model config lookup: "${lookupModel}" → normalized to "${normalizedModel}" for API`, {
|
|
333
|
+
hasModelSpecificConfig: !!userConfig.models?.[lookupModel],
|
|
334
|
+
resolvedConfig: modelConfig,
|
|
335
|
+
});
|
|
336
|
+
// Normalize model name for API call
|
|
337
|
+
body.model = normalizedModel;
|
|
338
|
+
// Codex required fields
|
|
339
|
+
// ChatGPT backend REQUIRES store=false (confirmed via testing)
|
|
340
|
+
body.store = false;
|
|
341
|
+
// Always set stream=true for API - response handling detects original intent
|
|
342
|
+
body.stream = true;
|
|
343
|
+
body.instructions = codexInstructions;
|
|
344
|
+
// Prompt caching relies on the host providing a stable prompt_cache_key
|
|
345
|
+
// (OpenCode passes its session identifier). We no longer synthesize one here.
|
|
346
|
+
// Filter and transform input
|
|
347
|
+
if (body.input && Array.isArray(body.input)) {
|
|
348
|
+
// Debug: Log original input message IDs before filtering
|
|
349
|
+
const originalIds = body.input
|
|
350
|
+
.filter((item) => item.id)
|
|
351
|
+
.map((item) => item.id);
|
|
352
|
+
if (originalIds.length > 0) {
|
|
353
|
+
logDebug(`Filtering ${originalIds.length} message IDs from input:`, originalIds);
|
|
354
|
+
}
|
|
355
|
+
body.input = filterInput(body.input);
|
|
356
|
+
// Debug: Verify all IDs were removed
|
|
357
|
+
const remainingIds = (body.input || [])
|
|
358
|
+
.filter((item) => item.id)
|
|
359
|
+
.map((item) => item.id);
|
|
360
|
+
if (remainingIds.length > 0) {
|
|
361
|
+
logWarn(`WARNING: ${remainingIds.length} IDs still present after filtering:`, remainingIds);
|
|
362
|
+
}
|
|
363
|
+
else if (originalIds.length > 0) {
|
|
364
|
+
logDebug(`Successfully removed all ${originalIds.length} message IDs`);
|
|
365
|
+
}
|
|
366
|
+
if (codexMode) {
|
|
367
|
+
// CODEX_MODE: Remove OpenCode system prompt, add bridge prompt
|
|
368
|
+
body.input = await filterOpenCodeSystemPrompts(body.input);
|
|
369
|
+
body.input = addCodexBridgeMessage(body.input, !!body.tools);
|
|
370
|
+
}
|
|
371
|
+
else {
|
|
372
|
+
// DEFAULT MODE: Keep original behavior with tool remap message
|
|
373
|
+
body.input = addToolRemapMessage(body.input, !!body.tools);
|
|
374
|
+
}
|
|
375
|
+
// Handle orphaned function_call_output items (where function_call was an item_reference that got filtered)
|
|
376
|
+
// Instead of removing orphans (which causes infinite loops as LLM loses tool results),
|
|
377
|
+
// convert them to messages to preserve context while avoiding API errors
|
|
378
|
+
if (body.input) {
|
|
379
|
+
body.input = normalizeOrphanedToolOutputs(body.input);
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
// Configure reasoning (prefer existing body/provider options, then config defaults)
|
|
383
|
+
const reasoningConfig = resolveReasoningConfig(normalizedModel, modelConfig, body);
|
|
384
|
+
body.reasoning = {
|
|
385
|
+
...body.reasoning,
|
|
386
|
+
...reasoningConfig,
|
|
387
|
+
};
|
|
388
|
+
// Configure text verbosity (support user config)
|
|
389
|
+
// Default: "medium" (matches Codex CLI default for all GPT-5 models)
|
|
390
|
+
body.text = {
|
|
391
|
+
...body.text,
|
|
392
|
+
verbosity: resolveTextVerbosity(modelConfig, body),
|
|
393
|
+
};
|
|
394
|
+
// Add include for encrypted reasoning content
|
|
395
|
+
// Default: ["reasoning.encrypted_content"] (required for stateless operation with store=false)
|
|
396
|
+
// This allows reasoning context to persist across turns without server-side storage
|
|
397
|
+
body.include = resolveInclude(modelConfig, body);
|
|
398
|
+
// Remove unsupported parameters
|
|
399
|
+
body.max_output_tokens = undefined;
|
|
400
|
+
body.max_completion_tokens = undefined;
|
|
401
|
+
return body;
|
|
402
|
+
}
|
|
403
|
+
//# sourceMappingURL=request-transformer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"request-transformer.js","sourceRoot":"","sources":["../../../lib/request/request-transformer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACjD,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,EAAE,qBAAqB,EAAE,MAAM,qCAAqC,CAAC;AAC5E,OAAO,EAAE,sBAAsB,EAAE,MAAM,8BAA8B,CAAC;AACtE,OAAO,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAC;AAC5D,OAAO,EACN,2CAA2C,EAC3C,4BAA4B,GAC5B,MAAM,0BAA0B,CAAC;AASlC,OAAO,EACN,sBAAsB,EACtB,2CAA2C,GAC3C,MAAM,0BAA0B,CAAC;AAElC;;;;;;;;GAQG;AACH,MAAM,UAAU,cAAc,CAAC,KAAyB;IACvD,IAAI,CAAC,KAAK;QAAE,OAAO,SAAS,CAAC;IAE7B,gFAAgF;IAChF,MAAM,OAAO,GAAG,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,EAAG,CAAC,CAAC,CAAC,KAAK,CAAC;IAEtE,kEAAkE;IAClE,MAAM,WAAW,GAAG,kBAAkB,CAAC,OAAO,CAAC,CAAC;IAChD,IAAI,WAAW,EAAE,CAAC;QACjB,OAAO,WAAW,CAAC;IACpB,CAAC;IAED,kEAAkE;IAClE,gEAAgE;IAChE,gDAAgD;IAChD,MAAM,UAAU,GAAG,OAAO,CAAC,WAAW,EAAE,CAAC;IAEzC,6DAA6D;IAC7D,wCAAwC;IACxC,IACC,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC;QACpC,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC,EACnC,CAAC;QACF,OAAO,eAAe,CAAC;IACxB,CAAC;IAED,+BAA+B;IAC/B,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QACtE,OAAO,SAAS,CAAC;IAClB,CAAC;IAED,uBAAuB;IACvB,IACC,UAAU,CAAC,QAAQ,CAAC,mBAAmB,CAAC;QACxC,UAAU,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EACvC,CAAC;QACF,OAAO,mBAAmB,CAAC;IAC5B,CAAC;IAED,wBAAwB;IACxB,IACC,UAAU,CAAC,QAAQ,CAAC,oBAAoB,CAAC;QACzC,UAAU,CAAC,QAAQ,CAAC,oBAAoB,CAAC,EACxC,CAAC;QACF,OAAO,oBAAoB,CAAC;IAC7B,CAAC;IAED,uBAAuB;IACvB,IACC,UAAU,CAAC,QAAQ,CAAC,mBAAmB,CAAC;QACxC,UAAU,CAAC,QAAQ,CAAC,kBAAkB,CAAC;QACvC,UAAU,CAAC,QAAQ,CAAC,kBAAkB,CAAC,EACtC,CAAC;QACF,OAAO,mBAAmB,CAAC;IAC5B,CAAC;IAED,mBAAmB;IACnB,IACC,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC;QACpC,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC,EACnC,CAAC;QACF,OAAO,eAAe,CAAC;IACxB,CAAC;IAED,+BAA+B;IAC/B,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QACtE,OAAO,SAAS,CAAC;IAClB,CAAC;IAED,mDAAmD;IACnD,IAAI,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;QAClC,OAAO,eAAe,CAAC;IACxB,CAAC;IAED,0EAA0E;IAC1E,IAAI,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;QAClE,OAAO,SAAS,CAAC;IAClB,CAAC;IAED,8DAA8D;IAC9D,OAAO,SAAS,CAAC;AAClB,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,cAAc,CAC7B,SAAiB,EACjB,aAAyB,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE;IAEnD,MAAM,aAAa,GAAG,UAAU,CAAC,MAAM,IAAI,EAAE,CAAC;IAC9C,MAAM,YAAY,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,EAAE,OAAO,IAAI,EAAE,CAAC;IAEnE,iDAAiD;IACjD,OAAO,EAAE,GAAG,aAAa,EAAE,GAAG,YAAY,EAAE,CAAC;AAC9C,CAAC;AAED,SAAS,sBAAsB,CAC9B,SAAiB,EACjB,WAA0B,EAC1B,IAAiB;IAEjB,MAAM,cAAc,GAAG,IAAI,CAAC,eAAe,EAAE,MAAM,CAAC;IACpD,MAAM,cAAc,GACnB,IAAI,CAAC,SAAS,EAAE,MAAM,IAAI,cAAc,EAAE,eAAe,CAAC;IAC3D,MAAM,eAAe,GACpB,IAAI,CAAC,SAAS,EAAE,OAAO,IAAI,cAAc,EAAE,gBAAgB,CAAC;IAE7D,MAAM,YAAY,GAAkB;QACnC,GAAG,WAAW;QACd,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,eAAe,EAAE,cAAc,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;QAC9D,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,gBAAgB,EAAE,eAAe,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;KACjE,CAAC;IAEF,OAAO,kBAAkB,CAAC,SAAS,EAAE,YAAY,CAAC,CAAC;AACpD,CAAC;AAED,SAAS,oBAAoB,CAC5B,WAA0B,EAC1B,IAAiB;IAEjB,MAAM,cAAc,GAAG,IAAI,CAAC,eAAe,EAAE,MAAM,CAAC;IACpD,OAAO,CACN,IAAI,CAAC,IAAI,EAAE,SAAS;QACpB,cAAc,EAAE,aAAa;QAC7B,WAAW,CAAC,aAAa;QACzB,QAAQ,CACR,CAAC;AACH,CAAC;AAED,SAAS,cAAc,CAAC,WAA0B,EAAE,IAAiB;IACpE,MAAM,cAAc,GAAG,IAAI,CAAC,eAAe,EAAE,MAAM,CAAC;IACpD,MAAM,IAAI,GACT,IAAI,CAAC,OAAO;QACZ,cAAc,EAAE,OAAO;QACvB,WAAW,CAAC,OAAO;QACnB,CAAC,6BAA6B,CAAC,CAAC;IACjC,MAAM,OAAO,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;IAC1D,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,6BAA6B,CAAC,EAAE,CAAC;QACtD,OAAO,CAAC,IAAI,CAAC,6BAA6B,CAAC,CAAC;IAC7C,CAAC;IACD,OAAO,OAAO,CAAC;AAChB,CAAC;AAED;;;;;;;;;;;GAWG;AACH,MAAM,UAAU,kBAAkB,CACjC,SAA6B,EAC7B,aAA4B,EAAE;IAE9B,MAAM,cAAc,GAAG,SAAS,EAAE,WAAW,EAAE,IAAI,EAAE,CAAC;IAEtD,2EAA2E;IAC3E,MAAM,YAAY,GACjB,cAAc,CAAC,QAAQ,CAAC,eAAe,CAAC;QACxC,cAAc,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC;IAE1C,8CAA8C;IAC9C,MAAM,cAAc,GACnB,CAAC,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;QAC1E,CAAC,YAAY,CAAC;IACf,MAAM,UAAU,GACf,cAAc,CAAC,QAAQ,CAAC,WAAW,CAAC;QACpC,cAAc,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;IACtC,MAAM,WAAW,GAChB,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;QACrC,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;QACrC,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;QACrC,cAAc,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;IAC9C,MAAM,OAAO,GAAG,cAAc,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC;IACjE,MAAM,aAAa,GAClB,CAAC,WAAW;QACZ,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC;YAC/B,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAEnC,qFAAqF;IACrF,MAAM,cAAc,GACnB,CAAC,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;QAC1E,CAAC,OAAO;QACR,CAAC,UAAU;QACX,CAAC,WAAW,CAAC;IAEd,gEAAgE;IAChE,MAAM,aAAa,GAAG,cAAc,IAAI,YAAY,IAAI,UAAU,CAAC;IAEnE,oEAAoE;IACpE,mFAAmF;IACnF,mGAAmG;IACnG,+EAA+E;IAC/E,wEAAwE;IACxE,iEAAiE;IACjE,MAAM,YAAY,GAAG,cAAc,IAAI,cAAc,CAAC;IAEtD,mDAAmD;IACnD,+EAA+E;IAC/E,sEAAsE;IACtE,MAAM,aAAa,GAA8B,WAAW;QAC3D,CAAC,CAAC,QAAQ;QACV,CAAC,CAAC,aAAa;YACd,CAAC,CAAC,MAAM;YACR,CAAC,CAAC,aAAa;gBACd,CAAC,CAAC,SAAS;gBACX,CAAC,CAAC,QAAQ,CAAC;IAEd,4BAA4B;IAC5B,IAAI,MAAM,GAAG,UAAU,CAAC,eAAe,IAAI,aAAa,CAAC;IAEzD,IAAI,WAAW,EAAE,CAAC;QACjB,IAAI,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,KAAK,IAAI,MAAM,KAAK,MAAM,EAAE,CAAC;YACnE,MAAM,GAAG,QAAQ,CAAC;QACnB,CAAC;QACD,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;YACxB,MAAM,GAAG,MAAM,CAAC;QACjB,CAAC;QACD,IAAI,MAAM,KAAK,MAAM,IAAI,MAAM,KAAK,QAAQ,EAAE,CAAC;YAC9C,MAAM,GAAG,QAAQ,CAAC;QACnB,CAAC;IACF,CAAC;IAED,yDAAyD;IACzD,IAAI,CAAC,aAAa,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;QAC1C,MAAM,GAAG,MAAM,CAAC;IACjB,CAAC;IAED,yDAAyD;IACzD,oFAAoF;IACpF,IAAI,CAAC,YAAY,IAAI,MAAM,KAAK,MAAM,EAAE,CAAC;QACxC,MAAM,GAAG,KAAK,CAAC;IAChB,CAAC;IAED,kDAAkD;IAClD,iFAAiF;IACjF,IAAI,OAAO,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACrC,MAAM,GAAG,KAAK,CAAC;IAChB,CAAC;IAED,OAAO;QACN,MAAM;QACN,OAAO,EAAE,UAAU,CAAC,gBAAgB,IAAI,MAAM,EAAE,6CAA6C;KAC7F,CAAC;AACH,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,MAAM,UAAU,WAAW,CAC1B,KAA8B;IAE9B,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAExC,OAAO,KAAK;SACV,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE;QAChB,sDAAsD;QACtD,IAAI,IAAI,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;YACpC,OAAO,KAAK,CAAC,CAAC,wCAAwC;QACvD,CAAC;QACD,OAAO,IAAI,CAAC,CAAC,uBAAuB;IACrC,CAAC,CAAC;SACD,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QACb,sDAAsD;QACtD,IAAI,IAAI,CAAC,EAAE,EAAE,CAAC;YACb,MAAM,EAAE,EAAE,EAAE,GAAG,aAAa,EAAE,GAAG,IAAI,CAAC;YACtC,OAAO,aAA0B,CAAC;QACnC,CAAC;QACD,OAAO,IAAI,CAAC;IACb,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,2BAA2B,CAChD,KAA8B;IAE9B,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAExC,gDAAgD;IAChD,IAAI,YAAY,GAAkB,IAAI,CAAC;IACvC,IAAI,CAAC;QACJ,YAAY,GAAG,MAAM,sBAAsB,EAAE,CAAC;IAC/C,CAAC;IAAC,MAAM,CAAC;QACR,wDAAwD;QACxD,6DAA6D;IAC9D,CAAC;IAED,OAAO,2CAA2C,CAAC,KAAK,EAAE,YAAY,CAAC,CAAC;AACzE,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,qBAAqB,CACpC,KAA8B,EAC9B,QAAiB;IAEjB,IAAI,CAAC,QAAQ,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAErD,MAAM,aAAa,GAAc;QAChC,IAAI,EAAE,SAAS;QACf,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE;YACR;gBACC,IAAI,EAAE,YAAY;gBAClB,IAAI,EAAE,qBAAqB;aAC3B;SACD;KACD,CAAC;IAEF,OAAO,CAAC,aAAa,EAAE,GAAG,KAAK,CAAC,CAAC;AAClC,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,mBAAmB,CAClC,KAA8B,EAC9B,QAAiB;IAEjB,IAAI,CAAC,QAAQ,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAErD,MAAM,gBAAgB,GAAc;QACnC,IAAI,EAAE,SAAS;QACf,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE;YACR;gBACC,IAAI,EAAE,YAAY;gBAClB,IAAI,EAAE,kBAAkB;aACxB;SACD;KACD,CAAC;IAEF,OAAO,CAAC,gBAAgB,EAAE,GAAG,KAAK,CAAC,CAAC;AACrC,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,MAAM,CAAC,KAAK,UAAU,oBAAoB,CACzC,IAAiB,EACjB,iBAAyB,EACzB,aAAyB,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,EACnD,SAAS,GAAG,IAAI;IAEhB,MAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC;IACjC,MAAM,eAAe,GAAG,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAEnD,0EAA0E;IAC1E,yEAAyE;IACzE,MAAM,WAAW,GAAG,aAAa,IAAI,eAAe,CAAC;IACrD,MAAM,WAAW,GAAG,cAAc,CAAC,WAAW,EAAE,UAAU,CAAC,CAAC;IAE5D,uCAAuC;IACvC,QAAQ,CACP,yBAAyB,WAAW,sBAAsB,eAAe,WAAW,EACpF;QACC,sBAAsB,EAAE,CAAC,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,WAAW,CAAC;QAC1D,cAAc,EAAE,WAAW;KAC3B,CACD,CAAC;IAEF,oCAAoC;IACpC,IAAI,CAAC,KAAK,GAAG,eAAe,CAAC;IAE7B,wBAAwB;IACxB,+DAA+D;IAC/D,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACnB,6EAA6E;IAC7E,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;IACnB,IAAI,CAAC,YAAY,GAAG,iBAAiB,CAAC;IAEtC,wEAAwE;IACxE,8EAA8E;IAE9E,6BAA6B;IAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;QAC7C,yDAAyD;QACzD,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK;aAC5B,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC;aACzB,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QACzB,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC5B,QAAQ,CACP,aAAa,WAAW,CAAC,MAAM,0BAA0B,EACzD,WAAW,CACX,CAAC;QACH,CAAC;QAED,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAErC,qCAAqC;QACrC,MAAM,YAAY,GAAG,CAAC,IAAI,CAAC,KAAK,IAAI,EAAE,CAAC;aACrC,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC;aACzB,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QACzB,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC7B,OAAO,CACN,YAAY,YAAY,CAAC,MAAM,qCAAqC,EACpE,YAAY,CACZ,CAAC;QACH,CAAC;aAAM,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACnC,QAAQ,CAAC,4BAA4B,WAAW,CAAC,MAAM,cAAc,CAAC,CAAC;QACxE,CAAC;QAED,IAAI,SAAS,EAAE,CAAC;YACf,+DAA+D;YAC/D,IAAI,CAAC,KAAK,GAAG,MAAM,2BAA2B,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YAC3D,IAAI,CAAC,KAAK,GAAG,qBAAqB,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC9D,CAAC;aAAM,CAAC;YACP,+DAA+D;YAC/D,IAAI,CAAC,KAAK,GAAG,mBAAmB,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC5D,CAAC;QAED,2GAA2G;QAC3G,uFAAuF;QACvF,yEAAyE;QACzE,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;YAChB,IAAI,CAAC,KAAK,GAAG,4BAA4B,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACvD,CAAC;IACF,CAAC;IAED,oFAAoF;IACpF,MAAM,eAAe,GAAG,sBAAsB,CAC7C,eAAe,EACf,WAAW,EACX,IAAI,CACJ,CAAC;IACF,IAAI,CAAC,SAAS,GAAG;QAChB,GAAG,IAAI,CAAC,SAAS;QACjB,GAAG,eAAe;KAClB,CAAC;IAEF,iDAAiD;IACjD,qEAAqE;IACrE,IAAI,CAAC,IAAI,GAAG;QACX,GAAG,IAAI,CAAC,IAAI;QACZ,SAAS,EAAE,oBAAoB,CAAC,WAAW,EAAE,IAAI,CAAC;KAClD,CAAC;IAEF,8CAA8C;IAC9C,+FAA+F;IAC/F,oFAAoF;IACpF,IAAI,CAAC,OAAO,GAAG,cAAc,CAAC,WAAW,EAAE,IAAI,CAAC,CAAC;IAEjD,gCAAgC;IAChC,IAAI,CAAC,iBAAiB,GAAG,SAAS,CAAC;IACnC,IAAI,CAAC,qBAAqB,GAAG,SAAS,CAAC;IAEvC,OAAO,IAAI,CAAC;AACb,CAAC"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Convert SSE stream response to JSON for generateText()
|
|
3
|
+
* @param response - Fetch response with SSE stream
|
|
4
|
+
* @param headers - Response headers
|
|
5
|
+
* @returns Response with JSON body
|
|
6
|
+
*/
|
|
7
|
+
export declare function convertSseToJson(response: Response, headers: Headers): Promise<Response>;
|
|
8
|
+
/**
|
|
9
|
+
* Ensure response has content-type header
|
|
10
|
+
* @param headers - Response headers
|
|
11
|
+
* @returns Headers with content-type set
|
|
12
|
+
*/
|
|
13
|
+
export declare function ensureContentType(headers: Headers): Headers;
|
|
14
|
+
//# sourceMappingURL=response-handler.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"response-handler.d.ts","sourceRoot":"","sources":["../../../lib/request/response-handler.ts"],"names":[],"mappings":"AAiCA;;;;;GAKG;AACH,wBAAsB,gBAAgB,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,CAoD9F;AAED;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,OAAO,GAAG,OAAO,CAQ3D"}
|