@kjerneverk/execution-openai 1.0.13 → 1.0.14-dev.20260320165526.3c2edde

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -22,7 +22,7 @@ const provider = new OpenAIProvider();
22
22
  // Execute a request
23
23
  const response = await provider.execute(
24
24
  {
25
- model: 'gpt-4o',
25
+ model: 'gpt-5.4',
26
26
  messages: [
27
27
  { role: 'system', content: 'You are helpful.' },
28
28
  { role: 'user', content: 'Hello!' }
@@ -42,10 +42,13 @@ console.log(response.usage); // { inputTokens: X, outputTokens: Y }
42
42
 
43
43
  ## Supported Models
44
44
 
45
- The provider supports all OpenAI models:
46
- - GPT-4 family (gpt-4, gpt-4o, gpt-4-turbo, etc.)
47
- - O-series (o1, o1-preview, o1-mini, o3, etc.)
48
- - GPT-3.5 family
45
+ The provider supports Chat Completions for current OpenAI models, including:
46
+ - GPT family (`gpt-*`). If you omit the model on the request and in options, the provider defaults to **`gpt-5.4`** (current GPT‑5 flagship in the official SDK). You can still pass `gpt-5`, `gpt-5-mini`, `gpt-4o`, etc.
47
+ - Reasoning models (`o1`, `o3`, `o4`, )
48
+ - Fine-tuned chat models (`ft:…`)
49
+ - Consumer-style IDs such as `chatgpt-4o-latest` where your key has access
50
+
51
+ It uses `max_completion_tokens` (not deprecated `max_tokens`) so O-series models receive a valid cap, and it omits `temperature` for reasoning models where the API rejects custom values. `developer` system prompts are sent as the `developer` role per current API guidance.
49
52
 
50
53
  ## API Key
51
54
 
package/dist/index.js CHANGED
@@ -78,6 +78,56 @@ configureSecretGuard({
78
78
  { name: "openai-proj", pattern: /sk-proj-[a-zA-Z0-9_-]+/g, description: "OpenAI project key" }
79
79
  ]
80
80
  });
81
+ function isOpenAIReasoningModel(model) {
82
+ return /^o\d/i.test(model.trim());
83
+ }
84
+ function mapRequestMessagesToOpenAI(request) {
85
+ return request.messages.map((msg) => {
86
+ if (msg.role === "tool") {
87
+ return {
88
+ role: "tool",
89
+ content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content),
90
+ tool_call_id: msg.tool_call_id || ""
91
+ };
92
+ }
93
+ if (msg.role === "assistant") {
94
+ const extra = msg;
95
+ if (extra.tool_calls) {
96
+ return {
97
+ role: "assistant",
98
+ content: msg.content,
99
+ tool_calls: extra.tool_calls
100
+ };
101
+ }
102
+ }
103
+ return {
104
+ role: msg.role,
105
+ content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content),
106
+ ...msg.name ? { name: msg.name } : {}
107
+ };
108
+ });
109
+ }
110
+ function buildTools(request) {
111
+ if (!request.tools?.length) {
112
+ return void 0;
113
+ }
114
+ return request.tools.map((tool) => ({
115
+ type: "function",
116
+ function: {
117
+ name: tool.name,
118
+ description: tool.description,
119
+ parameters: tool.parameters
120
+ }
121
+ }));
122
+ }
123
+ function appendMaxTokensAndTemperature(model, options, params) {
124
+ if (options.maxTokens != null) {
125
+ params.max_completion_tokens = options.maxTokens;
126
+ }
127
+ if (options.temperature !== void 0 && options.temperature !== null && !isOpenAIReasoningModel(model)) {
128
+ params.temperature = options.temperature;
129
+ }
130
+ }
81
131
  class OpenAIProvider {
82
132
  name = "openai";
83
133
  /**
@@ -85,7 +135,8 @@ class OpenAIProvider {
85
135
  */
86
136
  supportsModel(model) {
87
137
  if (!model) return true;
88
- return model.startsWith("gpt") || model.startsWith("o1") || model.startsWith("o3") || model.startsWith("o4");
138
+ const m = model.toLowerCase();
139
+ return m.startsWith("gpt") || /^o\d/.test(m) || m.startsWith("ft:") || m.startsWith("chatgpt-4o");
89
140
  }
90
141
  /**
91
142
  * Execute a request against OpenAI
@@ -106,51 +157,21 @@ class OpenAIProvider {
106
157
  clientOptions.fetch = createProxyFetch(proxyUrl);
107
158
  }
108
159
  const client = new OpenAI(clientOptions);
109
- const model = options.model || request.model || "gpt-4";
110
- const messages = request.messages.map((msg) => {
111
- if (msg.role === "tool") {
112
- return {
113
- role: "tool",
114
- content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content),
115
- tool_call_id: msg.tool_call_id || ""
116
- };
117
- } else if (msg.role === "assistant" && msg.tool_calls) {
118
- return {
119
- role: "assistant",
120
- content: msg.content,
121
- tool_calls: msg.tool_calls
122
- };
123
- } else {
124
- const role = msg.role === "developer" ? "system" : msg.role;
125
- return {
126
- role,
127
- content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content),
128
- name: msg.name
129
- };
130
- }
131
- });
132
- let openaiTools;
133
- if (request.tools && request.tools.length > 0) {
134
- openaiTools = request.tools.map((tool) => ({
135
- type: "function",
136
- function: {
137
- name: tool.name,
138
- description: tool.description,
139
- parameters: tool.parameters
140
- }
141
- }));
142
- }
143
- const response = await client.chat.completions.create({
160
+ const model = options.model || request.model || "gpt-5.4";
161
+ const messages = mapRequestMessagesToOpenAI(request);
162
+ const openaiTools = buildTools(request);
163
+ const params = {
144
164
  model,
145
165
  messages,
146
- temperature: options.temperature,
147
- max_tokens: options.maxTokens,
148
- response_format: request.responseFormat,
166
+ ...request.responseFormat != null ? { response_format: request.responseFormat } : {},
149
167
  ...openaiTools ? { tools: openaiTools } : {}
150
- });
168
+ };
169
+ appendMaxTokensAndTemperature(model, options, params);
170
+ const response = await client.chat.completions.create(params);
151
171
  const choice = response.choices[0];
172
+ const assistantMessage = choice.message;
152
173
  return {
153
- content: choice.message.content || "",
174
+ content: assistantMessage.content ?? assistantMessage.refusal ?? "",
154
175
  model: response.model,
155
176
  usage: response.usage ? {
156
177
  inputTokens: response.usage.prompt_tokens,
@@ -188,55 +209,28 @@ class OpenAIProvider {
188
209
  clientOptions.fetch = createProxyFetch(proxyUrl);
189
210
  }
190
211
  const client = new OpenAI(clientOptions);
191
- const model = options.model || request.model || "gpt-4";
192
- const messages = request.messages.map((msg) => {
193
- if (msg.role === "tool") {
194
- return {
195
- role: "tool",
196
- content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content),
197
- tool_call_id: msg.tool_call_id || ""
198
- };
199
- } else if (msg.role === "assistant" && msg.tool_calls) {
200
- return {
201
- role: "assistant",
202
- content: msg.content,
203
- tool_calls: msg.tool_calls
204
- };
205
- } else {
206
- const role = msg.role === "developer" ? "system" : msg.role;
207
- return {
208
- role,
209
- content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content),
210
- name: msg.name
211
- };
212
- }
213
- });
214
- let openaiTools;
215
- if (request.tools && request.tools.length > 0) {
216
- openaiTools = request.tools.map((tool) => ({
217
- type: "function",
218
- function: {
219
- name: tool.name,
220
- description: tool.description,
221
- parameters: tool.parameters
222
- }
223
- }));
224
- }
225
- const stream = await client.chat.completions.create({
212
+ const model = options.model || request.model || "gpt-5.4";
213
+ const messages = mapRequestMessagesToOpenAI(request);
214
+ const openaiTools = buildTools(request);
215
+ const params = {
226
216
  model,
227
217
  messages,
228
- temperature: options.temperature,
229
- max_tokens: options.maxTokens,
230
218
  stream: true,
231
219
  stream_options: { include_usage: true },
220
+ ...request.responseFormat != null ? { response_format: request.responseFormat } : {},
232
221
  ...openaiTools ? { tools: openaiTools } : {}
233
- });
222
+ };
223
+ appendMaxTokensAndTemperature(model, options, params);
224
+ const stream = await client.chat.completions.create(params);
234
225
  const toolCallsInProgress = /* @__PURE__ */ new Map();
235
226
  for await (const chunk of stream) {
236
227
  const delta = chunk.choices[0]?.delta;
237
228
  if (delta?.content) {
238
229
  yield { type: "text", text: delta.content };
239
230
  }
231
+ if (delta?.refusal) {
232
+ yield { type: "text", text: delta.refusal };
233
+ }
240
234
  if (delta?.tool_calls) {
241
235
  for (const tc of delta.tool_calls) {
242
236
  const index = tc.index;
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sources":["../src/proxy.ts","../src/index.ts"],"sourcesContent":["/**\n * Proxy support for OpenAI API requests.\n *\n * When proxy environment variables are set, routes requests through the proxy\n * using undici's ProxyAgent. Respects NO_PROXY/no_proxy for bypass lists and\n * NODE_TLS_REJECT_UNAUTHORIZED for TLS verification.\n */\n\nimport { ProxyAgent, fetch as undiciFetch } from 'undici';\n\n/**\n * Get the proxy URL from environment variables.\n * Checks HTTPS_PROXY, https_proxy, HTTP_PROXY, http_proxy.\n */\nexport function getProxyUrl(): string | undefined {\n return (\n process.env.HTTPS_PROXY ||\n process.env.https_proxy ||\n process.env.HTTP_PROXY ||\n process.env.http_proxy ||\n undefined\n );\n}\n\n/**\n * Read TLS strict mode. Returns false only when NODE_TLS_REJECT_UNAUTHORIZED\n * is explicitly set to '0'.\n */\nexport function getStrictSSL(): boolean {\n return process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0';\n}\n\n/**\n * Check whether a target URL should bypass the proxy based on NO_PROXY / no_proxy.\n */\nexport function isProxyBypassed(targetUrl: string): boolean {\n const noProxy = process.env.NO_PROXY || process.env.no_proxy;\n if (!noProxy) {\n return false;\n }\n\n let hostname: string;\n try {\n hostname = new URL(targetUrl).hostname.toLowerCase();\n } catch {\n return false;\n }\n\n const entries = noProxy.split(',').map((e) => e.trim().toLowerCase());\n for (const entry of entries) {\n if (!entry) {\n continue;\n }\n if (entry === '*') {\n return true;\n }\n if (hostname === entry) {\n return true;\n }\n const suffix = entry.startsWith('.') ? entry : `.${entry}`;\n if (hostname.endsWith(suffix)) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Create a fetch implementation that routes requests through an HTTP(S) proxy.\n * Respects TLS verification settings and NO_PROXY bypass lists.\n *\n * @param proxyUrl - The proxy URL (e.g. https://proxy.example.com:8080)\n * @returns A fetch function that uses ProxyAgent as the dispatcher\n */\nexport function createProxyFetch(proxyUrl: string): typeof fetch {\n const proxyAgent = new ProxyAgent({\n uri: proxyUrl,\n requestTls: { rejectUnauthorized: getStrictSSL() },\n });\n return ((input: any, init?: any) => {\n const targetUrl = typeof input === 'string'\n ? input\n : input instanceof URL\n ? input.toString()\n : input.url;\n if (isProxyBypassed(targetUrl)) {\n return undiciFetch(input, init);\n }\n return undiciFetch(input, { ...init, dispatcher: proxyAgent });\n }) as any;\n}\n","/**\n * Execution OpenAI Package\n *\n * OpenAI provider implementation for LLM execution.\n *\n * @packageDocumentation\n */\n\nimport OpenAI from 'openai';\nimport { getRedactor } from '@utilarium/offrecord';\nimport { getProxyUrl, createProxyFetch } from './proxy.js';\nimport { \n createSafeError, \n configureErrorSanitizer,\n configureSecretGuard,\n} from '@utilarium/spotclean';\n\n// Register OpenAI API key patterns on module load\nconst redactor = getRedactor();\nredactor.register({\n name: 'openai',\n patterns: [\n /sk-[a-zA-Z0-9]{20,}/g,\n /sk-proj-[a-zA-Z0-9_-]+/g,\n ],\n validator: (key: string) => /^sk-(proj-)?[a-zA-Z0-9_-]{20,}$/.test(key),\n envVar: 'OPENAI_API_KEY',\n description: 'OpenAI API keys',\n});\n\n// Configure spotclean for error sanitization\nconfigureErrorSanitizer({\n enabled: true,\n environment: process.env.NODE_ENV === 'production' ? 'production' : 'development',\n includeCorrelationId: true,\n sanitizeStackTraces: process.env.NODE_ENV === 'production',\n maxMessageLength: 500,\n});\n\nconfigureSecretGuard({\n enabled: true,\n redactionText: '[REDACTED]',\n preservePartial: false,\n preserveLength: 0,\n customPatterns: [\n { name: 'openai', pattern: /sk-[a-zA-Z0-9]{20,}/g, description: 'OpenAI API key' },\n { name: 'openai-proj', pattern: /sk-proj-[a-zA-Z0-9_-]+/g, description: 'OpenAI project key' },\n ],\n});\n\n// ===== INLINE TYPES (from 'execution' package) =====\n// These types are duplicated here for build independence.\n// When 'execution' is published, these can be imported from there.\n\nexport type Model = string;\n\nexport interface Message {\n role: 'user' | 'assistant' | 'system' | 'developer' | 'tool';\n content: string | string[] | null;\n name?: string;\n}\n\nexport interface ToolParameterSchema {\n type: 'object';\n properties: Record<string, {\n type: string;\n description?: string;\n enum?: string[];\n items?: { type: string };\n default?: any;\n }>;\n required?: string[];\n additionalProperties?: boolean;\n}\n\nexport interface ToolDefinition {\n name: string;\n description: string;\n parameters: ToolParameterSchema;\n}\n\nexport type StreamChunkType = 'text' | 'tool_call_start' | 'tool_call_delta' | 'tool_call_end' | 'usage' | 'done';\n\nexport interface StreamChunk {\n type: StreamChunkType;\n text?: string;\n toolCall?: {\n id?: string;\n index?: number;\n name?: string;\n argumentsDelta?: string;\n };\n usage?: {\n inputTokens: number;\n outputTokens: number;\n };\n}\n\nexport interface Request {\n messages: Message[];\n model: Model;\n responseFormat?: any;\n validator?: any;\n tools?: ToolDefinition[];\n addMessage(message: Message): void;\n}\n\nexport interface ProviderResponse {\n content: string;\n model: string;\n usage?: {\n inputTokens: number;\n outputTokens: number;\n };\n toolCalls?: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n }>;\n}\n\nexport interface ExecutionOptions {\n apiKey?: string;\n model?: string;\n temperature?: number;\n maxTokens?: number;\n timeout?: number;\n retries?: number;\n}\n\nexport interface Provider {\n readonly name: string;\n execute(request: Request, options?: ExecutionOptions): Promise<ProviderResponse>;\n executeStream?(request: Request, options?: ExecutionOptions): AsyncIterable<StreamChunk>;\n supportsModel?(model: Model): boolean;\n}\n\n/**\n * OpenAI Provider implementation\n */\nexport class OpenAIProvider implements Provider {\n readonly name = 'openai';\n\n /**\n * Check if this provider supports a given model\n */\n supportsModel(model: Model): boolean {\n if (!model) return true; // Default to OpenAI\n return (\n model.startsWith('gpt') ||\n model.startsWith('o1') ||\n model.startsWith('o3') ||\n model.startsWith('o4')\n );\n }\n\n /**\n * Execute a request against OpenAI\n */\n async execute(\n request: Request,\n options: ExecutionOptions = {}\n ): Promise<ProviderResponse> {\n const apiKey = options.apiKey || process.env.OPENAI_API_KEY;\n \n if (!apiKey) {\n throw new Error('OpenAI API key is required. Set OPENAI_API_KEY environment variable.');\n }\n\n // Validate key format\n const validation = redactor.validateKey(apiKey, 'openai');\n if (!validation.valid) {\n throw new Error('Invalid OpenAI API key format');\n }\n\n try {\n const clientOptions: ConstructorParameters<typeof OpenAI>[0] = { apiKey };\n const proxyUrl = getProxyUrl();\n if (proxyUrl) {\n clientOptions.fetch = createProxyFetch(proxyUrl);\n }\n const client = new OpenAI(clientOptions);\n\n const model = options.model || request.model || 'gpt-4';\n\n // Convert messages to OpenAI format\n const messages = request.messages.map((msg) => {\n if (msg.role === 'tool') {\n // Tool result message\n return {\n role: 'tool',\n content: typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content),\n tool_call_id: (msg as any).tool_call_id || '',\n };\n } else if (msg.role === 'assistant' && (msg as any).tool_calls) {\n // Assistant message with tool calls\n return {\n role: 'assistant',\n content: msg.content,\n tool_calls: (msg as any).tool_calls,\n };\n } else {\n const role = msg.role === 'developer' ? 'system' : msg.role;\n return {\n role: role,\n content:\n typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content),\n name: msg.name,\n };\n }\n }) as any[];\n\n // Build tools array for OpenAI format\n let openaiTools: OpenAI.ChatCompletionTool[] | undefined;\n if (request.tools && request.tools.length > 0) {\n openaiTools = request.tools.map((tool) => ({\n type: 'function' as const,\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters as unknown as OpenAI.FunctionParameters,\n },\n }));\n }\n\n const response = await client.chat.completions.create({\n model: model,\n messages: messages,\n temperature: options.temperature,\n max_tokens: options.maxTokens,\n response_format: request.responseFormat,\n ...(openaiTools ? { tools: openaiTools } : {}),\n });\n\n const choice = response.choices[0];\n\n return {\n content: choice.message.content || '',\n model: response.model,\n usage: response.usage\n ? {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n }\n : undefined,\n toolCalls: choice.message.tool_calls\n ?.filter((tc) => tc.type === 'function')\n .map((tc) => ({\n id: tc.id,\n type: 'function' as const,\n function: {\n name: (tc as any).function.name,\n arguments: (tc as any).function.arguments,\n },\n })),\n };\n } catch (error) {\n // Sanitize error to remove any API keys from error messages\n // Use spotclean for comprehensive error sanitization\n throw createSafeError(error as Error, { provider: 'openai' });\n }\n }\n\n /**\n * Execute a request with streaming response\n */\n async *executeStream(\n request: Request,\n options: ExecutionOptions = {}\n ): AsyncIterable<StreamChunk> {\n const apiKey = options.apiKey || process.env.OPENAI_API_KEY;\n \n if (!apiKey) {\n throw new Error('OpenAI API key is required. Set OPENAI_API_KEY environment variable.');\n }\n\n // Validate key format\n const validation = redactor.validateKey(apiKey, 'openai');\n if (!validation.valid) {\n throw new Error('Invalid OpenAI API key format');\n }\n\n try {\n const clientOptions: ConstructorParameters<typeof OpenAI>[0] = { apiKey };\n const proxyUrl = getProxyUrl();\n if (proxyUrl) {\n clientOptions.fetch = createProxyFetch(proxyUrl);\n }\n const client = new OpenAI(clientOptions);\n\n const model = options.model || request.model || 'gpt-4';\n\n // Convert messages to OpenAI format\n const messages = request.messages.map((msg) => {\n if (msg.role === 'tool') {\n return {\n role: 'tool',\n content: typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content),\n tool_call_id: (msg as any).tool_call_id || '',\n };\n } else if (msg.role === 'assistant' && (msg as any).tool_calls) {\n return {\n role: 'assistant',\n content: msg.content,\n tool_calls: (msg as any).tool_calls,\n };\n } else {\n const role = msg.role === 'developer' ? 'system' : msg.role;\n return {\n role: role,\n content:\n typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content),\n name: msg.name,\n };\n }\n }) as any[];\n\n // Build tools array\n let openaiTools: OpenAI.ChatCompletionTool[] | undefined;\n if (request.tools && request.tools.length > 0) {\n openaiTools = request.tools.map((tool) => ({\n type: 'function' as const,\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters as unknown as OpenAI.FunctionParameters,\n },\n }));\n }\n\n const stream = await client.chat.completions.create({\n model: model,\n messages: messages,\n temperature: options.temperature,\n max_tokens: options.maxTokens,\n stream: true,\n stream_options: { include_usage: true },\n ...(openaiTools ? { tools: openaiTools } : {}),\n });\n\n // Track tool calls being built\n const toolCallsInProgress: Map<number, { id: string; name: string; arguments: string }> = new Map();\n\n for await (const chunk of stream) {\n const delta = chunk.choices[0]?.delta;\n \n if (delta?.content) {\n yield { type: 'text', text: delta.content };\n }\n\n if (delta?.tool_calls) {\n for (const tc of delta.tool_calls) {\n const index = tc.index;\n \n if (tc.id) {\n // New tool call starting\n toolCallsInProgress.set(index, {\n id: tc.id,\n name: tc.function?.name || '',\n arguments: '',\n });\n yield {\n type: 'tool_call_start',\n toolCall: {\n id: tc.id,\n index,\n name: tc.function?.name,\n },\n };\n }\n\n if (tc.function?.name) {\n const existing = toolCallsInProgress.get(index);\n if (existing) {\n existing.name = tc.function.name;\n } else {\n toolCallsInProgress.set(index, {\n id: tc.id || '',\n name: tc.function.name,\n arguments: '',\n });\n }\n }\n \n if (tc.function?.arguments) {\n const toolCall = toolCallsInProgress.get(index);\n if (toolCall) {\n toolCall.arguments += tc.function.arguments;\n yield {\n type: 'tool_call_delta',\n toolCall: {\n index,\n argumentsDelta: tc.function.arguments,\n },\n };\n }\n }\n }\n }\n\n // Check for finish reason to emit tool_call_end\n if (chunk.choices[0]?.finish_reason === 'tool_calls') {\n for (const [index, toolCall] of toolCallsInProgress) {\n yield {\n type: 'tool_call_end',\n toolCall: {\n id: toolCall.id,\n index,\n name: toolCall.name,\n },\n };\n }\n }\n\n // Usage comes at the end\n if (chunk.usage) {\n yield {\n type: 'usage',\n usage: {\n inputTokens: chunk.usage.prompt_tokens,\n outputTokens: chunk.usage.completion_tokens,\n },\n };\n }\n }\n\n yield { type: 'done' };\n } catch (error) {\n throw createSafeError(error as Error, { provider: 'openai' });\n }\n }\n}\n\n/**\n * Create a new OpenAI provider instance\n */\nexport function createOpenAIProvider(): OpenAIProvider {\n return new OpenAIProvider();\n}\n\n/**\n * Package version\n */\nexport const VERSION = '0.0.1';\n\nexport default OpenAIProvider;\n"],"names":["undiciFetch"],"mappings":";;;;AAcO,SAAS,cAAkC;AAC9C,SACI,QAAQ,IAAI,eACZ,QAAQ,IAAI,eACZ,QAAQ,IAAI,cACZ,QAAQ,IAAI,cACZ;AAER;AAMO,SAAS,eAAwB;AACpC,SAAO,QAAQ,IAAI,iCAAiC;AACxD;AAKO,SAAS,gBAAgB,WAA4B;AACxD,QAAM,UAAU,QAAQ,IAAI,YAAY,QAAQ,IAAI;AACpD,MAAI,CAAC,SAAS;AACV,WAAO;AAAA,EACX;AAEA,MAAI;AACJ,MAAI;AACA,eAAW,IAAI,IAAI,SAAS,EAAE,SAAS,YAAA;AAAA,EAC3C,QAAQ;AACJ,WAAO;AAAA,EACX;AAEA,QAAM,UAAU,QAAQ,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,KAAA,EAAO,YAAA,CAAa;AACpE,aAAW,SAAS,SAAS;AACzB,QAAI,CAAC,OAAO;AACR;AAAA,IACJ;AACA,QAAI,UAAU,KAAK;AACf,aAAO;AAAA,IACX;AACA,QAAI,aAAa,OAAO;AACpB,aAAO;AAAA,IACX;AACA,UAAM,SAAS,MAAM,WAAW,GAAG,IAAI,QAAQ,IAAI,KAAK;AACxD,QAAI,SAAS,SAAS,MAAM,GAAG;AAC3B,aAAO;AAAA,IACX;AAAA,EACJ;AACA,SAAO;AACX;AASO,SAAS,iBAAiB,UAAgC;AAC7D,QAAM,aAAa,IAAI,WAAW;AAAA,IAC9B,KAAK;AAAA,IACL,YAAY,EAAE,oBAAoB,aAAA,EAAa;AAAA,EAAE,CACpD;AACD,UAAQ,CAAC,OAAY,SAAe;AAChC,UAAM,YAAY,OAAO,UAAU,WAC7B,QACA,iBAAiB,MACb,MAAM,SAAA,IACN,MAAM;AAChB,QAAI,gBAAgB,SAAS,GAAG;AAC5B,aAAOA,MAAY,OAAO,IAAI;AAAA,IAClC;AACA,WAAOA,MAAY,OAAO,EAAE,GAAG,MAAM,YAAY,YAAY;AAAA,EACjE;AACJ;ACxEA,MAAM,WAAW,YAAA;AACjB,SAAS,SAAS;AAAA,EACd,MAAM;AAAA,EACN,UAAU;AAAA,IACN;AAAA,IACA;AAAA,EAAA;AAAA,EAEJ,WAAW,CAAC,QAAgB,kCAAkC,KAAK,GAAG;AAAA,EACtE,QAAQ;AAAA,EACR,aAAa;AACjB,CAAC;AAGD,wBAAwB;AAAA,EACpB,SAAS;AAAA,EACT,aAAa,QAAQ,IAAI,aAAa,eAAe,eAAe;AAAA,EACpE,sBAAsB;AAAA,EACtB,qBAAqB,QAAQ,IAAI,aAAa;AAAA,EAC9C,kBAAkB;AACtB,CAAC;AAED,qBAAqB;AAAA,EACjB,SAAS;AAAA,EACT,eAAe;AAAA,EACf,iBAAiB;AAAA,EACjB,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,IACZ,EAAE,MAAM,UAAU,SAAS,wBAAwB,aAAa,iBAAA;AAAA,IAChE,EAAE,MAAM,eAAe,SAAS,2BAA2B,aAAa,qBAAA;AAAA,EAAqB;AAErG,CAAC;AA+FM,MAAM,eAAmC;AAAA,EACnC,OAAO;AAAA;AAAA;AAAA;AAAA,EAKhB,cAAc,OAAuB;AACjC,QAAI,CAAC,MAAO,QAAO;AACnB,WACI,MAAM,WAAW,KAAK,KACtB,MAAM,WAAW,IAAI,KACrB,MAAM,WAAW,IAAI,KACrB,MAAM,WAAW,IAAI;AAAA,EAE7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QACF,SACA,UAA4B,IACH;AACzB,UAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI;AAE7C,QAAI,CAAC,QAAQ;AACT,YAAM,IAAI,MAAM,sEAAsE;AAAA,IAC1F;AAGA,UAAM,aAAa,SAAS,YAAY,QAAQ,QAAQ;AACxD,QAAI,CAAC,WAAW,OAAO;AACnB,YAAM,IAAI,MAAM,+BAA+B;AAAA,IACnD;AAEA,QAAI;AACA,YAAM,gBAAyD,EAAE,OAAA;AACjE,YAAM,WAAW,YAAA;AACjB,UAAI,UAAU;AACV,sBAAc,QAAQ,iBAAiB,QAAQ;AAAA,MACnD;AACA,YAAM,SAAS,IAAI,OAAO,aAAa;AAEvC,YAAM,QAAQ,QAAQ,SAAS,QAAQ,SAAS;AAGhD,YAAM,WAAW,QAAQ,SAAS,IAAI,CAAC,QAAQ;AAC3C,YAAI,IAAI,SAAS,QAAQ;AAErB,iBAAO;AAAA,YACH,MAAM;AAAA,YACN,SAAS,OAAO,IAAI,YAAY,WAC1B,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAAA,YAChC,cAAe,IAAY,gBAAgB;AAAA,UAAA;AAAA,QAEnD,WAAW,IAAI,SAAS,eAAgB,IAAY,YAAY;AAE5D,iBAAO;AAAA,YACH,MAAM;AAAA,YACN,SAAS,IAAI;AAAA,YACb,YAAa,IAAY;AAAA,UAAA;AAAA,QAEjC,OAAO;AACH,gBAAM,OAAO,IAAI,SAAS,cAAc,WAAW,IAAI;AACvD,iBAAO;AAAA,YACH;AAAA,YACA,SACI,OAAO,IAAI,YAAY,WACjB,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAAA,YACpC,MAAM,IAAI;AAAA,UAAA;AAAA,QAElB;AAAA,MACJ,CAAC;AAGD,UAAI;AACJ,UAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC3C,sBAAc,QAAQ,MAAM,IAAI,CAAC,UAAU;AAAA,UACvC,MAAM;AAAA,UACN,UAAU;AAAA,YACN,MAAM,KAAK;AAAA,YACX,aAAa,KAAK;AAAA,YAClB,YAAY,KAAK;AAAA,UAAA;AAAA,QACrB,EACF;AAAA,MACN;AAEA,YAAM,WAAW,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,QAClD;AAAA,QACA;AAAA,QACA,aAAa,QAAQ;AAAA,QACrB,YAAY,QAAQ;AAAA,QACpB,iBAAiB,QAAQ;AAAA,QACzB,GAAI,cAAc,EAAE,OAAO,gBAAgB,CAAA;AAAA,MAAC,CAC/C;AAED,YAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,aAAO;AAAA,QACH,SAAS,OAAO,QAAQ,WAAW;AAAA,QACnC,OAAO,SAAS;AAAA,QAChB,OAAO,SAAS,QACV;AAAA,UACE,aAAa,SAAS,MAAM;AAAA,UAC5B,cAAc,SAAS,MAAM;AAAA,QAAA,IAE/B;AAAA,QACN,WAAW,OAAO,QAAQ,YACpB,OAAO,CAAC,OAAO,GAAG,SAAS,UAAU,EACtC,IAAI,CAAC,QAAQ;AAAA,UACV,IAAI,GAAG;AAAA,UACP,MAAM;AAAA,UACN,UAAU;AAAA,YACN,MAAO,GAAW,SAAS;AAAA,YAC3B,WAAY,GAAW,SAAS;AAAA,UAAA;AAAA,QACpC,EACF;AAAA,MAAA;AAAA,IAEd,SAAS,OAAO;AAGZ,YAAM,gBAAgB,OAAgB,EAAE,UAAU,UAAU;AAAA,IAChE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cACH,SACA,UAA4B,IACF;AAC1B,UAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI;AAE7C,QAAI,CAAC,QAAQ;AACT,YAAM,IAAI,MAAM,sEAAsE;AAAA,IAC1F;AAGA,UAAM,aAAa,SAAS,YAAY,QAAQ,QAAQ;AACxD,QAAI,CAAC,WAAW,OAAO;AACnB,YAAM,IAAI,MAAM,+BAA+B;AAAA,IACnD;AAEA,QAAI;AACA,YAAM,gBAAyD,EAAE,OAAA;AACjE,YAAM,WAAW,YAAA;AACjB,UAAI,UAAU;AACV,sBAAc,QAAQ,iBAAiB,QAAQ;AAAA,MACnD;AACA,YAAM,SAAS,IAAI,OAAO,aAAa;AAEvC,YAAM,QAAQ,QAAQ,SAAS,QAAQ,SAAS;AAGhD,YAAM,WAAW,QAAQ,SAAS,IAAI,CAAC,QAAQ;AAC3C,YAAI,IAAI,SAAS,QAAQ;AACrB,iBAAO;AAAA,YACH,MAAM;AAAA,YACN,SAAS,OAAO,IAAI,YAAY,WAC1B,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAAA,YAChC,cAAe,IAAY,gBAAgB;AAAA,UAAA;AAAA,QAEnD,WAAW,IAAI,SAAS,eAAgB,IAAY,YAAY;AAC5D,iBAAO;AAAA,YACH,MAAM;AAAA,YACN,SAAS,IAAI;AAAA,YACb,YAAa,IAAY;AAAA,UAAA;AAAA,QAEjC,OAAO;AACH,gBAAM,OAAO,IAAI,SAAS,cAAc,WAAW,IAAI;AACvD,iBAAO;AAAA,YACH;AAAA,YACA,SACI,OAAO,IAAI,YAAY,WACjB,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAAA,YACpC,MAAM,IAAI;AAAA,UAAA;AAAA,QAElB;AAAA,MACJ,CAAC;AAGD,UAAI;AACJ,UAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC3C,sBAAc,QAAQ,MAAM,IAAI,CAAC,UAAU;AAAA,UACvC,MAAM;AAAA,UACN,UAAU;AAAA,YACN,MAAM,KAAK;AAAA,YACX,aAAa,KAAK;AAAA,YAClB,YAAY,KAAK;AAAA,UAAA;AAAA,QACrB,EACF;AAAA,MACN;AAEA,YAAM,SAAS,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,QAChD;AAAA,QACA;AAAA,QACA,aAAa,QAAQ;AAAA,QACrB,YAAY,QAAQ;AAAA,QACpB,QAAQ;AAAA,QACR,gBAAgB,EAAE,eAAe,KAAA;AAAA,QACjC,GAAI,cAAc,EAAE,OAAO,gBAAgB,CAAA;AAAA,MAAC,CAC/C;AAGD,YAAM,0CAAwF,IAAA;AAE9F,uBAAiB,SAAS,QAAQ;AAC9B,cAAM,QAAQ,MAAM,QAAQ,CAAC,GAAG;AAEhC,YAAI,OAAO,SAAS;AAChB,gBAAM,EAAE,MAAM,QAAQ,MAAM,MAAM,QAAA;AAAA,QACtC;AAEA,YAAI,OAAO,YAAY;AACnB,qBAAW,MAAM,MAAM,YAAY;AAC/B,kBAAM,QAAQ,GAAG;AAEjB,gBAAI,GAAG,IAAI;AAEP,kCAAoB,IAAI,OAAO;AAAA,gBAC3B,IAAI,GAAG;AAAA,gBACP,MAAM,GAAG,UAAU,QAAQ;AAAA,gBAC3B,WAAW;AAAA,cAAA,CACd;AACD,oBAAM;AAAA,gBACF,MAAM;AAAA,gBACN,UAAU;AAAA,kBACN,IAAI,GAAG;AAAA,kBACP;AAAA,kBACA,MAAM,GAAG,UAAU;AAAA,gBAAA;AAAA,cACvB;AAAA,YAER;AAEA,gBAAI,GAAG,UAAU,MAAM;AACnB,oBAAM,WAAW,oBAAoB,IAAI,KAAK;AAC9C,kBAAI,UAAU;AACV,yBAAS,OAAO,GAAG,SAAS;AAAA,cAChC,OAAO;AACH,oCAAoB,IAAI,OAAO;AAAA,kBAC3B,IAAI,GAAG,MAAM;AAAA,kBACb,MAAM,GAAG,SAAS;AAAA,kBAClB,WAAW;AAAA,gBAAA,CACd;AAAA,cACL;AAAA,YACJ;AAEA,gBAAI,GAAG,UAAU,WAAW;AACxB,oBAAM,WAAW,oBAAoB,IAAI,KAAK;AAC9C,kBAAI,UAAU;AACV,yBAAS,aAAa,GAAG,SAAS;AAClC,sBAAM;AAAA,kBACF,MAAM;AAAA,kBACN,UAAU;AAAA,oBACN;AAAA,oBACA,gBAAgB,GAAG,SAAS;AAAA,kBAAA;AAAA,gBAChC;AAAA,cAER;AAAA,YACJ;AAAA,UACJ;AAAA,QACJ;AAGA,YAAI,MAAM,QAAQ,CAAC,GAAG,kBAAkB,cAAc;AAClD,qBAAW,CAAC,OAAO,QAAQ,KAAK,qBAAqB;AACjD,kBAAM;AAAA,cACF,MAAM;AAAA,cACN,UAAU;AAAA,gBACN,IAAI,SAAS;AAAA,gBACb;AAAA,gBACA,MAAM,SAAS;AAAA,cAAA;AAAA,YACnB;AAAA,UAER;AAAA,QACJ;AAGA,YAAI,MAAM,OAAO;AACb,gBAAM;AAAA,YACF,MAAM;AAAA,YACN,OAAO;AAAA,cACH,aAAa,MAAM,MAAM;AAAA,cACzB,cAAc,MAAM,MAAM;AAAA,YAAA;AAAA,UAC9B;AAAA,QAER;AAAA,MACJ;AAEA,YAAM,EAAE,MAAM,OAAA;AAAA,IAClB,SAAS,OAAO;AACZ,YAAM,gBAAgB,OAAgB,EAAE,UAAU,UAAU;AAAA,IAChE;AAAA,EACJ;AACJ;AAKO,SAAS,uBAAuC;AACnD,SAAO,IAAI,eAAA;AACf;AAKO,MAAM,UAAU;"}
1
+ {"version":3,"file":"index.js","sources":["../src/proxy.ts","../src/index.ts"],"sourcesContent":["/**\n * Proxy support for OpenAI API requests.\n *\n * When proxy environment variables are set, routes requests through the proxy\n * using undici's ProxyAgent. Respects NO_PROXY/no_proxy for bypass lists and\n * NODE_TLS_REJECT_UNAUTHORIZED for TLS verification.\n */\n\nimport { ProxyAgent, fetch as undiciFetch } from 'undici';\n\n/**\n * Get the proxy URL from environment variables.\n * Checks HTTPS_PROXY, https_proxy, HTTP_PROXY, http_proxy.\n */\nexport function getProxyUrl(): string | undefined {\n return (\n process.env.HTTPS_PROXY ||\n process.env.https_proxy ||\n process.env.HTTP_PROXY ||\n process.env.http_proxy ||\n undefined\n );\n}\n\n/**\n * Read TLS strict mode. Returns false only when NODE_TLS_REJECT_UNAUTHORIZED\n * is explicitly set to '0'.\n */\nexport function getStrictSSL(): boolean {\n return process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0';\n}\n\n/**\n * Check whether a target URL should bypass the proxy based on NO_PROXY / no_proxy.\n */\nexport function isProxyBypassed(targetUrl: string): boolean {\n const noProxy = process.env.NO_PROXY || process.env.no_proxy;\n if (!noProxy) {\n return false;\n }\n\n let hostname: string;\n try {\n hostname = new URL(targetUrl).hostname.toLowerCase();\n } catch {\n return false;\n }\n\n const entries = noProxy.split(',').map((e) => e.trim().toLowerCase());\n for (const entry of entries) {\n if (!entry) {\n continue;\n }\n if (entry === '*') {\n return true;\n }\n if (hostname === entry) {\n return true;\n }\n const suffix = entry.startsWith('.') ? entry : `.${entry}`;\n if (hostname.endsWith(suffix)) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Create a fetch implementation that routes requests through an HTTP(S) proxy.\n * Respects TLS verification settings and NO_PROXY bypass lists.\n *\n * @param proxyUrl - The proxy URL (e.g. https://proxy.example.com:8080)\n * @returns A fetch function that uses ProxyAgent as the dispatcher\n */\nexport function createProxyFetch(proxyUrl: string): typeof fetch {\n const proxyAgent = new ProxyAgent({\n uri: proxyUrl,\n requestTls: { rejectUnauthorized: getStrictSSL() },\n });\n return ((input: any, init?: any) => {\n const targetUrl = typeof input === 'string'\n ? input\n : input instanceof URL\n ? input.toString()\n : input.url;\n if (isProxyBypassed(targetUrl)) {\n return undiciFetch(input, init);\n }\n return undiciFetch(input, { ...init, dispatcher: proxyAgent });\n }) as any;\n}\n","/**\n * Execution OpenAI Package\n *\n * OpenAI provider implementation for LLM execution.\n *\n * @packageDocumentation\n */\n\nimport OpenAI from 'openai';\nimport { getRedactor } from '@utilarium/offrecord';\nimport { getProxyUrl, createProxyFetch } from './proxy.js';\nimport { \n createSafeError, \n configureErrorSanitizer,\n configureSecretGuard,\n} from '@utilarium/spotclean';\n\n// Register OpenAI API key patterns on module load\nconst redactor = getRedactor();\nredactor.register({\n name: 'openai',\n patterns: [\n /sk-[a-zA-Z0-9]{20,}/g,\n /sk-proj-[a-zA-Z0-9_-]+/g,\n ],\n validator: (key: string) => /^sk-(proj-)?[a-zA-Z0-9_-]{20,}$/.test(key),\n envVar: 'OPENAI_API_KEY',\n description: 'OpenAI API keys',\n});\n\n// Configure spotclean for error sanitization\nconfigureErrorSanitizer({\n enabled: true,\n environment: process.env.NODE_ENV === 'production' ? 'production' : 'development',\n includeCorrelationId: true,\n sanitizeStackTraces: process.env.NODE_ENV === 'production',\n maxMessageLength: 500,\n});\n\nconfigureSecretGuard({\n enabled: true,\n redactionText: '[REDACTED]',\n preservePartial: false,\n preserveLength: 0,\n customPatterns: [\n { name: 'openai', pattern: /sk-[a-zA-Z0-9]{20,}/g, description: 'OpenAI API key' },\n { name: 'openai-proj', pattern: /sk-proj-[a-zA-Z0-9_-]+/g, description: 'OpenAI project key' },\n ],\n});\n\n// ===== INLINE TYPES (from 'execution' package) =====\n// These types are duplicated here for build independence.\n// When 'execution' is published, these can be imported from there.\n\nexport type Model = string;\n\nexport interface Message {\n role: 'user' | 'assistant' | 'system' | 'developer' | 'tool';\n content: string | string[] | null;\n name?: string;\n}\n\nexport interface ToolParameterSchema {\n type: 'object';\n properties: Record<string, {\n type: string;\n description?: string;\n enum?: string[];\n items?: { type: string };\n default?: any;\n }>;\n required?: string[];\n additionalProperties?: boolean;\n}\n\nexport interface ToolDefinition {\n name: string;\n description: string;\n parameters: ToolParameterSchema;\n}\n\nexport type StreamChunkType = 'text' | 'tool_call_start' | 'tool_call_delta' | 'tool_call_end' | 'usage' | 'done';\n\nexport interface StreamChunk {\n type: StreamChunkType;\n text?: string;\n toolCall?: {\n id?: string;\n index?: number;\n name?: string;\n argumentsDelta?: string;\n };\n usage?: {\n inputTokens: number;\n outputTokens: number;\n };\n}\n\nexport interface Request {\n messages: Message[];\n model: Model;\n responseFormat?: any;\n validator?: any;\n tools?: ToolDefinition[];\n addMessage(message: Message): void;\n}\n\nexport interface ProviderResponse {\n content: string;\n model: string;\n usage?: {\n inputTokens: number;\n outputTokens: number;\n };\n toolCalls?: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n }>;\n}\n\nexport interface ExecutionOptions {\n apiKey?: string;\n model?: string;\n temperature?: number;\n maxTokens?: number;\n timeout?: number;\n retries?: number;\n}\n\n/** O-series / reasoning models reject custom temperature on Chat Completions. */\nfunction isOpenAIReasoningModel(model: string): boolean {\n return /^o\\d/i.test(model.trim());\n}\n\nfunction mapRequestMessagesToOpenAI(request: Request): OpenAI.ChatCompletionMessageParam[] {\n return request.messages.map((msg) => {\n if (msg.role === 'tool') {\n return {\n role: 'tool',\n content: typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content),\n tool_call_id: (msg as { tool_call_id?: string }).tool_call_id || '',\n };\n }\n if (msg.role === 'assistant') {\n const extra = msg as unknown as { tool_calls?: unknown };\n if (extra.tool_calls) {\n return {\n role: 'assistant',\n content: msg.content,\n tool_calls: extra.tool_calls,\n } as OpenAI.ChatCompletionMessageParam;\n }\n }\n return {\n role: msg.role,\n content:\n typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content),\n ...(msg.name ? { name: msg.name } : {}),\n } as OpenAI.ChatCompletionMessageParam;\n });\n}\n\nfunction buildTools(request: Request): OpenAI.ChatCompletionTool[] | undefined {\n if (!request.tools?.length) {\n return undefined;\n }\n return request.tools.map((tool) => ({\n type: 'function' as const,\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters as unknown as OpenAI.FunctionParameters,\n },\n }));\n}\n\nfunction appendMaxTokensAndTemperature(\n model: string,\n options: ExecutionOptions,\n params: OpenAI.ChatCompletionCreateParams\n): void {\n if (options.maxTokens != null) {\n params.max_completion_tokens = options.maxTokens;\n }\n if (\n options.temperature !== undefined &&\n options.temperature !== null &&\n !isOpenAIReasoningModel(model)\n ) {\n params.temperature = options.temperature;\n }\n}\n\nexport interface Provider {\n readonly name: string;\n execute(request: Request, options?: ExecutionOptions): Promise<ProviderResponse>;\n executeStream?(request: Request, options?: ExecutionOptions): AsyncIterable<StreamChunk>;\n supportsModel?(model: Model): boolean;\n}\n\n/**\n * OpenAI Provider implementation\n */\nexport class OpenAIProvider implements Provider {\n readonly name = 'openai';\n\n /**\n * Check if this provider supports a given model\n */\n supportsModel(model: Model): boolean {\n if (!model) return true; // Default to OpenAI\n const m = model.toLowerCase();\n return (\n m.startsWith('gpt') ||\n /^o\\d/.test(m) ||\n m.startsWith('ft:') ||\n m.startsWith('chatgpt-4o')\n );\n }\n\n /**\n * Execute a request against OpenAI\n */\n async execute(\n request: Request,\n options: ExecutionOptions = {}\n ): Promise<ProviderResponse> {\n const apiKey = options.apiKey || process.env.OPENAI_API_KEY;\n \n if (!apiKey) {\n throw new Error('OpenAI API key is required. Set OPENAI_API_KEY environment variable.');\n }\n\n // Validate key format\n const validation = redactor.validateKey(apiKey, 'openai');\n if (!validation.valid) {\n throw new Error('Invalid OpenAI API key format');\n }\n\n try {\n const clientOptions: ConstructorParameters<typeof OpenAI>[0] = { apiKey };\n const proxyUrl = getProxyUrl();\n if (proxyUrl) {\n clientOptions.fetch = createProxyFetch(proxyUrl);\n }\n const client = new OpenAI(clientOptions);\n\n const model = options.model || request.model || 'gpt-5.4';\n\n const messages = mapRequestMessagesToOpenAI(request);\n const openaiTools = buildTools(request);\n\n const params: OpenAI.ChatCompletionCreateParams = {\n model,\n messages,\n ...(request.responseFormat != null ? { response_format: request.responseFormat } : {}),\n ...(openaiTools ? { tools: openaiTools } : {}),\n };\n appendMaxTokensAndTemperature(model, options, params);\n\n const response = await client.chat.completions.create(params);\n\n const choice = response.choices[0];\n const assistantMessage = choice.message;\n\n return {\n content: assistantMessage.content ?? assistantMessage.refusal ?? '',\n model: response.model,\n usage: response.usage\n ? {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n }\n : undefined,\n toolCalls: choice.message.tool_calls\n ?.filter((tc) => tc.type === 'function')\n .map((tc) => ({\n id: tc.id,\n type: 'function' as const,\n function: {\n name: (tc as any).function.name,\n arguments: (tc as any).function.arguments,\n },\n })),\n };\n } catch (error) {\n // Sanitize error to remove any API keys from error messages\n // Use spotclean for comprehensive error sanitization\n throw createSafeError(error as Error, { provider: 'openai' });\n }\n }\n\n /**\n * Execute a request with streaming response\n */\n async *executeStream(\n request: Request,\n options: ExecutionOptions = {}\n ): AsyncIterable<StreamChunk> {\n const apiKey = options.apiKey || process.env.OPENAI_API_KEY;\n \n if (!apiKey) {\n throw new Error('OpenAI API key is required. Set OPENAI_API_KEY environment variable.');\n }\n\n // Validate key format\n const validation = redactor.validateKey(apiKey, 'openai');\n if (!validation.valid) {\n throw new Error('Invalid OpenAI API key format');\n }\n\n try {\n const clientOptions: ConstructorParameters<typeof OpenAI>[0] = { apiKey };\n const proxyUrl = getProxyUrl();\n if (proxyUrl) {\n clientOptions.fetch = createProxyFetch(proxyUrl);\n }\n const client = new OpenAI(clientOptions);\n\n const model = options.model || request.model || 'gpt-5.4';\n\n const messages = mapRequestMessagesToOpenAI(request);\n const openaiTools = buildTools(request);\n\n const params: OpenAI.ChatCompletionCreateParamsStreaming = {\n model,\n messages,\n stream: true,\n stream_options: { include_usage: true },\n ...(request.responseFormat != null ? { response_format: request.responseFormat } : {}),\n ...(openaiTools ? { tools: openaiTools } : {}),\n };\n appendMaxTokensAndTemperature(model, options, params);\n\n const stream = await client.chat.completions.create(params);\n\n // Track tool calls being built\n const toolCallsInProgress: Map<number, { id: string; name: string; arguments: string }> = new Map();\n\n for await (const chunk of stream) {\n const delta = chunk.choices[0]?.delta;\n \n if (delta?.content) {\n yield { type: 'text', text: delta.content };\n }\n\n if (delta?.refusal) {\n yield { type: 'text', text: delta.refusal };\n }\n\n if (delta?.tool_calls) {\n for (const tc of delta.tool_calls) {\n const index = tc.index;\n \n if (tc.id) {\n // New tool call starting\n toolCallsInProgress.set(index, {\n id: tc.id,\n name: tc.function?.name || '',\n arguments: '',\n });\n yield {\n type: 'tool_call_start',\n toolCall: {\n id: tc.id,\n index,\n name: tc.function?.name,\n },\n };\n }\n\n if (tc.function?.name) {\n const existing = toolCallsInProgress.get(index);\n if (existing) {\n existing.name = tc.function.name;\n } else {\n toolCallsInProgress.set(index, {\n id: tc.id || '',\n name: tc.function.name,\n arguments: '',\n });\n }\n }\n \n if (tc.function?.arguments) {\n const toolCall = toolCallsInProgress.get(index);\n if (toolCall) {\n toolCall.arguments += tc.function.arguments;\n yield {\n type: 'tool_call_delta',\n toolCall: {\n index,\n argumentsDelta: tc.function.arguments,\n },\n };\n }\n }\n }\n }\n\n // Check for finish reason to emit tool_call_end\n if (chunk.choices[0]?.finish_reason === 'tool_calls') {\n for (const [index, toolCall] of toolCallsInProgress) {\n yield {\n type: 'tool_call_end',\n toolCall: {\n id: toolCall.id,\n index,\n name: toolCall.name,\n },\n };\n }\n }\n\n // Usage comes at the end\n if (chunk.usage) {\n yield {\n type: 'usage',\n usage: {\n inputTokens: chunk.usage.prompt_tokens,\n outputTokens: chunk.usage.completion_tokens,\n },\n };\n }\n }\n\n yield { type: 'done' };\n } catch (error) {\n throw createSafeError(error as Error, { provider: 'openai' });\n }\n }\n}\n\n/**\n * Create a new OpenAI provider instance\n */\nexport function createOpenAIProvider(): OpenAIProvider {\n return new OpenAIProvider();\n}\n\n/**\n * Package version\n */\nexport const VERSION = '0.0.1';\n\nexport default OpenAIProvider;\n"],"names":["undiciFetch"],"mappings":";;;;AAcO,SAAS,cAAkC;AAC9C,SACI,QAAQ,IAAI,eACZ,QAAQ,IAAI,eACZ,QAAQ,IAAI,cACZ,QAAQ,IAAI,cACZ;AAER;AAMO,SAAS,eAAwB;AACpC,SAAO,QAAQ,IAAI,iCAAiC;AACxD;AAKO,SAAS,gBAAgB,WAA4B;AACxD,QAAM,UAAU,QAAQ,IAAI,YAAY,QAAQ,IAAI;AACpD,MAAI,CAAC,SAAS;AACV,WAAO;AAAA,EACX;AAEA,MAAI;AACJ,MAAI;AACA,eAAW,IAAI,IAAI,SAAS,EAAE,SAAS,YAAA;AAAA,EAC3C,QAAQ;AACJ,WAAO;AAAA,EACX;AAEA,QAAM,UAAU,QAAQ,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,KAAA,EAAO,YAAA,CAAa;AACpE,aAAW,SAAS,SAAS;AACzB,QAAI,CAAC,OAAO;AACR;AAAA,IACJ;AACA,QAAI,UAAU,KAAK;AACf,aAAO;AAAA,IACX;AACA,QAAI,aAAa,OAAO;AACpB,aAAO;AAAA,IACX;AACA,UAAM,SAAS,MAAM,WAAW,GAAG,IAAI,QAAQ,IAAI,KAAK;AACxD,QAAI,SAAS,SAAS,MAAM,GAAG;AAC3B,aAAO;AAAA,IACX;AAAA,EACJ;AACA,SAAO;AACX;AASO,SAAS,iBAAiB,UAAgC;AAC7D,QAAM,aAAa,IAAI,WAAW;AAAA,IAC9B,KAAK;AAAA,IACL,YAAY,EAAE,oBAAoB,aAAA,EAAa;AAAA,EAAE,CACpD;AACD,UAAQ,CAAC,OAAY,SAAe;AAChC,UAAM,YAAY,OAAO,UAAU,WAC7B,QACA,iBAAiB,MACb,MAAM,SAAA,IACN,MAAM;AAChB,QAAI,gBAAgB,SAAS,GAAG;AAC5B,aAAOA,MAAY,OAAO,IAAI;AAAA,IAClC;AACA,WAAOA,MAAY,OAAO,EAAE,GAAG,MAAM,YAAY,YAAY;AAAA,EACjE;AACJ;ACxEA,MAAM,WAAW,YAAA;AACjB,SAAS,SAAS;AAAA,EACd,MAAM;AAAA,EACN,UAAU;AAAA,IACN;AAAA,IACA;AAAA,EAAA;AAAA,EAEJ,WAAW,CAAC,QAAgB,kCAAkC,KAAK,GAAG;AAAA,EACtE,QAAQ;AAAA,EACR,aAAa;AACjB,CAAC;AAGD,wBAAwB;AAAA,EACpB,SAAS;AAAA,EACT,aAAa,QAAQ,IAAI,aAAa,eAAe,eAAe;AAAA,EACpE,sBAAsB;AAAA,EACtB,qBAAqB,QAAQ,IAAI,aAAa;AAAA,EAC9C,kBAAkB;AACtB,CAAC;AAED,qBAAqB;AAAA,EACjB,SAAS;AAAA,EACT,eAAe;AAAA,EACf,iBAAiB;AAAA,EACjB,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,IACZ,EAAE,MAAM,UAAU,SAAS,wBAAwB,aAAa,iBAAA;AAAA,IAChE,EAAE,MAAM,eAAe,SAAS,2BAA2B,aAAa,qBAAA;AAAA,EAAqB;AAErG,CAAC;AAsFD,SAAS,uBAAuB,OAAwB;AACpD,SAAO,QAAQ,KAAK,MAAM,KAAA,CAAM;AACpC;AAEA,SAAS,2BAA2B,SAAuD;AACvF,SAAO,QAAQ,SAAS,IAAI,CAAC,QAAQ;AACjC,QAAI,IAAI,SAAS,QAAQ;AACrB,aAAO;AAAA,QACH,MAAM;AAAA,QACN,SAAS,OAAO,IAAI,YAAY,WAC1B,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAAA,QAChC,cAAe,IAAkC,gBAAgB;AAAA,MAAA;AAAA,IAEzE;AACA,QAAI,IAAI,SAAS,aAAa;AAC1B,YAAM,QAAQ;AACd,UAAI,MAAM,YAAY;AAClB,eAAO;AAAA,UACH,MAAM;AAAA,UACN,SAAS,IAAI;AAAA,UACb,YAAY,MAAM;AAAA,QAAA;AAAA,MAE1B;AAAA,IACJ;AACA,WAAO;AAAA,MACH,MAAM,IAAI;AAAA,MACV,SACI,OAAO,IAAI,YAAY,WACjB,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAAA,MACpC,GAAI,IAAI,OAAO,EAAE,MAAM,IAAI,KAAA,IAAS,CAAA;AAAA,IAAC;AAAA,EAE7C,CAAC;AACL;AAEA,SAAS,WAAW,SAA2D;AAC3E,MAAI,CAAC,QAAQ,OAAO,QAAQ;AACxB,WAAO;AAAA,EACX;AACA,SAAO,QAAQ,MAAM,IAAI,CAAC,UAAU;AAAA,IAChC,MAAM;AAAA,IACN,UAAU;AAAA,MACN,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,IAAA;AAAA,EACrB,EACF;AACN;AAEA,SAAS,8BACL,OACA,SACA,QACI;AACJ,MAAI,QAAQ,aAAa,MAAM;AAC3B,WAAO,wBAAwB,QAAQ;AAAA,EAC3C;AACA,MACI,QAAQ,gBAAgB,UACxB,QAAQ,gBAAgB,QACxB,CAAC,uBAAuB,KAAK,GAC/B;AACE,WAAO,cAAc,QAAQ;AAAA,EACjC;AACJ;AAYO,MAAM,eAAmC;AAAA,EACnC,OAAO;AAAA;AAAA;AAAA;AAAA,EAKhB,cAAc,OAAuB;AACjC,QAAI,CAAC,MAAO,QAAO;AACnB,UAAM,IAAI,MAAM,YAAA;AAChB,WACI,EAAE,WAAW,KAAK,KAClB,OAAO,KAAK,CAAC,KACb,EAAE,WAAW,KAAK,KAClB,EAAE,WAAW,YAAY;AAAA,EAEjC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QACF,SACA,UAA4B,IACH;AACzB,UAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI;AAE7C,QAAI,CAAC,QAAQ;AACT,YAAM,IAAI,MAAM,sEAAsE;AAAA,IAC1F;AAGA,UAAM,aAAa,SAAS,YAAY,QAAQ,QAAQ;AACxD,QAAI,CAAC,WAAW,OAAO;AACnB,YAAM,IAAI,MAAM,+BAA+B;AAAA,IACnD;AAEA,QAAI;AACA,YAAM,gBAAyD,EAAE,OAAA;AACjE,YAAM,WAAW,YAAA;AACjB,UAAI,UAAU;AACV,sBAAc,QAAQ,iBAAiB,QAAQ;AAAA,MACnD;AACA,YAAM,SAAS,IAAI,OAAO,aAAa;AAEvC,YAAM,QAAQ,QAAQ,SAAS,QAAQ,SAAS;AAEhD,YAAM,WAAW,2BAA2B,OAAO;AACnD,YAAM,cAAc,WAAW,OAAO;AAEtC,YAAM,SAA4C;AAAA,QAC9C;AAAA,QACA;AAAA,QACA,GAAI,QAAQ,kBAAkB,OAAO,EAAE,iBAAiB,QAAQ,eAAA,IAAmB,CAAA;AAAA,QACnF,GAAI,cAAc,EAAE,OAAO,gBAAgB,CAAA;AAAA,MAAC;AAEhD,oCAA8B,OAAO,SAAS,MAAM;AAEpD,YAAM,WAAW,MAAM,OAAO,KAAK,YAAY,OAAO,MAAM;AAE5D,YAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,YAAM,mBAAmB,OAAO;AAEhC,aAAO;AAAA,QACH,SAAS,iBAAiB,WAAW,iBAAiB,WAAW;AAAA,QACjE,OAAO,SAAS;AAAA,QAChB,OAAO,SAAS,QACV;AAAA,UACE,aAAa,SAAS,MAAM;AAAA,UAC5B,cAAc,SAAS,MAAM;AAAA,QAAA,IAE/B;AAAA,QACN,WAAW,OAAO,QAAQ,YACpB,OAAO,CAAC,OAAO,GAAG,SAAS,UAAU,EACtC,IAAI,CAAC,QAAQ;AAAA,UACV,IAAI,GAAG;AAAA,UACP,MAAM;AAAA,UACN,UAAU;AAAA,YACN,MAAO,GAAW,SAAS;AAAA,YAC3B,WAAY,GAAW,SAAS;AAAA,UAAA;AAAA,QACpC,EACF;AAAA,MAAA;AAAA,IAEd,SAAS,OAAO;AAGZ,YAAM,gBAAgB,OAAgB,EAAE,UAAU,UAAU;AAAA,IAChE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cACH,SACA,UAA4B,IACF;AAC1B,UAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI;AAE7C,QAAI,CAAC,QAAQ;AACT,YAAM,IAAI,MAAM,sEAAsE;AAAA,IAC1F;AAGA,UAAM,aAAa,SAAS,YAAY,QAAQ,QAAQ;AACxD,QAAI,CAAC,WAAW,OAAO;AACnB,YAAM,IAAI,MAAM,+BAA+B;AAAA,IACnD;AAEA,QAAI;AACA,YAAM,gBAAyD,EAAE,OAAA;AACjE,YAAM,WAAW,YAAA;AACjB,UAAI,UAAU;AACV,sBAAc,QAAQ,iBAAiB,QAAQ;AAAA,MACnD;AACA,YAAM,SAAS,IAAI,OAAO,aAAa;AAEvC,YAAM,QAAQ,QAAQ,SAAS,QAAQ,SAAS;AAEhD,YAAM,WAAW,2BAA2B,OAAO;AACnD,YAAM,cAAc,WAAW,OAAO;AAEtC,YAAM,SAAqD;AAAA,QACvD;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,gBAAgB,EAAE,eAAe,KAAA;AAAA,QACjC,GAAI,QAAQ,kBAAkB,OAAO,EAAE,iBAAiB,QAAQ,eAAA,IAAmB,CAAA;AAAA,QACnF,GAAI,cAAc,EAAE,OAAO,gBAAgB,CAAA;AAAA,MAAC;AAEhD,oCAA8B,OAAO,SAAS,MAAM;AAEpD,YAAM,SAAS,MAAM,OAAO,KAAK,YAAY,OAAO,MAAM;AAG1D,YAAM,0CAAwF,IAAA;AAE9F,uBAAiB,SAAS,QAAQ;AAC9B,cAAM,QAAQ,MAAM,QAAQ,CAAC,GAAG;AAEhC,YAAI,OAAO,SAAS;AAChB,gBAAM,EAAE,MAAM,QAAQ,MAAM,MAAM,QAAA;AAAA,QACtC;AAEA,YAAI,OAAO,SAAS;AAChB,gBAAM,EAAE,MAAM,QAAQ,MAAM,MAAM,QAAA;AAAA,QACtC;AAEA,YAAI,OAAO,YAAY;AACnB,qBAAW,MAAM,MAAM,YAAY;AAC/B,kBAAM,QAAQ,GAAG;AAEjB,gBAAI,GAAG,IAAI;AAEP,kCAAoB,IAAI,OAAO;AAAA,gBAC3B,IAAI,GAAG;AAAA,gBACP,MAAM,GAAG,UAAU,QAAQ;AAAA,gBAC3B,WAAW;AAAA,cAAA,CACd;AACD,oBAAM;AAAA,gBACF,MAAM;AAAA,gBACN,UAAU;AAAA,kBACN,IAAI,GAAG;AAAA,kBACP;AAAA,kBACA,MAAM,GAAG,UAAU;AAAA,gBAAA;AAAA,cACvB;AAAA,YAER;AAEA,gBAAI,GAAG,UAAU,MAAM;AACnB,oBAAM,WAAW,oBAAoB,IAAI,KAAK;AAC9C,kBAAI,UAAU;AACV,yBAAS,OAAO,GAAG,SAAS;AAAA,cAChC,OAAO;AACH,oCAAoB,IAAI,OAAO;AAAA,kBAC3B,IAAI,GAAG,MAAM;AAAA,kBACb,MAAM,GAAG,SAAS;AAAA,kBAClB,WAAW;AAAA,gBAAA,CACd;AAAA,cACL;AAAA,YACJ;AAEA,gBAAI,GAAG,UAAU,WAAW;AACxB,oBAAM,WAAW,oBAAoB,IAAI,KAAK;AAC9C,kBAAI,UAAU;AACV,yBAAS,aAAa,GAAG,SAAS;AAClC,sBAAM;AAAA,kBACF,MAAM;AAAA,kBACN,UAAU;AAAA,oBACN;AAAA,oBACA,gBAAgB,GAAG,SAAS;AAAA,kBAAA;AAAA,gBAChC;AAAA,cAER;AAAA,YACJ;AAAA,UACJ;AAAA,QACJ;AAGA,YAAI,MAAM,QAAQ,CAAC,GAAG,kBAAkB,cAAc;AAClD,qBAAW,CAAC,OAAO,QAAQ,KAAK,qBAAqB;AACjD,kBAAM;AAAA,cACF,MAAM;AAAA,cACN,UAAU;AAAA,gBACN,IAAI,SAAS;AAAA,gBACb;AAAA,gBACA,MAAM,SAAS;AAAA,cAAA;AAAA,YACnB;AAAA,UAER;AAAA,QACJ;AAGA,YAAI,MAAM,OAAO;AACb,gBAAM;AAAA,YACF,MAAM;AAAA,YACN,OAAO;AAAA,cACH,aAAa,MAAM,MAAM;AAAA,cACzB,cAAc,MAAM,MAAM;AAAA,YAAA;AAAA,UAC9B;AAAA,QAER;AAAA,MACJ;AAEA,YAAM,EAAE,MAAM,OAAA;AAAA,IAClB,SAAS,OAAO;AACZ,YAAM,gBAAgB,OAAgB,EAAE,UAAU,UAAU;AAAA,IAChE;AAAA,EACJ;AACJ;AAKO,SAAS,uBAAuC;AACnD,SAAO,IAAI,eAAA;AACf;AAKO,MAAM,UAAU;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kjerneverk/execution-openai",
3
- "version": "1.0.13",
3
+ "version": "1.0.14-dev.20260320165526.3c2edde",
4
4
  "description": "OpenAI provider for execution - implements Provider interface",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -39,7 +39,7 @@
39
39
  "dependencies": {
40
40
  "@utilarium/offrecord": "^0.0.3",
41
41
  "@utilarium/spotclean": "^0.0.4",
42
- "openai": "^6.22.0",
42
+ "openai": "^6.32.0",
43
43
  "tiktoken": "^1.0.21",
44
44
  "undici": "^7.21.0"
45
45
  },