@glueco/plugin-llm-openai 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,189 @@
1
+ # @glueco/plugin-llm-openai
2
+
3
+ OpenAI LLM plugin for Personal Resource Gateway. Provides typed access to OpenAI's GPT models including GPT-4o, GPT-4 Turbo, GPT-3.5 Turbo, and o1 reasoning models.
4
+
5
+ ## Features
6
+
7
+ - **Full OpenAI API Support**: Chat completions with all parameters (tools, streaming, response formats, etc.)
8
+ - **Schema-First Validation**: Request validation using Zod schemas with enforcement field extraction
9
+ - **Typed Client**: Full TypeScript support with autocomplete for target apps
10
+ - **Dual-Entrypoint**: Clean separation between proxy and client code
11
+
12
+ ## Installation
13
+
14
+ ```bash
15
+ npm install @glueco/plugin-llm-openai
16
+ ```
17
+
18
+ ## Usage
19
+
20
+ ### For Target Apps (Client-Side)
21
+
22
+ Use the typed client wrapper for full TypeScript support:
23
+
24
+ ```typescript
25
+ import { openai } from "@glueco/plugin-llm-openai/client";
26
+ import { GatewayClient, FileKeyStorage, FileConfigStorage } from "@glueco/sdk";
27
+
28
+ // Setup gateway client
29
+ const gatewayClient = new GatewayClient({
30
+ keyStorage: new FileKeyStorage("./.gateway/keys.json"),
31
+ configStorage: new FileConfigStorage("./.gateway/config.json"),
32
+ });
33
+
34
+ // Get transport and create typed client
35
+ const transport = await gatewayClient.getTransport();
36
+ const openaiClient = openai(transport);
37
+
38
+ // Chat completion (non-streaming)
39
+ const response = await openaiClient.chatCompletions({
40
+ model: "gpt-4o",
41
+ messages: [
42
+ { role: "system", content: "You are a helpful assistant." },
43
+ { role: "user", content: "What is the capital of France?" },
44
+ ],
45
+ temperature: 0.7,
46
+ max_tokens: 1000,
47
+ });
48
+
49
+ console.log(response.data.choices[0].message.content);
50
+ ```
51
+
52
+ ### Streaming Chat Completion
53
+
54
+ ```typescript
55
+ const stream = await openaiClient.chatCompletionsStream({
56
+ model: "gpt-4o",
57
+ messages: [{ role: "user", content: "Tell me a story" }],
58
+ });
59
+
60
+ const reader = stream.stream.getReader();
61
+ const decoder = new TextDecoder();
62
+
63
+ while (true) {
64
+ const { done, value } = await reader.read();
65
+ if (done) break;
66
+ const chunk = decoder.decode(value);
67
+ // Process SSE chunk
68
+ console.log(chunk);
69
+ }
70
+ ```
71
+
72
+ ### Tool Calling
73
+
74
+ ```typescript
75
+ const response = await openaiClient.chatCompletions({
76
+ model: "gpt-4o",
77
+ messages: [{ role: "user", content: "What's the weather in Paris?" }],
78
+ tools: [
79
+ {
80
+ type: "function",
81
+ function: {
82
+ name: "get_weather",
83
+ description: "Get current weather for a city",
84
+ parameters: {
85
+ type: "object",
86
+ properties: {
87
+ city: { type: "string" },
88
+ },
89
+ required: ["city"],
90
+ },
91
+ },
92
+ },
93
+ ],
94
+ });
95
+
96
+ const toolCall = response.data.choices[0].message.tool_calls?.[0];
97
+ if (toolCall) {
98
+ console.log("Function called:", toolCall.function.name);
99
+ console.log("Arguments:", JSON.parse(toolCall.function.arguments));
100
+ }
101
+ ```
102
+
103
+ ### With OpenAI SDK (Compatibility Mode)
104
+
105
+ You can also use the standard OpenAI SDK with the gateway's fetch wrapper:
106
+
107
+ ```typescript
108
+ import OpenAI from "openai";
109
+ import { GatewayClient } from "@glueco/sdk";
110
+
111
+ const client = new GatewayClient({ ... });
112
+ const gatewayFetch = await client.getFetch();
113
+ const baseURL = await client.getResourceBaseUrl("llm", "openai");
114
+
115
+ const openai = new OpenAI({
116
+ apiKey: "unused", // Gateway provides the key
117
+ baseURL,
118
+ fetch: gatewayFetch,
119
+ });
120
+
121
+ const response = await openai.chat.completions.create({
122
+ model: "gpt-4o",
123
+ messages: [{ role: "user", content: "Hello!" }],
124
+ });
125
+ ```
126
+
127
+ ## Supported Models
128
+
129
+ ### GPT-4o Family
130
+
131
+ - `gpt-4o` - Latest GPT-4o
132
+ - `gpt-4o-2024-11-20`
133
+ - `gpt-4o-2024-08-06`
134
+ - `gpt-4o-mini` - Smaller, faster, cheaper
135
+
136
+ ### GPT-4 Turbo
137
+
138
+ - `gpt-4-turbo` - Latest GPT-4 Turbo
139
+ - `gpt-4-turbo-2024-04-09`
140
+ - `gpt-4-turbo-preview`
141
+
142
+ ### GPT-4
143
+
144
+ - `gpt-4`
145
+ - `gpt-4-0613`
146
+
147
+ ### GPT-3.5 Turbo
148
+
149
+ - `gpt-3.5-turbo`
150
+ - `gpt-3.5-turbo-0125`
151
+
152
+ ### o1 Reasoning Models
153
+
154
+ - `o1` - Latest o1
155
+ - `o1-preview`
156
+ - `o1-mini`
157
+
158
+ ## Enforcement Fields
159
+
160
+ The plugin extracts these enforcement fields during validation:
161
+
162
+ | Field | Type | Description |
163
+ | ----------------- | --------- | ------------------------------------------------ |
164
+ | `model` | `string` | The requested model name |
165
+ | `stream` | `boolean` | Whether streaming is requested |
166
+ | `usesTools` | `boolean` | Whether tools/functions are used |
167
+ | `maxOutputTokens` | `number?` | Max tokens (max_tokens or max_completion_tokens) |
168
+
169
+ These fields are used by the gateway's policy enforcement system to apply access controls.
170
+
171
+ ## Plugin Configuration
172
+
173
+ When setting up credentials in the gateway admin:
174
+
175
+ | Field | Type | Required | Description |
176
+ | -------------- | ------ | -------- | --------------------------------------------- |
177
+ | `apiKey` | secret | Yes | Your OpenAI API key (starts with sk-) |
178
+ | `organization` | string | No | Optional organization ID |
179
+ | `baseUrl` | url | No | Custom base URL (for Azure OpenAI or proxies) |
180
+
181
+ ## Actions
182
+
183
+ | Action | Description |
184
+ | ------------------ | --------------------------------------- |
185
+ | `chat.completions` | Create chat completions with GPT models |
186
+
187
+ ## License
188
+
189
+ MIT
@@ -0,0 +1,261 @@
1
+ // src/contracts.ts
2
+ import { z } from "zod";
3
+ var ChatMessageSchema = z.object({
4
+ role: z.enum(["system", "user", "assistant", "tool", "function"]),
5
+ content: z.union([
6
+ z.string(),
7
+ z.array(
8
+ z.object({
9
+ type: z.enum(["text", "image_url"]),
10
+ text: z.string().optional(),
11
+ image_url: z.object({
12
+ url: z.string(),
13
+ detail: z.enum(["auto", "low", "high"]).optional()
14
+ }).optional()
15
+ })
16
+ )
17
+ ]).nullable(),
18
+ name: z.string().optional(),
19
+ tool_calls: z.array(
20
+ z.object({
21
+ id: z.string(),
22
+ type: z.literal("function"),
23
+ function: z.object({
24
+ name: z.string(),
25
+ arguments: z.string()
26
+ })
27
+ })
28
+ ).optional(),
29
+ tool_call_id: z.string().optional(),
30
+ function_call: z.object({
31
+ name: z.string(),
32
+ arguments: z.string()
33
+ }).optional()
34
+ });
35
+ var ToolSchema = z.object({
36
+ type: z.literal("function"),
37
+ function: z.object({
38
+ name: z.string(),
39
+ description: z.string().optional(),
40
+ parameters: z.record(z.unknown()).optional(),
41
+ strict: z.boolean().optional()
42
+ })
43
+ });
44
+ var ChatCompletionRequestSchema = z.object({
45
+ model: z.string(),
46
+ messages: z.array(ChatMessageSchema),
47
+ temperature: z.number().min(0).max(2).optional(),
48
+ top_p: z.number().min(0).max(1).optional(),
49
+ n: z.number().int().min(1).max(10).optional(),
50
+ stream: z.boolean().optional(),
51
+ stream_options: z.object({
52
+ include_usage: z.boolean().optional()
53
+ }).optional(),
54
+ stop: z.union([z.string(), z.array(z.string())]).optional(),
55
+ max_tokens: z.number().int().positive().optional(),
56
+ max_completion_tokens: z.number().int().positive().optional(),
57
+ presence_penalty: z.number().min(-2).max(2).optional(),
58
+ frequency_penalty: z.number().min(-2).max(2).optional(),
59
+ logit_bias: z.record(z.number()).optional(),
60
+ logprobs: z.boolean().optional(),
61
+ top_logprobs: z.number().int().min(0).max(20).optional(),
62
+ user: z.string().optional(),
63
+ tools: z.array(ToolSchema).optional(),
64
+ tool_choice: z.union([
65
+ z.literal("none"),
66
+ z.literal("auto"),
67
+ z.literal("required"),
68
+ z.object({
69
+ type: z.literal("function"),
70
+ function: z.object({ name: z.string() })
71
+ })
72
+ ]).optional(),
73
+ parallel_tool_calls: z.boolean().optional(),
74
+ response_format: z.union([
75
+ z.object({ type: z.literal("text") }),
76
+ z.object({ type: z.literal("json_object") }),
77
+ z.object({
78
+ type: z.literal("json_schema"),
79
+ json_schema: z.object({
80
+ name: z.string(),
81
+ description: z.string().optional(),
82
+ schema: z.record(z.unknown()),
83
+ strict: z.boolean().optional()
84
+ })
85
+ })
86
+ ]).optional(),
87
+ seed: z.number().int().optional(),
88
+ service_tier: z.enum(["auto", "default"]).optional()
89
+ });
90
+ var ChatCompletionChoiceSchema = z.object({
91
+ index: z.number(),
92
+ message: z.object({
93
+ role: z.literal("assistant"),
94
+ content: z.string().nullable(),
95
+ tool_calls: z.array(
96
+ z.object({
97
+ id: z.string(),
98
+ type: z.literal("function"),
99
+ function: z.object({
100
+ name: z.string(),
101
+ arguments: z.string()
102
+ })
103
+ })
104
+ ).optional(),
105
+ function_call: z.object({
106
+ name: z.string(),
107
+ arguments: z.string()
108
+ }).optional(),
109
+ refusal: z.string().nullable().optional()
110
+ }),
111
+ finish_reason: z.enum(["stop", "length", "tool_calls", "content_filter", "function_call"]).nullable(),
112
+ logprobs: z.object({
113
+ content: z.array(
114
+ z.object({
115
+ token: z.string(),
116
+ logprob: z.number(),
117
+ bytes: z.array(z.number()).nullable(),
118
+ top_logprobs: z.array(
119
+ z.object({
120
+ token: z.string(),
121
+ logprob: z.number(),
122
+ bytes: z.array(z.number()).nullable()
123
+ })
124
+ )
125
+ })
126
+ ).nullable()
127
+ }).nullable().optional()
128
+ });
129
+ var UsageSchema = z.object({
130
+ prompt_tokens: z.number(),
131
+ completion_tokens: z.number(),
132
+ total_tokens: z.number(),
133
+ prompt_tokens_details: z.object({
134
+ cached_tokens: z.number().optional()
135
+ }).optional(),
136
+ completion_tokens_details: z.object({
137
+ reasoning_tokens: z.number().optional()
138
+ }).optional()
139
+ });
140
+ var ChatCompletionResponseSchema = z.object({
141
+ id: z.string(),
142
+ object: z.literal("chat.completion"),
143
+ created: z.number(),
144
+ model: z.string(),
145
+ choices: z.array(ChatCompletionChoiceSchema),
146
+ usage: UsageSchema.optional(),
147
+ system_fingerprint: z.string().optional(),
148
+ service_tier: z.string().optional()
149
+ });
150
+ var ChatCompletionChunkSchema = z.object({
151
+ id: z.string(),
152
+ object: z.literal("chat.completion.chunk"),
153
+ created: z.number(),
154
+ model: z.string(),
155
+ system_fingerprint: z.string().optional(),
156
+ choices: z.array(
157
+ z.object({
158
+ index: z.number(),
159
+ delta: z.object({
160
+ role: z.string().optional(),
161
+ content: z.string().optional(),
162
+ tool_calls: z.array(
163
+ z.object({
164
+ index: z.number(),
165
+ id: z.string().optional(),
166
+ type: z.literal("function").optional(),
167
+ function: z.object({
168
+ name: z.string().optional(),
169
+ arguments: z.string().optional()
170
+ }).optional()
171
+ })
172
+ ).optional(),
173
+ refusal: z.string().nullable().optional()
174
+ }),
175
+ finish_reason: z.enum([
176
+ "stop",
177
+ "length",
178
+ "tool_calls",
179
+ "content_filter",
180
+ "function_call"
181
+ ]).nullable(),
182
+ logprobs: z.object({
183
+ content: z.array(
184
+ z.object({
185
+ token: z.string(),
186
+ logprob: z.number(),
187
+ bytes: z.array(z.number()).nullable(),
188
+ top_logprobs: z.array(
189
+ z.object({
190
+ token: z.string(),
191
+ logprob: z.number(),
192
+ bytes: z.array(z.number()).nullable()
193
+ })
194
+ )
195
+ })
196
+ ).nullable()
197
+ }).nullable().optional()
198
+ })
199
+ ),
200
+ usage: UsageSchema.optional(),
201
+ service_tier: z.string().optional()
202
+ });
203
+ var PLUGIN_ID = "llm:openai";
204
+ var RESOURCE_TYPE = "llm";
205
+ var PROVIDER = "openai";
206
+ var VERSION = "1.0.0";
207
+ var DEFAULT_OPENAI_MODELS = [
208
+ // GPT-4o family
209
+ "gpt-4o",
210
+ "gpt-4o-2024-11-20",
211
+ "gpt-4o-2024-08-06",
212
+ "gpt-4o-2024-05-13",
213
+ "gpt-4o-mini",
214
+ "gpt-4o-mini-2024-07-18",
215
+ // GPT-4 Turbo
216
+ "gpt-4-turbo",
217
+ "gpt-4-turbo-2024-04-09",
218
+ "gpt-4-turbo-preview",
219
+ "gpt-4-0125-preview",
220
+ "gpt-4-1106-preview",
221
+ // GPT-4
222
+ "gpt-4",
223
+ "gpt-4-0613",
224
+ // GPT-3.5 Turbo
225
+ "gpt-3.5-turbo",
226
+ "gpt-3.5-turbo-0125",
227
+ "gpt-3.5-turbo-1106",
228
+ // o1 family (reasoning models)
229
+ "o1",
230
+ "o1-2024-12-17",
231
+ "o1-preview",
232
+ "o1-preview-2024-09-12",
233
+ "o1-mini",
234
+ "o1-mini-2024-09-12"
235
+ ];
236
+ var ACTIONS = ["chat.completions"];
237
+ var ENFORCEMENT_SUPPORT = [
238
+ "model",
239
+ "max_tokens",
240
+ "streaming"
241
+ ];
242
+ var DEFAULT_API_URL = "https://api.openai.com/v1";
243
+
244
+ export {
245
+ ChatMessageSchema,
246
+ ToolSchema,
247
+ ChatCompletionRequestSchema,
248
+ ChatCompletionChoiceSchema,
249
+ UsageSchema,
250
+ ChatCompletionResponseSchema,
251
+ ChatCompletionChunkSchema,
252
+ PLUGIN_ID,
253
+ RESOURCE_TYPE,
254
+ PROVIDER,
255
+ VERSION,
256
+ DEFAULT_OPENAI_MODELS,
257
+ ACTIONS,
258
+ ENFORCEMENT_SUPPORT,
259
+ DEFAULT_API_URL
260
+ };
261
+ //# sourceMappingURL=chunk-6FNYHOB5.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/contracts.ts"],"sourcesContent":["// ============================================\n// OPENAI PLUGIN CONTRACTS\n// Shared request/response schemas for proxy and client\n// ============================================\n\nimport { z } from \"zod\";\n\n// ============================================\n// REQUEST SCHEMAS (OpenAI Chat Completion API)\n// ============================================\n\nexport const ChatMessageSchema = z.object({\n role: z.enum([\"system\", \"user\", \"assistant\", \"tool\", \"function\"]),\n content: z\n .union([\n z.string(),\n z.array(\n z.object({\n type: z.enum([\"text\", \"image_url\"]),\n text: z.string().optional(),\n image_url: z\n .object({\n url: z.string(),\n detail: z.enum([\"auto\", \"low\", \"high\"]).optional(),\n })\n .optional(),\n }),\n ),\n ])\n .nullable(),\n name: z.string().optional(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n type: z.literal(\"function\"),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .optional(),\n tool_call_id: z.string().optional(),\n function_call: z\n .object({\n name: z.string(),\n arguments: z.string(),\n })\n .optional(),\n});\n\nexport type ChatMessage = z.infer<typeof ChatMessageSchema>;\n\nexport const ToolSchema = z.object({\n type: z.literal(\"function\"),\n function: z.object({\n name: z.string(),\n description: z.string().optional(),\n parameters: z.record(z.unknown()).optional(),\n strict: z.boolean().optional(),\n }),\n});\n\nexport type Tool = z.infer<typeof ToolSchema>;\n\nexport const ChatCompletionRequestSchema = z.object({\n model: z.string(),\n messages: z.array(ChatMessageSchema),\n temperature: z.number().min(0).max(2).optional(),\n top_p: z.number().min(0).max(1).optional(),\n n: z.number().int().min(1).max(10).optional(),\n stream: z.boolean().optional(),\n stream_options: z\n .object({\n include_usage: z.boolean().optional(),\n })\n .optional(),\n stop: z.union([z.string(), z.array(z.string())]).optional(),\n max_tokens: z.number().int().positive().optional(),\n max_completion_tokens: z.number().int().positive().optional(),\n presence_penalty: z.number().min(-2).max(2).optional(),\n frequency_penalty: z.number().min(-2).max(2).optional(),\n logit_bias: z.record(z.number()).optional(),\n logprobs: z.boolean().optional(),\n top_logprobs: z.number().int().min(0).max(20).optional(),\n user: z.string().optional(),\n tools: z.array(ToolSchema).optional(),\n tool_choice: z\n .union([\n z.literal(\"none\"),\n z.literal(\"auto\"),\n z.literal(\"required\"),\n z.object({\n type: z.literal(\"function\"),\n function: z.object({ name: z.string() }),\n }),\n ])\n .optional(),\n parallel_tool_calls: z.boolean().optional(),\n response_format: z\n .union([\n z.object({ type: z.literal(\"text\") }),\n z.object({ type: z.literal(\"json_object\") }),\n z.object({\n type: z.literal(\"json_schema\"),\n json_schema: z.object({\n name: z.string(),\n description: z.string().optional(),\n schema: z.record(z.unknown()),\n strict: z.boolean().optional(),\n }),\n }),\n ])\n .optional(),\n seed: z.number().int().optional(),\n service_tier: z.enum([\"auto\", \"default\"]).optional(),\n});\n\nexport type ChatCompletionRequest = z.infer<typeof ChatCompletionRequestSchema>;\n\n// ============================================\n// RESPONSE SCHEMAS\n// ============================================\n\nexport const ChatCompletionChoiceSchema = z.object({\n index: z.number(),\n message: z.object({\n role: z.literal(\"assistant\"),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n type: z.literal(\"function\"),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .optional(),\n function_call: z\n .object({\n name: z.string(),\n arguments: z.string(),\n })\n .optional(),\n refusal: z.string().nullable().optional(),\n }),\n finish_reason: z\n .enum([\"stop\", \"length\", \"tool_calls\", \"content_filter\", \"function_call\"])\n .nullable(),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n bytes: z.array(z.number()).nullable(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n bytes: z.array(z.number()).nullable(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullable()\n .optional(),\n});\n\nexport type ChatCompletionChoice = z.infer<typeof ChatCompletionChoiceSchema>;\n\nexport const UsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().optional(),\n })\n .optional(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().optional(),\n })\n .optional(),\n});\n\nexport type Usage = z.infer<typeof UsageSchema>;\n\nexport const ChatCompletionResponseSchema = z.object({\n id: z.string(),\n object: z.literal(\"chat.completion\"),\n created: z.number(),\n model: z.string(),\n choices: z.array(ChatCompletionChoiceSchema),\n usage: UsageSchema.optional(),\n system_fingerprint: z.string().optional(),\n service_tier: z.string().optional(),\n});\n\nexport type ChatCompletionResponse = z.infer<\n typeof ChatCompletionResponseSchema\n>;\n\n// ============================================\n// STREAMING RESPONSE SCHEMAS\n// ============================================\n\nexport const ChatCompletionChunkSchema = z.object({\n id: z.string(),\n object: z.literal(\"chat.completion.chunk\"),\n created: z.number(),\n model: z.string(),\n system_fingerprint: z.string().optional(),\n choices: z.array(\n z.object({\n index: z.number(),\n delta: z.object({\n role: z.string().optional(),\n content: z.string().optional(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().optional(),\n type: z.literal(\"function\").optional(),\n function: z\n .object({\n name: z.string().optional(),\n arguments: z.string().optional(),\n })\n .optional(),\n }),\n )\n .optional(),\n refusal: z.string().nullable().optional(),\n }),\n finish_reason: z\n .enum([\n \"stop\",\n \"length\",\n \"tool_calls\",\n \"content_filter\",\n \"function_call\",\n ])\n .nullable(),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n bytes: z.array(z.number()).nullable(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n bytes: z.array(z.number()).nullable(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullable()\n .optional(),\n }),\n ),\n usage: UsageSchema.optional(),\n service_tier: z.string().optional(),\n});\n\nexport type ChatCompletionChunk = z.infer<typeof ChatCompletionChunkSchema>;\n\n// ============================================\n// PLUGIN CONSTANTS\n// ============================================\n\nexport const PLUGIN_ID = \"llm:openai\" as const;\nexport const RESOURCE_TYPE = \"llm\" as const;\nexport const PROVIDER = \"openai\" as const;\nexport const VERSION = \"1.0.0\";\n\n/** Default allowed models */\nexport const DEFAULT_OPENAI_MODELS = [\n // GPT-4o family\n \"gpt-4o\",\n \"gpt-4o-2024-11-20\",\n \"gpt-4o-2024-08-06\",\n \"gpt-4o-2024-05-13\",\n \"gpt-4o-mini\",\n \"gpt-4o-mini-2024-07-18\",\n // GPT-4 Turbo\n \"gpt-4-turbo\",\n \"gpt-4-turbo-2024-04-09\",\n \"gpt-4-turbo-preview\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n // GPT-4\n \"gpt-4\",\n \"gpt-4-0613\",\n // GPT-3.5 Turbo\n \"gpt-3.5-turbo\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n // o1 family (reasoning models)\n \"o1\",\n \"o1-2024-12-17\",\n \"o1-preview\",\n \"o1-preview-2024-09-12\",\n \"o1-mini\",\n \"o1-mini-2024-09-12\",\n] as const;\n\n/** Supported actions */\nexport const ACTIONS = [\"chat.completions\"] as const;\nexport type OpenAIAction = (typeof ACTIONS)[number];\n\n/** Enforcement knobs */\nexport const ENFORCEMENT_SUPPORT = [\n \"model\",\n \"max_tokens\",\n \"streaming\",\n] as const;\n\n/** Default API URL */\nexport const DEFAULT_API_URL = \"https://api.openai.com/v1\";\n"],"mappings":";AAKA,SAAS,SAAS;AAMX,IAAM,oBAAoB,EAAE,OAAO;AAAA,EACxC,MAAM,EAAE,KAAK,CAAC,UAAU,QAAQ,aAAa,QAAQ,UAAU,CAAC;AAAA,EAChE,SAAS,EACN,MAAM;AAAA,IACL,EAAE,OAAO;AAAA,IACT,EAAE;AAAA,MACA,EAAE,OAAO;AAAA,QACP,MAAM,EAAE,KAAK,CAAC,QAAQ,WAAW,CAAC;AAAA,QAClC,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,QAC1B,WAAW,EACR,OAAO;AAAA,UACN,KAAK,EAAE,OAAO;AAAA,UACd,QAAQ,EAAE,KAAK,CAAC,QAAQ,OAAO,MAAM,CAAC,EAAE,SAAS;AAAA,QACnD,CAAC,EACA,SAAS;AAAA,MACd,CAAC;AAAA,IACH;AAAA,EACF,CAAC,EACA,SAAS;AAAA,EACZ,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,YAAY,EACT;AAAA,IACC,EAAE,OAAO;AAAA,MACP,IAAI,EAAE,OAAO;AAAA,MACb,MAAM,EAAE,QAAQ,UAAU;AAAA,MAC1B,UAAU,EAAE,OAAO;AAAA,QACjB,MAAM,EAAE,OAAO;AAAA,QACf,WAAW,EAAE,OAAO;AAAA,MACtB,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,SAAS;AAAA,EACZ,cAAc,EAAE,OAAO,EAAE,SAAS;AAAA,EAClC,eAAe,EACZ,OAAO;AAAA,IACN,MAAM,EAAE,OAAO;AAAA,IACf,WAAW,EAAE,OAAO;AAAA,EACtB,CAAC,EACA,SAAS;AACd,CAAC;AAIM,IAAM,aAAa,EAAE,OAAO;AAAA,EACjC,MAAM,EAAE,QAAQ,UAAU;AAAA,EAC1B,UAAU,EAAE,OAAO;AAAA,IACjB,MAAM,EAAE,OAAO;AAAA,IACf,aAAa,EAAE,OAAO,EAAE,SAAS;AAAA,IACjC,YAAY,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,SAAS;AAAA,IAC3C,QAAQ,EAAE,QAAQ,EAAE,SAAS;AAAA,EAC/B,CAAC;AACH,CAAC;AAIM,IAAM,8BAA8B,EAAE,OAAO;AAAA,EAClD,OAAO,EAAE,OAAO;AAAA,EAChB,UAAU,EAAE,MAAM,iBAAiB;AAAA,EACnC,aAAa,EAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC,EAAE,SAAS;AAAA,EAC/C,OAAO,EAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC,EAAE,SAAS;AAAA,EACzC,GAAG,EAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,EAAE,SAAS;AAAA,EAC5C,QAAQ,EAAE,QAAQ,EAAE,SAAS;AAAA,EAC7B,gBAAgB,EACb,OAAO;AAAA,IACN,eAAe,EAAE,QAAQ,EAAE,SAAS;AAAA,EACtC,CAAC,EACA,SAAS;AAAA,EACZ,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,GAAG,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,EAAE,SAAS;AAAA,EAC1D,YAAY,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,EACjD,uBAAuB,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,EAC5D,kBAAkB,EAAE,OAAO,EAAE,IAAI,EAAE,EAAE,IAAI,CAAC,EAAE,SAAS;AAAA,EACrD,mBAAmB,EAAE,OAAO,EAAE,IAAI,EAAE,EAAE,IAAI,CAAC,EAAE,SAAS;AAAA,EACtD,YAAY,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,SAAS;AAAA,EAC1C,UAAU,EAAE,QAAQ,EAAE,SAAS;AAAA,EAC/B,cAAc,EAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,EAAE,SAAS;AAAA,EACvD,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,OAAO,EAAE,MAAM,UAAU,EAAE,SAAS;AAAA,EACpC,aAAa,EACV,MAAM;AAAA,IACL,EAAE,QAAQ,MAAM;AAAA,IAChB,EAAE,QAAQ,MAAM;AAAA,IAChB,EAAE,QAAQ,UAAU;AAAA,IACpB,EAAE,OAAO;AAAA,MACP,MAAM,EAAE,QAAQ,UAAU;AAAA,MAC1B,UAAU,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;AAAA,IACzC,CAAC;AAAA,EACH,CAAC,EACA,SAAS;AAAA,EACZ,qBAAqB,EAAE,QAAQ,EAAE,SAAS;AAAA,EAC1C,iBAAiB,EACd,MAAM;AAAA,IACL,EAAE,OAAO,EAAE,MAAM,EAAE,QAAQ,MAAM,EAAE,CAAC;AAAA,IACpC,EAAE,OAAO,EAAE,MAAM,EAAE,QAAQ,aAAa,EAAE,CAAC;AAAA,IAC3C,EAAE,OAAO;AAAA,MACP,MAAM,EAAE,QAAQ,aAAa;AAAA,MAC7B,aAAa,EAAE,OAAO;AAAA,QACpB,MAAM,EAAE,OAAO;AAAA,QACf,aAAa,EAAE,OAAO,EAAE,SAAS;AAAA,QACjC,QAAQ,EAAE,OAAO,EAAE,QAAQ,CAAC;AAAA,QAC5B,QAAQ,EAAE,QAAQ,EAAE,SAAS;AAAA,MAC/B,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC,EACA,SAAS;AAAA,EACZ,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA,EAChC,cAAc,EAAE,KAAK,CAAC,QAAQ,SAAS,CAAC,EAAE,SAAS;AACrD,CAAC;AAQM,IAAM,6BAA6B,EAAE,OAAO;AAAA,EACjD,OAAO,EAAE,OAAO;AAAA,EAChB,SAAS,EAAE,OAAO;AAAA,IAChB,MAAM,EAAE,QAAQ,WAAW;AAAA,IAC3B,SAAS,EAAE,OAAO,EAAE,SAAS;AAAA,IAC7B,YAAY,EACT;AAAA,MACC,EAAE,OAAO;AAAA,QACP,IAAI,EAAE,OAAO;AAAA,QACb,MAAM,EAAE,QAAQ,UAAU;AAAA,QAC1B,UAAU,EAAE,OAAO;AAAA,UACjB,MAAM,EAAE,OAAO;AAAA,UACf,WAAW,EAAE,OAAO;AAAA,QACtB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,EACC,SAAS;AAAA,IACZ,eAAe,EACZ,OAAO;AAAA,MACN,MAAM,EAAE,OAAO;AAAA,MACf,WAAW,EAAE,OAAO;AAAA,IACtB,CAAC,EACA,SAAS;AAAA,IACZ,SAAS,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,EAC1C,CAAC;AAAA,EACD,eAAe,EACZ,KAAK,CAAC,QAAQ,UAAU,cAAc,kBAAkB,eAAe,CAAC,EACxE,SAAS;AAAA,EACZ,UAAU,EACP,OAAO;AAAA,IACN,SAAS,EACN;AAAA,MACC,EAAE,OAAO;AAAA,QACP,OAAO,EAAE,OAAO;AAAA,QAChB,SAAS,EAAE,OAAO;AAAA,QAClB,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS;AAAA,QACpC,cAAc,EAAE;AAAA,UACd,EAAE,OAAO;AAAA,YACP,OAAO,EAAE,OAAO;AAAA,YAChB,SAAS,EAAE,OAAO;AAAA,YAClB,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS;AAAA,UACtC,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,EACC,SAAS;AAAA,EACd,CAAC,EACA,SAAS,EACT,SAAS;AACd,CAAC;AAIM,IAAM,cAAc,EAAE,OAAO;AAAA,EAClC,eAAe,EAAE,OAAO;AAAA,EACxB,mBAAmB,EAAE,OAAO;AAAA,EAC5B,cAAc,EAAE,OAAO;AAAA,EACvB,uBAAuB,EACpB,OAAO;AAAA,IACN,eAAe,EAAE,OAAO,EAAE,SAAS;AAAA,EACrC,CAAC,EACA,SAAS;AAAA,EACZ,2BAA2B,EACxB,OAAO;AAAA,IACN,kBAAkB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,CAAC,EACA,SAAS;AACd,CAAC;AAIM,IAAM,+BAA+B,EAAE,OAAO;AAAA,EACnD,IAAI,EAAE,OAAO;AAAA,EACb,QAAQ,EAAE,QAAQ,iBAAiB;AAAA,EACnC,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO;AAAA,EAChB,SAAS,EAAE,MAAM,0BAA0B;AAAA,EAC3C,OAAO,YAAY,SAAS;AAAA,EAC5B,oBAAoB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,cAAc,EAAE,OAAO,EAAE,SAAS;AACpC,CAAC;AAUM,IAAM,4BAA4B,EAAE,OAAO;AAAA,EAChD,IAAI,EAAE,OAAO;AAAA,EACb,QAAQ,EAAE,QAAQ,uBAAuB;AAAA,EACzC,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO;AAAA,EAChB,oBAAoB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,SAAS,EAAE;AAAA,IACT,EAAE,OAAO;AAAA,MACP,OAAO,EAAE,OAAO;AAAA,MAChB,OAAO,EAAE,OAAO;AAAA,QACd,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,QAC1B,SAAS,EAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAY,EACT;AAAA,UACC,EAAE,OAAO;AAAA,YACP,OAAO,EAAE,OAAO;AAAA,YAChB,IAAI,EAAE,OAAO,EAAE,SAAS;AAAA,YACxB,MAAM,EAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,YACrC,UAAU,EACP,OAAO;AAAA,cACN,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,cAC1B,WAAW,EAAE,OAAO,EAAE,SAAS;AAAA,YACjC,CAAC,EACA,SAAS;AAAA,UACd,CAAC;AAAA,QACH,EACC,SAAS;AAAA,QACZ,SAAS,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,MAC1C,CAAC;AAAA,MACD,eAAe,EACZ,KAAK;AAAA,QACJ;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC,EACA,SAAS;AAAA,MACZ,UAAU,EACP,OAAO;AAAA,QACN,SAAS,EACN;AAAA,UACC,EAAE,OAAO;AAAA,YACP,OAAO,EAAE,OAAO;AAAA,YAChB,SAAS,EAAE,OAAO;AAAA,YAClB,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS;AAAA,YACpC,cAAc,EAAE;AAAA,cACd,EAAE,OAAO;AAAA,gBACP,OAAO,EAAE,OAAO;AAAA,gBAChB,SAAS,EAAE,OAAO;AAAA,gBAClB,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS;AAAA,cACtC,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC,EACA,SAAS,EACT,SAAS;AAAA,IACd,CAAC;AAAA,EACH;AAAA,EACA,OAAO,YAAY,SAAS;AAAA,EAC5B,cAAc,EAAE,OAAO,EAAE,SAAS;AACpC,CAAC;AAQM,IAAM,YAAY;AAClB,IAAM,gBAAgB;AACtB,IAAM,WAAW;AACjB,IAAM,UAAU;AAGhB,IAAM,wBAAwB;AAAA;AAAA,EAEnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAGO,IAAM,UAAU,CAAC,kBAAkB;AAInC,IAAM,sBAAsB;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AACF;AAGO,IAAM,kBAAkB;","names":[]}