@simulacra-ai/openai 0.0.2 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Simulacra OpenAI Provider
2
2
 
3
- OpenAI provider for the Simulacra conversation engine.
3
+ The OpenAI provider allows Simulacra to use OpenAI models via the OpenAI SDK, including GPT, o1, and o3 series.
4
4
 
5
5
  ## Installation
6
6
 
@@ -15,8 +15,9 @@ import { Conversation } from "@simulacra-ai/core";
15
15
  import { OpenAIProvider } from "@simulacra-ai/openai";
16
16
  import OpenAI from "openai";
17
17
 
18
+ // create a provider and conversation
18
19
  const provider = new OpenAIProvider(new OpenAI(), { model: MODEL_NAME });
19
- const conversation = new Conversation(provider);
20
+ using conversation = new Conversation(provider);
20
21
  ```
21
22
 
22
23
  ### OpenAIProviderConfig
@@ -30,13 +31,6 @@ interface OpenAIProviderConfig {
30
31
 
31
32
  Additional properties (`temperature`, `top_p`, etc.) spread into the API request.
32
33
 
33
- ## System Prompt Handling
34
-
35
- The provider automatically selects the correct system message role based on the model:
36
-
37
- - **GPT models** (`gpt-*`): uses `role: "system"`
38
- - **Other models** (o1, o3, etc.): uses `role: "developer"`
39
-
40
34
  ## License
41
35
 
42
36
  MIT
package/dist/index.cjs ADDED
@@ -0,0 +1,584 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ OpenAIProvider: () => OpenAIProvider
24
+ });
25
+ module.exports = __toCommonJS(index_exports);
26
+
27
+ // src/openai-provider.ts
28
+ var OpenAIProvider = class _OpenAIProvider {
29
+ #sdk;
30
+ #config;
31
+ context_transformers;
32
+ /**
33
+ * Creates a new OpenAI provider instance.
34
+ *
35
+ * @param sdk - The initialized OpenAI SDK client.
36
+ * @param config - Configuration options for the provider.
37
+ * @param context_transformers - Provider-level context transformers.
38
+ */
39
+ constructor(sdk, config, context_transformers = []) {
40
+ this.#sdk = sdk;
41
+ this.#config = config;
42
+ this.context_transformers = context_transformers;
43
+ }
44
+ /**
45
+ * Executes a model request and streams the response through the provided receiver.
46
+ *
47
+ * @param request - The request containing messages, tools, and system prompt.
48
+ * @param receiver - The receiver that handles streaming events.
49
+ * @param cancellation - Token to signal cancellation of the request.
50
+ * @returns A promise that resolves when the request completes.
51
+ */
52
+ async execute_request(request, receiver, cancellation) {
53
+ const { model, max_tokens, ...api_extras } = this.#config;
54
+ const params = {
55
+ ...api_extras,
56
+ model,
57
+ stream: true,
58
+ max_tokens,
59
+ ...request.tools.length > 0 ? {
60
+ tool_choice: "auto",
61
+ tools: request.tools.map((t) => to_openai_tool(t))
62
+ } : {},
63
+ messages: [
64
+ ...get_system_context(model, request.system),
65
+ ...request.messages.flatMap((m) => to_openai_messages(m))
66
+ ],
67
+ stream_options: {
68
+ include_usage: true
69
+ }
70
+ };
71
+ receiver.before_request({ params });
72
+ receiver.request_raw(params);
73
+ const stream = await this.#sdk.chat.completions.create(params);
74
+ this.#stream_response(stream, receiver, cancellation);
75
+ }
76
+ /**
77
+ * Creates a clone of this provider with the same configuration.
78
+ *
79
+ * @returns A new provider instance with identical configuration.
80
+ */
81
+ clone() {
82
+ return new _OpenAIProvider(this.#sdk, this.#config, this.context_transformers);
83
+ }
84
+ async #stream_response(stream, receiver, cancellation) {
85
+ try {
86
+ let response;
87
+ for await (const response_chunk of stream) {
88
+ if (cancellation.is_cancellation_requested) {
89
+ receiver.cancel();
90
+ return;
91
+ }
92
+ receiver.stream_raw(response_chunk);
93
+ const { choices: choices_chunk, ...rest } = response_chunk;
94
+ response = {
95
+ ...response,
96
+ ...rest,
97
+ choices: response?.choices ?? []
98
+ };
99
+ for (const choice_chunk of choices_chunk) {
100
+ if (!response.choices[choice_chunk.index]) {
101
+ response.choices[choice_chunk.index] = choice_chunk;
102
+ const message2 = from_openai_completion(response_chunk, choice_chunk);
103
+ for (const content of message2.content) {
104
+ receiver.start_content({ content, message: message2, usage: {} });
105
+ }
106
+ receiver.start_message({ message: message2, usage: {} });
107
+ continue;
108
+ }
109
+ const { delta: delta_chunk, ...rest2 } = choice_chunk;
110
+ const choice = response.choices[choice_chunk.index] = {
111
+ ...response.choices[choice_chunk.index],
112
+ ...rest2,
113
+ delta: {
114
+ ...response.choices[choice_chunk.index]?.delta
115
+ }
116
+ };
117
+ if (delta_chunk.role) {
118
+ choice.delta.role = delta_chunk.role;
119
+ }
120
+ if (delta_chunk.refusal) {
121
+ if (!choice.delta.refusal) {
122
+ choice.delta.refusal = "";
123
+ }
124
+ choice.delta.refusal += delta_chunk.refusal;
125
+ }
126
+ if (delta_chunk.content) {
127
+ if (!choice.delta.content) {
128
+ choice.delta.content = delta_chunk.content;
129
+ receiver.start_content({
130
+ content: from_openai_content(choice.delta),
131
+ message: from_openai_completion(response_chunk, choice),
132
+ usage: response?.usage ? from_openai_usage(response.usage) : {}
133
+ });
134
+ receiver.update_message({
135
+ message: from_openai_completion(response_chunk, choice),
136
+ usage: response?.usage ? from_openai_usage(response.usage) : {}
137
+ });
138
+ } else {
139
+ choice.delta.content += delta_chunk.content;
140
+ receiver.update_content({
141
+ content: from_openai_content(choice.delta),
142
+ message: from_openai_completion(response_chunk, choice),
143
+ usage: response?.usage ? from_openai_usage(response.usage) : {}
144
+ });
145
+ }
146
+ }
147
+ if (delta_chunk.tool_calls) {
148
+ if (!choice.delta.tool_calls) {
149
+ choice.delta.tool_calls = [];
150
+ }
151
+ for (const tool_call_chunk of delta_chunk.tool_calls) {
152
+ if (!choice.delta.tool_calls[tool_call_chunk.index]) {
153
+ choice.delta.tool_calls[tool_call_chunk.index] = tool_call_chunk;
154
+ receiver.start_content({
155
+ content: from_openai_tool_call(tool_call_chunk),
156
+ message: from_openai_completion(response_chunk, choice),
157
+ usage: response?.usage ? from_openai_usage(response.usage) : {}
158
+ });
159
+ receiver.update_message({
160
+ message: from_openai_completion(response_chunk, choice),
161
+ usage: response?.usage ? from_openai_usage(response.usage) : {}
162
+ });
163
+ } else {
164
+ const tool_call = choice.delta.tool_calls[tool_call_chunk.index];
165
+ if (tool_call_chunk.id) {
166
+ tool_call.id = tool_call_chunk.id;
167
+ }
168
+ if (tool_call_chunk.type) {
169
+ tool_call.type = tool_call_chunk.type;
170
+ }
171
+ if (tool_call_chunk.function) {
172
+ if (!tool_call.function) {
173
+ tool_call.function = tool_call_chunk.function;
174
+ } else {
175
+ if (tool_call_chunk.function.name) {
176
+ tool_call.function.name = tool_call_chunk.function.name;
177
+ }
178
+ if (tool_call_chunk.function.arguments) {
179
+ if (!tool_call.function.arguments) {
180
+ tool_call.function.arguments = "";
181
+ }
182
+ tool_call.function.arguments += tool_call_chunk.function.arguments;
183
+ }
184
+ }
185
+ }
186
+ receiver.update_content({
187
+ content: from_openai_tool_call(tool_call),
188
+ message: from_openai_completion(response_chunk, choice),
189
+ usage: response?.usage ? from_openai_usage(response.usage) : {}
190
+ });
191
+ receiver.update_message({
192
+ message: from_openai_completion(response_chunk, choice),
193
+ usage: response?.usage ? from_openai_usage(response.usage) : {}
194
+ });
195
+ }
196
+ }
197
+ }
198
+ }
199
+ }
200
+ if (!response || !response.choices?.[0]) {
201
+ throw new Error("no data");
202
+ }
203
+ receiver.response_raw({ ...response });
204
+ const message = from_openai_completion(response, response.choices[0]);
205
+ const usage = response?.usage ? from_openai_usage(response.usage) : {};
206
+ for (const content of message.content) {
207
+ receiver.complete_content({ content, message, usage });
208
+ }
209
+ receiver.complete_message({ message, usage, ...map_stop_reason(response) });
210
+ } catch (error) {
211
+ receiver.error(error);
212
+ }
213
+ }
214
+ };
215
+ function get_system_context(model, system) {
216
+ if (!system) {
217
+ return [];
218
+ }
219
+ if (model.startsWith("gpt")) {
220
+ return [
221
+ {
222
+ role: "system",
223
+ content: system
224
+ }
225
+ ];
226
+ }
227
+ return [
228
+ {
229
+ role: "developer",
230
+ content: system
231
+ }
232
+ ];
233
+ }
234
+ function to_openai_tool(tool) {
235
+ function map_parameter_type(parameter) {
236
+ switch (parameter.type) {
237
+ case "object":
238
+ return {
239
+ type: parameter.required ? parameter.type : [parameter.type, "null"],
240
+ description: parameter.description,
241
+ properties: Object.fromEntries(
242
+ Object.entries(parameter.properties).map(([k, v]) => [k, map_parameter_type(v)])
243
+ ),
244
+ additionalProperties: false,
245
+ required: Object.entries(parameter.properties).map(([k]) => k)
246
+ };
247
+ case "array":
248
+ return {
249
+ type: parameter.required ? parameter.type : [parameter.type, "null"],
250
+ description: parameter.description,
251
+ items: map_parameter_type(parameter.items)
252
+ };
253
+ default:
254
+ return {
255
+ type: parameter.required ? parameter.type : [parameter.type, "null"],
256
+ description: parameter.default !== void 0 ? parameter.description ? `${parameter.description} (default: ${parameter.default})` : `default: ${parameter.default}` : parameter.description,
257
+ enum: "enum" in parameter ? parameter.enum : void 0
258
+ };
259
+ }
260
+ }
261
+ return {
262
+ type: "function",
263
+ function: {
264
+ name: tool.name,
265
+ description: tool.description,
266
+ parameters: map_parameter_type({
267
+ type: "object",
268
+ required: true,
269
+ properties: Object.fromEntries(
270
+ tool.parameters.map(({ name, ...parameter }) => [name, parameter])
271
+ )
272
+ }),
273
+ strict: true
274
+ }
275
+ };
276
+ }
277
+ function from_openai_completion(completion, choice) {
278
+ let contents = [];
279
+ for (const k in choice.delta) {
280
+ const key = k;
281
+ if (key === "role") {
282
+ continue;
283
+ }
284
+ if (key === "content" && choice.delta.content) {
285
+ contents = [...contents, from_openai_content(choice.delta)];
286
+ } else if (key === "refusal" && choice.delta.refusal) {
287
+ contents = [...contents, from_openai_refusal(choice.delta)];
288
+ } else if (key === "tool_calls" && choice.delta.tool_calls) {
289
+ contents = [...contents, ...choice.delta.tool_calls.map((t) => from_openai_tool_call(t))];
290
+ } else if (choice.delta[key] !== void 0 && choice.delta[key] !== null) {
291
+ const { [key]: data } = choice.delta;
292
+ contents = [
293
+ ...contents,
294
+ {
295
+ type: "raw",
296
+ model_kind: "openai",
297
+ data: JSON.stringify({ [key]: data })
298
+ }
299
+ ];
300
+ }
301
+ }
302
+ return {
303
+ id: completion.id,
304
+ timestamp: completion.created,
305
+ role: map_role(choice),
306
+ content: contents
307
+ };
308
+ }
309
+ function from_openai_refusal(content) {
310
+ const { refusal, tool_calls: _, function_call: __, content: ___, role: ____, ...rest } = content;
311
+ return {
312
+ type: "text",
313
+ text: refusal,
314
+ extended: {
315
+ ...rest,
316
+ openai_refusal: true
317
+ }
318
+ };
319
+ }
320
+ function from_openai_content(content) {
321
+ const {
322
+ content: c,
323
+ tool_calls: _,
324
+ function_call: __,
325
+ refusal: ___,
326
+ role: ____,
327
+ ...rest
328
+ } = content;
329
+ return {
330
+ type: "text",
331
+ text: c,
332
+ extended: rest
333
+ };
334
+ }
335
+ function from_openai_tool_call(tool_call) {
336
+ const { id: tool_request_id, function: fn, type: _, index: __, ...extended } = tool_call;
337
+ let params;
338
+ try {
339
+ params = JSON.parse(fn?.arguments ?? "{}");
340
+ } catch {
341
+ params = fn?.arguments;
342
+ }
343
+ return {
344
+ tool_request_id,
345
+ type: "tool",
346
+ tool: fn?.name,
347
+ params,
348
+ extended
349
+ };
350
+ }
351
+ function to_openai_messages(message) {
352
+ if (message.role === "assistant") {
353
+ return [to_openai_assistant_message(message)];
354
+ }
355
+ const tool_result_content = message.content.filter((c) => c.type === "tool_result");
356
+ const other_content = message.content.filter((c) => c.type !== "tool_result");
357
+ const ordered_content = [...tool_result_content, ...other_content];
358
+ const results = [];
359
+ let result;
360
+ for (const content of ordered_content) {
361
+ if (content.type === "text") {
362
+ if (!result) {
363
+ result = {
364
+ role: "user",
365
+ content: content.text
366
+ };
367
+ } else if (result.role === "tool") {
368
+ results.push(result);
369
+ result = {
370
+ role: "user",
371
+ content: content.text
372
+ };
373
+ } else {
374
+ if (typeof result.content === "string") {
375
+ result.content = [
376
+ {
377
+ type: "text",
378
+ text: result.content
379
+ }
380
+ ];
381
+ }
382
+ if (!result.content) {
383
+ result.content = [
384
+ {
385
+ type: "text",
386
+ text: content.text
387
+ }
388
+ ];
389
+ } else {
390
+ result.content.push({
391
+ type: "text",
392
+ text: content.text
393
+ });
394
+ }
395
+ }
396
+ } else if (content.type === "tool_result") {
397
+ if (!result) {
398
+ result = {
399
+ role: "tool",
400
+ tool_call_id: content.tool_request_id,
401
+ content: JSON.stringify(content.result)
402
+ };
403
+ } else if (result.role !== "tool" || result.tool_call_id !== content.tool_request_id) {
404
+ results.push(result);
405
+ result = {
406
+ role: "tool",
407
+ tool_call_id: content.tool_request_id,
408
+ content: JSON.stringify(content.result)
409
+ };
410
+ } else {
411
+ if (typeof result.content === "string") {
412
+ result.content = [
413
+ {
414
+ type: "text",
415
+ text: result.content
416
+ }
417
+ ];
418
+ }
419
+ result.content.push({
420
+ type: "text",
421
+ text: JSON.stringify(content.result)
422
+ });
423
+ }
424
+ } else if (content.type === "raw") {
425
+ result = {
426
+ ...result ?? {},
427
+ ...JSON.parse(content.data)
428
+ };
429
+ }
430
+ }
431
+ if (result) {
432
+ results.push(result);
433
+ }
434
+ return results;
435
+ }
436
+ function to_openai_assistant_message(message) {
437
+ let result = {
438
+ role: "assistant"
439
+ };
440
+ for (const content of message.content) {
441
+ switch (content.type) {
442
+ case "text":
443
+ if (content.extended && content.extended.openai_refusal === true) {
444
+ result.refusal = content.text;
445
+ } else {
446
+ if (typeof result.content === "string") {
447
+ result.content = [
448
+ {
449
+ type: "text",
450
+ text: result.content
451
+ }
452
+ ];
453
+ }
454
+ if (!result.content) {
455
+ result.content = content.text;
456
+ } else {
457
+ result.content.push({
458
+ type: "text",
459
+ text: content.text
460
+ });
461
+ }
462
+ }
463
+ break;
464
+ case "tool":
465
+ if (!result.tool_calls) {
466
+ result.tool_calls = [];
467
+ }
468
+ result.tool_calls.push({
469
+ id: content.tool_request_id,
470
+ type: "function",
471
+ function: {
472
+ name: content.tool,
473
+ arguments: JSON.stringify(content.params)
474
+ }
475
+ });
476
+ break;
477
+ case "raw":
478
+ if (content.model_kind !== "openai") {
479
+ if (typeof result.content === "string") {
480
+ result.content = [
481
+ {
482
+ type: "text",
483
+ text: result.content
484
+ }
485
+ ];
486
+ }
487
+ if (!result.content) {
488
+ result.content = content.data;
489
+ } else {
490
+ result.content.push({
491
+ type: "text",
492
+ text: content.data
493
+ });
494
+ }
495
+ break;
496
+ }
497
+ result = {
498
+ ...result,
499
+ ...JSON.parse(content.data)
500
+ };
501
+ break;
502
+ case "thinking":
503
+ if (typeof result.content === "string") {
504
+ result.content = [
505
+ {
506
+ type: "text",
507
+ text: result.content
508
+ }
509
+ ];
510
+ }
511
+ if (!result.content) {
512
+ result.content = content.thought;
513
+ } else {
514
+ result.content.push({
515
+ type: "text",
516
+ text: content.thought
517
+ });
518
+ }
519
+ break;
520
+ default:
521
+ throw new Error("unexpected content type");
522
+ }
523
+ }
524
+ return result;
525
+ }
526
+ function from_openai_usage(usage) {
527
+ return {
528
+ input_tokens: usage?.prompt_tokens,
529
+ output_tokens: usage?.completion_tokens
530
+ };
531
+ }
532
+ function map_stop_reason(completion) {
533
+ for (const choice of completion.choices) {
534
+ switch (choice.finish_reason) {
535
+ case "content_filter":
536
+ return {
537
+ stop_reason: "error",
538
+ stop_details: choice.finish_reason
539
+ };
540
+ case "function_call":
541
+ return {
542
+ stop_reason: "tool_use"
543
+ };
544
+ case "length":
545
+ return {
546
+ stop_reason: "max_tokens"
547
+ };
548
+ case "stop":
549
+ return {
550
+ stop_reason: "end_turn"
551
+ };
552
+ case "tool_calls":
553
+ return {
554
+ stop_reason: "tool_use"
555
+ };
556
+ default:
557
+ return {
558
+ stop_reason: "other",
559
+ stop_details: `${choice.finish_reason}`
560
+ };
561
+ }
562
+ }
563
+ return {
564
+ stop_reason: "other"
565
+ };
566
+ }
567
+ function map_role(choice) {
568
+ switch (choice.delta.role) {
569
+ case "user":
570
+ case "developer":
571
+ case "system":
572
+ return "user";
573
+ case "assistant":
574
+ case "tool":
575
+ return "assistant";
576
+ default:
577
+ throw new Error("invalid role");
578
+ }
579
+ }
580
+ // Annotate the CommonJS export names for ESM import in node:
581
+ 0 && (module.exports = {
582
+ OpenAIProvider
583
+ });
584
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/openai-provider.ts"],"sourcesContent":["export { OpenAIProvider, type OpenAIProviderConfig } from \"./openai-provider.ts\";\n","import { OpenAI } from \"openai\";\n\nimport type {\n AssistantContent,\n AssistantMessage,\n CancellationToken,\n CompletionResponseData,\n Content,\n Message,\n ModelProvider,\n ModelRequest,\n ParameterType,\n ProviderContextTransformer,\n StreamReceiver,\n ToolContent,\n ToolDefinition,\n Usage,\n} from \"@simulacra-ai/core\";\n\ntype Prettify<T> = { [K in keyof T]: T[K] } & {};\n\n/**\n * Configuration options for the OpenAI provider.\n */\nexport interface OpenAIProviderConfig extends Record<string, unknown> {\n /** The model identifier to use (e.g., \"gpt-4\", \"o1-preview\"). */\n model: string;\n /** The maximum number of tokens to generate in the response. */\n max_tokens?: number;\n}\n\n/**\n * Model provider implementation for OpenAI's chat completion models.\n *\n * This provider wraps the OpenAI SDK to provide streaming completions with support\n * for tool use and function calling. It handles message formatting, content streaming,\n * and usage tracking according to the ModelProvider interface. Supports both GPT models\n * (using system messages) and O-series models (using developer messages).\n */\nexport class OpenAIProvider implements ModelProvider {\n readonly #sdk: OpenAI;\n readonly #config: OpenAIProviderConfig;\n readonly context_transformers: ProviderContextTransformer[];\n\n /**\n * Creates a new OpenAI provider instance.\n *\n * @param sdk - The initialized OpenAI SDK client.\n * @param config - Configuration options for the provider.\n * @param context_transformers - Provider-level context transformers.\n */\n constructor(\n sdk: OpenAI,\n config: OpenAIProviderConfig,\n context_transformers: ProviderContextTransformer[] = [],\n ) {\n this.#sdk = sdk;\n this.#config = config;\n this.context_transformers = context_transformers;\n }\n\n /**\n * Executes a model request and streams the response through the provided receiver.\n *\n * @param request - The request containing messages, tools, and system prompt.\n * @param receiver - The receiver that handles streaming events.\n * @param cancellation - Token to signal cancellation of the request.\n * @returns A promise that resolves when the request completes.\n */\n async execute_request(\n request: ModelRequest,\n receiver: StreamReceiver,\n cancellation: CancellationToken,\n ): Promise<void> {\n const { model, max_tokens, ...api_extras } = this.#config;\n const params: OpenAI.ChatCompletionCreateParamsStreaming = {\n ...api_extras,\n model,\n stream: true,\n max_tokens,\n ...(request.tools.length > 0\n ? {\n tool_choice: \"auto\",\n tools: request.tools.map((t) => to_openai_tool(t)),\n }\n : {}),\n messages: [\n ...get_system_context(model, request.system),\n ...request.messages.flatMap((m) => to_openai_messages(m)),\n ],\n stream_options: {\n include_usage: true,\n },\n };\n\n receiver.before_request({ params });\n receiver.request_raw(params);\n\n const stream = await this.#sdk.chat.completions.create(params);\n\n // Intentionally not awaited. Streaming is event-driven through the receiver.\n // The policy wraps only connection establishment; chunk processing flows\n // asynchronously via StreamListener events back to the conversation.\n this.#stream_response(stream, receiver, cancellation);\n }\n\n /**\n * Creates a clone of this provider with the same configuration.\n *\n * @returns A new provider instance with identical configuration.\n */\n clone(): ModelProvider {\n return new OpenAIProvider(this.#sdk, this.#config, this.context_transformers);\n }\n\n async #stream_response(\n stream: AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>,\n receiver: StreamReceiver,\n cancellation: CancellationToken,\n ) {\n try {\n let response: OpenAI.Chat.Completions.ChatCompletionChunk | undefined;\n for await (const response_chunk of stream) {\n if (cancellation.is_cancellation_requested) {\n receiver.cancel();\n return;\n }\n receiver.stream_raw(response_chunk);\n\n const { choices: choices_chunk, ...rest } = response_chunk;\n response = {\n ...response,\n ...rest,\n choices: response?.choices ?? [],\n };\n\n for (const choice_chunk of choices_chunk) {\n if (!response.choices[choice_chunk.index]) {\n response.choices[choice_chunk.index] = choice_chunk;\n const message = from_openai_completion(response_chunk, choice_chunk);\n for (const content of message.content) {\n receiver.start_content({ content, message, usage: {} });\n }\n receiver.start_message({ message, usage: {} });\n continue;\n }\n\n const { delta: delta_chunk, ...rest } = choice_chunk;\n const choice = (response.choices[choice_chunk.index] = {\n ...response.choices[choice_chunk.index],\n ...rest,\n delta: {\n ...response.choices[choice_chunk.index]?.delta,\n },\n });\n\n if (delta_chunk.role) {\n choice.delta.role = delta_chunk.role;\n }\n if (delta_chunk.refusal) {\n if (!choice.delta.refusal) {\n choice.delta.refusal = \"\";\n }\n choice.delta.refusal += delta_chunk.refusal;\n }\n if (delta_chunk.content) {\n if (!choice.delta.content) {\n choice.delta.content = delta_chunk.content;\n receiver.start_content({\n content: from_openai_content(choice.delta) as AssistantContent,\n message: from_openai_completion(response_chunk, choice),\n usage: response?.usage ? from_openai_usage(response.usage) : {},\n });\n receiver.update_message({\n message: from_openai_completion(response_chunk, choice),\n usage: response?.usage ? from_openai_usage(response.usage) : {},\n });\n } else {\n choice.delta.content += delta_chunk.content;\n receiver.update_content({\n content: from_openai_content(choice.delta) as AssistantContent,\n message: from_openai_completion(response_chunk, choice),\n usage: response?.usage ? from_openai_usage(response.usage) : {},\n });\n }\n }\n if (delta_chunk.tool_calls) {\n if (!choice.delta.tool_calls) {\n choice.delta.tool_calls = [];\n }\n for (const tool_call_chunk of delta_chunk.tool_calls) {\n if (!choice.delta.tool_calls[tool_call_chunk.index]) {\n choice.delta.tool_calls[tool_call_chunk.index] = tool_call_chunk;\n receiver.start_content({\n content: from_openai_tool_call(tool_call_chunk),\n message: from_openai_completion(response_chunk, choice),\n usage: response?.usage ? from_openai_usage(response.usage) : {},\n });\n receiver.update_message({\n message: from_openai_completion(response_chunk, choice),\n usage: response?.usage ? from_openai_usage(response.usage) : {},\n });\n } else {\n const tool_call = choice.delta.tool_calls[tool_call_chunk.index];\n\n if (tool_call_chunk.id) {\n tool_call.id = tool_call_chunk.id;\n }\n if (tool_call_chunk.type) {\n tool_call.type = tool_call_chunk.type;\n }\n if (tool_call_chunk.function) {\n if (!tool_call.function) {\n tool_call.function = tool_call_chunk.function;\n } else {\n if (tool_call_chunk.function.name) {\n tool_call.function.name = tool_call_chunk.function.name;\n }\n if (tool_call_chunk.function.arguments) {\n if (!tool_call.function.arguments) {\n tool_call.function.arguments = \"\";\n }\n tool_call.function.arguments += tool_call_chunk.function.arguments;\n }\n }\n }\n receiver.update_content({\n content: from_openai_tool_call(tool_call),\n message: from_openai_completion(response_chunk, choice),\n usage: response?.usage ? from_openai_usage(response.usage) : {},\n });\n receiver.update_message({\n message: from_openai_completion(response_chunk, choice),\n usage: response?.usage ? from_openai_usage(response.usage) : {},\n });\n }\n }\n }\n }\n }\n if (!response || !response.choices?.[0]) {\n throw new Error(\"no data\");\n }\n receiver.response_raw({ ...response });\n\n const message = from_openai_completion(response, response.choices[0]);\n const usage = response?.usage ? from_openai_usage(response.usage) : {};\n for (const content of message.content) {\n receiver.complete_content({ content, message, usage });\n }\n receiver.complete_message({ message, usage, ...map_stop_reason(response) });\n } catch (error) {\n receiver.error(error);\n }\n }\n}\n\nfunction get_system_context(model: string, system?: string): OpenAI.ChatCompletionMessageParam[] {\n if (!system) {\n return [];\n }\n if (model.startsWith(\"gpt\")) {\n return [\n {\n role: \"system\",\n content: system,\n } as OpenAI.ChatCompletionSystemMessageParam,\n ];\n }\n return [\n {\n role: \"developer\",\n content: system,\n } as OpenAI.ChatCompletionDeveloperMessageParam,\n ];\n}\n\nfunction to_openai_tool(tool: ToolDefinition): OpenAI.Chat.ChatCompletionTool {\n function map_parameter_type(\n parameter: Prettify<ParameterType & { description?: string }>,\n ): OpenAI.FunctionParameters {\n switch (parameter.type) {\n case \"object\":\n return {\n type: parameter.required ? parameter.type : [parameter.type, \"null\"],\n description: parameter.description,\n properties: Object.fromEntries(\n Object.entries(parameter.properties).map(([k, v]) => [k, map_parameter_type(v)]),\n ),\n additionalProperties: false,\n required: Object.entries(parameter.properties).map(([k]) => k),\n };\n case \"array\":\n return {\n type: parameter.required ? parameter.type : [parameter.type, \"null\"],\n description: parameter.description,\n items: map_parameter_type(parameter.items),\n };\n default:\n return {\n type: parameter.required ? parameter.type : [parameter.type, \"null\"],\n description:\n parameter.default !== undefined\n ? parameter.description\n ? `${parameter.description} (default: ${parameter.default})`\n : `default: ${parameter.default}`\n : parameter.description,\n enum: \"enum\" in parameter ? parameter.enum : undefined,\n };\n }\n }\n return {\n type: \"function\",\n function: {\n name: tool.name,\n description: tool.description,\n parameters: map_parameter_type({\n type: \"object\",\n required: true,\n properties: Object.fromEntries(\n tool.parameters.map(({ name, ...parameter }) => [name, parameter]),\n ),\n }),\n strict: true,\n },\n };\n}\n\nfunction from_openai_completion(\n completion: OpenAI.Chat.Completions.ChatCompletionChunk,\n choice: OpenAI.Chat.Completions.ChatCompletionChunk.Choice,\n) {\n let contents: Content[] = [];\n for (const k in choice.delta) {\n const key = k as keyof typeof choice.delta;\n if (key === \"role\") {\n continue;\n }\n if (key === \"content\" && choice.delta.content) {\n contents = [...contents, from_openai_content(choice.delta)];\n } else if (key === \"refusal\" && choice.delta.refusal) {\n contents = [...contents, from_openai_refusal(choice.delta)];\n } else if (key === \"tool_calls\" && choice.delta.tool_calls) {\n contents = [...contents, ...choice.delta.tool_calls.map((t) => from_openai_tool_call(t))];\n } else if (choice.delta[key] !== undefined && choice.delta[key] !== null) {\n const { [key]: data } = choice.delta;\n contents = [\n ...contents,\n {\n type: \"raw\",\n model_kind: \"openai\",\n data: JSON.stringify({ [key]: data }),\n },\n ];\n }\n }\n return {\n id: completion.id,\n timestamp: completion.created,\n role: map_role(choice),\n content: contents,\n } as AssistantMessage;\n}\n\nfunction from_openai_refusal(content: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta) {\n const { refusal, tool_calls: _, function_call: __, content: ___, role: ____, ...rest } = content;\n return {\n type: \"text\",\n text: refusal,\n extended: {\n ...rest,\n openai_refusal: true,\n },\n } as Content;\n}\n\nfunction from_openai_content(content: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta) {\n const {\n content: c,\n tool_calls: _,\n function_call: __,\n refusal: ___,\n role: ____,\n ...rest\n } = content;\n return {\n type: \"text\",\n text: c,\n extended: rest,\n } as Content;\n}\n\nfunction from_openai_tool_call(\n tool_call: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta.ToolCall,\n) {\n const { id: tool_request_id, function: fn, type: _, index: __, ...extended } = tool_call;\n let params: unknown;\n try {\n params = JSON.parse(fn?.arguments ?? \"{}\");\n } catch {\n params = fn?.arguments;\n }\n return {\n tool_request_id,\n type: \"tool\",\n tool: fn?.name,\n params,\n extended,\n } as ToolContent;\n}\n\nfunction to_openai_messages(message: Message) {\n if (message.role === \"assistant\") {\n return [to_openai_assistant_message(message)];\n }\n // Partition content so tool_result blocks come before non-tool_result blocks.\n // OpenAI requires all tool-role messages immediately after the assistant message\n // containing the corresponding tool_calls; interleaving user messages between\n // tool messages causes a validation error.\n const tool_result_content = message.content.filter((c) => c.type === \"tool_result\");\n const other_content = message.content.filter((c) => c.type !== \"tool_result\");\n const ordered_content = [...tool_result_content, ...other_content];\n\n const results: OpenAI.ChatCompletionMessageParam[] = [];\n let result: OpenAI.ChatCompletionMessageParam | undefined;\n for (const content of ordered_content) {\n if (content.type === \"text\") {\n if (!result) {\n result = {\n role: \"user\",\n content: content.text,\n };\n } else if (result.role === \"tool\") {\n results.push(result);\n result = {\n role: \"user\",\n content: content.text,\n };\n } else {\n if (typeof result.content === \"string\") {\n result.content = [\n {\n type: \"text\",\n text: result.content,\n },\n ];\n }\n if (!result.content) {\n result.content = [\n {\n type: \"text\",\n text: content.text,\n },\n ];\n } else {\n result.content.push({\n type: \"text\",\n text: content.text,\n });\n }\n }\n } else if (content.type === \"tool_result\") {\n if (!result) {\n result = {\n role: \"tool\",\n tool_call_id: content.tool_request_id,\n content: JSON.stringify(content.result),\n };\n } else if (result.role !== \"tool\" || result.tool_call_id !== content.tool_request_id) {\n results.push(result);\n result = {\n role: \"tool\",\n tool_call_id: content.tool_request_id,\n content: JSON.stringify(content.result),\n };\n } else {\n if (typeof result.content === \"string\") {\n result.content = [\n {\n type: \"text\",\n text: result.content,\n },\n ];\n }\n result.content.push({\n type: \"text\",\n text: JSON.stringify(content.result),\n });\n }\n } else if (content.type === \"raw\") {\n result = {\n ...(result ?? {}),\n ...JSON.parse(content.data),\n };\n }\n }\n if (result) {\n results.push(result);\n }\n return results;\n}\n\nfunction to_openai_assistant_message(message: AssistantMessage) {\n let result: OpenAI.ChatCompletionAssistantMessageParam = {\n role: \"assistant\",\n };\n for (const content of message.content) {\n switch (content.type) {\n case \"text\":\n if (content.extended && content.extended.openai_refusal === true) {\n result.refusal = content.text;\n } else {\n if (typeof result.content === \"string\") {\n result.content = [\n {\n type: \"text\",\n text: result.content,\n },\n ];\n }\n if (!result.content) {\n result.content = content.text;\n } else {\n result.content.push({\n type: \"text\",\n text: content.text,\n });\n }\n }\n break;\n case \"tool\":\n if (!result.tool_calls) {\n result.tool_calls = [];\n }\n result.tool_calls.push({\n id: content.tool_request_id,\n type: \"function\",\n function: {\n name: content.tool,\n arguments: JSON.stringify(content.params),\n },\n });\n break;\n case \"raw\":\n if (content.model_kind !== \"openai\") {\n if (typeof result.content === \"string\") {\n result.content = [\n {\n type: \"text\",\n text: result.content,\n },\n ];\n }\n if (!result.content) {\n result.content = content.data;\n } else {\n result.content.push({\n type: \"text\",\n text: content.data,\n });\n }\n break;\n }\n result = {\n ...result,\n ...JSON.parse(content.data),\n };\n break;\n case \"thinking\":\n if (typeof result.content === \"string\") {\n result.content = [\n {\n type: \"text\",\n text: result.content,\n },\n ];\n }\n if (!result.content) {\n result.content = content.thought;\n } else {\n result.content.push({\n type: \"text\",\n text: content.thought,\n });\n }\n break;\n default:\n throw new Error(\"unexpected content type\");\n }\n }\n return result;\n}\n\nfunction from_openai_usage(usage: OpenAI.CompletionUsage | null | undefined) {\n return {\n input_tokens: usage?.prompt_tokens,\n output_tokens: usage?.completion_tokens,\n } as Usage;\n}\n\nfunction map_stop_reason(\n completion: OpenAI.ChatCompletionChunk,\n): Pick<CompletionResponseData, \"stop_reason\" | \"stop_details\"> {\n for (const choice of completion.choices) {\n switch (choice.finish_reason) {\n case \"content_filter\":\n return {\n stop_reason: \"error\",\n stop_details: choice.finish_reason,\n };\n case \"function_call\":\n return {\n stop_reason: \"tool_use\",\n };\n case \"length\":\n return {\n stop_reason: \"max_tokens\",\n };\n case \"stop\":\n return {\n stop_reason: \"end_turn\",\n };\n case \"tool_calls\":\n return {\n stop_reason: \"tool_use\",\n };\n default:\n return {\n stop_reason: \"other\",\n stop_details: `${choice.finish_reason}`,\n };\n }\n }\n return {\n stop_reason: \"other\",\n };\n}\n\nfunction map_role(choice: OpenAI.Chat.Completions.ChatCompletionChunk.Choice) {\n switch (choice.delta.role) {\n case \"user\":\n case \"developer\":\n case \"system\":\n return \"user\";\n case \"assistant\":\n case \"tool\":\n return \"assistant\";\n default:\n throw new Error(\"invalid role\");\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACuCO,IAAM,iBAAN,MAAM,gBAAwC;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAST,YACE,KACA,QACA,uBAAqD,CAAC,GACtD;AACA,SAAK,OAAO;AACZ,SAAK,UAAU;AACf,SAAK,uBAAuB;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,gBACJ,SACA,UACA,cACe;AACf,UAAM,EAAE,OAAO,YAAY,GAAG,WAAW,IAAI,KAAK;AAClD,UAAM,SAAqD;AAAA,MACzD,GAAG;AAAA,MACH;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA,GAAI,QAAQ,MAAM,SAAS,IACvB;AAAA,QACE,aAAa;AAAA,QACb,OAAO,QAAQ,MAAM,IAAI,CAAC,MAAM,eAAe,CAAC,CAAC;AAAA,MACnD,IACA,CAAC;AAAA,MACL,UAAU;AAAA,QACR,GAAG,mBAAmB,OAAO,QAAQ,MAAM;AAAA,QAC3C,GAAG,QAAQ,SAAS,QAAQ,CAAC,MAAM,mBAAmB,CAAC,CAAC;AAAA,MAC1D;AAAA,MACA,gBAAgB;AAAA,QACd,eAAe;AAAA,MACjB;AAAA,IACF;AAEA,aAAS,eAAe,EAAE,OAAO,CAAC;AAClC,aAAS,YAAY,MAAM;AAE3B,UAAM,SAAS,MAAM,KAAK,KAAK,KAAK,YAAY,OAAO,MAAM;AAK7D,SAAK,iBAAiB,QAAQ,UAAU,YAAY;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAuB;AACrB,WAAO,IAAI,gBAAe,KAAK,MAAM,KAAK,SAAS,KAAK,oBAAoB;AAAA,EAC9E;AAAA,EAEA,MAAM,iBACJ,QACA,UACA,cACA;AACA,QAAI;AACF,UAAI;AACJ,uBAAiB,kBAAkB,QAAQ;AACzC,YAAI,aAAa,2BAA2B;AAC1C,mBAAS,OAAO;AAChB;AAAA,QACF;AACA,iBAAS,WAAW,cAAc;AAElC,cAAM,EAAE,SAAS,eAAe,GAAG,KAAK,IAAI;AAC5C,mBAAW;AAAA,UACT,GAAG;AAAA,UACH,GAAG;AAAA,UACH,SAAS,UAAU,WAAW,CAAC;AAAA,QACjC;AAEA,mBAAW,gBAAgB,eAAe;AACxC,cAAI,CAAC,SAAS,QAAQ,aAAa,KAAK,GAAG;AACzC,qBAAS,QAAQ,aAAa,KAAK,IAAI;AACvC,kBAAMA,WAAU,uBAAuB,gBAAgB,YAAY;AACnE,uBAAW,WAAWA,SAAQ,SAAS;AACrC,uBAAS,cAAc,EAAE,SAAS,SAAAA,UAAS,OAAO,CAAC,EAAE,CAAC;AAAA,YACxD;AACA,qBAAS,cAAc,EAAE,SAAAA,UAAS,OAAO,CAAC,EAAE,CAAC;AAC7C;AAAA,UACF;AAEA,gBAAM,EAAE,OAAO,aAAa,GAAGC,MAAK,IAAI;AACxC,gBAAM,SAAU,SAAS,QAAQ,aAAa,KAAK,IAAI;AAAA,YACrD,GAAG,SAAS,QAAQ,aAAa,KAAK;AAAA,YACtC,GAAGA;AAAA,YACH,OAAO;AAAA,cACL,GAAG,SAAS,QAAQ,aAAa,KAAK,GAAG;AAAA,YAC3C;AAAA,UACF;AAEA,cAAI,YAAY,MAAM;AACpB,mBAAO,MAAM,OAAO,YAAY;AAAA,UAClC;AACA,cAAI,YAAY,SAAS;AACvB,gBAAI,CAAC,OAAO,MAAM,SAAS;AACzB,qBAAO,MAAM,UAAU;AAAA,YACzB;AACA,mBAAO,MAAM,WAAW,YAAY;AAAA,UACtC;AACA,cAAI,YAAY,SAAS;AACvB,gBAAI,CAAC,OAAO,MAAM,SAAS;AACzB,qBAAO,MAAM,UAAU,YAAY;AACnC,uBAAS,cAAc;AAAA,gBACrB,SAAS,oBAAoB,OAAO,KAAK;AAAA,gBACzC,SAAS,uBAAuB,gBAAgB,MAAM;AAAA,gBACtD,OAAO,UAAU,QAAQ,kBAAkB,SAAS,KAAK,IAAI,CAAC;AAAA,cAChE,CAAC;AACD,uBAAS,eAAe;AAAA,gBACtB,SAAS,uBAAuB,gBAAgB,MAAM;AAAA,gBACtD,OAAO,UAAU,QAAQ,kBAAkB,SAAS,KAAK,IAAI,CAAC;AAAA,cAChE,CAAC;AAAA,YACH,OAAO;AACL,qBAAO,MAAM,WAAW,YAAY;AACpC,uBAAS,eAAe;AAAA,gBACtB,SAAS,oBAAoB,OAAO,KAAK;AAAA,gBACzC,SAAS,uBAAuB,gBAAgB,MAAM;AAAA,gBACtD,OAAO,UAAU,QAAQ,kBAAkB,SAAS,KAAK,IAAI,CAAC;AAAA,cAChE,CAAC;AAAA,YACH;AAAA,UACF;AACA,cAAI,YAAY,YAAY;AAC1B,gBAAI,CAAC,OAAO,MAAM,YAAY;AAC5B,qBAAO,MAAM,aAAa,CAAC;AAAA,YAC7B;AACA,uBAAW,mBAAmB,YAAY,YAAY;AACpD,kBAAI,CAAC,OAAO,MAAM,WAAW,gBAAgB,KAAK,GAAG;AACnD,uBAAO,MAAM,WAAW,gBAAgB,KAAK,IAAI;AACjD,yBAAS,cAAc;AAAA,kBACrB,SAAS,sBAAsB,eAAe;AAAA,kBAC9C,SAAS,uBAAuB,gBAAgB,MAAM;AAAA,kBACtD,OAAO,UAAU,QAAQ,kBAAkB,SAAS,KAAK,IAAI,CAAC;AAAA,gBAChE,CAAC;AACD,yBAAS,eAAe;AAAA,kBACtB,SAAS,uBAAuB,gBAAgB,MAAM;AAAA,kBACtD,OAAO,UAAU,QAAQ,kBAAkB,SAAS,KAAK,IAAI,CAAC;AAAA,gBAChE,CAAC;AAAA,cACH,OAAO;AACL,sBAAM,YAAY,OAAO,MAAM,WAAW,gBAAgB,KAAK;AAE/D,oBAAI,gBAAgB,IAAI;AACtB,4BAAU,KAAK,gBAAgB;AAAA,gBACjC;AACA,oBAAI,gBAAgB,MAAM;AACxB,4BAAU,OAAO,gBAAgB;AAAA,gBACnC;AACA,oBAAI,gBAAgB,UAAU;AAC5B,sBAAI,CAAC,UAAU,UAAU;AACvB,8BAAU,WAAW,gBAAgB;AAAA,kBACvC,OAAO;AACL,wBAAI,gBAAgB,SAAS,MAAM;AACjC,gCAAU,SAAS,OAAO,gBAAgB,SAAS;AAAA,oBACrD;AACA,wBAAI,gBAAgB,SAAS,WAAW;AACtC,0BAAI,CAAC,UAAU,SAAS,WAAW;AACjC,kCAAU,SAAS,YAAY;AAAA,sBACjC;AACA,gCAAU,SAAS,aAAa,gBAAgB,SAAS;AAAA,oBAC3D;AAAA,kBACF;AAAA,gBACF;AACA,yBAAS,eAAe;AAAA,kBACtB,SAAS,sBAAsB,SAAS;AAAA,kBACxC,SAAS,uBAAuB,gBAAgB,MAAM;AAAA,kBACtD,OAAO,UAAU,QAAQ,kBAAkB,SAAS,KAAK,IAAI,CAAC;AAAA,gBAChE,CAAC;AACD,yBAAS,eAAe;AAAA,kBACtB,SAAS,uBAAuB,gBAAgB,MAAM;AAAA,kBACtD,OAAO,UAAU,QAAQ,kBAAkB,SAAS,KAAK,IAAI,CAAC;AAAA,gBAChE,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,UAAI,CAAC,YAAY,CAAC,SAAS,UAAU,CAAC,GAAG;AACvC,cAAM,IAAI,MAAM,SAAS;AAAA,MAC3B;AACA,eAAS,aAAa,EAAE,GAAG,SAAS,CAAC;AAErC,YAAM,UAAU,uBAAuB,UAAU,SAAS,QAAQ,CAAC,CAAC;AACpE,YAAM,QAAQ,UAAU,QAAQ,kBAAkB,SAAS,KAAK,IAAI,CAAC;AACrE,iBAAW,WAAW,QAAQ,SAAS;AACrC,iBAAS,iBAAiB,EAAE,SAAS,SAAS,MAAM,CAAC;AAAA,MACvD;AACA,eAAS,iBAAiB,EAAE,SAAS,OAAO,GAAG,gBAAgB,QAAQ,EAAE,CAAC;AAAA,IAC5E,SAAS,OAAO;AACd,eAAS,MAAM,KAAK;AAAA,IACtB;AAAA,EACF;AACF;AAEA,SAAS,mBAAmB,OAAe,QAAsD;AAC/F,MAAI,CAAC,QAAQ;AACX,WAAO,CAAC;AAAA,EACV;AACA,MAAI,MAAM,WAAW,KAAK,GAAG;AAC3B,WAAO;AAAA,MACL;AAAA,QACE,MAAM;AAAA,QACN,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACA,SAAO;AAAA,IACL;AAAA,MACE,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,EACF;AACF;AAEA,SAAS,eAAe,MAAsD;AAC5E,WAAS,mBACP,WAC2B;AAC3B,YAAQ,UAAU,MAAM;AAAA,MACtB,KAAK;AACH,eAAO;AAAA,UACL,MAAM,UAAU,WAAW,UAAU,OAAO,CAAC,UAAU,MAAM,MAAM;AAAA,UACnE,aAAa,UAAU;AAAA,UACvB,YAAY,OAAO;AAAA,YACjB,OAAO,QAAQ,UAAU,UAAU,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC;AAAA,UACjF;AAAA,UACA,sBAAsB;AAAA,UACtB,UAAU,OAAO,QAAQ,UAAU,UAAU,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC;AAAA,QAC/D;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,MAAM,UAAU,WAAW,UAAU,OAAO,CAAC,UAAU,MAAM,MAAM;AAAA,UACnE,aAAa,UAAU;AAAA,UACvB,OAAO,mBAAmB,UAAU,KAAK;AAAA,QAC3C;AAAA,MACF;AACE,eAAO;AAAA,UACL,MAAM,UAAU,WAAW,UAAU,OAAO,CAAC,UAAU,MAAM,MAAM;AAAA,UACnE,aACE,UAAU,YAAY,SAClB,UAAU,cACR,GAAG,UAAU,WAAW,cAAc,UAAU,OAAO,MACvD,YAAY,UAAU,OAAO,KAC/B,UAAU;AAAA,UAChB,MAAM,UAAU,YAAY,UAAU,OAAO;AAAA,QAC/C;AAAA,IACJ;AAAA,EACF;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY,mBAAmB;AAAA,QAC7B,MAAM;AAAA,QACN,UAAU;AAAA,QACV,YAAY,OAAO;AAAA,UACjB,KAAK,WAAW,IAAI,CAAC,EAAE,MAAM,GAAG,UAAU,MAAM,CAAC,MAAM,SAAS,CAAC;AAAA,QACnE;AAAA,MACF,CAAC;AAAA,MACD,QAAQ;AAAA,IACV;AAAA,EACF;AACF;AAEA,SAAS,uBACP,YACA,QACA;AACA,MAAI,WAAsB,CAAC;AAC3B,aAAW,KAAK,OAAO,OAAO;AAC5B,UAAM,MAAM;AACZ,QAAI,QAAQ,QAAQ;AAClB;AAAA,IACF;AACA,QAAI,QAAQ,aAAa,OAAO,MAAM,SAAS;AAC7C,iBAAW,CAAC,GAAG,UAAU,oBAAoB,OAAO,KAAK,CAAC;AAAA,IAC5D,WAAW,QAAQ,aAAa,OAAO,MAAM,SAAS;AACpD,iBAAW,CAAC,GAAG,UAAU,oBAAoB,OAAO,KAAK,CAAC;AAAA,IAC5D,WAAW,QAAQ,gBAAgB,OAAO,MAAM,YAAY;AAC1D,iBAAW,CAAC,GAAG,UAAU,GAAG,OAAO,MAAM,WAAW,IAAI,CAAC,MAAM,sBAAsB,CAAC,CAAC,CAAC;AAAA,IAC1F,WAAW,OAAO,MAAM,GAAG,MAAM,UAAa,OAAO,MAAM,GAAG,MAAM,MAAM;AACxE,YAAM,EAAE,CAAC,GAAG,GAAG,KAAK,IAAI,OAAO;AAC/B,iBAAW;AAAA,QACT,GAAG;AAAA,QACH;AAAA,UACE,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,MAAM,KAAK,UAAU,EAAE,CAAC,GAAG,GAAG,KAAK,CAAC;AAAA,QACtC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,SAAO;AAAA,IACL,IAAI,WAAW;AAAA,IACf,WAAW,WAAW;AAAA,IACtB,MAAM,SAAS,MAAM;AAAA,IACrB,SAAS;AAAA,EACX;AACF;AAEA,SAAS,oBAAoB,SAAmE;AAC9F,QAAM,EAAE,SAAS,YAAY,GAAG,eAAe,IAAI,SAAS,KAAK,MAAM,MAAM,GAAG,KAAK,IAAI;AACzF,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,IACN,UAAU;AAAA,MACR,GAAG;AAAA,MACH,gBAAgB;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,oBAAoB,SAAmE;AAC9F,QAAM;AAAA,IACJ,SAAS;AAAA,IACT,YAAY;AAAA,IACZ,eAAe;AAAA,IACf,SAAS;AAAA,IACT,MAAM;AAAA,IACN,GAAG;AAAA,EACL,IAAI;AACJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,IACN,UAAU;AAAA,EACZ;AACF;AAEA,SAAS,sBACP,WACA;AACA,QAAM,EAAE,IAAI,iBAAiB,UAAU,IAAI,MAAM,GAAG,OAAO,IAAI,GAAG,SAAS,IAAI;AAC/E,MAAI;AACJ,MAAI;AACF,aAAS,KAAK,MAAM,IAAI,aAAa,IAAI;AAAA,EAC3C,QAAQ;AACN,aAAS,IAAI;AAAA,EACf;AACA,SAAO;AAAA,IACL;AAAA,IACA,MAAM;AAAA,IACN,MAAM,IAAI;AAAA,IACV;AAAA,IACA;AAAA,EACF;AACF;AAEA,SAAS,mBAAmB,SAAkB;AAC5C,MAAI,QAAQ,SAAS,aAAa;AAChC,WAAO,CAAC,4BAA4B,OAAO,CAAC;AAAA,EAC9C;AAKA,QAAM,sBAAsB,QAAQ,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,aAAa;AAClF,QAAM,gBAAgB,QAAQ,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,aAAa;AAC5E,QAAM,kBAAkB,CAAC,GAAG,qBAAqB,GAAG,aAAa;AAEjE,QAAM,UAA+C,CAAC;AACtD,MAAI;AACJ,aAAW,WAAW,iBAAiB;AACrC,QAAI,QAAQ,SAAS,QAAQ;AAC3B,UAAI,CAAC,QAAQ;AACX,iBAAS;AAAA,UACP,MAAM;AAAA,UACN,SAAS,QAAQ;AAAA,QACnB;AAAA,MACF,WAAW,OAAO,SAAS,QAAQ;AACjC,gBAAQ,KAAK,MAAM;AACnB,iBAAS;AAAA,UACP,MAAM;AAAA,UACN,SAAS,QAAQ;AAAA,QACnB;AAAA,MACF,OAAO;AACL,YAAI,OAAO,OAAO,YAAY,UAAU;AACtC,iBAAO,UAAU;AAAA,YACf;AAAA,cACE,MAAM;AAAA,cACN,MAAM,OAAO;AAAA,YACf;AAAA,UACF;AAAA,QACF;AACA,YAAI,CAAC,OAAO,SAAS;AACnB,iBAAO,UAAU;AAAA,YACf;AAAA,cACE,MAAM;AAAA,cACN,MAAM,QAAQ;AAAA,YAChB;AAAA,UACF;AAAA,QACF,OAAO;AACL,iBAAO,QAAQ,KAAK;AAAA,YAClB,MAAM;AAAA,YACN,MAAM,QAAQ;AAAA,UAChB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,WAAW,QAAQ,SAAS,eAAe;AACzC,UAAI,CAAC,QAAQ;AACX,iBAAS;AAAA,UACP,MAAM;AAAA,UACN,cAAc,QAAQ;AAAA,UACtB,SAAS,KAAK,UAAU,QAAQ,MAAM;AAAA,QACxC;AAAA,MACF,WAAW,OAAO,SAAS,UAAU,OAAO,iBAAiB,QAAQ,iBAAiB;AACpF,gBAAQ,KAAK,MAAM;AACnB,iBAAS;AAAA,UACP,MAAM;AAAA,UACN,cAAc,QAAQ;AAAA,UACtB,SAAS,KAAK,UAAU,QAAQ,MAAM;AAAA,QACxC;AAAA,MACF,OAAO;AACL,YAAI,OAAO,OAAO,YAAY,UAAU;AACtC,iBAAO,UAAU;AAAA,YACf;AAAA,cACE,MAAM;AAAA,cACN,MAAM,OAAO;AAAA,YACf;AAAA,UACF;AAAA,QACF;AACA,eAAO,QAAQ,KAAK;AAAA,UAClB,MAAM;AAAA,UACN,MAAM,KAAK,UAAU,QAAQ,MAAM;AAAA,QACrC,CAAC;AAAA,MACH;AAAA,IACF,WAAW,QAAQ,SAAS,OAAO;AACjC,eAAS;AAAA,QACP,GAAI,UAAU,CAAC;AAAA,QACf,GAAG,KAAK,MAAM,QAAQ,IAAI;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACA,MAAI,QAAQ;AACV,YAAQ,KAAK,MAAM;AAAA,EACrB;AACA,SAAO;AACT;AAEA,SAAS,4BAA4B,SAA2B;AAC9D,MAAI,SAAqD;AAAA,IACvD,MAAM;AAAA,EACR;AACA,aAAW,WAAW,QAAQ,SAAS;AACrC,YAAQ,QAAQ,MAAM;AAAA,MACpB,KAAK;AACH,YAAI,QAAQ,YAAY,QAAQ,SAAS,mBAAmB,MAAM;AAChE,iBAAO,UAAU,QAAQ;AAAA,QAC3B,OAAO;AACL,cAAI,OAAO,OAAO,YAAY,UAAU;AACtC,mBAAO,UAAU;AAAA,cACf;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,OAAO;AAAA,cACf;AAAA,YACF;AAAA,UACF;AACA,cAAI,CAAC,OAAO,SAAS;AACnB,mBAAO,UAAU,QAAQ;AAAA,UAC3B,OAAO;AACL,mBAAO,QAAQ,KAAK;AAAA,cAClB,MAAM;AAAA,cACN,MAAM,QAAQ;AAAA,YAChB,CAAC;AAAA,UACH;AAAA,QACF;AACA;AAAA,MACF,KAAK;AACH,YAAI,CAAC,OAAO,YAAY;AACtB,iBAAO,aAAa,CAAC;AAAA,QACvB;AACA,eAAO,WAAW,KAAK;AAAA,UACrB,IAAI,QAAQ;AAAA,UACZ,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,QAAQ;AAAA,YACd,WAAW,KAAK,UAAU,QAAQ,MAAM;AAAA,UAC1C;AAAA,QACF,CAAC;AACD;AAAA,MACF,KAAK;AACH,YAAI,QAAQ,eAAe,UAAU;AACnC,cAAI,OAAO,OAAO,YAAY,UAAU;AACtC,mBAAO,UAAU;AAAA,cACf;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM,OAAO;AAAA,cACf;AAAA,YACF;AAAA,UACF;AACA,cAAI,CAAC,OAAO,SAAS;AACnB,mBAAO,UAAU,QAAQ;AAAA,UAC3B,OAAO;AACL,mBAAO,QAAQ,KAAK;AAAA,cAClB,MAAM;AAAA,cACN,MAAM,QAAQ;AAAA,YAChB,CAAC;AAAA,UACH;AACA;AAAA,QACF;AACA,iBAAS;AAAA,UACP,GAAG;AAAA,UACH,GAAG,KAAK,MAAM,QAAQ,IAAI;AAAA,QAC5B;AACA;AAAA,MACF,KAAK;AACH,YAAI,OAAO,OAAO,YAAY,UAAU;AACtC,iBAAO,UAAU;AAAA,YACf;AAAA,cACE,MAAM;AAAA,cACN,MAAM,OAAO;AAAA,YACf;AAAA,UACF;AAAA,QACF;AACA,YAAI,CAAC,OAAO,SAAS;AACnB,iBAAO,UAAU,QAAQ;AAAA,QAC3B,OAAO;AACL,iBAAO,QAAQ,KAAK;AAAA,YAClB,MAAM;AAAA,YACN,MAAM,QAAQ;AAAA,UAChB,CAAC;AAAA,QACH;AACA;AAAA,MACF;AACE,cAAM,IAAI,MAAM,yBAAyB;AAAA,IAC7C;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,kBAAkB,OAAkD;AAC3E,SAAO;AAAA,IACL,cAAc,OAAO;AAAA,IACrB,eAAe,OAAO;AAAA,EACxB;AACF;AAEA,SAAS,gBACP,YAC8D;AAC9D,aAAW,UAAU,WAAW,SAAS;AACvC,YAAQ,OAAO,eAAe;AAAA,MAC5B,KAAK;AACH,eAAO;AAAA,UACL,aAAa;AAAA,UACb,cAAc,OAAO;AAAA,QACvB;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,aAAa;AAAA,QACf;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,aAAa;AAAA,QACf;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,aAAa;AAAA,QACf;AAAA,MACF,KAAK;AACH,eAAO;AAAA,UACL,aAAa;AAAA,QACf;AAAA,MACF;AACE,eAAO;AAAA,UACL,aAAa;AAAA,UACb,cAAc,GAAG,OAAO,aAAa;AAAA,QACvC;AAAA,IACJ;AAAA,EACF;AACA,SAAO;AAAA,IACL,aAAa;AAAA,EACf;AACF;AAEA,SAAS,SAAS,QAA4D;AAC5E,UAAQ,OAAO,MAAM,MAAM;AAAA,IACzB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,YAAM,IAAI,MAAM,cAAc;AAAA,EAClC;AACF;","names":["message","rest"]}