@amux.ai/adapter-zhipu 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,137 @@
1
+ import { LLMAdapter, ToolCall, JSONSchema } from '@amux.ai/llm-bridge';
2
+
3
+ /**
4
+ * Zhipu AI adapter implementation
5
+ * Zhipu API is OpenAI-compatible with some Zhipu-specific features
6
+ */
7
+ declare const zhipuAdapter: LLMAdapter;
8
+
9
+ /**
10
+ * Zhipu message format (OpenAI-compatible)
11
+ */
12
+ interface ZhipuMessage {
13
+ role: 'system' | 'user' | 'assistant' | 'tool';
14
+ content?: string | null;
15
+ name?: string;
16
+ tool_calls?: ToolCall[];
17
+ tool_call_id?: string;
18
+ }
19
+ /**
20
+ * Zhipu tool format (same as OpenAI)
21
+ */
22
+ interface ZhipuTool {
23
+ type: 'function';
24
+ function: {
25
+ name: string;
26
+ description?: string;
27
+ parameters?: JSONSchema;
28
+ };
29
+ }
30
+ /**
31
+ * Zhipu response format configuration
32
+ */
33
+ interface ZhipuResponseFormat {
34
+ type: 'text' | 'json_object';
35
+ }
36
+ /**
37
+ * Zhipu stream options
38
+ */
39
+ interface ZhipuStreamOptions {
40
+ include_usage?: boolean;
41
+ }
42
+ /**
43
+ * Zhipu request format
44
+ * Based on OpenAI with some Zhipu-specific features
45
+ */
46
+ interface ZhipuRequest {
47
+ model: string;
48
+ messages: ZhipuMessage[];
49
+ tools?: ZhipuTool[];
50
+ tool_choice?: 'auto' | 'none' | {
51
+ type: 'function';
52
+ function: {
53
+ name: string;
54
+ };
55
+ };
56
+ stream?: boolean;
57
+ stream_options?: ZhipuStreamOptions;
58
+ temperature?: number;
59
+ top_p?: number;
60
+ max_tokens?: number;
61
+ stop?: string | string[];
62
+ presence_penalty?: number;
63
+ frequency_penalty?: number;
64
+ n?: number;
65
+ response_format?: ZhipuResponseFormat;
66
+ do_sample?: boolean;
67
+ request_id?: string;
68
+ user_id?: string;
69
+ }
70
+ /**
71
+ * Zhipu response usage
72
+ */
73
+ interface ZhipuUsage {
74
+ prompt_tokens: number;
75
+ completion_tokens: number;
76
+ total_tokens: number;
77
+ }
78
+ /**
79
+ * Zhipu response format
80
+ */
81
+ interface ZhipuResponse {
82
+ id: string;
83
+ object: string;
84
+ created: number;
85
+ model: string;
86
+ choices: Array<{
87
+ index: number;
88
+ message: {
89
+ role: string;
90
+ content: string | null;
91
+ tool_calls?: ToolCall[];
92
+ };
93
+ finish_reason: string;
94
+ }>;
95
+ usage?: ZhipuUsage;
96
+ request_id?: string;
97
+ }
98
+ /**
99
+ * Zhipu stream chunk format
100
+ */
101
+ interface ZhipuStreamChunk {
102
+ id: string;
103
+ object: string;
104
+ created: number;
105
+ model: string;
106
+ choices: Array<{
107
+ index: number;
108
+ delta: {
109
+ role?: string;
110
+ content?: string;
111
+ tool_calls?: Array<{
112
+ index: number;
113
+ id?: string;
114
+ type?: string;
115
+ function?: {
116
+ name?: string;
117
+ arguments?: string;
118
+ };
119
+ }>;
120
+ };
121
+ finish_reason?: string | null;
122
+ }>;
123
+ usage?: ZhipuUsage;
124
+ }
125
+ /**
126
+ * Zhipu error format
127
+ */
128
+ interface ZhipuError {
129
+ error: {
130
+ message: string;
131
+ type: string;
132
+ param?: string;
133
+ code?: string;
134
+ };
135
+ }
136
+
137
+ export { type ZhipuError, type ZhipuMessage, type ZhipuRequest, type ZhipuResponse, type ZhipuResponseFormat, type ZhipuStreamChunk, type ZhipuStreamOptions, type ZhipuTool, type ZhipuUsage, zhipuAdapter };
package/dist/index.js ADDED
@@ -0,0 +1,474 @@
1
+ import { contentToString, parseOpenAICompatibleError, mapFinishReason, parseOpenAIUsage } from '@amux.ai/llm-bridge';
2
+
3
+ // src/inbound/request-parser.ts
4
+ function parseRequest(request) {
5
+ const req = request;
6
+ let system;
7
+ const messages = [];
8
+ for (const msg of req.messages) {
9
+ if (msg.role === "system") {
10
+ if (typeof msg.content === "string") {
11
+ system = system ? `${system}
12
+ ${msg.content}` : msg.content;
13
+ }
14
+ } else {
15
+ messages.push(parseMessage(msg));
16
+ }
17
+ }
18
+ const tools = req.tools?.map((tool) => parseTool(tool));
19
+ const toolChoice = req.tool_choice ? parseToolChoice(req.tool_choice) : void 0;
20
+ return {
21
+ messages,
22
+ model: req.model,
23
+ tools,
24
+ toolChoice,
25
+ stream: req.stream,
26
+ system,
27
+ generation: {
28
+ temperature: req.temperature,
29
+ topP: req.top_p,
30
+ maxTokens: req.max_tokens,
31
+ stopSequences: req.stop ? Array.isArray(req.stop) ? req.stop : [req.stop] : void 0,
32
+ presencePenalty: req.presence_penalty,
33
+ frequencyPenalty: req.frequency_penalty,
34
+ n: req.n,
35
+ responseFormat: req.response_format ? { type: req.response_format.type } : void 0
36
+ },
37
+ raw: request
38
+ };
39
+ }
40
+ function parseMessage(msg) {
41
+ return {
42
+ role: msg.role,
43
+ content: msg.content ?? "",
44
+ name: msg.name,
45
+ toolCalls: msg.tool_calls,
46
+ toolCallId: msg.tool_call_id
47
+ };
48
+ }
49
+ function parseTool(tool) {
50
+ return {
51
+ type: "function",
52
+ function: {
53
+ name: tool.function.name,
54
+ description: tool.function.description,
55
+ parameters: tool.function.parameters
56
+ }
57
+ };
58
+ }
59
+ function parseToolChoice(choice) {
60
+ if (typeof choice === "string") {
61
+ return choice;
62
+ }
63
+ return {
64
+ type: "function",
65
+ function: {
66
+ name: choice.function.name
67
+ }
68
+ };
69
+ }
70
+ function parseResponse(response) {
71
+ const res = response;
72
+ const choices = res.choices.map((choice) => ({
73
+ index: choice.index,
74
+ message: {
75
+ role: choice.message.role,
76
+ content: choice.message.content ?? "",
77
+ toolCalls: choice.message.tool_calls
78
+ },
79
+ finishReason: mapFinishReason(choice.finish_reason)
80
+ }));
81
+ return {
82
+ id: res.id,
83
+ model: res.model,
84
+ choices,
85
+ created: res.created,
86
+ usage: parseOpenAIUsage(res.usage),
87
+ raw: response
88
+ };
89
+ }
90
+
91
+ // src/inbound/stream-parser.ts
92
+ function mapFinishReason2(reason) {
93
+ const reasonMap = {
94
+ stop: "stop",
95
+ length: "length",
96
+ tool_calls: "tool_calls",
97
+ content_filter: "content_filter",
98
+ sensitive: "content_filter"
99
+ // Zhipu-specific
100
+ };
101
+ return reasonMap[reason] ?? "stop";
102
+ }
103
+ function parseStream(chunk) {
104
+ const data = chunk;
105
+ if (!data.choices || data.choices.length === 0) {
106
+ if (data.usage) {
107
+ return {
108
+ type: "end",
109
+ id: data.id,
110
+ model: data.model,
111
+ usage: {
112
+ promptTokens: data.usage.prompt_tokens,
113
+ completionTokens: data.usage.completion_tokens,
114
+ totalTokens: data.usage.total_tokens
115
+ },
116
+ raw: chunk
117
+ };
118
+ }
119
+ return null;
120
+ }
121
+ const choice = data.choices[0];
122
+ if (!choice) return null;
123
+ const delta = choice.delta;
124
+ if (choice.index === 0 && !delta.content && !delta.tool_calls && !choice.finish_reason) {
125
+ return {
126
+ type: "start",
127
+ id: data.id,
128
+ model: data.model,
129
+ raw: chunk
130
+ };
131
+ }
132
+ if (delta.content) {
133
+ return {
134
+ type: "content",
135
+ id: data.id,
136
+ model: data.model,
137
+ content: {
138
+ type: "content",
139
+ delta: delta.content,
140
+ index: choice.index
141
+ },
142
+ raw: chunk
143
+ };
144
+ }
145
+ if (delta.tool_calls && delta.tool_calls.length > 0) {
146
+ const toolCall = delta.tool_calls[0];
147
+ if (toolCall) {
148
+ return {
149
+ type: "tool_call",
150
+ id: data.id,
151
+ model: data.model,
152
+ toolCall: {
153
+ type: "tool_call",
154
+ id: toolCall.id,
155
+ name: toolCall.function?.name,
156
+ arguments: toolCall.function?.arguments,
157
+ index: toolCall.index
158
+ },
159
+ raw: chunk
160
+ };
161
+ }
162
+ }
163
+ if (choice.finish_reason) {
164
+ return {
165
+ type: "end",
166
+ id: data.id,
167
+ model: data.model,
168
+ finishReason: mapFinishReason2(choice.finish_reason),
169
+ usage: data.usage ? {
170
+ promptTokens: data.usage.prompt_tokens,
171
+ completionTokens: data.usage.completion_tokens,
172
+ totalTokens: data.usage.total_tokens
173
+ } : void 0,
174
+ raw: chunk
175
+ };
176
+ }
177
+ return null;
178
+ }
179
+ function parseError(error) {
180
+ return parseOpenAICompatibleError(error);
181
+ }
182
+
183
+ // src/outbound/request-builder.ts
184
+ function buildRequest(ir) {
185
+ const messages = [];
186
+ if (ir.system) {
187
+ messages.push({
188
+ role: "system",
189
+ content: ir.system
190
+ });
191
+ }
192
+ for (const msg of ir.messages) {
193
+ messages.push({
194
+ role: msg.role,
195
+ content: buildContent(msg.content),
196
+ name: msg.name,
197
+ tool_calls: msg.toolCalls,
198
+ tool_call_id: msg.toolCallId
199
+ });
200
+ }
201
+ const request = {
202
+ model: ir.model ?? "glm-4.7",
203
+ messages,
204
+ stream: ir.stream
205
+ };
206
+ if (ir.tools && ir.tools.length > 0) {
207
+ request.tools = ir.tools.map((tool) => ({
208
+ type: "function",
209
+ function: {
210
+ name: tool.function.name,
211
+ description: tool.function.description,
212
+ parameters: tool.function.parameters
213
+ }
214
+ }));
215
+ }
216
+ if (ir.toolChoice && ir.toolChoice !== "required") {
217
+ request.tool_choice = ir.toolChoice;
218
+ }
219
+ if (ir.generation) {
220
+ if (ir.generation.temperature !== void 0) {
221
+ request.temperature = ir.generation.temperature;
222
+ }
223
+ if (ir.generation.topP !== void 0) {
224
+ request.top_p = ir.generation.topP;
225
+ }
226
+ if (ir.generation.maxTokens !== void 0) {
227
+ request.max_tokens = ir.generation.maxTokens;
228
+ }
229
+ if (ir.generation.stopSequences && ir.generation.stopSequences.length > 0) {
230
+ request.stop = ir.generation.stopSequences;
231
+ }
232
+ if (ir.generation.presencePenalty !== void 0) {
233
+ request.presence_penalty = ir.generation.presencePenalty;
234
+ }
235
+ if (ir.generation.frequencyPenalty !== void 0) {
236
+ request.frequency_penalty = ir.generation.frequencyPenalty;
237
+ }
238
+ if (ir.generation.n !== void 0) {
239
+ request.n = ir.generation.n;
240
+ }
241
+ if (ir.generation.responseFormat) {
242
+ if (ir.generation.responseFormat.type === "json_object") {
243
+ request.response_format = { type: "json_object" };
244
+ }
245
+ }
246
+ }
247
+ if (ir.stream) {
248
+ request.stream_options = { include_usage: true };
249
+ }
250
+ return request;
251
+ }
252
+ function buildContent(content) {
253
+ if (typeof content === "string") {
254
+ return content || null;
255
+ }
256
+ if (!content || content.length === 0) {
257
+ return null;
258
+ }
259
+ return content.filter((part) => part.type === "text").map((part) => part.type === "text" ? part.text : "").join("");
260
+ }
261
+ function buildResponse(ir) {
262
+ return {
263
+ id: ir.id,
264
+ object: "chat.completion",
265
+ created: ir.created ?? Math.floor(Date.now() / 1e3),
266
+ model: ir.model,
267
+ choices: ir.choices.map((choice) => ({
268
+ index: choice.index,
269
+ message: {
270
+ role: choice.message.role,
271
+ content: contentToString(choice.message.content),
272
+ tool_calls: choice.message.toolCalls
273
+ },
274
+ finish_reason: choice.finishReason ?? "stop"
275
+ })),
276
+ usage: ir.usage ? {
277
+ prompt_tokens: ir.usage.promptTokens,
278
+ completion_tokens: ir.usage.completionTokens,
279
+ total_tokens: ir.usage.totalTokens
280
+ } : void 0
281
+ };
282
+ }
283
+
284
+ // src/outbound/stream-builder.ts
285
+ function createStreamBuilder() {
286
+ let chunkId = `chatcmpl-${Date.now()}`;
287
+ let model = "";
288
+ let created = Math.floor(Date.now() / 1e3);
289
+ const toolCallsState = /* @__PURE__ */ new Map();
290
+ return {
291
+ process(event) {
292
+ const events = [];
293
+ if (event.id) chunkId = event.id;
294
+ if (event.model) model = event.model;
295
+ if (event.type === "start") {
296
+ events.push({
297
+ event: "data",
298
+ data: {
299
+ id: chunkId,
300
+ object: "chat.completion.chunk",
301
+ created,
302
+ model,
303
+ choices: [{
304
+ index: 0,
305
+ delta: { role: "assistant", content: "" },
306
+ finish_reason: null
307
+ }]
308
+ }
309
+ });
310
+ }
311
+ if (event.type === "content" && event.content?.delta) {
312
+ events.push({
313
+ event: "data",
314
+ data: {
315
+ id: chunkId,
316
+ object: "chat.completion.chunk",
317
+ created,
318
+ model,
319
+ choices: [{
320
+ index: 0,
321
+ delta: { content: event.content.delta },
322
+ finish_reason: null
323
+ }]
324
+ }
325
+ });
326
+ }
327
+ if (event.type === "tool_call" && event.toolCall) {
328
+ const toolIndex = event.toolCall.index ?? 0;
329
+ const toolCallDelta = { index: toolIndex };
330
+ if (event.toolCall.name) {
331
+ toolCallDelta.id = event.toolCall.id || `call_${Date.now()}_${toolIndex}`;
332
+ toolCallDelta.type = "function";
333
+ toolCallDelta.function = { name: event.toolCall.name };
334
+ toolCallsState.set(toolIndex, {
335
+ id: toolCallDelta.id,
336
+ name: event.toolCall.name
337
+ });
338
+ }
339
+ if (event.toolCall.arguments) {
340
+ toolCallDelta.function = {
341
+ ...toolCallDelta.function,
342
+ arguments: event.toolCall.arguments
343
+ };
344
+ }
345
+ events.push({
346
+ event: "data",
347
+ data: {
348
+ id: chunkId,
349
+ object: "chat.completion.chunk",
350
+ created,
351
+ model,
352
+ choices: [{
353
+ index: 0,
354
+ delta: { tool_calls: [toolCallDelta] },
355
+ finish_reason: null
356
+ }]
357
+ }
358
+ });
359
+ }
360
+ if (event.type === "end") {
361
+ const finishReason = mapFinishReason3(event.finishReason);
362
+ const finalChunk = {
363
+ id: chunkId,
364
+ object: "chat.completion.chunk",
365
+ created,
366
+ model,
367
+ choices: [{
368
+ index: 0,
369
+ delta: {},
370
+ finish_reason: finishReason
371
+ }]
372
+ };
373
+ if (event.usage) {
374
+ finalChunk.usage = {
375
+ prompt_tokens: event.usage.promptTokens ?? 0,
376
+ completion_tokens: event.usage.completionTokens ?? 0,
377
+ total_tokens: event.usage.totalTokens ?? 0
378
+ };
379
+ }
380
+ events.push({ event: "data", data: finalChunk });
381
+ }
382
+ if (event.type === "error" && event.error) {
383
+ events.push({
384
+ event: "data",
385
+ data: {
386
+ error: {
387
+ message: event.error.message,
388
+ type: "server_error",
389
+ code: event.error.code
390
+ }
391
+ }
392
+ });
393
+ }
394
+ return events;
395
+ },
396
+ finalize() {
397
+ return [{ event: "data", data: "[DONE]" }];
398
+ }
399
+ };
400
+ }
401
+ function mapFinishReason3(reason) {
402
+ if (!reason) return "stop";
403
+ const reasonMap = {
404
+ stop: "stop",
405
+ length: "length",
406
+ tool_calls: "tool_calls",
407
+ content_filter: "content_filter",
408
+ end_turn: "stop",
409
+ max_tokens: "length",
410
+ sensitive: "content_filter"
411
+ // Zhipu-specific
412
+ };
413
+ return reasonMap[reason] ?? "stop";
414
+ }
415
+
416
+ // src/adapter.ts
417
+ var zhipuAdapter = {
418
+ name: "zhipu",
419
+ version: "1.0.0",
420
+ capabilities: {
421
+ streaming: true,
422
+ tools: true,
423
+ vision: true,
424
+ // GLM-4V supports vision
425
+ multimodal: true,
426
+ systemPrompt: true,
427
+ toolChoice: true,
428
+ reasoning: false,
429
+ webSearch: true,
430
+ // Zhipu supports web search
431
+ jsonMode: true,
432
+ logprobs: false,
433
+ seed: false
434
+ },
435
+ inbound: {
436
+ parseRequest: (request) => {
437
+ return parseRequest(request);
438
+ },
439
+ parseResponse: (response) => {
440
+ return parseResponse(response);
441
+ },
442
+ parseStream: (chunk) => {
443
+ return parseStream(chunk);
444
+ },
445
+ parseError: (error) => {
446
+ return parseError(error);
447
+ }
448
+ },
449
+ outbound: {
450
+ buildRequest: (ir) => {
451
+ return buildRequest(ir);
452
+ },
453
+ buildResponse: (ir) => {
454
+ return buildResponse(ir);
455
+ },
456
+ createStreamBuilder
457
+ },
458
+ getInfo() {
459
+ return {
460
+ name: this.name,
461
+ version: this.version,
462
+ capabilities: this.capabilities,
463
+ endpoint: {
464
+ baseUrl: "https://open.bigmodel.cn/api/paas",
465
+ chatPath: "/v4/chat/completions",
466
+ modelsPath: "/v4/models"
467
+ }
468
+ };
469
+ }
470
+ };
471
+
472
+ export { zhipuAdapter };
473
+ //# sourceMappingURL=index.js.map
474
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/inbound/request-parser.ts","../src/inbound/response-parser.ts","../src/inbound/stream-parser.ts","../src/inbound/error-parser.ts","../src/outbound/request-builder.ts","../src/outbound/response-builder.ts","../src/outbound/stream-builder.ts","../src/adapter.ts"],"names":["mapFinishReason"],"mappings":";;;AAYO,SAAS,aAAa,OAAA,EAAgC;AAC3D,EAAA,MAAM,GAAA,GAAM,OAAA;AAGZ,EAAA,IAAI,MAAA;AACJ,EAAA,MAAM,WAAsB,EAAC;AAE7B,EAAA,KAAA,MAAW,GAAA,IAAO,IAAI,QAAA,EAAU;AAC9B,IAAA,IAAI,GAAA,CAAI,SAAS,QAAA,EAAU;AACzB,MAAA,IAAI,OAAO,GAAA,CAAI,OAAA,KAAY,QAAA,EAAU;AACnC,QAAA,MAAA,GAAS,MAAA,GAAS,GAAG,MAAM;AAAA,EAAK,GAAA,CAAI,OAAO,CAAA,CAAA,GAAK,GAAA,CAAI,OAAA;AAAA,MACtD;AAAA,IACF,CAAA,MAAO;AACL,MAAA,QAAA,CAAS,IAAA,CAAK,YAAA,CAAa,GAAG,CAAC,CAAA;AAAA,IACjC;AAAA,EACF;AAGA,EAAA,MAAM,KAAA,GAA4B,IAAI,KAAA,EAAO,GAAA,CAAI,CAAC,IAAA,KAAS,SAAA,CAAU,IAAI,CAAC,CAAA;AAG1E,EAAA,MAAM,aAAqC,GAAA,CAAI,WAAA,GAC3C,eAAA,CAAgB,GAAA,CAAI,WAAW,CAAA,GAC/B,MAAA;AAEJ,EAAA,OAAO;AAAA,IACL,QAAA;AAAA,IACA,OAAO,GAAA,CAAI,KAAA;AAAA,IACX,KAAA;AAAA,IACA,UAAA;AAAA,IACA,QAAQ,GAAA,CAAI,MAAA;AAAA,IACZ,MAAA;AAAA,IACA,UAAA,EAAY;AAAA,MACV,aAAa,GAAA,CAAI,WAAA;AAAA,MACjB,MAAM,GAAA,CAAI,KAAA;AAAA,MACV,WAAW,GAAA,CAAI,UAAA;AAAA,MACf,aAAA,EAAe,GAAA,CAAI,IAAA,GACf,KAAA,CAAM,OAAA,CAAQ,GAAA,CAAI,IAAI,CAAA,GACpB,GAAA,CAAI,IAAA,GACJ,CAAC,GAAA,CAAI,IAAI,CAAA,GACX,MAAA;AAAA,MACJ,iBAAiB,GAAA,CAAI,gBAAA;AAAA,MACrB,kBAAkB,GAAA,CAAI,iBAAA;AAAA,MACtB,GAAG,GAAA,CAAI,CAAA;AAAA,MACP,cAAA,EAAgB,IAAI,eAAA,GAChB,EAAE,MAAM,GAAA,CAAI,eAAA,CAAgB,MAAK,GACjC;AAAA,KACN;AAAA,IACA,GAAA,EAAK;AAAA,GACP;AACF;AAEA,SAAS,aAAa,GAAA,EAA4B;AAChD,EAAA,OAAO;AAAA,IACL,MAAM,GAAA,CAAI,IAAA;AAAA,IACV,OAAA,EAAS,IAAI,OAAA,IAAW,EAAA;AAAA,IACxB,MAAM,GAAA,CAAI,IAAA;AAAA,IACV,WAAW,GAAA,CAAI,UAAA;AAAA,IACf,YAAY,GAAA,CAAI;AAAA,GAClB;AACF;AAEA,SAAS,UAAU,IAAA,EAAuB;AACxC,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,UAAA;AAAA,IACN,QAAA,EAAU;AAAA,MACR,IAAA,EAAM,KAAK,QAAA,CAAS,IAAA;AAAA,MACpB,WAAA,EAAa,KAAK,QAAA,CAAS,WAAA;AAAA,MAC3B,UAAA,EAAY,KAAK,QAAA,CAAS;AAAA;AAC5B,GACF;AACF;AAEA,SAAS,gBACP,MAAA,EACY;AACZ,EAAA,IAAI,OAAO,WAAW,QAAA,EAAU;AAC9B,IAAA,OAAO,MAAA;AAAA,EACT;AACA,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,UAAA;AAAA,IACN,QAAA,EAAU;AAAA,MACR,IAAA,EAAM,OAAO,QAAA,CAAS;AAAA;AACxB,GACF;AACF;ACzFO,SAAS,cAAc,QAAA,EAAkC;AAC9D,EAAA,MAAM,GAAA,GAAM,QAAA;AAEZ,EAAA,MAAM,OAAA,GAAoB,GAAA,CAAI,OAAA,CAAQ,GAAA,CAAI,CAAC,MAAA,MAAY;AAAA,IACrD,OAAO,MAAA,CAAO,KAAA;AAAA,IACd,OAAA,EAAS;AAAA,MACP,IAAA,EAAM,OAAO,OAAA,CAAQ,IAAA;AAAA,MACrB,OAAA,EAAS,MAAA,CAAO,OAAA,CAAQ,OAAA,IAAW,EAAA;AAAA,MACnC,SAAA,EAAW,OAAO,OAAA,CAAQ;AAAA,KAC5B;AAAA,IACA,YAAA,EAAc,eAAA,CAAgB,MAAA,CAAO,aAAa;AAAA,GACpD,CAAE,CAAA;AAEF,EAAA,OAAO;AAAA,IACL,IAAI,GAAA,CAAI,EAAA;AAAA,IACR,OAAO,GAAA,CAAI,KAAA;AAAA,IACX,OAAA;AAAA,IACA,SAAS,GAAA,CAAI,OAAA;AAAA,IACb,KAAA,EAAO,gBAAA,CAAiB,GAAA,CAAI,KAAK,CAAA;AAAA,IACjC,GAAA,EAAK;AAAA,GACP;AACF;;;ACtBA,SAASA,iBAAgB,MAAA,EAA8B;AACrD,EAAA,MAAM,SAAA,GAA0C;AAAA,IAC9C,IAAA,EAAM,MAAA;AAAA,IACN,MAAA,EAAQ,QAAA;AAAA,IACR,UAAA,EAAY,YAAA;AAAA,IACZ,cAAA,EAAgB,gBAAA;AAAA,IAChB,SAAA,EAAW;AAAA;AAAA,GACb;AACA,EAAA,OAAO,SAAA,CAAU,MAAM,CAAA,IAAK,MAAA;AAC9B;AAKO,SAAS,YACd,KAAA,EAC0C;AAC1C,EAAA,MAAM,IAAA,GAAO,KAAA;AAEb,EAAA,IAAI,CAAC,IAAA,CAAK,OAAA,IAAW,IAAA,CAAK,OAAA,CAAQ,WAAW,CAAA,EAAG;AAE9C,IAAA,IAAI,KAAK,KAAA,EAAO;AACd,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,KAAA;AAAA,QACN,IAAI,IAAA,CAAK,EAAA;AAAA,QACT,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,KAAA,EAAO;AAAA,UACL,YAAA,EAAc,KAAK,KAAA,CAAM,aAAA;AAAA,UACzB,gBAAA,EAAkB,KAAK,KAAA,CAAM,iBAAA;AAAA,UAC7B,WAAA,EAAa,KAAK,KAAA,CAAM;AAAA,SAC1B;AAAA,QACA,GAAA,EAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,MAAM,MAAA,GAAS,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC7B,EAAA,IAAI,CAAC,QAAQ,OAAO,IAAA;AAEpB,EAAA,MAAM,QAAQ,MAAA,CAAO,KAAA;AAGrB,EAAA,IAAI,MAAA,CAAO,KAAA,KAAU,CAAA,IAAK,CAAC,KAAA,CAAM,OAAA,IAAW,CAAC,KAAA,CAAM,UAAA,IAAc,CAAC,MAAA,CAAO,aAAA,EAAe;AACtF,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,OAAA;AAAA,MACN,IAAI,IAAA,CAAK,EAAA;AAAA,MACT,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,GAAA,EAAK;AAAA,KACP;AAAA,EACF;AAGA,EAAA,IAAI,MAAM,OAAA,EAAS;AACjB,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,SAAA;AAAA,MACN,IAAI,IAAA,CAAK,EAAA;AAAA,MACT,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,OAAA,EAAS;AAAA,QACP,IAAA,EAAM,SAAA;AAAA,QACN,OAAO,KAAA,CAAM,OAAA;AAAA,QACb,OAAO,MAAA,CAAO;AAAA,OAChB;AAAA,MACA,GAAA,EAAK;AAAA,KACP;AAAA,EACF;AAGA,EAAA,IAAI,KAAA,CAAM,UAAA,IAAc,KAAA,CAAM,UAAA,CAAW,SAAS,CAAA,EAAG;AACnD,IAAA,MAAM,QAAA,GAAW,KAAA,CAAM,UAAA,CAAW,CAAC,CAAA;AACnC,IAAA,IAAI,QAAA,EAAU;AACZ,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,WAAA;AAAA,QACN,IAAI,IAAA,CAAK,EAAA;AAAA,QACT,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,QAAA,EAAU;AAAA,UACR,IAAA,EAAM,WAAA;AAAA,UACN,IAAI,QAAA,CAAS,EAAA;AAAA,UACb,IAAA,EAAM,SAAS,QAAA,EAAU,IAAA;AAAA,UACzB,SAAA,EAAW,SAAS,QAAA,EAAU,SAAA;AAAA,UAC9B,OAAO,QAAA,CAAS;AAAA,SAClB;AAAA,QACA,GAAA,EAAK;AAAA,OACP;AAAA,IACF;AAAA,EACF;AAGA,EAAA,IAAI,OAAO,aAAA,EAAe;AACxB,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,KAAA;AAAA,MACN,IAAI,IAAA,CAAK,EAAA;AAAA,MACT,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,YAAA,EAAcA,gBAAAA,CAAgB,MAAA,CAAO,aAAa,CAAA;AAAA,MAClD,KAAA,EAAO,KAAK,KAAA,GACR;AAAA,QACE,YAAA,EAAc,KAAK,KAAA,CAAM,aAAA;AAAA,QACzB,gBAAA,EAAkB,KAAK,KAAA,CAAM,iBAAA;AAAA,QAC7B,WAAA,EAAa,KAAK,KAAA,CAAM;AAAA,OAC1B,GACA,MAAA;AAAA,MACJ,GAAA,EAAK;AAAA,KACP;AAAA,EACF;AAEA,EAAA,OAAO,IAAA;AACT;AC3GO,SAAS,WAAW,KAAA,EAA4B;AACrD,EAAA,OAAO,2BAA2B,KAAK,CAAA;AACzC;;;ACDO,SAAS,aAAa,EAAA,EAAgC;AAC3D,EAAA,MAAM,WAA2B,EAAC;AAGlC,EAAA,IAAI,GAAG,MAAA,EAAQ;AACb,IAAA,QAAA,CAAS,IAAA,CAAK;AAAA,MACZ,IAAA,EAAM,QAAA;AAAA,MACN,SAAS,EAAA,CAAG;AAAA,KACb,CAAA;AAAA,EACH;AAGA,EAAA,KAAA,MAAW,GAAA,IAAO,GAAG,QAAA,EAAU;AAC7B,IAAA,QAAA,CAAS,IAAA,CAAK;AAAA,MACZ,MAAM,GAAA,CAAI,IAAA;AAAA,MACV,OAAA,EAAS,YAAA,CAAa,GAAA,CAAI,OAAO,CAAA;AAAA,MACjC,MAAM,GAAA,CAAI,IAAA;AAAA,MACV,YAAY,GAAA,CAAI,SAAA;AAAA,MAChB,cAAc,GAAA,CAAI;AAAA,KACnB,CAAA;AAAA,EACH;AAEA,EAAA,MAAM,OAAA,GAAwB;AAAA,IAC5B,KAAA,EAAO,GAAG,KAAA,IAAS,SAAA;AAAA,IACnB,QAAA;AAAA,IACA,QAAQ,EAAA,CAAG;AAAA,GACb;AAGA,EAAA,IAAI,EAAA,CAAG,KAAA,IAAS,EAAA,CAAG,KAAA,CAAM,SAAS,CAAA,EAAG;AACnC,IAAA,OAAA,CAAQ,KAAA,GAAQ,EAAA,CAAG,KAAA,CAAM,GAAA,CAAI,CAAC,IAAA,MAAU;AAAA,MACtC,IAAA,EAAM,UAAA;AAAA,MACN,QAAA,EAAU;AAAA,QACR,IAAA,EAAM,KAAK,QAAA,CAAS,IAAA;AAAA,QACpB,WAAA,EAAa,KAAK,QAAA,CAAS,WAAA;AAAA,QAC3B,UAAA,EAAY,KAAK,QAAA,CAAS;AAAA;AAC5B,KACF,CAAE,CAAA;AAAA,EACJ;AAGA,EAAA,IAAI,EAAA,CAAG,UAAA,IAAc,EAAA,CAAG,UAAA,KAAe,UAAA,EAAY;AACjD,IAAA,OAAA,CAAQ,cAAc,EAAA,CAAG,UAAA;AAAA,EAC3B;AAGA,EAAA,IAAI,GAAG,UAAA,EAAY;AACjB,IAAA,IAAI,EAAA,CAAG,UAAA,CAAW,WAAA,KAAgB,MAAA,EAAW;AAC3C,MAAA,OAAA,CAAQ,WAAA,GAAc,GAAG,UAAA,CAAW,WAAA;AAAA,IACtC;AACA,IAAA,IAAI,EAAA,CAAG,UAAA,CAAW,IAAA,KAAS,MAAA,EAAW;AACpC,MAAA,OAAA,CAAQ,KAAA,GAAQ,GAAG,UAAA,CAAW,IAAA;AAAA,IAChC;AACA,IAAA,IAAI,EAAA,CAAG,UAAA,CAAW,SAAA,KAAc,MAAA,EAAW;AACzC,MAAA,OAAA,CAAQ,UAAA,GAAa,GAAG,UAAA,CAAW,SAAA;AAAA,IACrC;AACA,IAAA,IAAI,GAAG,UAAA,CAAW,aAAA,IAAiB,GAAG,UAAA,CAAW,aAAA,CAAc,SAAS,CAAA,EAAG;AACzE,MAAA,OAAA,CAAQ,IAAA,GAAO,GAAG,UAAA,CAAW,aAAA;AAAA,IAC/B;AACA,IAAA,IAAI,EAAA,CAAG,UAAA,CAAW,eAAA,KAAoB,MAAA,EAAW;AAC/C,MAAA,OAAA,CAAQ,gBAAA,GAAmB,GAAG,UAAA,CAAW,eAAA;AAAA,IAC3C;AACA,IAAA,IAAI,EAAA,CAAG,UAAA,CAAW,gBAAA,KAAqB,MAAA,EAAW;AAChD,MAAA,OAAA,CAAQ,iBAAA,GAAoB,GAAG,UAAA,CAAW,gBAAA;AAAA,IAC5C;AACA,IAAA,IAAI,EAAA,CAAG,UAAA,CAAW,CAAA,KAAM,MAAA,EAAW;AACjC,MAAA,OAAA,CAAQ,CAAA,GAAI,GAAG,UAAA,CAAW,CAAA;AAAA,IAC5B;AACA,IAAA,IAAI,EAAA,CAAG,WAAW,cAAA,EAAgB;AAChC,MAAA,IAAI,EAAA,CAAG,UAAA,CAAW,cAAA,CAAe,IAAA,KAAS,aAAA,EAAe;AACvD,QAAA,OAAA,CAAQ,eAAA,GAAkB,EAAE,IAAA,EAAM,aAAA,EAAc;AAAA,MAClD;AAAA,IACF;AAAA,EACF;AAGA,EAAA,IAAI,GAAG,MAAA,EAAQ;AACb,IAAA,OAAA,CAAQ,cAAA,GAAiB,EAAE,aAAA,EAAe,IAAA,EAAK;AAAA,EACjD;AAEA,EAAA,OAAO,OAAA;AACT;AAEA,SAAS,aAAa,OAAA,EAAgD;AACpE,EAAA,IAAI,OAAO,YAAY,QAAA,EAAU;AAC/B,IAAA,OAAO,OAAA,IAAW,IAAA;AAAA,EACpB;AAEA,EAAA,IAAI,CAAC,OAAA,IAAW,OAAA,CAAQ,MAAA,KAAW,CAAA,EAAG;AACpC,IAAA,OAAO,IAAA;AAAA,EACT;AAIA,EAAA,OAAO,QACJ,MAAA,CAAO,CAAC,SAAS,IAAA,CAAK,IAAA,KAAS,MAAM,CAAA,CACrC,GAAA,CAAI,CAAC,IAAA,KAAU,IAAA,CAAK,SAAS,MAAA,GAAS,IAAA,CAAK,OAAO,EAAG,CAAA,CACrD,KAAK,EAAE,CAAA;AACZ;ACjGO,SAAS,cAAc,EAAA,EAAkC;AAC9D,EAAA,OAAO;AAAA,IACL,IAAI,EAAA,CAAG,EAAA;AAAA,IACP,MAAA,EAAQ,iBAAA;AAAA,IACR,OAAA,EAAS,GAAG,OAAA,IAAW,IAAA,CAAK,MAAM,IAAA,CAAK,GAAA,KAAQ,GAAI,CAAA;AAAA,IACnD,OAAO,EAAA,CAAG,KAAA;AAAA,IACV,OAAA,EAAS,EAAA,CAAG,OAAA,CAAQ,GAAA,CAAI,CAAC,MAAA,MAAY;AAAA,MACnC,OAAO,MAAA,CAAO,KAAA;AAAA,MACd,OAAA,EAAS;AAAA,QACP,IAAA,EAAM,OAAO,OAAA,CAAQ,IAAA;AAAA,QACrB,OAAA,EAAS,eAAA,CAAgB,MAAA,CAAO,OAAA,CAAQ,OAAO,CAAA;AAAA,QAC/C,UAAA,EAAY,OAAO,OAAA,CAAQ;AAAA,OAC7B;AAAA,MACA,aAAA,EAAe,OAAO,YAAA,IAAgB;AAAA,KACxC,CAAE,CAAA;AAAA,IACF,KAAA,EAAO,GAAG,KAAA,GACN;AAAA,MACE,aAAA,EAAe,GAAG,KAAA,CAAM,YAAA;AAAA,MACxB,iBAAA,EAAmB,GAAG,KAAA,CAAM,gBAAA;AAAA,MAC5B,YAAA,EAAc,GAAG,KAAA,CAAM;AAAA,KACzB,GACA;AAAA,GACN;AACF;;;ACpBO,SAAS,mBAAA,GAA0C;AACxD,EAAA,IAAI,OAAA,GAAU,CAAA,SAAA,EAAY,IAAA,CAAK,GAAA,EAAK,CAAA,CAAA;AACpC,EAAA,IAAI,KAAA,GAAQ,EAAA;AACZ,EAAA,IAAI,UAAU,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,GAAA,KAAQ,GAAI,CAAA;AAC1C,EAAA,MAAM,cAAA,uBAAgE,GAAA,EAAI;AAE1E,EAAA,OAAO;AAAA,IACL,QAAQ,KAAA,EAAmC;AACzC,MAAA,MAAM,SAAqB,EAAC;AAG5B,MAAA,IAAI,KAAA,CAAM,EAAA,EAAI,OAAA,GAAU,KAAA,CAAM,EAAA;AAC9B,MAAA,IAAI,KAAA,CAAM,KAAA,EAAO,KAAA,GAAQ,KAAA,CAAM,KAAA;AAG/B,MAAA,IAAI,KAAA,CAAM,SAAS,OAAA,EAAS;AAG1B,QAAA,MAAA,CAAO,IAAA,CAAK;AAAA,UACV,KAAA,EAAO,MAAA;AAAA,UACP,IAAA,EAAM;AAAA,YACJ,EAAA,EAAI,OAAA;AAAA,YACJ,MAAA,EAAQ,uBAAA;AAAA,YACR,OAAA;AAAA,YACA,KAAA;AAAA,YACA,SAAS,CAAC;AAAA,cACR,KAAA,EAAO,CAAA;AAAA,cACP,KAAA,EAAO,EAAE,IAAA,EAAM,WAAA,EAAa,SAAS,EAAA,EAAG;AAAA,cACxC,aAAA,EAAe;AAAA,aAChB;AAAA;AACH,SACD,CAAA;AAAA,MACH;AAGA,MAAA,IAAI,KAAA,CAAM,IAAA,KAAS,SAAA,IAAa,KAAA,CAAM,SAAS,KAAA,EAAO;AACpD,QAAA,MAAA,CAAO,IAAA,CAAK;AAAA,UACV,KAAA,EAAO,MAAA;AAAA,UACP,IAAA,EAAM;AAAA,YACJ,EAAA,EAAI,OAAA;AAAA,YACJ,MAAA,EAAQ,uBAAA;AAAA,YACR,OAAA;AAAA,YACA,KAAA;AAAA,YACA,SAAS,CAAC;AAAA,cACR,KAAA,EAAO,CAAA;AAAA,cACP,KAAA,EAAO,EAAE,OAAA,EAAS,KAAA,CAAM,QAAQ,KAAA,EAAM;AAAA,cACtC,aAAA,EAAe;AAAA,aAChB;AAAA;AACH,SACD,CAAA;AAAA,MACH;AAGA,MAAA,IAAI,KAAA,CAAM,IAAA,KAAS,WAAA,IAAe,KAAA,CAAM,QAAA,EAAU;AAChD,QAAA,MAAM,SAAA,GAAY,KAAA,CAAM,QAAA,CAAS,KAAA,IAAS,CAAA;AAC1C,QAAA,MAAM,aAAA,GAKF,EAAE,KAAA,EAAO,SAAA,EAAU;AAGvB,QAAA,IAAI,KAAA,CAAM,SAAS,IAAA,EAAM;AACvB,UAAA,aAAA,CAAc,EAAA,GAAK,MAAM,QAAA,CAAS,EAAA,IAAM,QAAQ,IAAA,CAAK,GAAA,EAAK,CAAA,CAAA,EAAI,SAAS,CAAA,CAAA;AACvE,UAAA,aAAA,CAAc,IAAA,GAAO,UAAA;AACrB,UAAA,aAAA,CAAc,QAAA,GAAW,EAAE,IAAA,EAAM,KAAA,CAAM,SAAS,IAAA,EAAK;AACrD,UAAA,cAAA,CAAe,IAAI,SAAA,EAAW;AAAA,YAC5B,IAAI,aAAA,CAAc,EAAA;AAAA,YAClB,IAAA,EAAM,MAAM,QAAA,CAAS;AAAA,WACtB,CAAA;AAAA,QACH;AAGA,QAAA,IAAI,KAAA,CAAM,SAAS,SAAA,EAAW;AAC5B,UAAA,aAAA,CAAc,QAAA,GAAW;AAAA,YACvB,GAAG,aAAA,CAAc,QAAA;AAAA,YACjB,SAAA,EAAW,MAAM,QAAA,CAAS;AAAA,WAC5B;AAAA,QACF;AAEA,QAAA,MAAA,CAAO,IAAA,CAAK;AAAA,UACV,KAAA,EAAO,MAAA;AAAA,UACP,IAAA,EAAM;AAAA,YACJ,EAAA,EAAI,OAAA;AAAA,YACJ,MAAA,EAAQ,uBAAA;AAAA,YACR,OAAA;AAAA,YACA,KAAA;AAAA,YACA,SAAS,CAAC;AAAA,cACR,KAAA,EAAO,CAAA;AAAA,cACP,KAAA,EAAO,EAAE,UAAA,EAAY,CAAC,aAAa,CAAA,EAAE;AAAA,cACrC,aAAA,EAAe;AAAA,aAChB;AAAA;AACH,SACD,CAAA;AAAA,MACH;AAGA,MAAA,IAAI,KAAA,CAAM,SAAS,KAAA,EAAO;AACxB,QAAA,MAAM,YAAA,GAAeA,gBAAAA,CAAgB,KAAA,CAAM,YAAY,CAAA;AAGvD,QAAA,MAAM,UAAA,GAeF;AAAA,UACF,EAAA,EAAI,OAAA;AAAA,UACJ,MAAA,EAAQ,uBAAA;AAAA,UACR,OAAA;AAAA,UACA,KAAA;AAAA,UACA,SAAS,CAAC;AAAA,YACR,KAAA,EAAO,CAAA;AAAA,YACP,OAAO,EAAC;AAAA,YACR,aAAA,EAAe;AAAA,WAChB;AAAA,SACH;AAGA,QAAA,IAAI,MAAM,KAAA,EAAO;AACf,UAAA,UAAA,CAAW,KAAA,GAAQ;AAAA,YACjB,aAAA,EAAe,KAAA,CAAM,KAAA,CAAM,YAAA,IAAgB,CAAA;AAAA,YAC3C,iBAAA,EAAmB,KAAA,CAAM,KAAA,CAAM,gBAAA,IAAoB,CAAA;AAAA,YACnD,YAAA,EAAc,KAAA,CAAM,KAAA,CAAM,WAAA,IAAe;AAAA,WAC3C;AAAA,QACF;AAEA,QAAA,MAAA,CAAO,KAAK,EAAE,KAAA,EAAO,MAAA,EAAQ,IAAA,EAAM,YAAY,CAAA;AAAA,MACjD;AAGA,MAAA,IAAI,KAAA,CAAM,IAAA,KAAS,OAAA,IAAW,KAAA,CAAM,KAAA,EAAO;AACzC,QAAA,MAAA,CAAO,IAAA,CAAK;AAAA,UACV,KAAA,EAAO,MAAA;AAAA,UACP,IAAA,EAAM;AAAA,YACJ,KAAA,EAAO;AAAA,cACL,OAAA,EAAS,MAAM,KAAA,CAAM,OAAA;AAAA,cACrB,IAAA,EAAM,cAAA;AAAA,cACN,IAAA,EAAM,MAAM,KAAA,CAAM;AAAA;AACpB;AACF,SACD,CAAA;AAAA,MACH;AAEA,MAAA,OAAO,MAAA;AAAA,IACT,CAAA;AAAA,IAEA,QAAA,GAAuB;AAErB,MAAA,OAAO,CAAC,EAAE,KAAA,EAAO,MAAA,EAAQ,IAAA,EAAM,UAAU,CAAA;AAAA,IAC3C;AAAA,GACF;AACF;AAKA,SAASA,iBAAgB,MAAA,EAAyB;AAChD,EAAA,IAAI,CAAC,QAAQ,OAAO,MAAA;AAEpB,EAAA,MAAM,SAAA,GAAoC;AAAA,IACxC,IAAA,EAAM,MAAA;AAAA,IACN,MAAA,EAAQ,QAAA;AAAA,IACR,UAAA,EAAY,YAAA;AAAA,IACZ,cAAA,EAAgB,gBAAA;AAAA,IAChB,QAAA,EAAU,MAAA;AAAA,IACV,UAAA,EAAY,QAAA;AAAA,IACZ,SAAA,EAAW;AAAA;AAAA,GACb;AAEA,EAAA,OAAO,SAAA,CAAU,MAAM,CAAA,IAAK,MAAA;AAC9B;;;AC5KO,IAAM,YAAA,GAA2B;AAAA,EACtC,IAAA,EAAM,OAAA;AAAA,EACN,OAAA,EAAS,OAAA;AAAA,EACT,YAAA,EAAc;AAAA,IACZ,SAAA,EAAW,IAAA;AAAA,IACX,KAAA,EAAO,IAAA;AAAA,IACP,MAAA,EAAQ,IAAA;AAAA;AAAA,IACR,UAAA,EAAY,IAAA;AAAA,IACZ,YAAA,EAAc,IAAA;AAAA,IACd,UAAA,EAAY,IAAA;AAAA,IACZ,SAAA,EAAW,KAAA;AAAA,IACX,SAAA,EAAW,IAAA;AAAA;AAAA,IACX,QAAA,EAAU,IAAA;AAAA,IACV,QAAA,EAAU,KAAA;AAAA,IACV,IAAA,EAAM;AAAA,GACR;AAAA,EAEA,OAAA,EAAS;AAAA,IACP,YAAA,EAAc,CAAC,OAAA,KAAmC;AAChD,MAAA,OAAO,aAAa,OAAO,CAAA;AAAA,IAC7B,CAAA;AAAA,IAEA,aAAA,EAAe,CAAC,QAAA,KAAqC;AACnD,MAAA,OAAO,cAAc,QAAQ,CAAA;AAAA,IAC/B,CAAA;AAAA,IAEA,WAAA,EAAa,CAAC,KAAA,KAA6D;AACzE,MAAA,OAAO,YAAY,KAAK,CAAA;AAAA,IAC1B,CAAA;AAAA,IAEA,UAAA,EAAY,CAAC,KAAA,KAA+B;AAC1C,MAAA,OAAO,WAAW,KAAK,CAAA;AAAA,IACzB;AAAA,GACF;AAAA,EAEA,QAAA,EAAU;AAAA,IACR,YAAA,EAAc,CAAC,EAAA,KAA8B;AAC3C,MAAA,OAAO,aAAa,EAAE,CAAA;AAAA,IACxB,CAAA;AAAA,IAEA,aAAA,EAAe,CAAC,EAAA,KAA+B;AAC7C,MAAA,OAAO,cAAc,EAAE,CAAA;AAAA,IACzB,CAAA;AAAA,IAEA;AAAA,GACF;AAAA,EAEA,OAAA,GAAuB;AACrB,IAAA,OAAO;AAAA,MACL,MAAM,IAAA,CAAK,IAAA;AAAA,MACX,SAAS,IAAA,CAAK,OAAA;AAAA,MACd,cAAc,IAAA,CAAK,YAAA;AAAA,MACnB,QAAA,EAAU;AAAA,QACR,OAAA,EAAS,mCAAA;AAAA,QACT,QAAA,EAAU,sBAAA;AAAA,QACV,UAAA,EAAY;AAAA;AACd,KACF;AAAA,EACF;AACF","file":"index.js","sourcesContent":["import type {\n LLMRequestIR,\n Message,\n Tool,\n ToolChoice,\n} from '@amux.ai/llm-bridge'\n\nimport type { ZhipuRequest, ZhipuMessage, ZhipuTool } from '../types'\n\n/**\n * Parse Zhipu request to IR\n */\nexport function parseRequest(request: unknown): LLMRequestIR {\n const req = request as ZhipuRequest\n\n // Extract system message if present\n let system: string | undefined\n const messages: Message[] = []\n\n for (const msg of req.messages) {\n if (msg.role === 'system') {\n if (typeof msg.content === 'string') {\n system = system ? `${system}\\n${msg.content}` : msg.content\n }\n } else {\n messages.push(parseMessage(msg))\n }\n }\n\n // Parse tools\n const tools: Tool[] | undefined = req.tools?.map((tool) => parseTool(tool))\n\n // Parse tool choice\n const toolChoice: ToolChoice | undefined = req.tool_choice\n ? parseToolChoice(req.tool_choice)\n : undefined\n\n return {\n messages,\n model: req.model,\n tools,\n toolChoice,\n stream: req.stream,\n system,\n generation: {\n temperature: req.temperature,\n topP: req.top_p,\n maxTokens: req.max_tokens,\n stopSequences: req.stop\n ? Array.isArray(req.stop)\n ? req.stop\n : [req.stop]\n : undefined,\n presencePenalty: req.presence_penalty,\n frequencyPenalty: req.frequency_penalty,\n n: req.n,\n responseFormat: req.response_format\n ? { type: req.response_format.type }\n : undefined,\n },\n raw: request,\n }\n}\n\nfunction parseMessage(msg: ZhipuMessage): Message {\n return {\n role: msg.role,\n content: msg.content ?? '',\n name: msg.name,\n toolCalls: msg.tool_calls,\n toolCallId: msg.tool_call_id,\n }\n}\n\nfunction parseTool(tool: ZhipuTool): Tool {\n return {\n type: 'function',\n function: {\n name: tool.function.name,\n description: tool.function.description,\n parameters: tool.function.parameters,\n },\n }\n}\n\nfunction parseToolChoice(\n choice: 'auto' | 'none' | { type: 'function'; function: { name: string } }\n): ToolChoice {\n if (typeof choice === 'string') {\n return choice as ToolChoice\n }\n return {\n type: 'function',\n function: {\n name: choice.function.name,\n },\n }\n}\n","import type { LLMResponseIR, Choice, Role } from '@amux.ai/llm-bridge'\nimport { mapFinishReason, parseOpenAIUsage } from '@amux.ai/llm-bridge'\n\nimport type { ZhipuResponse } from '../types'\n\n/**\n * Parse Zhipu response to IR\n */\nexport function parseResponse(response: unknown): LLMResponseIR {\n const res = response as ZhipuResponse\n\n const choices: Choice[] = res.choices.map((choice) => ({\n index: choice.index,\n message: {\n role: choice.message.role as Role,\n content: choice.message.content ?? '',\n toolCalls: choice.message.tool_calls,\n },\n finishReason: mapFinishReason(choice.finish_reason),\n }))\n\n return {\n id: res.id,\n model: res.model,\n choices,\n created: res.created,\n usage: parseOpenAIUsage(res.usage),\n raw: response,\n }\n}\n","import type { LLMStreamEvent, FinishReason } from '@amux.ai/llm-bridge'\n\nimport type { ZhipuStreamChunk } from '../types'\n\n/**\n * Map Zhipu finish reason to IR finish reason\n */\nfunction mapFinishReason(reason: string): FinishReason {\n const reasonMap: Record<string, FinishReason> = {\n stop: 'stop',\n length: 'length',\n tool_calls: 'tool_calls',\n content_filter: 'content_filter',\n sensitive: 'content_filter', // Zhipu-specific\n }\n return reasonMap[reason] ?? 'stop'\n}\n\n/**\n * Parse Zhipu stream chunk to IR stream event\n */\nexport function parseStream(\n chunk: unknown\n): LLMStreamEvent | LLMStreamEvent[] | null {\n const data = chunk as ZhipuStreamChunk\n\n if (!data.choices || data.choices.length === 0) {\n // Check for usage-only chunk\n if (data.usage) {\n return {\n type: 'end',\n id: data.id,\n model: data.model,\n usage: {\n promptTokens: data.usage.prompt_tokens,\n completionTokens: data.usage.completion_tokens,\n totalTokens: data.usage.total_tokens,\n },\n raw: chunk,\n }\n }\n return null\n }\n\n const choice = data.choices[0]\n if (!choice) return null\n\n const delta = choice.delta\n\n // Start event (first chunk with role or empty delta at index 0)\n if (choice.index === 0 && !delta.content && !delta.tool_calls && !choice.finish_reason) {\n return {\n type: 'start',\n id: data.id,\n model: data.model,\n raw: chunk,\n }\n }\n\n // Content delta\n if (delta.content) {\n return {\n type: 'content',\n id: data.id,\n model: data.model,\n content: {\n type: 'content',\n delta: delta.content,\n index: choice.index,\n },\n raw: chunk,\n }\n }\n\n // Tool call delta\n if (delta.tool_calls && delta.tool_calls.length > 0) {\n const toolCall = delta.tool_calls[0]\n if (toolCall) {\n return {\n type: 'tool_call',\n id: data.id,\n model: data.model,\n toolCall: {\n type: 'tool_call',\n id: toolCall.id,\n name: toolCall.function?.name,\n arguments: toolCall.function?.arguments,\n index: toolCall.index,\n },\n raw: chunk,\n }\n }\n }\n\n // End event\n if (choice.finish_reason) {\n return {\n type: 'end',\n id: data.id,\n model: data.model,\n finishReason: mapFinishReason(choice.finish_reason),\n usage: data.usage\n ? {\n promptTokens: data.usage.prompt_tokens,\n completionTokens: data.usage.completion_tokens,\n totalTokens: data.usage.total_tokens,\n }\n : undefined,\n raw: chunk,\n }\n }\n\n return null\n}\n","import { parseOpenAICompatibleError } from '@amux.ai/llm-bridge'\nimport type { LLMErrorIR } from '@amux.ai/llm-bridge'\n\n/**\n * Parse Zhipu error to IR\n */\nexport function parseError(error: unknown): LLMErrorIR {\n return parseOpenAICompatibleError(error)\n}\n","import type { LLMRequestIR, ContentPart } from '@amux.ai/llm-bridge'\n\nimport type { ZhipuRequest, ZhipuMessage } from '../types'\n\n/**\n * Build Zhipu request from IR\n */\nexport function buildRequest(ir: LLMRequestIR): ZhipuRequest {\n const messages: ZhipuMessage[] = []\n\n // Add system message if present\n if (ir.system) {\n messages.push({\n role: 'system',\n content: ir.system,\n })\n }\n\n // Add conversation messages\n for (const msg of ir.messages) {\n messages.push({\n role: msg.role,\n content: buildContent(msg.content),\n name: msg.name,\n tool_calls: msg.toolCalls,\n tool_call_id: msg.toolCallId,\n })\n }\n\n const request: ZhipuRequest = {\n model: ir.model ?? 'glm-4.7',\n messages,\n stream: ir.stream,\n }\n\n // Add tools if present\n if (ir.tools && ir.tools.length > 0) {\n request.tools = ir.tools.map((tool) => ({\n type: 'function',\n function: {\n name: tool.function.name,\n description: tool.function.description,\n parameters: tool.function.parameters,\n },\n }))\n }\n\n // Add tool choice if present (Zhipu doesn't support 'required')\n if (ir.toolChoice && ir.toolChoice !== 'required') {\n request.tool_choice = ir.toolChoice as ZhipuRequest['tool_choice']\n }\n\n // Add generation parameters\n if (ir.generation) {\n if (ir.generation.temperature !== undefined) {\n request.temperature = ir.generation.temperature\n }\n if (ir.generation.topP !== undefined) {\n request.top_p = ir.generation.topP\n }\n if (ir.generation.maxTokens !== undefined) {\n request.max_tokens = ir.generation.maxTokens\n }\n if (ir.generation.stopSequences && ir.generation.stopSequences.length > 0) {\n request.stop = ir.generation.stopSequences\n }\n if (ir.generation.presencePenalty !== undefined) {\n request.presence_penalty = ir.generation.presencePenalty\n }\n if (ir.generation.frequencyPenalty !== undefined) {\n request.frequency_penalty = ir.generation.frequencyPenalty\n }\n if (ir.generation.n !== undefined) {\n request.n = ir.generation.n\n }\n if (ir.generation.responseFormat) {\n if (ir.generation.responseFormat.type === 'json_object') {\n request.response_format = { type: 'json_object' }\n }\n }\n }\n\n // Add stream options for usage in streaming\n if (ir.stream) {\n request.stream_options = { include_usage: true }\n }\n\n return request\n}\n\nfunction buildContent(content: string | ContentPart[]): string | null {\n if (typeof content === 'string') {\n return content || null\n }\n\n if (!content || content.length === 0) {\n return null\n }\n\n // Zhipu GLM-4V supports vision, but for basic adapter we concatenate text parts\n // Vision support can be added in a future version\n return content\n .filter((part) => part.type === 'text')\n .map((part) => (part.type === 'text' ? part.text : ''))\n .join('')\n}\n","import type { LLMResponseIR } from '@amux.ai/llm-bridge'\nimport { contentToString } from '@amux.ai/llm-bridge'\n\nimport type { ZhipuResponse } from '../types'\n\n/**\n * Build Zhipu response from IR\n */\nexport function buildResponse(ir: LLMResponseIR): ZhipuResponse {\n return {\n id: ir.id,\n object: 'chat.completion',\n created: ir.created ?? Math.floor(Date.now() / 1000),\n model: ir.model,\n choices: ir.choices.map((choice) => ({\n index: choice.index,\n message: {\n role: choice.message.role,\n content: contentToString(choice.message.content),\n tool_calls: choice.message.toolCalls,\n },\n finish_reason: choice.finishReason ?? 'stop',\n })),\n usage: ir.usage\n ? {\n prompt_tokens: ir.usage.promptTokens,\n completion_tokens: ir.usage.completionTokens,\n total_tokens: ir.usage.totalTokens,\n }\n : undefined,\n }\n}\n","import type { LLMStreamEvent, SSEEvent, StreamEventBuilder } from '@amux.ai/llm-bridge'\n\n/**\n * Zhipu stream event builder\n * Converts IR stream events to OpenAI-compatible SSE format\n *\n * Zhipu uses OpenAI-compatible SSE format:\n * - All events use \"data:\" prefix (no event type)\n * - Each chunk contains the full delta structure\n * - Stream ends with \"data: [DONE]\"\n */\nexport function createStreamBuilder(): StreamEventBuilder {\n let chunkId = `chatcmpl-${Date.now()}`\n let model = ''\n let created = Math.floor(Date.now() / 1000)\n const toolCallsState: Map<number, { id: string; name: string }> = new Map()\n\n return {\n process(event: LLMStreamEvent): SSEEvent[] {\n const events: SSEEvent[] = []\n\n // Update metadata from event\n if (event.id) chunkId = event.id\n if (event.model) model = event.model\n\n // Handle start event\n if (event.type === 'start') {\n // OpenAI doesn't have a separate start event\n // The first content chunk serves as the start\n events.push({\n event: 'data',\n data: {\n id: chunkId,\n object: 'chat.completion.chunk',\n created,\n model,\n choices: [{\n index: 0,\n delta: { role: 'assistant', content: '' },\n finish_reason: null,\n }],\n },\n })\n }\n\n // Handle content delta\n if (event.type === 'content' && event.content?.delta) {\n events.push({\n event: 'data',\n data: {\n id: chunkId,\n object: 'chat.completion.chunk',\n created,\n model,\n choices: [{\n index: 0,\n delta: { content: event.content.delta },\n finish_reason: null,\n }],\n },\n })\n }\n\n // Handle tool call\n if (event.type === 'tool_call' && event.toolCall) {\n const toolIndex = event.toolCall.index ?? 0\n const toolCallDelta: {\n index: number\n id?: string\n type?: string\n function?: { name?: string; arguments?: string }\n } = { index: toolIndex }\n\n // If this is a new tool call (has name)\n if (event.toolCall.name) {\n toolCallDelta.id = event.toolCall.id || `call_${Date.now()}_${toolIndex}`\n toolCallDelta.type = 'function'\n toolCallDelta.function = { name: event.toolCall.name }\n toolCallsState.set(toolIndex, {\n id: toolCallDelta.id,\n name: event.toolCall.name,\n })\n }\n\n // If this has arguments\n if (event.toolCall.arguments) {\n toolCallDelta.function = {\n ...toolCallDelta.function,\n arguments: event.toolCall.arguments,\n }\n }\n\n events.push({\n event: 'data',\n data: {\n id: chunkId,\n object: 'chat.completion.chunk',\n created,\n model,\n choices: [{\n index: 0,\n delta: { tool_calls: [toolCallDelta] },\n finish_reason: null,\n }],\n },\n })\n }\n\n // Handle end event\n if (event.type === 'end') {\n const finishReason = mapFinishReason(event.finishReason)\n\n // Emit final chunk with finish_reason\n const finalChunk: {\n id: string\n object: string\n created: number\n model: string\n choices: Array<{\n index: number\n delta: Record<string, never>\n finish_reason: string\n }>\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n }\n } = {\n id: chunkId,\n object: 'chat.completion.chunk',\n created,\n model,\n choices: [{\n index: 0,\n delta: {},\n finish_reason: finishReason,\n }],\n }\n\n // Include usage if available\n if (event.usage) {\n finalChunk.usage = {\n prompt_tokens: event.usage.promptTokens ?? 0,\n completion_tokens: event.usage.completionTokens ?? 0,\n total_tokens: event.usage.totalTokens ?? 0,\n }\n }\n\n events.push({ event: 'data', data: finalChunk })\n }\n\n // Handle error event\n if (event.type === 'error' && event.error) {\n events.push({\n event: 'data',\n data: {\n error: {\n message: event.error.message,\n type: 'server_error',\n code: event.error.code,\n },\n },\n })\n }\n\n return events\n },\n\n finalize(): SSEEvent[] {\n // OpenAI streams end with [DONE]\n return [{ event: 'data', data: '[DONE]' }]\n },\n }\n}\n\n/**\n * Map IR finish reason to OpenAI finish reason\n */\nfunction mapFinishReason(reason?: string): string {\n if (!reason) return 'stop'\n\n const reasonMap: Record<string, string> = {\n stop: 'stop',\n length: 'length',\n tool_calls: 'tool_calls',\n content_filter: 'content_filter',\n end_turn: 'stop',\n max_tokens: 'length',\n sensitive: 'content_filter', // Zhipu-specific\n }\n\n return reasonMap[reason] ?? 'stop'\n}\n","import type {\n LLMAdapter,\n LLMRequestIR,\n LLMResponseIR,\n LLMStreamEvent,\n LLMErrorIR,\n AdapterInfo,\n} from '@amux.ai/llm-bridge'\n\nimport { parseRequest } from './inbound/request-parser'\nimport { parseResponse } from './inbound/response-parser'\nimport { parseStream } from './inbound/stream-parser'\nimport { parseError } from './inbound/error-parser'\nimport { buildRequest } from './outbound/request-builder'\nimport { buildResponse } from './outbound/response-builder'\nimport { createStreamBuilder } from './outbound/stream-builder'\n\n/**\n * Zhipu AI adapter implementation\n * Zhipu API is OpenAI-compatible with some Zhipu-specific features\n */\nexport const zhipuAdapter: LLMAdapter = {\n name: 'zhipu',\n version: '1.0.0',\n capabilities: {\n streaming: true,\n tools: true,\n vision: true, // GLM-4V supports vision\n multimodal: true,\n systemPrompt: true,\n toolChoice: true,\n reasoning: false,\n webSearch: true, // Zhipu supports web search\n jsonMode: true,\n logprobs: false,\n seed: false,\n },\n\n inbound: {\n parseRequest: (request: unknown): LLMRequestIR => {\n return parseRequest(request)\n },\n\n parseResponse: (response: unknown): LLMResponseIR => {\n return parseResponse(response)\n },\n\n parseStream: (chunk: unknown): LLMStreamEvent | LLMStreamEvent[] | null => {\n return parseStream(chunk)\n },\n\n parseError: (error: unknown): LLMErrorIR => {\n return parseError(error)\n },\n },\n\n outbound: {\n buildRequest: (ir: LLMRequestIR): unknown => {\n return buildRequest(ir)\n },\n\n buildResponse: (ir: LLMResponseIR): unknown => {\n return buildResponse(ir)\n },\n\n createStreamBuilder,\n },\n\n getInfo(): AdapterInfo {\n return {\n name: this.name,\n version: this.version,\n capabilities: this.capabilities,\n endpoint: {\n baseUrl: 'https://open.bigmodel.cn/api/paas',\n chatPath: '/v4/chat/completions',\n modelsPath: '/v4/models',\n },\n }\n },\n}\n"]}