llama-cpp-capacitor 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,531 @@
1
+ 'use strict';
2
+
3
+ var core = require('@capacitor/core');
4
+
5
+ var __rest = (undefined && undefined.__rest) || function (s, e) {
6
+ var t = {};
7
+ for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
8
+ t[p] = s[p];
9
+ if (s != null && typeof Object.getOwnPropertySymbols === "function")
10
+ for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
11
+ if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
12
+ t[p[i]] = s[p[i]];
13
+ }
14
+ return t;
15
+ };
16
+ var _a, _b, _c;
17
+ // Constants
18
+ const LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER = '<__media__>';
19
+ // Event names
20
+ const EVENT_ON_INIT_CONTEXT_PROGRESS = '@LlamaCpp_onInitContextProgress';
21
+ const EVENT_ON_TOKEN = '@LlamaCpp_onToken';
22
+ const EVENT_ON_NATIVE_LOG = '@LlamaCpp_onNativeLog';
23
+ // Register the plugin
24
+ const LlamaCpp = core.registerPlugin('LlamaCpp');
25
+ // Log listeners management
26
+ const logListeners = [];
27
+ // Set up native log listener
28
+ LlamaCpp.addListener(EVENT_ON_NATIVE_LOG, (evt) => {
29
+ logListeners.forEach((listener) => listener(evt.level, evt.text));
30
+ });
31
+ // Trigger unset to use default log callback
32
+ (_c = (_b = (_a = LlamaCpp === null || LlamaCpp === void 0 ? void 0 : LlamaCpp.toggleNativeLog) === null || _a === void 0 ? void 0 : _a.call(LlamaCpp, { enabled: false })) === null || _b === void 0 ? void 0 : _b.catch) === null || _c === void 0 ? void 0 : _c.call(_b, () => { });
33
+ const RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER = LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER;
34
+ const validCacheTypes = [
35
+ 'f16',
36
+ 'f32',
37
+ 'bf16',
38
+ 'q8_0',
39
+ 'q4_0',
40
+ 'q4_1',
41
+ 'iq4_nl',
42
+ 'q5_0',
43
+ 'q5_1',
44
+ ];
45
+ const getJsonSchema = (responseFormat) => {
46
+ var _a;
47
+ if ((responseFormat === null || responseFormat === void 0 ? void 0 : responseFormat.type) === 'json_schema') {
48
+ return (_a = responseFormat.json_schema) === null || _a === void 0 ? void 0 : _a.schema;
49
+ }
50
+ if ((responseFormat === null || responseFormat === void 0 ? void 0 : responseFormat.type) === 'json_object') {
51
+ return responseFormat.schema || {};
52
+ }
53
+ return null;
54
+ };
55
+ class LlamaContext {
56
+ constructor({ contextId, gpu, reasonNoGPU, model }) {
57
+ this.gpu = false;
58
+ this.reasonNoGPU = '';
59
+ this.id = contextId;
60
+ this.gpu = gpu;
61
+ this.reasonNoGPU = reasonNoGPU;
62
+ this.model = model;
63
+ }
64
+ /**
65
+ * Load cached prompt & completion state from a file.
66
+ */
67
+ async loadSession(filepath) {
68
+ let path = filepath;
69
+ if (path.startsWith('file://'))
70
+ path = path.slice(7);
71
+ return LlamaCpp.loadSession({ contextId: this.id, filepath: path });
72
+ }
73
+ /**
74
+ * Save current cached prompt & completion state to a file.
75
+ */
76
+ async saveSession(filepath, options) {
77
+ return LlamaCpp.saveSession({
78
+ contextId: this.id,
79
+ filepath,
80
+ size: (options === null || options === void 0 ? void 0 : options.tokenSize) || -1
81
+ });
82
+ }
83
+ isLlamaChatSupported() {
84
+ return !!this.model.chatTemplates.llamaChat;
85
+ }
86
+ isJinjaSupported() {
87
+ const { minja } = this.model.chatTemplates;
88
+ return !!(minja === null || minja === void 0 ? void 0 : minja.toolUse) || !!(minja === null || minja === void 0 ? void 0 : minja.default);
89
+ }
90
+ async getFormattedChat(messages, template, params) {
91
+ var _a;
92
+ const mediaPaths = [];
93
+ const chat = messages.map((msg) => {
94
+ if (Array.isArray(msg.content)) {
95
+ const content = msg.content.map((part) => {
96
+ var _a;
97
+ // Handle multimodal content
98
+ if (part.type === 'image_url') {
99
+ let path = ((_a = part.image_url) === null || _a === void 0 ? void 0 : _a.url) || '';
100
+ if (path === null || path === void 0 ? void 0 : path.startsWith('file://'))
101
+ path = path.slice(7);
102
+ mediaPaths.push(path);
103
+ return {
104
+ type: 'text',
105
+ text: RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER,
106
+ };
107
+ }
108
+ else if (part.type === 'input_audio') {
109
+ const { input_audio: audio } = part;
110
+ if (!audio)
111
+ throw new Error('input_audio is required');
112
+ const { format } = audio;
113
+ if (format != 'wav' && format != 'mp3') {
114
+ throw new Error(`Unsupported audio format: ${format}`);
115
+ }
116
+ if (audio.url) {
117
+ const path = audio.url.replace(/file:\/\//, '');
118
+ mediaPaths.push(path);
119
+ }
120
+ else if (audio.data) {
121
+ mediaPaths.push(audio.data);
122
+ }
123
+ return {
124
+ type: 'text',
125
+ text: RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER,
126
+ };
127
+ }
128
+ return part;
129
+ });
130
+ return Object.assign(Object.assign({}, msg), { content });
131
+ }
132
+ return msg;
133
+ });
134
+ const useJinja = this.isJinjaSupported() && (params === null || params === void 0 ? void 0 : params.jinja);
135
+ let tmpl;
136
+ if (template)
137
+ tmpl = template; // Force replace if provided
138
+ const jsonSchema = getJsonSchema(params === null || params === void 0 ? void 0 : params.response_format);
139
+ const result = await LlamaCpp.getFormattedChat({
140
+ contextId: this.id,
141
+ messages: JSON.stringify(chat),
142
+ chatTemplate: tmpl,
143
+ params: {
144
+ jinja: useJinja,
145
+ json_schema: jsonSchema ? JSON.stringify(jsonSchema) : undefined,
146
+ tools: (params === null || params === void 0 ? void 0 : params.tools) ? JSON.stringify(params.tools) : undefined,
147
+ parallel_tool_calls: (params === null || params === void 0 ? void 0 : params.parallel_tool_calls) ? JSON.stringify(params.parallel_tool_calls)
148
+ : undefined,
149
+ tool_choice: params === null || params === void 0 ? void 0 : params.tool_choice,
150
+ enable_thinking: (_a = params === null || params === void 0 ? void 0 : params.enable_thinking) !== null && _a !== void 0 ? _a : true,
151
+ add_generation_prompt: params === null || params === void 0 ? void 0 : params.add_generation_prompt,
152
+ now: typeof (params === null || params === void 0 ? void 0 : params.now) === 'number' ? params.now.toString() : params === null || params === void 0 ? void 0 : params.now,
153
+ chat_template_kwargs: (params === null || params === void 0 ? void 0 : params.chat_template_kwargs) ? JSON.stringify(Object.entries(params.chat_template_kwargs).reduce((acc, [key, value]) => {
154
+ acc[key] = JSON.stringify(value); // Each value is a stringified JSON object
155
+ return acc;
156
+ }, {})) : undefined,
157
+ },
158
+ });
159
+ if (!useJinja) {
160
+ return {
161
+ type: 'llama-chat',
162
+ prompt: result,
163
+ has_media: mediaPaths.length > 0,
164
+ media_paths: mediaPaths,
165
+ };
166
+ }
167
+ const jinjaResult = result;
168
+ jinjaResult.type = 'jinja';
169
+ jinjaResult.has_media = mediaPaths.length > 0;
170
+ jinjaResult.media_paths = mediaPaths;
171
+ return jinjaResult;
172
+ }
173
+ /**
174
+ * Generate a completion based on the provided parameters
175
+ * @param params Completion parameters including prompt or messages
176
+ * @param callback Optional callback for token-by-token streaming
177
+ * @returns Promise resolving to the completion result
178
+ *
179
+ * Note: For multimodal support, you can include an media_paths parameter.
180
+ * This will process the images and add them to the context before generating text.
181
+ * Multimodal support must be enabled via initMultimodal() first.
182
+ */
183
+ async completion(params, callback) {
184
+ const nativeParams = Object.assign(Object.assign({}, params), { prompt: params.prompt || '', emit_partial_completion: !!callback });
185
+ if (params.messages) {
186
+ const formattedResult = await this.getFormattedChat(params.messages, params.chat_template || params.chatTemplate, {
187
+ jinja: params.jinja,
188
+ tools: params.tools,
189
+ parallel_tool_calls: params.parallel_tool_calls,
190
+ tool_choice: params.tool_choice,
191
+ enable_thinking: params.enable_thinking,
192
+ add_generation_prompt: params.add_generation_prompt,
193
+ now: params.now,
194
+ chat_template_kwargs: params.chat_template_kwargs,
195
+ });
196
+ if (formattedResult.type === 'jinja') {
197
+ const jinjaResult = formattedResult;
198
+ nativeParams.prompt = jinjaResult.prompt || '';
199
+ if (typeof jinjaResult.chat_format === 'number')
200
+ nativeParams.chat_format = jinjaResult.chat_format;
201
+ if (jinjaResult.grammar)
202
+ nativeParams.grammar = jinjaResult.grammar;
203
+ if (typeof jinjaResult.grammar_lazy === 'boolean')
204
+ nativeParams.grammar_lazy = jinjaResult.grammar_lazy;
205
+ if (jinjaResult.grammar_triggers)
206
+ nativeParams.grammar_triggers = jinjaResult.grammar_triggers;
207
+ if (jinjaResult.preserved_tokens)
208
+ nativeParams.preserved_tokens = jinjaResult.preserved_tokens;
209
+ if (jinjaResult.additional_stops) {
210
+ if (!nativeParams.stop)
211
+ nativeParams.stop = [];
212
+ nativeParams.stop.push(...jinjaResult.additional_stops);
213
+ }
214
+ if (jinjaResult.has_media) {
215
+ nativeParams.media_paths = jinjaResult.media_paths;
216
+ }
217
+ }
218
+ else if (formattedResult.type === 'llama-chat') {
219
+ const llamaChatResult = formattedResult;
220
+ nativeParams.prompt = llamaChatResult.prompt || '';
221
+ if (llamaChatResult.has_media) {
222
+ nativeParams.media_paths = llamaChatResult.media_paths;
223
+ }
224
+ }
225
+ }
226
+ else {
227
+ nativeParams.prompt = params.prompt || '';
228
+ }
229
+ // If media_paths were explicitly provided or extracted from messages, use them
230
+ if (!nativeParams.media_paths && params.media_paths) {
231
+ nativeParams.media_paths = params.media_paths;
232
+ }
233
+ if (nativeParams.response_format && !nativeParams.grammar) {
234
+ const jsonSchema = getJsonSchema(params.response_format);
235
+ if (jsonSchema)
236
+ nativeParams.json_schema = JSON.stringify(jsonSchema);
237
+ }
238
+ let tokenListener = callback &&
239
+ LlamaCpp.addListener(EVENT_ON_TOKEN, (evt) => {
240
+ const { contextId, tokenResult } = evt;
241
+ if (contextId !== this.id)
242
+ return;
243
+ callback(tokenResult);
244
+ });
245
+ if (!nativeParams.prompt)
246
+ throw new Error('Prompt is required');
247
+ const promise = LlamaCpp.completion({ contextId: this.id, params: nativeParams });
248
+ return promise
249
+ .then((completionResult) => {
250
+ tokenListener === null || tokenListener === void 0 ? void 0 : tokenListener.remove();
251
+ tokenListener = null;
252
+ return completionResult;
253
+ })
254
+ .catch((err) => {
255
+ tokenListener === null || tokenListener === void 0 ? void 0 : tokenListener.remove();
256
+ tokenListener = null;
257
+ throw err;
258
+ });
259
+ }
260
+ stopCompletion() {
261
+ return LlamaCpp.stopCompletion({ contextId: this.id });
262
+ }
263
+ /**
264
+ * Tokenize text or text with images
265
+ * @param text Text to tokenize
266
+ * @param params.media_paths Array of image paths to tokenize (if multimodal is enabled)
267
+ * @returns Promise resolving to the tokenize result
268
+ */
269
+ tokenize(text, { media_paths: mediaPaths, } = {}) {
270
+ return LlamaCpp.tokenize({ contextId: this.id, text, imagePaths: mediaPaths });
271
+ }
272
+ detokenize(tokens) {
273
+ return LlamaCpp.detokenize({ contextId: this.id, tokens });
274
+ }
275
+ embedding(text, params) {
276
+ return LlamaCpp.embedding({ contextId: this.id, text, params: params || {} });
277
+ }
278
+ /**
279
+ * Rerank documents based on relevance to a query
280
+ * @param query The query text to rank documents against
281
+ * @param documents Array of document texts to rank
282
+ * @param params Optional reranking parameters
283
+ * @returns Promise resolving to an array of ranking results with scores and indices
284
+ */
285
+ async rerank(query, documents, params) {
286
+ const results = await LlamaCpp.rerank({
287
+ contextId: this.id,
288
+ query,
289
+ documents,
290
+ params: params || {}
291
+ });
292
+ // Sort by score descending and add document text if requested
293
+ return results
294
+ .map((result) => (Object.assign(Object.assign({}, result), { document: documents[result.index] })))
295
+ .sort((a, b) => b.score - a.score);
296
+ }
297
+ async bench(pp, tg, pl, nr) {
298
+ const result = await LlamaCpp.bench({ contextId: this.id, pp, tg, pl, nr });
299
+ const [modelDesc, modelSize, modelNParams, ppAvg, ppStd, tgAvg, tgStd] = JSON.parse(result);
300
+ return {
301
+ modelDesc,
302
+ modelSize,
303
+ modelNParams,
304
+ ppAvg,
305
+ ppStd,
306
+ tgAvg,
307
+ tgStd,
308
+ };
309
+ }
310
+ async applyLoraAdapters(loraList) {
311
+ let loraAdapters = [];
312
+ if (loraList)
313
+ loraAdapters = loraList.map((l) => ({
314
+ path: l.path.replace(/file:\/\//, ''),
315
+ scaled: l.scaled,
316
+ }));
317
+ return LlamaCpp.applyLoraAdapters({ contextId: this.id, loraAdapters });
318
+ }
319
+ async removeLoraAdapters() {
320
+ return LlamaCpp.removeLoraAdapters({ contextId: this.id });
321
+ }
322
+ async getLoadedLoraAdapters() {
323
+ return LlamaCpp.getLoadedLoraAdapters({ contextId: this.id });
324
+ }
325
+ /**
326
+ * Initialize multimodal support with a mmproj file
327
+ * @param params Parameters for multimodal support
328
+ * @param params.path Path to the multimodal projector file
329
+ * @param params.use_gpu Whether to use GPU
330
+ * @returns Promise resolving to true if initialization was successful
331
+ */
332
+ async initMultimodal({ path, use_gpu: useGpu, }) {
333
+ if (path.startsWith('file://'))
334
+ path = path.slice(7);
335
+ return LlamaCpp.initMultimodal({
336
+ contextId: this.id,
337
+ params: {
338
+ path,
339
+ use_gpu: useGpu !== null && useGpu !== void 0 ? useGpu : true,
340
+ },
341
+ });
342
+ }
343
+ /**
344
+ * Check if multimodal support is enabled
345
+ * @returns Promise resolving to true if multimodal is enabled
346
+ */
347
+ async isMultimodalEnabled() {
348
+ return await LlamaCpp.isMultimodalEnabled({ contextId: this.id });
349
+ }
350
+ /**
351
+ * Check multimodal support
352
+ * @returns Promise resolving to an object with vision and audio support
353
+ */
354
+ async getMultimodalSupport() {
355
+ return await LlamaCpp.getMultimodalSupport({ contextId: this.id });
356
+ }
357
+ /**
358
+ * Release multimodal support
359
+ * @returns Promise resolving to void
360
+ */
361
+ async releaseMultimodal() {
362
+ return await LlamaCpp.releaseMultimodal({ contextId: this.id });
363
+ }
364
+ /**
365
+ * Initialize TTS support with a vocoder model
366
+ * @param params Parameters for TTS support
367
+ * @param params.path Path to the vocoder model
368
+ * @param params.n_batch Batch size for the vocoder model
369
+ * @returns Promise resolving to true if initialization was successful
370
+ */
371
+ async initVocoder({ path, n_batch: nBatch }) {
372
+ if (path.startsWith('file://'))
373
+ path = path.slice(7);
374
+ return await LlamaCpp.initVocoder({
375
+ contextId: this.id,
376
+ params: { path, n_batch: nBatch }
377
+ });
378
+ }
379
+ /**
380
+ * Check if TTS support is enabled
381
+ * @returns Promise resolving to true if TTS is enabled
382
+ */
383
+ async isVocoderEnabled() {
384
+ return await LlamaCpp.isVocoderEnabled({ contextId: this.id });
385
+ }
386
+ /**
387
+ * Get a formatted audio completion prompt
388
+ * @param speakerJsonStr JSON string representing the speaker
389
+ * @param textToSpeak Text to speak
390
+ * @returns Promise resolving to the formatted audio completion result with prompt and grammar
391
+ */
392
+ async getFormattedAudioCompletion(speaker, textToSpeak) {
393
+ return await LlamaCpp.getFormattedAudioCompletion({
394
+ contextId: this.id,
395
+ speakerJsonStr: speaker ? JSON.stringify(speaker) : '',
396
+ textToSpeak,
397
+ });
398
+ }
399
+ /**
400
+ * Get guide tokens for audio completion
401
+ * @param textToSpeak Text to speak
402
+ * @returns Promise resolving to the guide tokens
403
+ */
404
+ async getAudioCompletionGuideTokens(textToSpeak) {
405
+ return await LlamaCpp.getAudioCompletionGuideTokens({ contextId: this.id, textToSpeak });
406
+ }
407
+ /**
408
+ * Decode audio tokens
409
+ * @param tokens Array of audio tokens
410
+ * @returns Promise resolving to the decoded audio tokens
411
+ */
412
+ async decodeAudioTokens(tokens) {
413
+ return await LlamaCpp.decodeAudioTokens({ contextId: this.id, tokens });
414
+ }
415
+ /**
416
+ * Release TTS support
417
+ * @returns Promise resolving to void
418
+ */
419
+ async releaseVocoder() {
420
+ return await LlamaCpp.releaseVocoder({ contextId: this.id });
421
+ }
422
+ async release() {
423
+ return LlamaCpp.releaseContext({ contextId: this.id });
424
+ }
425
+ }
426
+ async function toggleNativeLog(enabled) {
427
+ return LlamaCpp.toggleNativeLog({ enabled });
428
+ }
429
+ function addNativeLogListener(listener) {
430
+ logListeners.push(listener);
431
+ return {
432
+ remove: () => {
433
+ logListeners.splice(logListeners.indexOf(listener), 1);
434
+ },
435
+ };
436
+ }
437
+ async function setContextLimit(limit) {
438
+ return LlamaCpp.setContextLimit({ limit });
439
+ }
440
+ let contextIdCounter = 0;
441
+ const contextIdRandom = () => process.env.NODE_ENV === 'test' ? 0 : Math.floor(Math.random() * 100000);
442
+ const modelInfoSkip = [
443
+ // Large fields
444
+ 'tokenizer.ggml.tokens',
445
+ 'tokenizer.ggml.token_type',
446
+ 'tokenizer.ggml.merges',
447
+ 'tokenizer.ggml.scores',
448
+ ];
449
+ async function loadLlamaModelInfo(model) {
450
+ let path = model;
451
+ if (path.startsWith('file://'))
452
+ path = path.slice(7);
453
+ return LlamaCpp.modelInfo({ path, skip: modelInfoSkip });
454
+ }
455
+ const poolTypeMap = {
456
+ // -1 is unspecified as undefined
457
+ none: 0,
458
+ mean: 1,
459
+ cls: 2,
460
+ last: 3,
461
+ rank: 4,
462
+ };
463
+ async function initLlama(_a, onProgress) {
464
+ var { model, is_model_asset: isModelAsset, pooling_type: poolingType, lora, lora_list: loraList } = _a, rest = __rest(_a, ["model", "is_model_asset", "pooling_type", "lora", "lora_list"]);
465
+ let path = model;
466
+ if (path.startsWith('file://'))
467
+ path = path.slice(7);
468
+ let loraPath = lora;
469
+ if (loraPath === null || loraPath === void 0 ? void 0 : loraPath.startsWith('file://'))
470
+ loraPath = loraPath.slice(7);
471
+ let loraAdapters = [];
472
+ if (loraList)
473
+ loraAdapters = loraList.map((l) => ({
474
+ path: l.path.replace(/file:\/\//, ''),
475
+ scaled: l.scaled,
476
+ }));
477
+ const contextId = contextIdCounter + contextIdRandom();
478
+ contextIdCounter += 1;
479
+ let removeProgressListener = null;
480
+ if (onProgress) {
481
+ removeProgressListener = LlamaCpp.addListener(EVENT_ON_INIT_CONTEXT_PROGRESS, (evt) => {
482
+ if (evt.contextId !== contextId)
483
+ return;
484
+ onProgress(evt.progress);
485
+ });
486
+ }
487
+ const poolType = poolTypeMap[poolingType];
488
+ if (rest.cache_type_k && !validCacheTypes.includes(rest.cache_type_k)) {
489
+ console.warn(`[LlamaCpp] initLlama: Invalid cache K type: ${rest.cache_type_k}, falling back to f16`);
490
+ delete rest.cache_type_k;
491
+ }
492
+ if (rest.cache_type_v && !validCacheTypes.includes(rest.cache_type_v)) {
493
+ console.warn(`[LlamaCpp] initLlama: Invalid cache V type: ${rest.cache_type_v}, falling back to f16`);
494
+ delete rest.cache_type_v;
495
+ }
496
+ const { gpu, reasonNoGPU, model: modelDetails, androidLib, } = await LlamaCpp.initContext({
497
+ contextId,
498
+ params: Object.assign({ model: path, is_model_asset: !!isModelAsset, use_progress_callback: !!onProgress, pooling_type: poolType, lora: loraPath, lora_list: loraAdapters }, rest),
499
+ }).catch((err) => {
500
+ removeProgressListener === null || removeProgressListener === void 0 ? void 0 : removeProgressListener.remove();
501
+ throw err;
502
+ });
503
+ removeProgressListener === null || removeProgressListener === void 0 ? void 0 : removeProgressListener.remove();
504
+ return new LlamaContext({
505
+ contextId,
506
+ gpu,
507
+ reasonNoGPU,
508
+ model: modelDetails,
509
+ androidLib,
510
+ });
511
+ }
512
+ async function releaseAllLlama() {
513
+ return LlamaCpp.releaseAllContexts();
514
+ }
515
+ const BuildInfo = {
516
+ number: '1.0.0',
517
+ commit: 'capacitor-llama-cpp',
518
+ };
519
+
520
+ exports.BuildInfo = BuildInfo;
521
+ exports.LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER = LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER;
522
+ exports.LlamaContext = LlamaContext;
523
+ exports.LlamaCpp = LlamaCpp;
524
+ exports.RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER = RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER;
525
+ exports.addNativeLogListener = addNativeLogListener;
526
+ exports.initLlama = initLlama;
527
+ exports.loadLlamaModelInfo = loadLlamaModelInfo;
528
+ exports.releaseAllLlama = releaseAllLlama;
529
+ exports.setContextLimit = setContextLimit;
530
+ exports.toggleNativeLog = toggleNativeLog;
531
+ //# sourceMappingURL=plugin.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"plugin.cjs.js","sources":["esm/index.js"],"sourcesContent":["var __rest = (this && this.__rest) || function (s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n};\r\nvar _a, _b, _c;\r\nimport { registerPlugin } from '@capacitor/core';\r\n// Constants\r\nexport const LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER = '<__media__>';\r\n// Event names\r\nconst EVENT_ON_INIT_CONTEXT_PROGRESS = '@LlamaCpp_onInitContextProgress';\r\nconst EVENT_ON_TOKEN = '@LlamaCpp_onToken';\r\nconst EVENT_ON_NATIVE_LOG = '@LlamaCpp_onNativeLog';\r\n// Register the plugin\r\nconst LlamaCpp = registerPlugin('LlamaCpp');\r\n// Log listeners management\r\nconst logListeners = [];\r\n// Set up native log listener\r\nLlamaCpp.addListener(EVENT_ON_NATIVE_LOG, (evt) => {\r\n logListeners.forEach((listener) => listener(evt.level, evt.text));\r\n});\r\n// Trigger unset to use default log callback\r\n(_c = (_b = (_a = LlamaCpp === null || LlamaCpp === void 0 ? void 0 : LlamaCpp.toggleNativeLog) === null || _a === void 0 ? void 0 : _a.call(LlamaCpp, { enabled: false })) === null || _b === void 0 ? void 0 : _b.catch) === null || _c === void 0 ? void 0 : _c.call(_b, () => { });\r\nexport const RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER = LLAMACPP_MTMD_DEFAULT_MEDIA_MARKER;\r\nconst validCacheTypes = [\r\n 'f16',\r\n 'f32',\r\n 'bf16',\r\n 'q8_0',\r\n 'q4_0',\r\n 'q4_1',\r\n 'iq4_nl',\r\n 'q5_0',\r\n 'q5_1',\r\n];\r\nconst getJsonSchema = (responseFormat) => {\r\n var _a;\r\n if ((responseFormat === null || responseFormat === void 0 ? void 0 : responseFormat.type) === 'json_schema') {\r\n return (_a = responseFormat.json_schema) === null || _a === void 0 ? void 0 : _a.schema;\r\n }\r\n if ((responseFormat === null || responseFormat === void 0 ? void 0 : responseFormat.type) === 'json_object') {\r\n return responseFormat.schema || {};\r\n }\r\n return null;\r\n};\r\nexport class LlamaContext {\r\n constructor({ contextId, gpu, reasonNoGPU, model }) {\r\n this.gpu = false;\r\n this.reasonNoGPU = '';\r\n this.id = contextId;\r\n this.gpu = gpu;\r\n this.reasonNoGPU = reasonNoGPU;\r\n this.model = model;\r\n }\r\n /**\r\n * Load cached prompt & completion state from a file.\r\n */\r\n async loadSession(filepath) {\r\n let path = filepath;\r\n if (path.startsWith('file://'))\r\n path = path.slice(7);\r\n return LlamaCpp.loadSession({ contextId: this.id, filepath: path });\r\n }\r\n /**\r\n * Save current cached prompt & completion state to a file.\r\n */\r\n async saveSession(filepath, options) {\r\n return LlamaCpp.saveSession({\r\n contextId: this.id,\r\n filepath,\r\n size: (options === null || options === void 0 ? void 0 : options.tokenSize) || -1\r\n });\r\n }\r\n isLlamaChatSupported() {\r\n return !!this.model.chatTemplates.llamaChat;\r\n }\r\n isJinjaSupported() {\r\n const { minja } = this.model.chatTemplates;\r\n return !!(minja === null || minja === void 0 ? void 0 : minja.toolUse) || !!(minja === null || minja === void 0 ? void 0 : minja.default);\r\n }\r\n async getFormattedChat(messages, template, params) {\r\n var _a;\r\n const mediaPaths = [];\r\n const chat = messages.map((msg) => {\r\n if (Array.isArray(msg.content)) {\r\n const content = msg.content.map((part) => {\r\n var _a;\r\n // Handle multimodal content\r\n if (part.type === 'image_url') {\r\n let path = ((_a = part.image_url) === null || _a === void 0 ? void 0 : _a.url) || '';\r\n if (path === null || path === void 0 ? void 0 : path.startsWith('file://'))\r\n path = path.slice(7);\r\n mediaPaths.push(path);\r\n return {\r\n type: 'text',\r\n text: RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER,\r\n };\r\n }\r\n else if (part.type === 'input_audio') {\r\n const { input_audio: audio } = part;\r\n if (!audio)\r\n throw new Error('input_audio is required');\r\n const { format } = audio;\r\n if (format != 'wav' && format != 'mp3') {\r\n throw new Error(`Unsupported audio format: ${format}`);\r\n }\r\n if (audio.url) {\r\n const path = audio.url.replace(/file:\\/\\//, '');\r\n mediaPaths.push(path);\r\n }\r\n else if (audio.data) {\r\n mediaPaths.push(audio.data);\r\n }\r\n return {\r\n type: 'text',\r\n text: RNLLAMA_MTMD_DEFAULT_MEDIA_MARKER,\r\n };\r\n }\r\n return part;\r\n });\r\n return Object.assign(Object.assign({}, msg), { content });\r\n }\r\n return msg;\r\n });\r\n const useJinja = this.isJinjaSupported() && (params === null || params === void 0 ? void 0 : params.jinja);\r\n let tmpl;\r\n if (template)\r\n tmpl = template; // Force replace if provided\r\n const jsonSchema = getJsonSchema(params === null || params === void 0 ? void 0 : params.response_format);\r\n const result = await LlamaCpp.getFormattedChat({\r\n contextId: this.id,\r\n messages: JSON.stringify(chat),\r\n chatTemplate: tmpl,\r\n params: {\r\n jinja: useJinja,\r\n json_schema: jsonSchema ? JSON.stringify(jsonSchema) : undefined,\r\n tools: (params === null || params === void 0 ? void 0 : params.tools) ? JSON.stringify(params.tools) : undefined,\r\n parallel_tool_calls: (params === null || params === void 0 ? void 0 : params.parallel_tool_calls) ? JSON.stringify(params.parallel_tool_calls)\r\n : undefined,\r\n tool_choice: params === null || params === void 0 ? void 0 : params.tool_choice,\r\n enable_thinking: (_a = params === null || params === void 0 ? void 0 : params.enable_thinking) !== null && _a !== void 0 ? _a : true,\r\n add_generation_prompt: params === null || params === void 0 ? void 0 : params.add_generation_prompt,\r\n now: typeof (params === null || params === void 0 ? void 0 : params.now) === 'number' ? params.now.toString() : params === null || params === void 0 ? void 0 : params.now,\r\n chat_template_kwargs: (params === null || params === void 0 ? void 0 : params.chat_template_kwargs) ? JSON.stringify(Object.entries(params.chat_template_kwargs).reduce((acc, [key, value]) => {\r\n acc[key] = JSON.stringify(value); // Each value is a stringified JSON object\r\n return acc;\r\n }, {})) : undefined,\r\n },\r\n });\r\n if (!useJinja) {\r\n return {\r\n type: 'llama-chat',\r\n prompt: result,\r\n has_media: mediaPaths.length > 0,\r\n media_paths: mediaPaths,\r\n };\r\n }\r\n const jinjaResult = result;\r\n jinjaResult.type = 'jinja';\r\n jinjaResult.has_media = mediaPaths.length > 0;\r\n jinjaResult.media_paths = mediaPaths;\r\n return jinjaResult;\r\n }\r\n /**\r\n * Generate a completion based on the provided parameters\r\n * @param params Completion parameters including prompt or messages\r\n * @param callback Optional callback for token-by-token streaming\r\n * @returns Promise resolving to the completion result\r\n *\r\n * Note: For multimodal support, you can include an media_paths parameter.\r\n * This will process the images and add them to the context before generating text.\r\n * Multimodal support must be enabled via initMultimodal() first.\r\n */\r\n async completion(params, callback) {\r\n const nativeParams = Object.assign(Object.assign({}, params), { prompt: params.prompt || '', emit_partial_completion: !!callback });\r\n if (params.messages) {\r\n const formattedResult = await this.getFormattedChat(params.messages, params.chat_template || params.chatTemplate, {\r\n jinja: params.jinja,\r\n tools: params.tools,\r\n parallel_tool_calls: params.parallel_tool_calls,\r\n tool_choice: params.tool_choice,\r\n enable_thinking: params.enable_thinking,\r\n add_generation_prompt: params.add_generation_prompt,\r\n now: params.now,\r\n chat_template_kwargs: params.chat_template_kwargs,\r\n });\r\n if (formattedResult.type === 'jinja') {\r\n const jinjaResult = formattedResult;\r\n nativeParams.prompt = jinjaResult.prompt || '';\r\n if (typeof jinjaResult.chat_format === 'number')\r\n nativeParams.chat_format = jinjaResult.chat_format;\r\n if (jinjaResult.grammar)\r\n nativeParams.grammar = jinjaResult.grammar;\r\n if (typeof jinjaResult.grammar_lazy === 'boolean')\r\n nativeParams.grammar_lazy = jinjaResult.grammar_lazy;\r\n if (jinjaResult.grammar_triggers)\r\n nativeParams.grammar_triggers = jinjaResult.grammar_triggers;\r\n if (jinjaResult.preserved_tokens)\r\n nativeParams.preserved_tokens = jinjaResult.preserved_tokens;\r\n if (jinjaResult.additional_stops) {\r\n if (!nativeParams.stop)\r\n nativeParams.stop = [];\r\n nativeParams.stop.push(...jinjaResult.additional_stops);\r\n }\r\n if (jinjaResult.has_media) {\r\n nativeParams.media_paths = jinjaResult.media_paths;\r\n }\r\n }\r\n else if (formattedResult.type === 'llama-chat') {\r\n const llamaChatResult = formattedResult;\r\n nativeParams.prompt = llamaChatResult.prompt || '';\r\n if (llamaChatResult.has_media) {\r\n nativeParams.media_paths = llamaChatResult.media_paths;\r\n }\r\n }\r\n }\r\n else {\r\n nativeParams.prompt = params.prompt || '';\r\n }\r\n // If media_paths were explicitly provided or extracted from messages, use them\r\n if (!nativeParams.media_paths && params.media_paths) {\r\n nativeParams.media_paths = params.media_paths;\r\n }\r\n if (nativeParams.response_format && !nativeParams.grammar) {\r\n const jsonSchema = getJsonSchema(params.response_format);\r\n if (jsonSchema)\r\n nativeParams.json_schema = JSON.stringify(jsonSchema);\r\n }\r\n let tokenListener = callback &&\r\n LlamaCpp.addListener(EVENT_ON_TOKEN, (evt) => {\r\n const { contextId, tokenResult } = evt;\r\n if (contextId !== this.id)\r\n return;\r\n callback(tokenResult);\r\n });\r\n if (!nativeParams.prompt)\r\n throw new Error('Prompt is required');\r\n const promise = LlamaCpp.completion({ contextId: this.id, params: nativeParams });\r\n return promise\r\n .then((completionResult) => {\r\n tokenListener === null || tokenListener === void 0 ? void 0 : tokenListener.remove();\r\n tokenListener = null;\r\n return completionResult;\r\n })\r\n .catch((err) => {\r\n tokenListener === null || tokenListener === void 0 ? void 0 : tokenListener.remove();\r\n tokenListener = null;\r\n throw err;\r\n });\r\n }\r\n stopCompletion() {\r\n return LlamaCpp.stopCompletion({ contextId: this.id });\r\n }\r\n /**\r\n * Tokenize text or text with images\r\n * @param text Text to tokenize\r\n * @param params.media_paths Array of image paths to tokenize (if multimodal is enabled)\r\n * @returns Promise resolving to the tokenize result\r\n */\r\n tokenize(text, { media_paths: mediaPaths, } = {}) {\r\n return LlamaCpp.tokenize({ contextId: this.id, text, imagePaths: mediaPaths });\r\n }\r\n detokenize(tokens) {\r\n return LlamaCpp.detokenize({ contextId: this.id, tokens });\r\n }\r\n embedding(text, params) {\r\n return LlamaCpp.embedding({ contextId: this.id, text, params: params || {} });\r\n }\r\n /**\r\n * Rerank documents based on relevance to a query\r\n * @param query The query text to rank documents against\r\n * @param documents Array of document texts to rank\r\n * @param params Optional reranking parameters\r\n * @returns Promise resolving to an array of ranking results with scores and indices\r\n */\r\n async rerank(query, documents, params) {\r\n const results = await LlamaCpp.rerank({\r\n contextId: this.id,\r\n query,\r\n documents,\r\n params: params || {}\r\n });\r\n // Sort by score descending and add document text if requested\r\n return results\r\n .map((result) => (Object.assign(Object.assign({}, result), { document: documents[result.index] })))\r\n .sort((a, b) => b.score - a.score);\r\n }\r\n async bench(pp, tg, pl, nr) {\r\n const result = await LlamaCpp.bench({ contextId: this.id, pp, tg, pl, nr });\r\n const [modelDesc, modelSize, modelNParams, ppAvg, ppStd, tgAvg, tgStd] = JSON.parse(result);\r\n return {\r\n modelDesc,\r\n modelSize,\r\n modelNParams,\r\n ppAvg,\r\n ppStd,\r\n tgAvg,\r\n tgStd,\r\n };\r\n }\r\n async applyLoraAdapters(loraList) {\r\n let loraAdapters = [];\r\n if (loraList)\r\n loraAdapters = loraList.map((l) => ({\r\n path: l.path.replace(/file:\\/\\//, ''),\r\n scaled: l.scaled,\r\n }));\r\n return LlamaCpp.applyLoraAdapters({ contextId: this.id, loraAdapters });\r\n }\r\n async removeLoraAdapters() {\r\n return LlamaCpp.removeLoraAdapters({ contextId: this.id });\r\n }\r\n async getLoadedLoraAdapters() {\r\n return LlamaCpp.getLoadedLoraAdapters({ contextId: this.id });\r\n }\r\n /**\r\n * Initialize multimodal support with a mmproj file\r\n * @param params Parameters for multimodal support\r\n * @param params.path Path to the multimodal projector file\r\n * @param params.use_gpu Whether to use GPU\r\n * @returns Promise resolving to true if initialization was successful\r\n */\r\n async initMultimodal({ path, use_gpu: useGpu, }) {\r\n if (path.startsWith('file://'))\r\n path = path.slice(7);\r\n return LlamaCpp.initMultimodal({\r\n contextId: this.id,\r\n params: {\r\n path,\r\n use_gpu: useGpu !== null && useGpu !== void 0 ? useGpu : true,\r\n },\r\n });\r\n }\r\n /**\r\n * Check if multimodal support is enabled\r\n * @returns Promise resolving to true if multimodal is enabled\r\n */\r\n async isMultimodalEnabled() {\r\n return await LlamaCpp.isMultimodalEnabled({ contextId: this.id });\r\n }\r\n /**\r\n * Check multimodal support\r\n * @returns Promise resolving to an object with vision and audio support\r\n */\r\n async getMultimodalSupport() {\r\n return await LlamaCpp.getMultimodalSupport({ contextId: this.id });\r\n }\r\n /**\r\n * Release multimodal support\r\n * @returns Promise resolving to void\r\n */\r\n async releaseMultimodal() {\r\n return await LlamaCpp.releaseMultimodal({ contextId: this.id });\r\n }\r\n /**\r\n * Initialize TTS support with a vocoder model\r\n * @param params Parameters for TTS support\r\n * @param params.path Path to the vocoder model\r\n * @param params.n_batch Batch size for the vocoder model\r\n * @returns Promise resolving to true if initialization was successful\r\n */\r\n async initVocoder({ path, n_batch: nBatch }) {\r\n if (path.startsWith('file://'))\r\n path = path.slice(7);\r\n return await LlamaCpp.initVocoder({\r\n contextId: this.id,\r\n params: { path, n_batch: nBatch }\r\n });\r\n }\r\n /**\r\n * Check if TTS support is enabled\r\n * @returns Promise resolving to true if TTS is enabled\r\n */\r\n async isVocoderEnabled() {\r\n return await LlamaCpp.isVocoderEnabled({ contextId: this.id });\r\n }\r\n /**\r\n * Get a formatted audio completion prompt\r\n * @param speakerJsonStr JSON string representing the speaker\r\n * @param textToSpeak Text to speak\r\n * @returns Promise resolving to the formatted audio completion result with prompt and grammar\r\n */\r\n async getFormattedAudioCompletion(speaker, textToSpeak) {\r\n return await LlamaCpp.getFormattedAudioCompletion({\r\n contextId: this.id,\r\n speakerJsonStr: speaker ? JSON.stringify(speaker) : '',\r\n textToSpeak,\r\n });\r\n }\r\n /**\r\n * Get guide tokens for audio completion\r\n * @param textToSpeak Text to speak\r\n * @returns Promise resolving to the guide tokens\r\n */\r\n async getAudioCompletionGuideTokens(textToSpeak) {\r\n return await LlamaCpp.getAudioCompletionGuideTokens({ contextId: this.id, textToSpeak });\r\n }\r\n /**\r\n * Decode audio tokens\r\n * @param tokens Array of audio tokens\r\n * @returns Promise resolving to the decoded audio tokens\r\n */\r\n async decodeAudioTokens(tokens) {\r\n return await LlamaCpp.decodeAudioTokens({ contextId: this.id, tokens });\r\n }\r\n /**\r\n * Release TTS support\r\n * @returns Promise resolving to void\r\n */\r\n async releaseVocoder() {\r\n return await LlamaCpp.releaseVocoder({ contextId: this.id });\r\n }\r\n async release() {\r\n return LlamaCpp.releaseContext({ contextId: this.id });\r\n }\r\n}\r\nexport async function toggleNativeLog(enabled) {\r\n return LlamaCpp.toggleNativeLog({ enabled });\r\n}\r\nexport function addNativeLogListener(listener) {\r\n logListeners.push(listener);\r\n return {\r\n remove: () => {\r\n logListeners.splice(logListeners.indexOf(listener), 1);\r\n },\r\n };\r\n}\r\nexport async function setContextLimit(limit) {\r\n return LlamaCpp.setContextLimit({ limit });\r\n}\r\nlet contextIdCounter = 0;\r\nconst contextIdRandom = () => process.env.NODE_ENV === 'test' ? 0 : Math.floor(Math.random() * 100000);\r\nconst modelInfoSkip = [\r\n // Large fields\r\n 'tokenizer.ggml.tokens',\r\n 'tokenizer.ggml.token_type',\r\n 'tokenizer.ggml.merges',\r\n 'tokenizer.ggml.scores',\r\n];\r\nexport async function loadLlamaModelInfo(model) {\r\n let path = model;\r\n if (path.startsWith('file://'))\r\n path = path.slice(7);\r\n return LlamaCpp.modelInfo({ path, skip: modelInfoSkip });\r\n}\r\nconst poolTypeMap = {\r\n // -1 is unspecified as undefined\r\n none: 0,\r\n mean: 1,\r\n cls: 2,\r\n last: 3,\r\n rank: 4,\r\n};\r\nexport async function initLlama(_a, onProgress) {\r\n var { model, is_model_asset: isModelAsset, pooling_type: poolingType, lora, lora_list: loraList } = _a, rest = __rest(_a, [\"model\", \"is_model_asset\", \"pooling_type\", \"lora\", \"lora_list\"]);\r\n let path = model;\r\n if (path.startsWith('file://'))\r\n path = path.slice(7);\r\n let loraPath = lora;\r\n if (loraPath === null || loraPath === void 0 ? void 0 : loraPath.startsWith('file://'))\r\n loraPath = loraPath.slice(7);\r\n let loraAdapters = [];\r\n if (loraList)\r\n loraAdapters = loraList.map((l) => ({\r\n path: l.path.replace(/file:\\/\\//, ''),\r\n scaled: l.scaled,\r\n }));\r\n const contextId = contextIdCounter + contextIdRandom();\r\n contextIdCounter += 1;\r\n let removeProgressListener = null;\r\n if (onProgress) {\r\n removeProgressListener = LlamaCpp.addListener(EVENT_ON_INIT_CONTEXT_PROGRESS, (evt) => {\r\n if (evt.contextId !== contextId)\r\n return;\r\n onProgress(evt.progress);\r\n });\r\n }\r\n const poolType = poolTypeMap[poolingType];\r\n if (rest.cache_type_k && !validCacheTypes.includes(rest.cache_type_k)) {\r\n console.warn(`[LlamaCpp] initLlama: Invalid cache K type: ${rest.cache_type_k}, falling back to f16`);\r\n delete rest.cache_type_k;\r\n }\r\n if (rest.cache_type_v && !validCacheTypes.includes(rest.cache_type_v)) {\r\n console.warn(`[LlamaCpp] initLlama: Invalid cache V type: ${rest.cache_type_v}, falling back to f16`);\r\n delete rest.cache_type_v;\r\n }\r\n const { gpu, reasonNoGPU, model: modelDetails, androidLib, } = await LlamaCpp.initContext({\r\n contextId,\r\n params: Object.assign({ model: path, is_model_asset: !!isModelAsset, use_progress_callback: !!onProgress, pooling_type: poolType, lora: loraPath, lora_list: loraAdapters }, rest),\r\n }).catch((err) => {\r\n removeProgressListener === null || removeProgressListener === void 0 ? void 0 : removeProgressListener.remove();\r\n throw err;\r\n });\r\n removeProgressListener === null || removeProgressListener === void 0 ? void 0 : removeProgressListener.remove();\r\n return new LlamaContext({\r\n contextId,\r\n gpu,\r\n reasonNoGPU,\r\n model: modelDetails,\r\n androidLib,\r\n });\r\n}\r\nexport async function releaseAllLlama() {\r\n return LlamaCpp.releaseAllContexts();\r\n}\r\nexport const BuildInfo = {\r\n number: '1.0.0',\r\n commit: 'capacitor-llama-cpp',\r\n};\r\n// Re-export the plugin for direct access\r\nexport { LlamaCpp };\r\n//# sourceMappingURL=index.js.map"],"names":["this","registerPlugin"],"mappings":";;;;AAAA,IAAI,MAAM,GAAG,CAACA,SAAI,IAAIA,SAAI,CAAC,MAAM,KAAK,UAAU,CAAC,EAAE,CAAC,EAAE;AACtD,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;AACf,IAAI,KAAK,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC;AACvF,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpB,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,OAAO,MAAM,CAAC,qBAAqB,KAAK,UAAU;AACvE,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,qBAAqB,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAChF,YAAY,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,MAAM,CAAC,SAAS,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1F,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,QAAQ,CAAC;AACT,IAAI,OAAO,CAAC,CAAC;AACb,CAAC,CAAC;AACF,IAAI,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC;AAEf;AACY,MAAC,kCAAkC,GAAG,cAAc;AAChE;AACA,MAAM,8BAA8B,GAAG,iCAAiC,CAAC;AACzE,MAAM,cAAc,GAAG,mBAAmB,CAAC;AAC3C,MAAM,mBAAmB,GAAG,uBAAuB,CAAC;AACpD;AACK,MAAC,QAAQ,GAAGC,mBAAc,CAAC,UAAU,EAAE;AAC5C;AACA,MAAM,YAAY,GAAG,EAAE,CAAC;AACxB;AACA,QAAQ,CAAC,WAAW,CAAC,mBAAmB,EAAE,CAAC,GAAG,KAAK;AACnD,IAAI,YAAY,CAAC,OAAO,CAAC,CAAC,QAAQ,KAAK,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC;AACH;AACA,CAAC,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,EAAE,GAAG,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,MAAM,GAAG,MAAM,GAAG,QAAQ,CAAC,eAAe,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC,KAAK,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC;AAC3Q,MAAC,iCAAiC,GAAG,mCAAmC;AACpF,MAAM,eAAe,GAAG;AACxB,IAAI,KAAK;AACT,IAAI,KAAK;AACT,IAAI,MAAM;AACV,IAAI,MAAM;AACV,IAAI,MAAM;AACV,IAAI,MAAM;AACV,IAAI,QAAQ;AACZ,IAAI,MAAM;AACV,IAAI,MAAM;AACV,CAAC,CAAC;AACF,MAAM,aAAa,GAAG,CAAC,cAAc,KAAK;AAC1C,IAAI,IAAI,EAAE,CAAC;AACX,IAAI,IAAI,CAAC,cAAc,KAAK,IAAI,IAAI,cAAc,KAAK,MAAM,GAAG,MAAM,GAAG,cAAc,CAAC,IAAI,MAAM,aAAa,EAAE;AACjH,QAAQ,OAAO,CAAC,EAAE,GAAG,cAAc,CAAC,WAAW,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC,MAAM,CAAC;AAChG,IAAI,CAAC;AACL,IAAI,IAAI,CAAC,cAAc,KAAK,IAAI,IAAI,cAAc,KAAK,MAAM,GAAG,MAAM,GAAG,cAAc,CAAC,IAAI,MAAM,aAAa,EAAE;AACjH,QAAQ,OAAO,cAAc,CAAC,MAAM,IAAI,EAAE,CAAC;AAC3C,IAAI,CAAC;AACL,IAAI,OAAO,IAAI,CAAC;AAChB,CAAC,CAAC;AACK,MAAM,YAAY,CAAC;AAC1B,IAAI,WAAW,CAAC,EAAE,SAAS,EAAE,GAAG,EAAE,WAAW,EAAE,KAAK,EAAE,EAAE;AACxD,QAAQ,IAAI,CAAC,GAAG,GAAG,KAAK,CAAC;AACzB,QAAQ,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;AAC9B,QAAQ,IAAI,CAAC,EAAE,GAAG,SAAS,CAAC;AAC5B,QAAQ,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;AACvB,QAAQ,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AACvC,QAAQ,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;AAC3B,IAAI,CAAC;AACL;AACA;AACA;AACA,IAAI,MAAM,WAAW,CAAC,QAAQ,EAAE;AAChC,QAAQ,IAAI,IAAI,GAAG,QAAQ,CAAC;AAC5B,QAAQ,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;AACtC,YAAY,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,QAAQ,OAAO,QAAQ,CAAC,WAAW,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;AAC5E,IAAI,CAAC;AACL;AACA;AACA;AACA,IAAI,MAAM,WAAW,CAAC,QAAQ,EAAE,OAAO,EAAE;AACzC,QAAQ,OAAO,QAAQ,CAAC,WAAW,CAAC;AACpC,YAAY,SAAS,EAAE,IAAI,CAAC,EAAE;AAC9B,YAAY,QAAQ;AACpB,YAAY,IAAI,EAAE,CAAC,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC,SAAS,KAAK,EAAE;AAC7F,SAAS,CAAC,CAAC;AACX,IAAI,CAAC;AACL,IAAI,oBAAoB,GAAG;AAC3B,QAAQ,OAAO,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,SAAS,CAAC;AACpD,IAAI,CAAC;AACL,IAAI,gBAAgB,GAAG;AACvB,QAAQ,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC;AACnD,QAAQ,OAAO,CAAC,EAAE,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,GAAG,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,GAAG,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;AAClJ,IAAI,CAAC;AACL,IAAI,MAAM,gBAAgB,CAAC,QAAQ,EAAE,QAAQ,EAAE,MAAM,EAAE;AACvD,QAAQ,IAAI,EAAE,CAAC;AACf,QAAQ,MAAM,UAAU,GAAG,EAAE,CAAC;AAC9B,QAAQ,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,KAAK;AAC3C,YAAY,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE;AAC5C,gBAAgB,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK;AAC1D,oBAAoB,IAAI,EAAE,CAAC;AAC3B;AACA,oBAAoB,IAAI,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;AACnD,wBAAwB,IAAI,IAAI,GAAG,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,SAAS,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC;AAC7G,wBAAwB,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;AAClG,4BAA4B,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC9C,wBAAwB,OAAO;AAC/B,4BAA4B,IAAI,EAAE,MAAM;AACxC,4BAA4B,IAAI,EAAE,iCAAiC;AACnE,yBAAyB,CAAC;AAC1B,oBAAoB,CAAC;AACrB,yBAAyB,IAAI,IAAI,CAAC,IAAI,KAAK,aAAa,EAAE;AAC1D,wBAAwB,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC;AAC5D,wBAAwB,IAAI,CAAC,KAAK;AAClC,4BAA4B,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;AACvE,wBAAwB,MAAM,EAAE,MAAM,EAAE,GAAG,KAAK,CAAC;AACjD,wBAAwB,IAAI,MAAM,IAAI,KAAK,IAAI,MAAM,IAAI,KAAK,EAAE;AAChE,4BAA4B,MAAM,IAAI,KAAK,CAAC,CAAC,0BAA0B,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC;AACnF,wBAAwB,CAAC;AACzB,wBAAwB,IAAI,KAAK,CAAC,GAAG,EAAE;AACvC,4BAA4B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC;AAC5E,4BAA4B,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAClD,wBAAwB,CAAC;AACzB,6BAA6B,IAAI,KAAK,CAAC,IAAI,EAAE;AAC7C,4BAA4B,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AACxD,wBAAwB,CAAC;AACzB,wBAAwB,OAAO;AAC/B,4BAA4B,IAAI,EAAE,MAAM;AACxC,4BAA4B,IAAI,EAAE,iCAAiC;AACnE,yBAAyB,CAAC;AAC1B,oBAAoB,CAAC;AACrB,oBAAoB,OAAO,IAAI,CAAC;AAChC,gBAAgB,CAAC,CAAC,CAAC;AACnB,gBAAgB,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,EAAE,OAAO,EAAE,CAAC,CAAC;AAC1E,YAAY,CAAC;AACb,YAAY,OAAO,GAAG,CAAC;AACvB,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,MAAM,QAAQ,GAAG,IAAI,CAAC,gBAAgB,EAAE,KAAK,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;AACnH,QAAQ,IAAI,IAAI,CAAC;AACjB,QAAQ,IAAI,QAAQ;AACpB,YAAY,IAAI,GAAG,QAAQ,CAAC;AAC5B,QAAQ,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC,eAAe,CAAC,CAAC;AACjH,QAAQ,MAAM,MAAM,GAAG,MAAM,QAAQ,CAAC,gBAAgB,CAAC;AACvD,YAAY,SAAS,EAAE,IAAI,CAAC,EAAE;AAC9B,YAAY,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;AAC1C,YAAY,YAAY,EAAE,IAAI;AAC9B,YAAY,MAAM,EAAE;AACpB,gBAAgB,KAAK,EAAE,QAAQ;AAC/B,gBAAgB,WAAW,EAAE,UAAU,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,SAAS;AAChF,gBAAgB,KAAK,EAAE,CAAC,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC,KAAK,IAAI,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,SAAS;AAChI,gBAAgB,mBAAmB,EAAE,CAAC,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC,mBAAmB,IAAI,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,mBAAmB,CAAC;AAC9J,sBAAsB,SAAS;AAC/B,gBAAgB,WAAW,EAAE,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC,WAAW;AAC/F,gBAAgB,eAAe,EAAE,CAAC,EAAE,GAAG,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC,eAAe,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,IAAI;AACpJ,gBAAgB,qBAAqB,EAAE,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC,qBAAqB;AACnH,gBAAgB,GAAG,EAAE,QAAQ,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,KAAK,QAAQ,GAAG,MAAM,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC,GAAG;AAC1L,gBAAgB,oBAAoB,EAAE,CAAC,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,MAAM,CAAC,oBAAoB,IAAI,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK;AAC/M,oBAAoB,GAAG,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;AACrD,oBAAoB,OAAO,GAAG,CAAC;AAC/B,gBAAgB,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,SAAS;AACnC,aAAa;AACb,SAAS,CAAC,CAAC;AACX,QAAQ,IAAI,CAAC,QAAQ,EAAE;AACvB,YAAY,OAAO;AACnB,gBAAgB,IAAI,EAAE,YAAY;AAClC,gBAAgB,MAAM,EAAE,MAAM;AAC9B,gBAAgB,SAAS,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC;AAChD,gBAAgB,WAAW,EAAE,UAAU;AACvC,aAAa,CAAC;AACd,QAAQ,CAAC;AACT,QAAQ,MAAM,WAAW,GAAG,MAAM,CAAC;AACnC,QAAQ,WAAW,CAAC,IAAI,GAAG,OAAO,CAAC;AACnC,QAAQ,WAAW,CAAC,SAAS,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;AACtD,QAAQ,WAAW,CAAC,WAAW,GAAG,UAAU,CAAC;AAC7C,QAAQ,OAAO,WAAW,CAAC;AAC3B,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,MAAM,UAAU,CAAC,MAAM,EAAE,QAAQ,EAAE;AACvC,QAAQ,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,MAAM,CAAC,EAAE,EAAE,MAAM,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,EAAE,uBAAuB,EAAE,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC5I,QAAQ,IAAI,MAAM,CAAC,QAAQ,EAAE;AAC7B,YAAY,MAAM,eAAe,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,QAAQ,EAAE,MAAM,CAAC,aAAa,IAAI,MAAM,CAAC,YAAY,EAAE;AAC9H,gBAAgB,KAAK,EAAE,MAAM,CAAC,KAAK;AACnC,gBAAgB,KAAK,EAAE,MAAM,CAAC,KAAK;AACnC,gBAAgB,mBAAmB,EAAE,MAAM,CAAC,mBAAmB;AAC/D,gBAAgB,WAAW,EAAE,MAAM,CAAC,WAAW;AAC/C,gBAAgB,eAAe,EAAE,MAAM,CAAC,eAAe;AACvD,gBAAgB,qBAAqB,EAAE,MAAM,CAAC,qBAAqB;AACnE,gBAAgB,GAAG,EAAE,MAAM,CAAC,GAAG;AAC/B,gBAAgB,oBAAoB,EAAE,MAAM,CAAC,oBAAoB;AACjE,aAAa,CAAC,CAAC;AACf,YAAY,IAAI,eAAe,CAAC,IAAI,KAAK,OAAO,EAAE;AAClD,gBAAgB,MAAM,WAAW,GAAG,eAAe,CAAC;AACpD,gBAAgB,YAAY,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,IAAI,EAAE,CAAC;AAC/D,gBAAgB,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,QAAQ;AAC/D,oBAAoB,YAAY,CAAC,WAAW,GAAG,WAAW,CAAC,WAAW,CAAC;AACvE,gBAAgB,IAAI,WAAW,CAAC,OAAO;AACvC,oBAAoB,YAAY,CAAC,OAAO,GAAG,WAAW,CAAC,OAAO,CAAC;AAC/D,gBAAgB,IAAI,OAAO,WAAW,CAAC,YAAY,KAAK,SAAS;AACjE,oBAAoB,YAAY,CAAC,YAAY,GAAG,WAAW,CAAC,YAAY,CAAC;AACzE,gBAAgB,IAAI,WAAW,CAAC,gBAAgB;AAChD,oBAAoB,YAAY,CAAC,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;AACjF,gBAAgB,IAAI,WAAW,CAAC,gBAAgB;AAChD,oBAAoB,YAAY,CAAC,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;AACjF,gBAAgB,IAAI,WAAW,CAAC,gBAAgB,EAAE;AAClD,oBAAoB,IAAI,CAAC,YAAY,CAAC,IAAI;AAC1C,wBAAwB,YAAY,CAAC,IAAI,GAAG,EAAE,CAAC;AAC/C,oBAAoB,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,gBAAgB,CAAC,CAAC;AAC5E,gBAAgB,CAAC;AACjB,gBAAgB,IAAI,WAAW,CAAC,SAAS,EAAE;AAC3C,oBAAoB,YAAY,CAAC,WAAW,GAAG,WAAW,CAAC,WAAW,CAAC;AACvE,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb,iBAAiB,IAAI,eAAe,CAAC,IAAI,KAAK,YAAY,EAAE;AAC5D,gBAAgB,MAAM,eAAe,GAAG,eAAe,CAAC;AACxD,gBAAgB,YAAY,CAAC,MAAM,GAAG,eAAe,CAAC,MAAM,IAAI,EAAE,CAAC;AACnE,gBAAgB,IAAI,eAAe,CAAC,SAAS,EAAE;AAC/C,oBAAoB,YAAY,CAAC,WAAW,GAAG,eAAe,CAAC,WAAW,CAAC;AAC3E,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb,QAAQ,CAAC;AACT,aAAa;AACb,YAAY,YAAY,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;AACtD,QAAQ,CAAC;AACT;AACA,QAAQ,IAAI,CAAC,YAAY,CAAC,WAAW,IAAI,MAAM,CAAC,WAAW,EAAE;AAC7D,YAAY,YAAY,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;AAC1D,QAAQ,CAAC;AACT,QAAQ,IAAI,YAAY,CAAC,eAAe,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE;AACnE,YAAY,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;AACrE,YAAY,IAAI,UAAU;AAC1B,gBAAgB,YAAY,CAAC,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;AACtE,QAAQ,CAAC;AACT,QAAQ,IAAI,aAAa,GAAG,QAAQ;AACpC,YAAY,QAAQ,CAAC,WAAW,CAAC,cAAc,EAAE,CAAC,GAAG,KAAK;AAC1D,gBAAgB,MAAM,EAAE,SAAS,EAAE,WAAW,EAAE,GAAG,GAAG,CAAC;AACvD,gBAAgB,IAAI,SAAS,KAAK,IAAI,CAAC,EAAE;AACzC,oBAAoB,OAAO;AAC3B,gBAAgB,QAAQ,CAAC,WAAW,CAAC,CAAC;AACtC,YAAY,CAAC,CAAC,CAAC;AACf,QAAQ,IAAI,CAAC,YAAY,CAAC,MAAM;AAChC,YAAY,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC;AAClD,QAAQ,MAAM,OAAO,GAAG,QAAQ,CAAC,UAAU,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1F,QAAQ,OAAO,OAAO;AACtB,aAAa,IAAI,CAAC,CAAC,gBAAgB,KAAK;AACxC,YAAY,aAAa,KAAK,IAAI,IAAI,aAAa,KAAK,MAAM,GAAG,MAAM,GAAG,aAAa,CAAC,MAAM,EAAE,CAAC;AACjG,YAAY,aAAa,GAAG,IAAI,CAAC;AACjC,YAAY,OAAO,gBAAgB,CAAC;AACpC,QAAQ,CAAC,CAAC;AACV,aAAa,KAAK,CAAC,CAAC,GAAG,KAAK;AAC5B,YAAY,aAAa,KAAK,IAAI,IAAI,aAAa,KAAK,MAAM,GAAG,MAAM,GAAG,aAAa,CAAC,MAAM,EAAE,CAAC;AACjG,YAAY,aAAa,GAAG,IAAI,CAAC;AACjC,YAAY,MAAM,GAAG,CAAC;AACtB,QAAQ,CAAC,CAAC,CAAC;AACX,IAAI,CAAC;AACL,IAAI,cAAc,GAAG;AACrB,QAAQ,OAAO,QAAQ,CAAC,cAAc,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AAC/D,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,QAAQ,CAAC,IAAI,EAAE,EAAE,WAAW,EAAE,UAAU,GAAG,GAAG,EAAE,EAAE;AACtD,QAAQ,OAAO,QAAQ,CAAC,QAAQ,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,UAAU,EAAE,CAAC,CAAC;AACvF,IAAI,CAAC;AACL,IAAI,UAAU,CAAC,MAAM,EAAE;AACvB,QAAQ,OAAO,QAAQ,CAAC,UAAU,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAC;AACnE,IAAI,CAAC;AACL,IAAI,SAAS,CAAC,IAAI,EAAE,MAAM,EAAE;AAC5B,QAAQ,OAAO,QAAQ,CAAC,SAAS,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,IAAI,EAAE,EAAE,CAAC,CAAC;AACtF,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,MAAM,MAAM,CAAC,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE;AAC3C,QAAQ,MAAM,OAAO,GAAG,MAAM,QAAQ,CAAC,MAAM,CAAC;AAC9C,YAAY,SAAS,EAAE,IAAI,CAAC,EAAE;AAC9B,YAAY,KAAK;AACjB,YAAY,SAAS;AACrB,YAAY,MAAM,EAAE,MAAM,IAAI,EAAE;AAChC,SAAS,CAAC,CAAC;AACX;AACA,QAAQ,OAAO,OAAO;AACtB,aAAa,GAAG,CAAC,CAAC,MAAM,MAAM,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,MAAM,CAAC,EAAE,EAAE,QAAQ,EAAE,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;AAC/G,aAAa,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC;AAC/C,IAAI,CAAC;AACL,IAAI,MAAM,KAAK,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE;AAChC,QAAQ,MAAM,MAAM,GAAG,MAAM,QAAQ,CAAC,KAAK,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;AACpF,QAAQ,MAAM,CAAC,SAAS,EAAE,SAAS,EAAE,YAAY,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;AACpG,QAAQ,OAAO;AACf,YAAY,SAAS;AACrB,YAAY,SAAS;AACrB,YAAY,YAAY;AACxB,YAAY,KAAK;AACjB,YAAY,KAAK;AACjB,YAAY,KAAK;AACjB,YAAY,KAAK;AACjB,SAAS,CAAC;AACV,IAAI,CAAC;AACL,IAAI,MAAM,iBAAiB,CAAC,QAAQ,EAAE;AACtC,QAAQ,IAAI,YAAY,GAAG,EAAE,CAAC;AAC9B,QAAQ,IAAI,QAAQ;AACpB,YAAY,YAAY,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAChD,gBAAgB,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;AACrD,gBAAgB,MAAM,EAAE,CAAC,CAAC,MAAM;AAChC,aAAa,CAAC,CAAC,CAAC;AAChB,QAAQ,OAAO,QAAQ,CAAC,iBAAiB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AAChF,IAAI,CAAC;AACL,IAAI,MAAM,kBAAkB,GAAG;AAC/B,QAAQ,OAAO,QAAQ,CAAC,kBAAkB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AACnE,IAAI,CAAC;AACL,IAAI,MAAM,qBAAqB,GAAG;AAClC,QAAQ,OAAO,QAAQ,CAAC,qBAAqB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AACtE,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,MAAM,cAAc,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,GAAG,EAAE;AACrD,QAAQ,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;AACtC,YAAY,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,QAAQ,OAAO,QAAQ,CAAC,cAAc,CAAC;AACvC,YAAY,SAAS,EAAE,IAAI,CAAC,EAAE;AAC9B,YAAY,MAAM,EAAE;AACpB,gBAAgB,IAAI;AACpB,gBAAgB,OAAO,EAAE,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,MAAM,GAAG,MAAM,GAAG,IAAI;AAC7E,aAAa;AACb,SAAS,CAAC,CAAC;AACX,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA,IAAI,MAAM,mBAAmB,GAAG;AAChC,QAAQ,OAAO,MAAM,QAAQ,CAAC,mBAAmB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AAC1E,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA,IAAI,MAAM,oBAAoB,GAAG;AACjC,QAAQ,OAAO,MAAM,QAAQ,CAAC,oBAAoB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AAC3E,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA,IAAI,MAAM,iBAAiB,GAAG;AAC9B,QAAQ,OAAO,MAAM,QAAQ,CAAC,iBAAiB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AACxE,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,MAAM,WAAW,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE;AACjD,QAAQ,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;AACtC,YAAY,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,QAAQ,OAAO,MAAM,QAAQ,CAAC,WAAW,CAAC;AAC1C,YAAY,SAAS,EAAE,IAAI,CAAC,EAAE;AAC9B,YAAY,MAAM,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE;AAC7C,SAAS,CAAC,CAAC;AACX,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA,IAAI,MAAM,gBAAgB,GAAG;AAC7B,QAAQ,OAAO,MAAM,QAAQ,CAAC,gBAAgB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AACvE,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,MAAM,2BAA2B,CAAC,OAAO,EAAE,WAAW,EAAE;AAC5D,QAAQ,OAAO,MAAM,QAAQ,CAAC,2BAA2B,CAAC;AAC1D,YAAY,SAAS,EAAE,IAAI,CAAC,EAAE;AAC9B,YAAY,cAAc,EAAE,OAAO,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,EAAE;AAClE,YAAY,WAAW;AACvB,SAAS,CAAC,CAAC;AACX,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA;AACA,IAAI,MAAM,6BAA6B,CAAC,WAAW,EAAE;AACrD,QAAQ,OAAO,MAAM,QAAQ,CAAC,6BAA6B,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,CAAC;AACjG,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA;AACA,IAAI,MAAM,iBAAiB,CAAC,MAAM,EAAE;AACpC,QAAQ,OAAO,MAAM,QAAQ,CAAC,iBAAiB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAC;AAChF,IAAI,CAAC;AACL;AACA;AACA;AACA;AACA,IAAI,MAAM,cAAc,GAAG;AAC3B,QAAQ,OAAO,MAAM,QAAQ,CAAC,cAAc,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AACrE,IAAI,CAAC;AACL,IAAI,MAAM,OAAO,GAAG;AACpB,QAAQ,OAAO,QAAQ,CAAC,cAAc,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AAC/D,IAAI,CAAC;AACL,CAAC;AACM,eAAe,eAAe,CAAC,OAAO,EAAE;AAC/C,IAAI,OAAO,QAAQ,CAAC,eAAe,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;AACjD,CAAC;AACM,SAAS,oBAAoB,CAAC,QAAQ,EAAE;AAC/C,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AAChC,IAAI,OAAO;AACX,QAAQ,MAAM,EAAE,MAAM;AACtB,YAAY,YAAY,CAAC,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;AACnE,QAAQ,CAAC;AACT,KAAK,CAAC;AACN,CAAC;AACM,eAAe,eAAe,CAAC,KAAK,EAAE;AAC7C,IAAI,OAAO,QAAQ,CAAC,eAAe,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC;AAC/C,CAAC;AACD,IAAI,gBAAgB,GAAG,CAAC,CAAC;AACzB,MAAM,eAAe,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,QAAQ,KAAK,MAAM,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,MAAM,CAAC,CAAC;AACvG,MAAM,aAAa,GAAG;AACtB;AACA,IAAI,uBAAuB;AAC3B,IAAI,2BAA2B;AAC/B,IAAI,uBAAuB;AAC3B,IAAI,uBAAuB;AAC3B,CAAC,CAAC;AACK,eAAe,kBAAkB,CAAC,KAAK,EAAE;AAChD,IAAI,IAAI,IAAI,GAAG,KAAK,CAAC;AACrB,IAAI,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;AAClC,QAAQ,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAC7B,IAAI,OAAO,QAAQ,CAAC,SAAS,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,aAAa,EAAE,CAAC,CAAC;AAC7D,CAAC;AACD,MAAM,WAAW,GAAG;AACpB;AACA,IAAI,IAAI,EAAE,CAAC;AACX,IAAI,IAAI,EAAE,CAAC;AACX,IAAI,GAAG,EAAE,CAAC;AACV,IAAI,IAAI,EAAE,CAAC;AACX,IAAI,IAAI,EAAE,CAAC;AACX,CAAC,CAAC;AACK,eAAe,SAAS,CAAC,EAAE,EAAE,UAAU,EAAE;AAChD,IAAI,IAAI,EAAE,KAAK,EAAE,cAAc,EAAE,YAAY,EAAE,YAAY,EAAE,WAAW,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE,GAAG,EAAE,EAAE,IAAI,GAAG,MAAM,CAAC,EAAE,EAAE,CAAC,OAAO,EAAE,gBAAgB,EAAE,cAAc,EAAE,MAAM,EAAE,WAAW,CAAC,CAAC,CAAC;AAChM,IAAI,IAAI,IAAI,GAAG,KAAK,CAAC;AACrB,IAAI,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC;AAClC,QAAQ,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAC7B,IAAI,IAAI,QAAQ,GAAG,IAAI,CAAC;AACxB,IAAI,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,MAAM,GAAG,MAAM,GAAG,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC;AAC1F,QAAQ,QAAQ,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACrC,IAAI,IAAI,YAAY,GAAG,EAAE,CAAC;AAC1B,IAAI,IAAI,QAAQ;AAChB,QAAQ,YAAY,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC5C,YAAY,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;AACjD,YAAY,MAAM,EAAE,CAAC,CAAC,MAAM;AAC5B,SAAS,CAAC,CAAC,CAAC;AACZ,IAAI,MAAM,SAAS,GAAG,gBAAgB,GAAG,eAAe,EAAE,CAAC;AAC3D,IAAI,gBAAgB,IAAI,CAAC,CAAC;AAC1B,IAAI,IAAI,sBAAsB,GAAG,IAAI,CAAC;AACtC,IAAI,IAAI,UAAU,EAAE;AACpB,QAAQ,sBAAsB,GAAG,QAAQ,CAAC,WAAW,CAAC,8BAA8B,EAAE,CAAC,GAAG,KAAK;AAC/F,YAAY,IAAI,GAAG,CAAC,SAAS,KAAK,SAAS;AAC3C,gBAAgB,OAAO;AACvB,YAAY,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;AACrC,QAAQ,CAAC,CAAC,CAAC;AACX,IAAI,CAAC;AACL,IAAI,MAAM,QAAQ,GAAG,WAAW,CAAC,WAAW,CAAC,CAAC;AAC9C,IAAI,IAAI,IAAI,CAAC,YAAY,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AAC3E,QAAQ,OAAO,CAAC,IAAI,CAAC,CAAC,4CAA4C,EAAE,IAAI,CAAC,YAAY,CAAC,qBAAqB,CAAC,CAAC,CAAC;AAC9G,QAAQ,OAAO,IAAI,CAAC,YAAY,CAAC;AACjC,IAAI,CAAC;AACL,IAAI,IAAI,IAAI,CAAC,YAAY,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AAC3E,QAAQ,OAAO,CAAC,IAAI,CAAC,CAAC,4CAA4C,EAAE,IAAI,CAAC,YAAY,CAAC,qBAAqB,CAAC,CAAC,CAAC;AAC9G,QAAQ,OAAO,IAAI,CAAC,YAAY,CAAC;AACjC,IAAI,CAAC;AACL,IAAI,MAAM,EAAE,GAAG,EAAE,WAAW,EAAE,KAAK,EAAE,YAAY,EAAE,UAAU,GAAG,GAAG,MAAM,QAAQ,CAAC,WAAW,CAAC;AAC9F,QAAQ,SAAS;AACjB,QAAQ,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,cAAc,EAAE,CAAC,CAAC,YAAY,EAAE,qBAAqB,EAAE,CAAC,CAAC,UAAU,EAAE,YAAY,EAAE,QAAQ,EAAE,IAAI,EAAE,QAAQ,EAAE,SAAS,EAAE,YAAY,EAAE,EAAE,IAAI,CAAC;AAC1L,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAK;AACtB,QAAQ,sBAAsB,KAAK,IAAI,IAAI,sBAAsB,KAAK,MAAM,GAAG,MAAM,GAAG,sBAAsB,CAAC,MAAM,EAAE,CAAC;AACxH,QAAQ,MAAM,GAAG,CAAC;AAClB,IAAI,CAAC,CAAC,CAAC;AACP,IAAI,sBAAsB,KAAK,IAAI,IAAI,sBAAsB,KAAK,MAAM,GAAG,MAAM,GAAG,sBAAsB,CAAC,MAAM,EAAE,CAAC;AACpH,IAAI,OAAO,IAAI,YAAY,CAAC;AAC5B,QAAQ,SAAS;AACjB,QAAQ,GAAG;AACX,QAAQ,WAAW;AACnB,QAAQ,KAAK,EAAE,YAAY;AAC3B,QAAQ,UAAU;AAClB,KAAK,CAAC,CAAC;AACP,CAAC;AACM,eAAe,eAAe,GAAG;AACxC,IAAI,OAAO,QAAQ,CAAC,kBAAkB,EAAE,CAAC;AACzC,CAAC;AACW,MAAC,SAAS,GAAG;AACzB,IAAI,MAAM,EAAE,OAAO;AACnB,IAAI,MAAM,EAAE,qBAAqB;AACjC;;;;;;;;;;;;;;"}