@almadar/llm 1.0.16 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE CHANGED
@@ -1,72 +1,21 @@
1
- Business Source License 1.1
2
-
3
- Parameters
4
-
5
- Licensor: Almadar FZE
6
- Licensed Work: KFlow Builder / Almadar
7
- The Licensed Work is (c) 2025-2026 Almadar FZE.
8
- Additional Use Grant: You may make production use of the Licensed Work for
9
- non-commercial purposes and for internal evaluation.
10
- Production use for commercial purposes requires a
11
- commercial license from the Licensor.
12
- Change Date: 2030-02-01
13
- Change License: Apache License, Version 2.0
14
-
15
- Terms
16
-
17
- The Licensor hereby grants you the right to copy, modify, create derivative
18
- works, redistribute, and make non-production use of the Licensed Work. The
19
- Licensor may make an Additional Use Grant, above, permitting limited
20
- production use.
21
-
22
- Effective on the Change Date, or the fourth anniversary of the first publicly
23
- available distribution of a specific version of the Licensed Work under this
24
- License, whichever comes first, the Licensor hereby grants you rights under
25
- the terms of the Change License, and the rights granted in the paragraph
26
- above terminate.
27
-
28
- If your use of the Licensed Work does not comply with the requirements
29
- currently in effect as described in this License, you must purchase a
30
- commercial license from the Licensor, its affiliated entities, or authorized
31
- resellers, or you must refrain from using the Licensed Work.
32
-
33
- All copies of the original and modified Licensed Work, and derivative works
34
- of the Licensed Work, are subject to this License. This License applies
35
- separately for each version of the Licensed Work and the Change Date may vary
36
- for each version of the Licensed Work released by Licensor.
37
-
38
- You must conspicuously display this License on each original or modified copy
39
- of the Licensed Work. If you receive the Licensed Work in original or
40
- modified form from a third party, the terms and conditions set forth in this
41
- License apply to your use of that work.
42
-
43
- Any use of the Licensed Work in violation of this License will automatically
44
- terminate your rights under this License for the current and all other
45
- versions of the Licensed Work.
46
-
47
- This License does not grant you any right in any trademark or logo of
48
- Licensor or its affiliates (provided that you may use a trademark or logo of
49
- Licensor as expressly required by this License).
50
-
51
- TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON
52
- AN "AS IS" BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS,
53
- EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF
54
- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND
55
- TITLE.
56
-
57
- ---
58
-
59
- License text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved.
60
- "Business Source License" is a trademark of MariaDB Corporation Ab.
61
-
62
- ADDITIONAL TERMS:
63
-
64
- Documentation (builder/packages/website/docs/) is licensed under CC BY 4.0.
65
-
66
- TRADEMARKS:
67
-
68
- "Orbital", "KFlow", "Almadar", and the Almadar logo are trademarks of
69
- Almadar FZE. You may not use these trademarks without prior written
70
- permission from Almadar FZE.
71
-
72
- For licensing inquiries: licensing@almadar.io
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Almadar Team
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,25 @@
1
+ # @almadar/llm
2
+
3
+ > PKG_DESCRIPTION
4
+
5
+ Part of the [Almadar](https://github.com/almadar-io/almadar) platform.
6
+
7
+ ## Installation
8
+
9
+ ```bash
10
+ npm install @almadar/llm
11
+ ```
12
+
13
+ ## Usage
14
+
15
+ ```typescript
16
+ import { /* ... */ } from '@almadar/llm';
17
+ ```
18
+
19
+ ## API
20
+
21
+ <!-- Document public exports here -->
22
+
23
+ ## License
24
+
25
+ MIT
@@ -9,55 +9,25 @@ import {
9
9
 
10
10
  // src/client.ts
11
11
  import { ChatOpenAI } from "@langchain/openai";
12
- import {
13
- ChatAnthropic
14
- } from "@langchain/anthropic";
12
+ import { ChatAnthropic } from "@langchain/anthropic";
15
13
  import Anthropic from "@anthropic-ai/sdk";
16
- var CachingChatAnthropic = class extends ChatAnthropic {
17
- async invoke(input, options) {
18
- let messages;
19
- if (typeof input === "string") {
20
- messages = [{ role: "user", content: input }];
21
- } else {
22
- messages = input;
14
+ function addCacheControlToSystemMessages(messages) {
15
+ return messages.map((msg) => {
16
+ if (msg.role !== "system") {
17
+ return msg;
23
18
  }
24
- const transformedMessages = messages.map((msg) => {
25
- const msgType = msg._getType?.() || msg.role || "unknown";
26
- const isSystem = msgType === "system";
27
- if (!isSystem) return msg;
28
- if (typeof msg.content === "string") {
29
- return {
30
- ...msg,
31
- content: [
32
- {
33
- type: "text",
34
- text: msg.content,
35
- cache_control: { type: "ephemeral" }
36
- }
37
- ]
38
- };
39
- }
40
- if (Array.isArray(msg.content)) {
41
- const blocks = msg.content;
42
- const hasAnyCacheControl = blocks.some((b) => b.cache_control);
43
- if (!hasAnyCacheControl) {
44
- const transformedBlocks = blocks.map((block, idx) => {
45
- if (block.type === "text" && idx === blocks.length - 1) {
46
- return {
47
- ...block,
48
- cache_control: { type: "ephemeral" }
49
- };
50
- }
51
- return block;
52
- });
53
- return { ...msg, content: transformedBlocks };
19
+ return {
20
+ role: msg.role,
21
+ content: [
22
+ {
23
+ type: "text",
24
+ text: msg.content,
25
+ cache_control: { type: "ephemeral" }
54
26
  }
55
- }
56
- return msg;
57
- });
58
- return super.invoke(transformedMessages, options);
59
- }
60
- };
27
+ ]
28
+ };
29
+ });
30
+ }
61
31
  var PROVIDER_CONFIGS = {
62
32
  openai: () => {
63
33
  const apiKey = process.env.OPENAI_API_KEY;
@@ -103,9 +73,23 @@ var PROVIDER_CONFIGS = {
103
73
  }
104
74
  return {
105
75
  apiKey,
106
- baseUrl: "https://api.moonshot.cn/v1",
76
+ baseUrl: "https://api.moonshot.ai/v1",
107
77
  defaultModel: "kimi-k2.5"
108
78
  };
79
+ },
80
+ openrouter: () => {
81
+ const apiKey = process.env.OPEN_ROUTER_API_KEY;
82
+ if (!apiKey) {
83
+ throw new Error(
84
+ "OPEN_ROUTER_API_KEY environment variable is not set. Please set it in your .env file or environment."
85
+ );
86
+ }
87
+ return {
88
+ apiKey,
89
+ baseUrl: "https://openrouter.ai/api/v1",
90
+ defaultModel: "qwen/qwen-2.5-72b-instruct"
91
+ // Default to Qwen 2.5
92
+ };
109
93
  }
110
94
  };
111
95
  var DEEPSEEK_MODELS = {
@@ -129,11 +113,22 @@ var ANTHROPIC_MODELS = {
129
113
  var KIMI_MODELS = {
130
114
  K2_5: "kimi-k2.5"
131
115
  };
116
+ var OPENROUTER_MODELS = {
117
+ // Qwen models - JSON/structured data specialists
118
+ QWEN_2_5_72B: "qwen/qwen-2.5-72b-instruct",
119
+ QWEN_2_5_CODER_32B: "qwen/qwen-2.5-coder-32b-instruct",
120
+ QWEN_3_235B: "qwen/qwen3-235b-a22b",
121
+ // Llama models - agentic workhorses
122
+ LLAMA_3_3_70B: "meta-llama/llama-3.3-70b-instruct",
123
+ LLAMA_3_1_405B: "meta-llama/llama-3.1-405b-instruct",
124
+ LLAMA_4_MAVERICK: "meta-llama/llama-4-maverick",
125
+ LLAMA_4_SCOUT: "meta-llama/llama-4-scout"
126
+ };
132
127
  var DEFAULT_TEMPERATURE = 0.3;
133
128
  var LLMClient = class {
134
129
  constructor(options = {}) {
135
130
  this.provider = options.provider || "openai";
136
- this.temperature = options.temperature ?? DEFAULT_TEMPERATURE;
131
+ this.temperature = options.temperature ?? (this.provider === "kimi" ? 0.6 : DEFAULT_TEMPERATURE);
137
132
  this.streaming = options.streaming ?? false;
138
133
  this.providerConfig = PROVIDER_CONFIGS[this.provider]();
139
134
  this.modelName = options.model || this.providerConfig.defaultModel;
@@ -158,7 +153,7 @@ var LLMClient = class {
158
153
  const maxTokens = options?.maxTokens;
159
154
  const temperature = options?.temperature ?? this.temperature;
160
155
  if (this.provider === "anthropic") {
161
- return new CachingChatAnthropic({
156
+ return new ChatAnthropic({
162
157
  apiKey: this.providerConfig.apiKey,
163
158
  model: this.modelName,
164
159
  temperature,
@@ -207,13 +202,26 @@ var LLMClient = class {
207
202
  const useCompletionTokens = this.usesMaxCompletionTokens();
208
203
  const tokenConfig = maxTokens ? useCompletionTokens ? { modelKwargs: { max_completion_tokens: maxTokens } } : { maxTokens } : {};
209
204
  const timeout = this.provider === "deepseek" ? 6e5 : void 0;
205
+ const isKimi = this.provider === "kimi";
206
+ const effectiveTemp = isKimi ? 0.6 : temperature;
207
+ const modelKwargs = {};
208
+ if (useCompletionTokens && maxTokens) {
209
+ modelKwargs.max_completion_tokens = maxTokens;
210
+ }
211
+ if (isKimi) {
212
+ modelKwargs.thinking = { type: "disabled" };
213
+ }
214
+ if (this.provider === "openrouter") {
215
+ modelKwargs.tool_choice = "auto";
216
+ }
210
217
  return new ChatOpenAI({
211
218
  apiKey: this.providerConfig.apiKey,
212
219
  model: this.modelName,
213
- temperature: useCompletionTokens ? void 0 : temperature,
220
+ temperature: useCompletionTokens ? void 0 : effectiveTemp,
214
221
  streaming: this.streaming,
215
222
  timeout,
216
- ...tokenConfig,
223
+ ...Object.keys(modelKwargs).length > 0 ? { modelKwargs } : {},
224
+ ...useCompletionTokens ? {} : maxTokens ? { maxTokens } : {},
217
225
  configuration: {
218
226
  apiKey: this.providerConfig.apiKey,
219
227
  ...this.providerConfig.baseUrl ? { baseURL: this.providerConfig.baseUrl } : {}
@@ -272,10 +280,13 @@ var LLMClient = class {
272
280
  console.log(`[LLMClient:call] Invoking model...`);
273
281
  const invokeStartTime = Date.now();
274
282
  const modelToUse = maxTokens || temperature !== void 0 ? this.getModelWithOptions({ maxTokens, temperature }) : this.model;
275
- const response = await modelToUse.invoke([
283
+ const messages = [
276
284
  { role: "system", content: systemPrompt },
277
285
  { role: "user", content: currentPrompt }
278
- ]);
286
+ ];
287
+ const response = await modelToUse.invoke(
288
+ this.provider === "anthropic" ? addCacheControlToSystemMessages(messages) : messages
289
+ );
279
290
  console.log(
280
291
  `[LLMClient:call] Model responded in ${Date.now() - invokeStartTime}ms`
281
292
  );
@@ -362,10 +373,13 @@ Please output valid JSON that matches the expected schema.`;
362
373
  const { systemPrompt, userPrompt, maxTokens } = options;
363
374
  return this.rateLimiter.execute(async () => {
364
375
  const modelToUse = maxTokens ? this.getModelWithOptions({ maxTokens }) : this.model;
365
- const response = await modelToUse.invoke([
376
+ const messages = [
366
377
  { role: "system", content: systemPrompt },
367
378
  { role: "user", content: userPrompt }
368
- ]);
379
+ ];
380
+ const response = await modelToUse.invoke(
381
+ this.provider === "anthropic" ? addCacheControlToSystemMessages(messages) : messages
382
+ );
369
383
  let usage = null;
370
384
  if (response.usage_metadata) {
371
385
  const usageMeta = response.usage_metadata;
@@ -611,12 +625,20 @@ function createKimiClient(options) {
611
625
  ...options
612
626
  });
613
627
  }
628
+ function createOpenRouterClient(options) {
629
+ return new LLMClient({
630
+ provider: "openrouter",
631
+ model: OPENROUTER_MODELS.QWEN_2_5_72B,
632
+ ...options
633
+ });
634
+ }
614
635
 
615
636
  export {
616
637
  DEEPSEEK_MODELS,
617
638
  OPENAI_MODELS,
618
639
  ANTHROPIC_MODELS,
619
640
  KIMI_MODELS,
641
+ OPENROUTER_MODELS,
620
642
  LLMClient,
621
643
  getSharedLLMClient,
622
644
  resetSharedLLMClient,
@@ -628,6 +650,7 @@ export {
628
650
  createDeepSeekClient,
629
651
  createOpenAIClient,
630
652
  createAnthropicClient,
631
- createKimiClient
653
+ createKimiClient,
654
+ createOpenRouterClient
632
655
  };
633
- //# sourceMappingURL=chunk-56H37PN5.js.map
656
+ //# sourceMappingURL=chunk-YJVZ6ZWO.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/client.ts"],"sourcesContent":["/**\n * Shared LLM Client\n *\n * Multi-provider LLM client with:\n * - OpenAI, DeepSeek, Anthropic, and Kimi support\n * - Anthropic prompt caching (CachingChatAnthropic)\n * - Rate limiting and retry logic\n * - Token tracking\n * - Structured output parsing with Zod\n *\n * @packageDocumentation\n */\n\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatAnthropic } from '@langchain/anthropic';\nimport type { BaseMessageLike } from '@langchain/core/messages';\nimport Anthropic from '@anthropic-ai/sdk';\nimport { z } from 'zod';\nimport {\n RateLimiter,\n getGlobalRateLimiter,\n type RateLimiterOptions,\n} from './rate-limiter.js';\nimport { TokenTracker, getGlobalTokenTracker } from './token-tracker.js';\nimport { parseJsonResponse } from './json-parser.js';\n\n// ============================================================================\n// Anthropic Cache Control Helper\n// ============================================================================\n\nfunction addCacheControlToSystemMessages(\n messages: Array<{ role: string; content: string }>,\n): BaseMessageLike[] {\n return messages.map((msg) => {\n if (msg.role !== 'system') {\n return msg as BaseMessageLike;\n }\n\n return {\n role: msg.role,\n content: [\n {\n type: 'text' as const,\n text: msg.content,\n cache_control: { type: 'ephemeral' },\n },\n ],\n } as BaseMessageLike;\n });\n}\n\ntype ChatModel = ChatOpenAI | ChatAnthropic;\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport type LLMProvider = 'openai' | 'deepseek' | 'anthropic' | 'kimi' | 'openrouter';\n\nexport interface ProviderConfig {\n apiKey: string;\n baseUrl?: string;\n defaultModel: string;\n}\n\nexport interface LLMClientOptions {\n provider?: LLMProvider;\n model?: string;\n temperature?: number;\n streaming?: boolean;\n rateLimiter?: RateLimiterOptions;\n useGlobalRateLimiter?: boolean;\n trackTokens?: boolean;\n}\n\nexport interface LLMCallOptions<T = unknown> {\n systemPrompt: string;\n userPrompt: string;\n schema?: z.ZodSchema<T>;\n maxRetries?: number;\n retryWithContext?: boolean;\n maxTokens?: number;\n skipSchemaValidation?: boolean;\n temperature?: number;\n}\n\nexport interface CacheableBlock {\n type: 'text';\n text: string;\n cache_control?: { type: 'ephemeral' };\n}\n\nexport interface CacheAwareLLMCallOptions<T = unknown>\n extends LLMCallOptions<T> {\n systemBlocks?: CacheableBlock[];\n userBlocks?: CacheableBlock[];\n rawText?: boolean;\n}\n\nexport interface LLMUsage {\n promptTokens: number;\n completionTokens: number;\n totalTokens: number;\n}\n\nexport type LLMFinishReason =\n | 'stop'\n | 'length'\n | 'content_filter'\n | 'tool_calls'\n | null;\n\nexport interface LLMResponse<T> {\n data: T;\n raw: string;\n finishReason: LLMFinishReason;\n usage: LLMUsage | null;\n}\n\n// ============================================================================\n// Provider Configuration\n// ============================================================================\n\nconst PROVIDER_CONFIGS: Record<LLMProvider, () => ProviderConfig> = {\n openai: () => {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new Error(\n 'OPENAI_API_KEY environment variable is not set. ' +\n 'Please set it in your .env file or environment.',\n );\n }\n return { apiKey, baseUrl: undefined, defaultModel: 'gpt-4o' };\n },\n deepseek: () => {\n const apiKey = process.env.DEEPSEEK_API_KEY;\n if (!apiKey) {\n throw new Error(\n 'DEEPSEEK_API_KEY environment variable is not set. ' +\n 'Please set it in your .env file or environment.',\n );\n }\n return {\n apiKey,\n baseUrl: 'https://api.deepseek.com/v1',\n defaultModel: 'deepseek-chat',\n };\n },\n anthropic: () => {\n const apiKey = process.env.ANTHROPIC_API_KEY;\n if (!apiKey) {\n throw new Error(\n 'ANTHROPIC_API_KEY environment variable is not set. ' +\n 'Please set it in your .env file or environment.',\n );\n }\n return {\n apiKey,\n baseUrl: undefined,\n defaultModel: 'claude-sonnet-4-5-20250929',\n };\n },\n kimi: () => {\n const apiKey = process.env.KIMI_API_KEY;\n if (!apiKey) {\n throw new Error(\n 'KIMI_API_KEY environment variable is not set. ' +\n 'Please set it in your .env file or environment.',\n );\n }\n return {\n apiKey,\n baseUrl: 'https://api.moonshot.ai/v1',\n defaultModel: 'kimi-k2.5',\n };\n },\n openrouter: () => {\n const apiKey = process.env.OPEN_ROUTER_API_KEY;\n if (!apiKey) {\n throw new Error(\n 'OPEN_ROUTER_API_KEY environment variable is not set. ' +\n 'Please set it in your .env file or environment.',\n );\n }\n return {\n apiKey,\n baseUrl: 'https://openrouter.ai/api/v1',\n defaultModel: 'qwen/qwen-2.5-72b-instruct', // Default to Qwen 2.5\n };\n },\n};\n\nexport const DEEPSEEK_MODELS = {\n CHAT: 'deepseek-chat',\n CODER: 'deepseek-coder',\n REASONER: 'deepseek-reasoner',\n} as const;\n\nexport const OPENAI_MODELS = {\n GPT4O: 'gpt-4o',\n GPT4O_MINI: 'gpt-4o-mini',\n GPT4_TURBO: 'gpt-4-turbo',\n GPT35_TURBO: 'gpt-3.5-turbo',\n GPT_5_1: 'gpt-5.1',\n} as const;\n\nexport const ANTHROPIC_MODELS = {\n CLAUDE_SONNET_4_5: 'claude-sonnet-4-5-20250929',\n CLAUDE_SONNET_4: 'claude-sonnet-4-20250514',\n CLAUDE_OPUS_4_5: 'claude-opus-4-5-20250929',\n CLAUDE_3_5_HAIKU: 'claude-3-5-haiku-20241022',\n} as const;\n\nexport const KIMI_MODELS = {\n K2_5: 'kimi-k2.5',\n} as const;\n\nexport const OPENROUTER_MODELS = {\n // Qwen models - JSON/structured data specialists\n QWEN_2_5_72B: 'qwen/qwen-2.5-72b-instruct',\n QWEN_2_5_CODER_32B: 'qwen/qwen-2.5-coder-32b-instruct',\n QWEN_3_235B: 'qwen/qwen3-235b-a22b',\n \n // Llama models - agentic workhorses\n LLAMA_3_3_70B: 'meta-llama/llama-3.3-70b-instruct',\n LLAMA_3_1_405B: 'meta-llama/llama-3.1-405b-instruct',\n LLAMA_4_MAVERICK: 'meta-llama/llama-4-maverick',\n LLAMA_4_SCOUT: 'meta-llama/llama-4-scout',\n} as const;\n\nconst DEFAULT_TEMPERATURE = 0.3;\n\n// ============================================================================\n// LLM Client\n// ============================================================================\n\nexport class LLMClient {\n private model: ChatModel;\n private rateLimiter: RateLimiter;\n private tokenTracker: TokenTracker | null;\n private modelName: string;\n private provider: LLMProvider;\n private providerConfig: ProviderConfig;\n private temperature: number;\n private streaming: boolean;\n\n constructor(options: LLMClientOptions = {}) {\n this.provider = options.provider || 'openai';\n // Kimi: 0.6 when thinking disabled (our default), 1.0 when thinking enabled\n this.temperature = options.temperature ?? \n (this.provider === 'kimi' ? 0.6 : DEFAULT_TEMPERATURE);\n this.streaming = options.streaming ?? false;\n\n this.providerConfig = PROVIDER_CONFIGS[this.provider]();\n this.modelName = options.model || this.providerConfig.defaultModel;\n\n const keyPreview = this.providerConfig.apiKey.slice(-4);\n console.log(\n `[LLMClient] Provider: ${this.provider}, Model: ${this.modelName}, Key: ****${keyPreview}`,\n );\n if (this.providerConfig.baseUrl) {\n console.log(\n `[LLMClient] Using custom base URL: ${this.providerConfig.baseUrl}`,\n );\n }\n\n this.model = this.createModel();\n\n this.rateLimiter =\n options.useGlobalRateLimiter !== false\n ? getGlobalRateLimiter(options.rateLimiter)\n : new RateLimiter(options.rateLimiter);\n\n this.tokenTracker =\n options.trackTokens !== false\n ? getGlobalTokenTracker(this.modelName)\n : null;\n }\n\n private usesMaxCompletionTokens(): boolean {\n const model = this.modelName.toLowerCase();\n return (\n model.startsWith('o1') ||\n model.startsWith('gpt-5') ||\n model.includes('o1-') ||\n model.includes('o3')\n );\n }\n\n private createModel(options?: {\n maxTokens?: number;\n temperature?: number;\n }): ChatModel {\n const maxTokens = options?.maxTokens;\n const temperature = options?.temperature ?? this.temperature;\n\n if (this.provider === 'anthropic') {\n return new ChatAnthropic({\n apiKey: this.providerConfig.apiKey,\n model: this.modelName,\n temperature,\n streaming: this.streaming,\n maxTokens: maxTokens || 8192,\n callbacks: [\n {\n handleLLMEnd: (output) => {\n const generation = output.generations?.[0]?.[0];\n const usage = (\n generation as unknown as {\n message?: {\n usage_metadata?: {\n cache_creation_input_tokens?: number;\n cache_read_input_tokens?: number;\n input_tokens?: number;\n output_tokens?: number;\n };\n };\n }\n )?.message?.usage_metadata;\n\n if (usage) {\n const cacheCreated = usage.cache_creation_input_tokens ?? 0;\n const cacheRead = usage.cache_read_input_tokens ?? 0;\n const inputTokens = usage.input_tokens ?? 0;\n const outputTokens = usage.output_tokens ?? 0;\n\n if (cacheCreated > 0) {\n console.log(\n `[LLMClient:Anthropic] Cache WRITE: ${cacheCreated} tokens cached`,\n );\n }\n if (cacheRead > 0) {\n const savingsPercent = Math.round(\n (cacheRead / (cacheRead + inputTokens)) * 100,\n );\n console.log(\n `[LLMClient:Anthropic] Cache HIT: ${cacheRead} tokens (~${savingsPercent}% of prompt)`,\n );\n }\n if (cacheCreated === 0 && cacheRead === 0 && inputTokens > 0) {\n if (inputTokens < 500) {\n console.log(\n `[LLMClient:Anthropic] ${inputTokens} input, ${outputTokens} output tokens (likely cached)`,\n );\n } else {\n console.log(\n `[LLMClient:Anthropic] ${inputTokens} input, ${outputTokens} output tokens`,\n );\n }\n }\n }\n },\n },\n ],\n });\n }\n\n const useCompletionTokens = this.usesMaxCompletionTokens();\n\n const tokenConfig = maxTokens\n ? useCompletionTokens\n ? { modelKwargs: { max_completion_tokens: maxTokens } }\n : { maxTokens }\n : {};\n\n const timeout = this.provider === 'deepseek' ? 600000 : undefined;\n\n // Kimi-k2.5: disable thinking to avoid reasoning_content issues with tool calls\n // When thinking is disabled, temperature must be 0.6 (not 1.0)\n const isKimi = this.provider === 'kimi';\n const effectiveTemp = isKimi ? 0.6 : temperature;\n\n // Build modelKwargs incrementally to avoid spread conflicts\n const modelKwargs: Record<string, unknown> = {};\n if (useCompletionTokens && maxTokens) {\n modelKwargs.max_completion_tokens = maxTokens;\n }\n if (isKimi) {\n modelKwargs.thinking = { type: 'disabled' };\n }\n // OpenRouter (Qwen): explicit tool_choice so the model doesn't ignore tool definitions\n if (this.provider === 'openrouter') {\n modelKwargs.tool_choice = 'auto';\n }\n\n return new ChatOpenAI({\n apiKey: this.providerConfig.apiKey,\n model: this.modelName,\n temperature: useCompletionTokens ? undefined : effectiveTemp,\n streaming: this.streaming,\n timeout,\n ...(Object.keys(modelKwargs).length > 0 ? { modelKwargs } : {}),\n ...(useCompletionTokens ? {} : maxTokens ? { maxTokens } : {}),\n configuration: {\n apiKey: this.providerConfig.apiKey,\n ...(this.providerConfig.baseUrl\n ? { baseURL: this.providerConfig.baseUrl }\n : {}),\n },\n });\n }\n\n private getModelWithOptions(options: {\n maxTokens?: number;\n temperature?: number;\n }): ChatModel {\n return this.createModel(options);\n }\n\n getProvider(): LLMProvider {\n return this.provider;\n }\n\n getModelName(): string {\n return this.modelName;\n }\n\n getModel(): ChatModel {\n return this.model;\n }\n\n getRateLimiterStatus() {\n return this.rateLimiter.getStatus();\n }\n\n getTokenUsage() {\n return this.tokenTracker?.getSummary() ?? null;\n }\n\n async call<T>(options: LLMCallOptions<T>): Promise<T> {\n const response = await this.callWithMetadata(options);\n return response.data;\n }\n\n async callWithMetadata<T>(options: LLMCallOptions<T>): Promise<LLMResponse<T>> {\n const {\n systemPrompt,\n userPrompt,\n schema,\n maxRetries = 2,\n retryWithContext = true,\n maxTokens,\n skipSchemaValidation = false,\n temperature,\n } = options;\n\n let currentPrompt = userPrompt;\n let lastError: Error | null = null;\n\n console.log(\n `[LLMClient:call] Starting call to ${this.provider}/${this.modelName}`,\n );\n console.log(`[LLMClient:call] Prompt length: ${userPrompt.length} chars`);\n if (maxTokens) {\n console.log(`[LLMClient:call] Max tokens: ${maxTokens}`);\n }\n\n for (let attempt = 0; attempt <= maxRetries; attempt++) {\n try {\n console.log(\n `[LLMClient:call] Attempt ${attempt + 1}/${maxRetries + 1}...`,\n );\n const attemptStartTime = Date.now();\n\n const result = await this.rateLimiter.execute(async () => {\n console.log(`[LLMClient:call] Invoking model...`);\n const invokeStartTime = Date.now();\n\n const modelToUse =\n maxTokens || temperature !== undefined\n ? this.getModelWithOptions({ maxTokens, temperature })\n : this.model;\n\n const messages = [\n { role: 'system', content: systemPrompt },\n { role: 'user', content: currentPrompt },\n ];\n const response = await modelToUse.invoke(\n this.provider === 'anthropic'\n ? addCacheControlToSystemMessages(messages)\n : messages,\n );\n\n console.log(\n `[LLMClient:call] Model responded in ${Date.now() - invokeStartTime}ms`,\n );\n\n let usage: LLMUsage | null = null;\n if (response.usage_metadata) {\n const usageMeta = response.usage_metadata as {\n input_tokens?: number;\n output_tokens?: number;\n };\n usage = {\n promptTokens: usageMeta.input_tokens || 0,\n completionTokens: usageMeta.output_tokens || 0,\n totalTokens:\n (usageMeta.input_tokens || 0) +\n (usageMeta.output_tokens || 0),\n };\n console.log(\n `[LLMClient:call] Tokens used: ${usage.promptTokens} in, ${usage.completionTokens} out`,\n );\n\n if (this.tokenTracker) {\n this.tokenTracker.addUsage(\n usage.promptTokens,\n usage.completionTokens,\n );\n }\n }\n\n const finishReason = this.extractFinishReason(response);\n if (finishReason === 'length') {\n console.warn(\n `[LLMClient:call] Response truncated (finish_reason=length)`,\n );\n }\n\n const content =\n typeof response.content === 'string'\n ? response.content\n : JSON.stringify(response.content);\n\n console.log(\n `[LLMClient:call] Response length: ${content.length} chars, finish_reason: ${finishReason}`,\n );\n\n return { content, finishReason, usage };\n });\n\n console.log(\n `[LLMClient:call] Attempt ${attempt + 1} completed in ${Date.now() - attemptStartTime}ms, parsing response...`,\n );\n\n const parsed = skipSchemaValidation\n ? (parseJsonResponse(result.content, undefined) as T)\n : parseJsonResponse(result.content, schema);\n console.log(\n `[LLMClient:call] Response parsed successfully${skipSchemaValidation ? ' (schema validation skipped)' : ''}`,\n );\n\n return {\n data: parsed,\n raw: result.content,\n finishReason: result.finishReason,\n usage: result.usage,\n };\n } catch (error) {\n lastError = error instanceof Error ? error : new Error(String(error));\n console.error(\n `[LLMClient:call] Attempt ${attempt + 1} failed:`,\n lastError.message,\n );\n\n if (this.isRateLimitError(lastError)) {\n console.error(`[LLMClient:call] Rate limit error, not retrying`);\n throw lastError;\n }\n\n if (attempt < maxRetries && retryWithContext) {\n console.log(`[LLMClient:call] Will retry with error context`);\n currentPrompt =\n `${userPrompt}\\n\\n` +\n `[Previous attempt failed with: ${lastError.message}]\\n` +\n `Please output valid JSON that matches the expected schema.`;\n }\n }\n }\n\n console.error(`[LLMClient:call] All attempts exhausted, throwing error`);\n throw lastError;\n }\n\n private extractFinishReason(\n response: Awaited<ReturnType<ChatOpenAI['invoke']>>,\n ): LLMFinishReason {\n const metadata = response.response_metadata as\n | Record<string, unknown>\n | undefined;\n if (metadata?.finish_reason) {\n const reason = metadata.finish_reason as string;\n if (\n reason === 'stop' ||\n reason === 'length' ||\n reason === 'content_filter' ||\n reason === 'tool_calls'\n ) {\n return reason;\n }\n }\n return null;\n }\n\n async callRaw(options: {\n systemPrompt: string;\n userPrompt: string;\n maxTokens?: number;\n }): Promise<string> {\n const response = await this.callRawWithMetadata(options);\n return response.raw;\n }\n\n async callRawWithMetadata(options: {\n systemPrompt: string;\n userPrompt: string;\n maxTokens?: number;\n }): Promise<Omit<LLMResponse<string>, 'data'> & { raw: string }> {\n const { systemPrompt, userPrompt, maxTokens } = options;\n\n return this.rateLimiter.execute(async () => {\n const modelToUse = maxTokens\n ? this.getModelWithOptions({ maxTokens })\n : this.model;\n\n const messages = [\n { role: 'system', content: systemPrompt },\n { role: 'user', content: userPrompt },\n ];\n const response = await modelToUse.invoke(\n this.provider === 'anthropic'\n ? addCacheControlToSystemMessages(messages)\n : messages,\n );\n\n let usage: LLMUsage | null = null;\n if (response.usage_metadata) {\n const usageMeta = response.usage_metadata as {\n input_tokens?: number;\n output_tokens?: number;\n };\n usage = {\n promptTokens: usageMeta.input_tokens || 0,\n completionTokens: usageMeta.output_tokens || 0,\n totalTokens:\n (usageMeta.input_tokens || 0) + (usageMeta.output_tokens || 0),\n };\n\n if (this.tokenTracker) {\n this.tokenTracker.addUsage(\n usage.promptTokens,\n usage.completionTokens,\n );\n }\n }\n\n const finishReason = this.extractFinishReason(response);\n const content =\n typeof response.content === 'string'\n ? response.content\n : JSON.stringify(response.content);\n\n return { raw: content, finishReason, usage };\n });\n }\n\n private isRateLimitError(error: Error): boolean {\n const message = error.message.toLowerCase();\n return (\n message.includes('rate limit') ||\n message.includes('429') ||\n message.includes('quota exceeded')\n );\n }\n\n // ==========================================================================\n // Anthropic Cache Control Support\n // ==========================================================================\n\n async callWithCache<T>(\n options: CacheAwareLLMCallOptions<T>,\n ): Promise<LLMResponse<T>> {\n const {\n systemPrompt,\n userPrompt,\n systemBlocks,\n userBlocks,\n schema,\n maxRetries = 2,\n maxTokens,\n skipSchemaValidation = false,\n temperature,\n rawText = false,\n } = options;\n\n if (this.provider !== 'anthropic') {\n console.log(\n `[LLMClient:callWithCache] Provider ${this.provider} doesn't support caching, using regular call`,\n );\n return this.callWithMetadata(options);\n }\n\n const cacheableCount =\n (systemBlocks || []).filter((b) => b.cache_control).length +\n (userBlocks || []).filter((b) => b.cache_control).length;\n console.log(\n `[LLMClient:callWithCache] ${cacheableCount} cacheable block(s)`,\n );\n\n let lastError: Error | null = null;\n\n for (let attempt = 0; attempt <= maxRetries; attempt++) {\n try {\n console.log(\n `[LLMClient:callWithCache] Attempt ${attempt + 1}/${maxRetries + 1}...`,\n );\n\n const result = await this.rateLimiter.execute(async () => {\n const anthropic = new Anthropic();\n\n const systemContent =\n systemBlocks && systemBlocks.length > 0\n ? systemBlocks.map((b) => ({\n type: 'text' as const,\n text: b.text,\n ...(b.cache_control\n ? { cache_control: b.cache_control }\n : {}),\n }))\n : systemPrompt\n ? [{ type: 'text' as const, text: systemPrompt }]\n : [];\n\n const userContent =\n userBlocks && userBlocks.length > 0\n ? userBlocks.map((b) => ({\n type: 'text' as const,\n text: b.text,\n ...(b.cache_control\n ? { cache_control: b.cache_control }\n : {}),\n }))\n : userPrompt\n ? [{ type: 'text' as const, text: userPrompt }]\n : [];\n\n const response = await anthropic.messages.create({\n model: this.modelName,\n max_tokens: maxTokens || 8192,\n temperature: temperature ?? 0,\n system: systemContent,\n messages: [{ role: 'user', content: userContent }],\n });\n\n const textContent = response.content.find((c) => c.type === 'text');\n const content =\n textContent && 'text' in textContent ? textContent.text : '';\n\n const apiUsage = response.usage as {\n input_tokens: number;\n output_tokens: number;\n cache_creation_input_tokens?: number;\n cache_read_input_tokens?: number;\n };\n\n const cacheRead = apiUsage.cache_read_input_tokens || 0;\n const cacheCreation = apiUsage.cache_creation_input_tokens || 0;\n\n if (cacheCreation > 0) {\n console.log(\n `[LLMClient:callWithCache] Cache WRITE: ${cacheCreation} tokens`,\n );\n }\n if (cacheRead > 0) {\n const savingsPercent = Math.round(\n (cacheRead / (cacheRead + apiUsage.input_tokens)) * 100,\n );\n console.log(\n `[LLMClient:callWithCache] Cache HIT: ${cacheRead} tokens (~${savingsPercent}% of prompt)`,\n );\n }\n if (cacheCreation === 0 && cacheRead === 0) {\n console.log(\n `[LLMClient:callWithCache] No caching: ${apiUsage.input_tokens} input tokens`,\n );\n }\n\n const usage: LLMUsage = {\n promptTokens: apiUsage.input_tokens,\n completionTokens: apiUsage.output_tokens,\n totalTokens: apiUsage.input_tokens + apiUsage.output_tokens,\n };\n\n if (this.tokenTracker) {\n this.tokenTracker.addUsage(\n usage.promptTokens,\n usage.completionTokens,\n );\n }\n\n const finishReason =\n response.stop_reason === 'end_turn'\n ? 'stop'\n : response.stop_reason;\n\n return {\n content,\n finishReason: finishReason as LLMFinishReason,\n usage,\n };\n });\n\n let parsed: T;\n if (rawText) {\n parsed = result.content as unknown as T;\n } else if (skipSchemaValidation) {\n parsed = parseJsonResponse(result.content, undefined) as T;\n } else {\n parsed = parseJsonResponse(result.content, schema);\n }\n\n return {\n data: parsed,\n raw: result.content,\n finishReason: result.finishReason,\n usage: result.usage,\n };\n } catch (error) {\n lastError = error instanceof Error ? error : new Error(String(error));\n console.error(\n `[LLMClient:callWithCache] Attempt ${attempt + 1} failed:`,\n lastError.message,\n );\n\n if (this.isRateLimitError(lastError)) {\n throw lastError;\n }\n }\n }\n\n throw lastError;\n }\n\n static cacheableBlock(text: string, cache = true): CacheableBlock {\n return cache\n ? { type: 'text', text, cache_control: { type: 'ephemeral' } }\n : { type: 'text', text };\n }\n}\n\n// ============================================================================\n// Singleton Instances\n// ============================================================================\n\nconst sharedClients: Partial<Record<LLMProvider, LLMClient>> = {};\n\nexport function getSharedLLMClient(options?: LLMClientOptions): LLMClient {\n const provider = options?.provider || 'openai';\n if (!sharedClients[provider]) {\n sharedClients[provider] = new LLMClient(options);\n }\n return sharedClients[provider]!;\n}\n\nexport function resetSharedLLMClient(provider?: LLMProvider): void {\n if (provider) {\n delete sharedClients[provider];\n } else {\n for (const key of Object.keys(sharedClients) as LLMProvider[]) {\n delete sharedClients[key];\n }\n }\n}\n\n// ============================================================================\n// Provider Detection\n// ============================================================================\n\nexport function getAvailableProvider(): LLMProvider {\n if (process.env.ANTHROPIC_API_KEY) return 'anthropic';\n if (process.env.DEEPSEEK_API_KEY) return 'deepseek';\n if (process.env.KIMI_API_KEY) return 'kimi';\n if (process.env.OPENAI_API_KEY) return 'openai';\n throw new Error(\n 'No LLM API key found. Please set ANTHROPIC_API_KEY, OPENAI_API_KEY, DEEPSEEK_API_KEY, or KIMI_API_KEY.',\n );\n}\n\nexport function isProviderAvailable(provider: LLMProvider): boolean {\n switch (provider) {\n case 'openai':\n return !!process.env.OPENAI_API_KEY;\n case 'deepseek':\n return !!process.env.DEEPSEEK_API_KEY;\n case 'anthropic':\n return !!process.env.ANTHROPIC_API_KEY;\n case 'kimi':\n return !!process.env.KIMI_API_KEY;\n default:\n return false;\n }\n}\n\n// ============================================================================\n// Convenience Functions\n// ============================================================================\n\nexport function createRequirementsClient(\n options?: Partial<LLMClientOptions>,\n): LLMClient {\n const provider = options?.provider || getAvailableProvider();\n const defaultModel =\n provider === 'deepseek' ? DEEPSEEK_MODELS.CHAT : OPENAI_MODELS.GPT_5_1;\n return new LLMClient({\n provider,\n model: defaultModel,\n temperature: 0.3,\n ...options,\n });\n}\n\nexport function createCreativeClient(\n options?: Partial<LLMClientOptions>,\n): LLMClient {\n const provider = options?.provider || getAvailableProvider();\n const defaultModel =\n provider === 'deepseek' ? DEEPSEEK_MODELS.REASONER : OPENAI_MODELS.GPT4O;\n return new LLMClient({\n provider,\n model: defaultModel,\n temperature: 0.7,\n ...options,\n });\n}\n\nexport function createFixClient(\n options?: Partial<LLMClientOptions>,\n): LLMClient {\n const provider = options?.provider || getAvailableProvider();\n const defaultModel =\n provider === 'deepseek'\n ? DEEPSEEK_MODELS.CHAT\n : OPENAI_MODELS.GPT4O_MINI;\n return new LLMClient({\n provider,\n model: defaultModel,\n temperature: 0.2,\n ...options,\n });\n}\n\nexport function createDeepSeekClient(\n options?: Partial<Omit<LLMClientOptions, 'provider'>>,\n): LLMClient {\n return new LLMClient({\n provider: 'deepseek',\n model: DEEPSEEK_MODELS.CHAT,\n ...options,\n });\n}\n\nexport function createOpenAIClient(\n options?: Partial<Omit<LLMClientOptions, 'provider'>>,\n): LLMClient {\n return new LLMClient({\n provider: 'openai',\n model: OPENAI_MODELS.GPT4O,\n ...options,\n });\n}\n\nexport function createAnthropicClient(\n options?: Partial<Omit<LLMClientOptions, 'provider'>>,\n): LLMClient {\n return new LLMClient({\n provider: 'anthropic',\n model: ANTHROPIC_MODELS.CLAUDE_SONNET_4_5,\n ...options,\n });\n}\n\nexport function createKimiClient(\n options?: Partial<Omit<LLMClientOptions, 'provider'>>,\n): LLMClient {\n return new LLMClient({\n provider: 'kimi',\n model: KIMI_MODELS.K2_5,\n ...options,\n });\n}\n\nexport function createOpenRouterClient(\n options?: Partial<Omit<LLMClientOptions, 'provider'>>,\n): LLMClient {\n return new LLMClient({\n provider: 'openrouter',\n model: OPENROUTER_MODELS.QWEN_2_5_72B,\n ...options,\n });\n}\n"],"mappings":";;;;;;;;;;AAaA,SAAS,kBAAkB;AAC3B,SAAS,qBAAqB;AAE9B,OAAO,eAAe;AActB,SAAS,gCACP,UACmB;AACnB,SAAO,SAAS,IAAI,CAAC,QAAQ;AAC3B,QAAI,IAAI,SAAS,UAAU;AACzB,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL,MAAM,IAAI;AAAA,MACV,SAAS;AAAA,QACP;AAAA,UACE,MAAM;AAAA,UACN,MAAM,IAAI;AAAA,UACV,eAAe,EAAE,MAAM,YAAY;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AA0EA,IAAM,mBAA8D;AAAA,EAClE,QAAQ,MAAM;AACZ,UAAM,SAAS,QAAQ,IAAI;AAC3B,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AACA,WAAO,EAAE,QAAQ,SAAS,QAAW,cAAc,SAAS;AAAA,EAC9D;AAAA,EACA,UAAU,MAAM;AACd,UAAM,SAAS,QAAQ,IAAI;AAC3B,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AACA,WAAO;AAAA,MACL;AAAA,MACA,SAAS;AAAA,MACT,cAAc;AAAA,IAChB;AAAA,EACF;AAAA,EACA,WAAW,MAAM;AACf,UAAM,SAAS,QAAQ,IAAI;AAC3B,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AACA,WAAO;AAAA,MACL;AAAA,MACA,SAAS;AAAA,MACT,cAAc;AAAA,IAChB;AAAA,EACF;AAAA,EACA,MAAM,MAAM;AACV,UAAM,SAAS,QAAQ,IAAI;AAC3B,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AACA,WAAO;AAAA,MACL;AAAA,MACA,SAAS;AAAA,MACT,cAAc;AAAA,IAChB;AAAA,EACF;AAAA,EACA,YAAY,MAAM;AAChB,UAAM,SAAS,QAAQ,IAAI;AAC3B,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AACA,WAAO;AAAA,MACL;AAAA,MACA,SAAS;AAAA,MACT,cAAc;AAAA;AAAA,IAChB;AAAA,EACF;AACF;AAEO,IAAM,kBAAkB;AAAA,EAC7B,MAAM;AAAA,EACN,OAAO;AAAA,EACP,UAAU;AACZ;AAEO,IAAM,gBAAgB;AAAA,EAC3B,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,SAAS;AACX;AAEO,IAAM,mBAAmB;AAAA,EAC9B,mBAAmB;AAAA,EACnB,iBAAiB;AAAA,EACjB,iBAAiB;AAAA,EACjB,kBAAkB;AACpB;AAEO,IAAM,cAAc;AAAA,EACzB,MAAM;AACR;AAEO,IAAM,oBAAoB;AAAA;AAAA,EAE/B,cAAc;AAAA,EACd,oBAAoB;AAAA,EACpB,aAAa;AAAA;AAAA,EAGb,eAAe;AAAA,EACf,gBAAgB;AAAA,EAChB,kBAAkB;AAAA,EAClB,eAAe;AACjB;AAEA,IAAM,sBAAsB;AAMrB,IAAM,YAAN,MAAgB;AAAA,EAUrB,YAAY,UAA4B,CAAC,GAAG;AAC1C,SAAK,WAAW,QAAQ,YAAY;AAEpC,SAAK,cAAc,QAAQ,gBACxB,KAAK,aAAa,SAAS,MAAM;AACpC,SAAK,YAAY,QAAQ,aAAa;AAEtC,SAAK,iBAAiB,iBAAiB,KAAK,QAAQ,EAAE;AACtD,SAAK,YAAY,QAAQ,SAAS,KAAK,eAAe;AAEtD,UAAM,aAAa,KAAK,eAAe,OAAO,MAAM,EAAE;AACtD,YAAQ;AAAA,MACN,yBAAyB,KAAK,QAAQ,YAAY,KAAK,SAAS,cAAc,UAAU;AAAA,IAC1F;AACA,QAAI,KAAK,eAAe,SAAS;AAC/B,cAAQ;AAAA,QACN,sCAAsC,KAAK,eAAe,OAAO;AAAA,MACnE;AAAA,IACF;AAEA,SAAK,QAAQ,KAAK,YAAY;AAE9B,SAAK,cACH,QAAQ,yBAAyB,QAC7B,qBAAqB,QAAQ,WAAW,IACxC,IAAI,YAAY,QAAQ,WAAW;AAEzC,SAAK,eACH,QAAQ,gBAAgB,QACpB,sBAAsB,KAAK,SAAS,IACpC;AAAA,EACR;AAAA,EAEQ,0BAAmC;AACzC,UAAM,QAAQ,KAAK,UAAU,YAAY;AACzC,WACE,MAAM,WAAW,IAAI,KACrB,MAAM,WAAW,OAAO,KACxB,MAAM,SAAS,KAAK,KACpB,MAAM,SAAS,IAAI;AAAA,EAEvB;AAAA,EAEQ,YAAY,SAGN;AACZ,UAAM,YAAY,SAAS;AAC3B,UAAM,cAAc,SAAS,eAAe,KAAK;AAEjD,QAAI,KAAK,aAAa,aAAa;AACjC,aAAO,IAAI,cAAc;AAAA,QACvB,QAAQ,KAAK,eAAe;AAAA,QAC5B,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,WAAW,KAAK;AAAA,QAChB,WAAW,aAAa;AAAA,QACxB,WAAW;AAAA,UACT;AAAA,YACE,cAAc,CAAC,WAAW;AACxB,oBAAM,aAAa,OAAO,cAAc,CAAC,IAAI,CAAC;AAC9C,oBAAM,QACJ,YAUC,SAAS;AAEZ,kBAAI,OAAO;AACT,sBAAM,eAAe,MAAM,+BAA+B;AAC1D,sBAAM,YAAY,MAAM,2BAA2B;AACnD,sBAAM,cAAc,MAAM,gBAAgB;AAC1C,sBAAM,eAAe,MAAM,iBAAiB;AAE5C,oBAAI,eAAe,GAAG;AACpB,0BAAQ;AAAA,oBACN,sCAAsC,YAAY;AAAA,kBACpD;AAAA,gBACF;AACA,oBAAI,YAAY,GAAG;AACjB,wBAAM,iBAAiB,KAAK;AAAA,oBACzB,aAAa,YAAY,eAAgB;AAAA,kBAC5C;AACA,0BAAQ;AAAA,oBACN,oCAAoC,SAAS,aAAa,cAAc;AAAA,kBAC1E;AAAA,gBACF;AACA,oBAAI,iBAAiB,KAAK,cAAc,KAAK,cAAc,GAAG;AAC5D,sBAAI,cAAc,KAAK;AACrB,4BAAQ;AAAA,sBACN,yBAAyB,WAAW,WAAW,YAAY;AAAA,oBAC7D;AAAA,kBACF,OAAO;AACL,4BAAQ;AAAA,sBACN,yBAAyB,WAAW,WAAW,YAAY;AAAA,oBAC7D;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,sBAAsB,KAAK,wBAAwB;AAEzD,UAAM,cAAc,YAChB,sBACE,EAAE,aAAa,EAAE,uBAAuB,UAAU,EAAE,IACpD,EAAE,UAAU,IACd,CAAC;AAEL,UAAM,UAAU,KAAK,aAAa,aAAa,MAAS;AAIxD,UAAM,SAAS,KAAK,aAAa;AACjC,UAAM,gBAAgB,SAAS,MAAM;AAGrC,UAAM,cAAuC,CAAC;AAC9C,QAAI,uBAAuB,WAAW;AACpC,kBAAY,wBAAwB;AAAA,IACtC;AACA,QAAI,QAAQ;AACV,kBAAY,WAAW,EAAE,MAAM,WAAW;AAAA,IAC5C;AAEA,QAAI,KAAK,aAAa,cAAc;AAClC,kBAAY,cAAc;AAAA,IAC5B;AAEA,WAAO,IAAI,WAAW;AAAA,MACpB,QAAQ,KAAK,eAAe;AAAA,MAC5B,OAAO,KAAK;AAAA,MACZ,aAAa,sBAAsB,SAAY;AAAA,MAC/C,WAAW,KAAK;AAAA,MAChB;AAAA,MACA,GAAI,OAAO,KAAK,WAAW,EAAE,SAAS,IAAI,EAAE,YAAY,IAAI,CAAC;AAAA,MAC7D,GAAI,sBAAsB,CAAC,IAAI,YAAY,EAAE,UAAU,IAAI,CAAC;AAAA,MAC5D,eAAe;AAAA,QACb,QAAQ,KAAK,eAAe;AAAA,QAC5B,GAAI,KAAK,eAAe,UACpB,EAAE,SAAS,KAAK,eAAe,QAAQ,IACvC,CAAC;AAAA,MACP;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEQ,oBAAoB,SAGd;AACZ,WAAO,KAAK,YAAY,OAAO;AAAA,EACjC;AAAA,EAEA,cAA2B;AACzB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,eAAuB;AACrB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAsB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,uBAAuB;AACrB,WAAO,KAAK,YAAY,UAAU;AAAA,EACpC;AAAA,EAEA,gBAAgB;AACd,WAAO,KAAK,cAAc,WAAW,KAAK;AAAA,EAC5C;AAAA,EAEA,MAAM,KAAQ,SAAwC;AACpD,UAAM,WAAW,MAAM,KAAK,iBAAiB,OAAO;AACpD,WAAO,SAAS;AAAA,EAClB;AAAA,EAEA,MAAM,iBAAoB,SAAqD;AAC7E,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa;AAAA,MACb,mBAAmB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB;AAAA,IACF,IAAI;AAEJ,QAAI,gBAAgB;AACpB,QAAI,YAA0B;AAE9B,YAAQ;AAAA,MACN,qCAAqC,KAAK,QAAQ,IAAI,KAAK,SAAS;AAAA,IACtE;AACA,YAAQ,IAAI,mCAAmC,WAAW,MAAM,QAAQ;AACxE,QAAI,WAAW;AACb,cAAQ,IAAI,gCAAgC,SAAS,EAAE;AAAA,IACzD;AAEA,aAAS,UAAU,GAAG,WAAW,YAAY,WAAW;AACtD,UAAI;AACF,gBAAQ;AAAA,UACN,4BAA4B,UAAU,CAAC,IAAI,aAAa,CAAC;AAAA,QAC3D;AACA,cAAM,mBAAmB,KAAK,IAAI;AAElC,cAAM,SAAS,MAAM,KAAK,YAAY,QAAQ,YAAY;AACxD,kBAAQ,IAAI,oCAAoC;AAChD,gBAAM,kBAAkB,KAAK,IAAI;AAEjC,gBAAM,aACJ,aAAa,gBAAgB,SACzB,KAAK,oBAAoB,EAAE,WAAW,YAAY,CAAC,IACnD,KAAK;AAEX,gBAAM,WAAW;AAAA,YACf,EAAE,MAAM,UAAU,SAAS,aAAa;AAAA,YACxC,EAAE,MAAM,QAAQ,SAAS,cAAc;AAAA,UACzC;AACA,gBAAM,WAAW,MAAM,WAAW;AAAA,YAChC,KAAK,aAAa,cACd,gCAAgC,QAAQ,IACxC;AAAA,UACN;AAEA,kBAAQ;AAAA,YACN,uCAAuC,KAAK,IAAI,IAAI,eAAe;AAAA,UACrE;AAEA,cAAI,QAAyB;AAC7B,cAAI,SAAS,gBAAgB;AAC3B,kBAAM,YAAY,SAAS;AAI3B,oBAAQ;AAAA,cACN,cAAc,UAAU,gBAAgB;AAAA,cACxC,kBAAkB,UAAU,iBAAiB;AAAA,cAC7C,cACG,UAAU,gBAAgB,MAC1B,UAAU,iBAAiB;AAAA,YAChC;AACA,oBAAQ;AAAA,cACN,iCAAiC,MAAM,YAAY,QAAQ,MAAM,gBAAgB;AAAA,YACnF;AAEA,gBAAI,KAAK,cAAc;AACrB,mBAAK,aAAa;AAAA,gBAChB,MAAM;AAAA,gBACN,MAAM;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,eAAe,KAAK,oBAAoB,QAAQ;AACtD,cAAI,iBAAiB,UAAU;AAC7B,oBAAQ;AAAA,cACN;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,UACJ,OAAO,SAAS,YAAY,WACxB,SAAS,UACT,KAAK,UAAU,SAAS,OAAO;AAErC,kBAAQ;AAAA,YACN,qCAAqC,QAAQ,MAAM,0BAA0B,YAAY;AAAA,UAC3F;AAEA,iBAAO,EAAE,SAAS,cAAc,MAAM;AAAA,QACxC,CAAC;AAED,gBAAQ;AAAA,UACN,4BAA4B,UAAU,CAAC,iBAAiB,KAAK,IAAI,IAAI,gBAAgB;AAAA,QACvF;AAEA,cAAM,SAAS,uBACV,kBAAkB,OAAO,SAAS,MAAS,IAC5C,kBAAkB,OAAO,SAAS,MAAM;AAC5C,gBAAQ;AAAA,UACN,gDAAgD,uBAAuB,iCAAiC,EAAE;AAAA,QAC5G;AAEA,eAAO;AAAA,UACL,MAAM;AAAA,UACN,KAAK,OAAO;AAAA,UACZ,cAAc,OAAO;AAAA,UACrB,OAAO,OAAO;AAAA,QAChB;AAAA,MACF,SAAS,OAAO;AACd,oBAAY,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,gBAAQ;AAAA,UACN,4BAA4B,UAAU,CAAC;AAAA,UACvC,UAAU;AAAA,QACZ;AAEA,YAAI,KAAK,iBAAiB,SAAS,GAAG;AACpC,kBAAQ,MAAM,iDAAiD;AAC/D,gBAAM;AAAA,QACR;AAEA,YAAI,UAAU,cAAc,kBAAkB;AAC5C,kBAAQ,IAAI,gDAAgD;AAC5D,0BACE,GAAG,UAAU;AAAA;AAAA,iCACqB,UAAU,OAAO;AAAA;AAAA,QAEvD;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,MAAM,yDAAyD;AACvE,UAAM;AAAA,EACR;AAAA,EAEQ,oBACN,UACiB;AACjB,UAAM,WAAW,SAAS;AAG1B,QAAI,UAAU,eAAe;AAC3B,YAAM,SAAS,SAAS;AACxB,UACE,WAAW,UACX,WAAW,YACX,WAAW,oBACX,WAAW,cACX;AACA,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,QAAQ,SAIM;AAClB,UAAM,WAAW,MAAM,KAAK,oBAAoB,OAAO;AACvD,WAAO,SAAS;AAAA,EAClB;AAAA,EAEA,MAAM,oBAAoB,SAIuC;AAC/D,UAAM,EAAE,cAAc,YAAY,UAAU,IAAI;AAEhD,WAAO,KAAK,YAAY,QAAQ,YAAY;AAC1C,YAAM,aAAa,YACf,KAAK,oBAAoB,EAAE,UAAU,CAAC,IACtC,KAAK;AAET,YAAM,WAAW;AAAA,QACf,EAAE,MAAM,UAAU,SAAS,aAAa;AAAA,QACxC,EAAE,MAAM,QAAQ,SAAS,WAAW;AAAA,MACtC;AACA,YAAM,WAAW,MAAM,WAAW;AAAA,QAChC,KAAK,aAAa,cACd,gCAAgC,QAAQ,IACxC;AAAA,MACN;AAEA,UAAI,QAAyB;AAC7B,UAAI,SAAS,gBAAgB;AAC3B,cAAM,YAAY,SAAS;AAI3B,gBAAQ;AAAA,UACN,cAAc,UAAU,gBAAgB;AAAA,UACxC,kBAAkB,UAAU,iBAAiB;AAAA,UAC7C,cACG,UAAU,gBAAgB,MAAM,UAAU,iBAAiB;AAAA,QAChE;AAEA,YAAI,KAAK,cAAc;AACrB,eAAK,aAAa;AAAA,YAChB,MAAM;AAAA,YACN,MAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAEA,YAAM,eAAe,KAAK,oBAAoB,QAAQ;AACtD,YAAM,UACJ,OAAO,SAAS,YAAY,WACxB,SAAS,UACT,KAAK,UAAU,SAAS,OAAO;AAErC,aAAO,EAAE,KAAK,SAAS,cAAc,MAAM;AAAA,IAC7C,CAAC;AAAA,EACH;AAAA,EAEQ,iBAAiB,OAAuB;AAC9C,UAAM,UAAU,MAAM,QAAQ,YAAY;AAC1C,WACE,QAAQ,SAAS,YAAY,KAC7B,QAAQ,SAAS,KAAK,KACtB,QAAQ,SAAS,gBAAgB;AAAA,EAErC;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,cACJ,SACyB;AACzB,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa;AAAA,MACb;AAAA,MACA,uBAAuB;AAAA,MACvB;AAAA,MACA,UAAU;AAAA,IACZ,IAAI;AAEJ,QAAI,KAAK,aAAa,aAAa;AACjC,cAAQ;AAAA,QACN,sCAAsC,KAAK,QAAQ;AAAA,MACrD;AACA,aAAO,KAAK,iBAAiB,OAAO;AAAA,IACtC;AAEA,UAAM,kBACH,gBAAgB,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,aAAa,EAAE,UACnD,cAAc,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,aAAa,EAAE;AACpD,YAAQ;AAAA,MACN,6BAA6B,cAAc;AAAA,IAC7C;AAEA,QAAI,YAA0B;AAE9B,aAAS,UAAU,GAAG,WAAW,YAAY,WAAW;AACtD,UAAI;AACF,gBAAQ;AAAA,UACN,qCAAqC,UAAU,CAAC,IAAI,aAAa,CAAC;AAAA,QACpE;AAEA,cAAM,SAAS,MAAM,KAAK,YAAY,QAAQ,YAAY;AACxD,gBAAM,YAAY,IAAI,UAAU;AAEhC,gBAAM,gBACJ,gBAAgB,aAAa,SAAS,IAClC,aAAa,IAAI,CAAC,OAAO;AAAA,YACvB,MAAM;AAAA,YACN,MAAM,EAAE;AAAA,YACR,GAAI,EAAE,gBACF,EAAE,eAAe,EAAE,cAAc,IACjC,CAAC;AAAA,UACP,EAAE,IACF,eACE,CAAC,EAAE,MAAM,QAAiB,MAAM,aAAa,CAAC,IAC9C,CAAC;AAET,gBAAM,cACJ,cAAc,WAAW,SAAS,IAC9B,WAAW,IAAI,CAAC,OAAO;AAAA,YACrB,MAAM;AAAA,YACN,MAAM,EAAE;AAAA,YACR,GAAI,EAAE,gBACF,EAAE,eAAe,EAAE,cAAc,IACjC,CAAC;AAAA,UACP,EAAE,IACF,aACE,CAAC,EAAE,MAAM,QAAiB,MAAM,WAAW,CAAC,IAC5C,CAAC;AAET,gBAAM,WAAW,MAAM,UAAU,SAAS,OAAO;AAAA,YAC/C,OAAO,KAAK;AAAA,YACZ,YAAY,aAAa;AAAA,YACzB,aAAa,eAAe;AAAA,YAC5B,QAAQ;AAAA,YACR,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,YAAY,CAAC;AAAA,UACnD,CAAC;AAED,gBAAM,cAAc,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM;AAClE,gBAAM,UACJ,eAAe,UAAU,cAAc,YAAY,OAAO;AAE5D,gBAAM,WAAW,SAAS;AAO1B,gBAAM,YAAY,SAAS,2BAA2B;AACtD,gBAAM,gBAAgB,SAAS,+BAA+B;AAE9D,cAAI,gBAAgB,GAAG;AACrB,oBAAQ;AAAA,cACN,0CAA0C,aAAa;AAAA,YACzD;AAAA,UACF;AACA,cAAI,YAAY,GAAG;AACjB,kBAAM,iBAAiB,KAAK;AAAA,cACzB,aAAa,YAAY,SAAS,gBAAiB;AAAA,YACtD;AACA,oBAAQ;AAAA,cACN,wCAAwC,SAAS,aAAa,cAAc;AAAA,YAC9E;AAAA,UACF;AACA,cAAI,kBAAkB,KAAK,cAAc,GAAG;AAC1C,oBAAQ;AAAA,cACN,yCAAyC,SAAS,YAAY;AAAA,YAChE;AAAA,UACF;AAEA,gBAAM,QAAkB;AAAA,YACtB,cAAc,SAAS;AAAA,YACvB,kBAAkB,SAAS;AAAA,YAC3B,aAAa,SAAS,eAAe,SAAS;AAAA,UAChD;AAEA,cAAI,KAAK,cAAc;AACrB,iBAAK,aAAa;AAAA,cAChB,MAAM;AAAA,cACN,MAAM;AAAA,YACR;AAAA,UACF;AAEA,gBAAM,eACJ,SAAS,gBAAgB,aACrB,SACA,SAAS;AAEf,iBAAO;AAAA,YACL;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA,QACF,CAAC;AAED,YAAI;AACJ,YAAI,SAAS;AACX,mBAAS,OAAO;AAAA,QAClB,WAAW,sBAAsB;AAC/B,mBAAS,kBAAkB,OAAO,SAAS,MAAS;AAAA,QACtD,OAAO;AACL,mBAAS,kBAAkB,OAAO,SAAS,MAAM;AAAA,QACnD;AAEA,eAAO;AAAA,UACL,MAAM;AAAA,UACN,KAAK,OAAO;AAAA,UACZ,cAAc,OAAO;AAAA,UACrB,OAAO,OAAO;AAAA,QAChB;AAAA,MACF,SAAS,OAAO;AACd,oBAAY,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,gBAAQ;AAAA,UACN,qCAAqC,UAAU,CAAC;AAAA,UAChD,UAAU;AAAA,QACZ;AAEA,YAAI,KAAK,iBAAiB,SAAS,GAAG;AACpC,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AAAA,EAEA,OAAO,eAAe,MAAc,QAAQ,MAAsB;AAChE,WAAO,QACH,EAAE,MAAM,QAAQ,MAAM,eAAe,EAAE,MAAM,YAAY,EAAE,IAC3D,EAAE,MAAM,QAAQ,KAAK;AAAA,EAC3B;AACF;AAMA,IAAM,gBAAyD,CAAC;AAEzD,SAAS,mBAAmB,SAAuC;AACxE,QAAM,WAAW,SAAS,YAAY;AACtC,MAAI,CAAC,cAAc,QAAQ,GAAG;AAC5B,kBAAc,QAAQ,IAAI,IAAI,UAAU,OAAO;AAAA,EACjD;AACA,SAAO,cAAc,QAAQ;AAC/B;AAEO,SAAS,qBAAqB,UAA8B;AACjE,MAAI,UAAU;AACZ,WAAO,cAAc,QAAQ;AAAA,EAC/B,OAAO;AACL,eAAW,OAAO,OAAO,KAAK,aAAa,GAAoB;AAC7D,aAAO,cAAc,GAAG;AAAA,IAC1B;AAAA,EACF;AACF;AAMO,SAAS,uBAAoC;AAClD,MAAI,QAAQ,IAAI,kBAAmB,QAAO;AAC1C,MAAI,QAAQ,IAAI,iBAAkB,QAAO;AACzC,MAAI,QAAQ,IAAI,aAAc,QAAO;AACrC,MAAI,QAAQ,IAAI,eAAgB,QAAO;AACvC,QAAM,IAAI;AAAA,IACR;AAAA,EACF;AACF;AAEO,SAAS,oBAAoB,UAAgC;AAClE,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,aAAO,CAAC,CAAC,QAAQ,IAAI;AAAA,IACvB,KAAK;AACH,aAAO,CAAC,CAAC,QAAQ,IAAI;AAAA,IACvB,KAAK;AACH,aAAO,CAAC,CAAC,QAAQ,IAAI;AAAA,IACvB,KAAK;AACH,aAAO,CAAC,CAAC,QAAQ,IAAI;AAAA,IACvB;AACE,aAAO;AAAA,EACX;AACF;AAMO,SAAS,yBACd,SACW;AACX,QAAM,WAAW,SAAS,YAAY,qBAAqB;AAC3D,QAAM,eACJ,aAAa,aAAa,gBAAgB,OAAO,cAAc;AACjE,SAAO,IAAI,UAAU;AAAA,IACnB;AAAA,IACA,OAAO;AAAA,IACP,aAAa;AAAA,IACb,GAAG;AAAA,EACL,CAAC;AACH;AAEO,SAAS,qBACd,SACW;AACX,QAAM,WAAW,SAAS,YAAY,qBAAqB;AAC3D,QAAM,eACJ,aAAa,aAAa,gBAAgB,WAAW,cAAc;AACrE,SAAO,IAAI,UAAU;AAAA,IACnB;AAAA,IACA,OAAO;AAAA,IACP,aAAa;AAAA,IACb,GAAG;AAAA,EACL,CAAC;AACH;AAEO,SAAS,gBACd,SACW;AACX,QAAM,WAAW,SAAS,YAAY,qBAAqB;AAC3D,QAAM,eACJ,aAAa,aACT,gBAAgB,OAChB,cAAc;AACpB,SAAO,IAAI,UAAU;AAAA,IACnB;AAAA,IACA,OAAO;AAAA,IACP,aAAa;AAAA,IACb,GAAG;AAAA,EACL,CAAC;AACH;AAEO,SAAS,qBACd,SACW;AACX,SAAO,IAAI,UAAU;AAAA,IACnB,UAAU;AAAA,IACV,OAAO,gBAAgB;AAAA,IACvB,GAAG;AAAA,EACL,CAAC;AACH;AAEO,SAAS,mBACd,SACW;AACX,SAAO,IAAI,UAAU;AAAA,IACnB,UAAU;AAAA,IACV,OAAO,cAAc;AAAA,IACrB,GAAG;AAAA,EACL,CAAC;AACH;AAEO,SAAS,sBACd,SACW;AACX,SAAO,IAAI,UAAU;AAAA,IACnB,UAAU;AAAA,IACV,OAAO,iBAAiB;AAAA,IACxB,GAAG;AAAA,EACL,CAAC;AACH;AAEO,SAAS,iBACd,SACW;AACX,SAAO,IAAI,UAAU;AAAA,IACnB,UAAU;AAAA,IACV,OAAO,YAAY;AAAA,IACnB,GAAG;AAAA,EACL,CAAC;AACH;AAEO,SAAS,uBACd,SACW;AACX,SAAO,IAAI,UAAU;AAAA,IACnB,UAAU;AAAA,IACV,OAAO,kBAAkB;AAAA,IACzB,GAAG;AAAA,EACL,CAAC;AACH;","names":[]}
package/dist/client.js CHANGED
@@ -4,18 +4,20 @@ import {
4
4
  KIMI_MODELS,
5
5
  LLMClient,
6
6
  OPENAI_MODELS,
7
+ OPENROUTER_MODELS,
7
8
  createAnthropicClient,
8
9
  createCreativeClient,
9
10
  createDeepSeekClient,
10
11
  createFixClient,
11
12
  createKimiClient,
12
13
  createOpenAIClient,
14
+ createOpenRouterClient,
13
15
  createRequirementsClient,
14
16
  getAvailableProvider,
15
17
  getSharedLLMClient,
16
18
  isProviderAvailable,
17
19
  resetSharedLLMClient
18
- } from "./chunk-56H37PN5.js";
20
+ } from "./chunk-YJVZ6ZWO.js";
19
21
  import "./chunk-WM7QVK2Z.js";
20
22
  import "./chunk-MJS33AAS.js";
21
23
  export {
@@ -24,12 +26,14 @@ export {
24
26
  KIMI_MODELS,
25
27
  LLMClient,
26
28
  OPENAI_MODELS,
29
+ OPENROUTER_MODELS,
27
30
  createAnthropicClient,
28
31
  createCreativeClient,
29
32
  createDeepSeekClient,
30
33
  createFixClient,
31
34
  createKimiClient,
32
35
  createOpenAIClient,
36
+ createOpenRouterClient,
33
37
  createRequirementsClient,
34
38
  getAvailableProvider,
35
39
  getSharedLLMClient,
package/dist/index.js CHANGED
@@ -4,18 +4,20 @@ import {
4
4
  KIMI_MODELS,
5
5
  LLMClient,
6
6
  OPENAI_MODELS,
7
+ OPENROUTER_MODELS,
7
8
  createAnthropicClient,
8
9
  createCreativeClient,
9
10
  createDeepSeekClient,
10
11
  createFixClient,
11
12
  createKimiClient,
12
13
  createOpenAIClient,
14
+ createOpenRouterClient,
13
15
  createRequirementsClient,
14
16
  getAvailableProvider,
15
17
  getSharedLLMClient,
16
18
  isProviderAvailable,
17
19
  resetSharedLLMClient
18
- } from "./chunk-56H37PN5.js";
20
+ } from "./chunk-YJVZ6ZWO.js";
19
21
  import {
20
22
  autoCloseJson,
21
23
  extractJsonFromText,
@@ -439,6 +441,7 @@ export {
439
441
  KIMI_MODELS,
440
442
  LLMClient,
441
443
  OPENAI_MODELS,
444
+ OPENROUTER_MODELS,
442
445
  RateLimiter,
443
446
  STRUCTURED_OUTPUT_MODELS,
444
447
  StructuredOutputClient,
@@ -452,6 +455,7 @@ export {
452
455
  createFixClient,
453
456
  createKimiClient,
454
457
  createOpenAIClient,
458
+ createOpenRouterClient,
455
459
  createRequirementsClient,
456
460
  detectTruncation,
457
461
  extractJsonFromText,
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/truncation-detector.ts","../src/continuation.ts"],"sourcesContent":["/**\n * Truncation Detector\n *\n * Utilities for detecting when LLM output has been truncated and\n * extracting usable content from partial responses.\n *\n * @packageDocumentation\n */\n\nimport type { LLMFinishReason } from './client.js';\nimport { autoCloseJson } from './json-parser.js';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport type TruncationReason =\n | 'finish_reason'\n | 'json_incomplete'\n | 'bracket_mismatch'\n | 'none';\n\nexport interface TruncationResult {\n isTruncated: boolean;\n reason: TruncationReason;\n partialContent?: string;\n lastCompleteElement?: unknown;\n missingCloseBrackets?: number;\n missingCloseBraces?: number;\n}\n\n// ============================================================================\n// Main Detection Function\n// ============================================================================\n\nexport function detectTruncation(\n response: string,\n finishReason: LLMFinishReason,\n): TruncationResult {\n if (finishReason === 'length') {\n const bracketInfo = countBrackets(response);\n return {\n isTruncated: true,\n reason: 'finish_reason',\n partialContent: response,\n lastCompleteElement: findLastCompleteElement(response),\n missingCloseBrackets: bracketInfo.missingCloseBrackets,\n missingCloseBraces: bracketInfo.missingCloseBraces,\n };\n }\n\n try {\n JSON.parse(response);\n return { isTruncated: false, reason: 'none' };\n } catch {\n // JSON is invalid, check if due to truncation\n }\n\n if (finishReason === 'stop' || finishReason === null) {\n const trimmed = response.trim();\n\n const isMidContent =\n trimmed.endsWith(',') ||\n trimmed.endsWith(':') ||\n trimmed.endsWith('\": ') ||\n /:\\s*$/.test(trimmed) ||\n /,\\s*$/.test(trimmed);\n\n if (isMidContent) {\n const bracketInfo = countBrackets(response);\n return {\n isTruncated: true,\n reason: 'json_incomplete',\n partialContent: response,\n lastCompleteElement: findLastCompleteElement(response),\n missingCloseBrackets: bracketInfo.missingCloseBrackets,\n missingCloseBraces: bracketInfo.missingCloseBraces,\n };\n }\n\n try {\n const closed = autoCloseJson(trimmed);\n JSON.parse(closed);\n return { isTruncated: false, reason: 'none' };\n } catch {\n return { isTruncated: false, reason: 'none' };\n }\n }\n\n const bracketInfo = countBrackets(response);\n if (\n bracketInfo.missingCloseBrackets > 0 ||\n bracketInfo.missingCloseBraces > 0\n ) {\n return {\n isTruncated: true,\n reason: 'bracket_mismatch',\n partialContent: response,\n lastCompleteElement: findLastCompleteElement(response),\n missingCloseBrackets: bracketInfo.missingCloseBrackets,\n missingCloseBraces: bracketInfo.missingCloseBraces,\n };\n }\n\n return { isTruncated: false, reason: 'none' };\n}\n\n// ============================================================================\n// Helper Functions\n// ============================================================================\n\nfunction countBrackets(json: string): {\n openBrackets: number;\n closeBrackets: number;\n openBraces: number;\n closeBraces: number;\n missingCloseBrackets: number;\n missingCloseBraces: number;\n} {\n let inString = false;\n let escaped = false;\n let openBrackets = 0;\n let closeBrackets = 0;\n let openBraces = 0;\n let closeBraces = 0;\n\n for (const char of json) {\n if (escaped) {\n escaped = false;\n continue;\n }\n if (char === '\\\\' && inString) {\n escaped = true;\n continue;\n }\n if (char === '\"') {\n inString = !inString;\n continue;\n }\n if (inString) continue;\n\n switch (char) {\n case '[':\n openBrackets++;\n break;\n case ']':\n closeBrackets++;\n break;\n case '{':\n openBraces++;\n break;\n case '}':\n closeBraces++;\n break;\n }\n }\n\n return {\n openBrackets,\n closeBrackets,\n openBraces,\n closeBraces,\n missingCloseBrackets: Math.max(0, openBrackets - closeBrackets),\n missingCloseBraces: Math.max(0, openBraces - closeBraces),\n };\n}\n\nexport function findLastCompleteElement(json: string): unknown | null {\n const autoClosed = autoCloseJson(json);\n try {\n return JSON.parse(autoClosed);\n } catch {\n // Auto-close didn't work\n }\n\n const trimmed = json.trim();\n\n if (trimmed.startsWith('[')) {\n const lastCompleteIndex = findLastCompleteArrayElement(trimmed);\n if (lastCompleteIndex > 0) {\n const subset = trimmed.substring(0, lastCompleteIndex) + ']';\n try {\n return JSON.parse(subset);\n } catch {\n // Continue\n }\n }\n }\n\n if (trimmed.startsWith('{')) {\n const closed = autoCloseJson(trimmed);\n try {\n return JSON.parse(closed);\n } catch {\n const lastCompleteIndex = findLastCompleteObjectProperty(trimmed);\n if (lastCompleteIndex > 0) {\n const subset = trimmed.substring(0, lastCompleteIndex) + '}';\n try {\n return JSON.parse(subset);\n } catch {\n // Give up\n }\n }\n }\n }\n\n return null;\n}\n\nfunction findLastCompleteArrayElement(json: string): number {\n let depth = 0;\n let inString = false;\n let escaped = false;\n let lastCompleteElementEnd = -1;\n\n for (let i = 0; i < json.length; i++) {\n const char = json[i];\n\n if (escaped) {\n escaped = false;\n continue;\n }\n if (char === '\\\\' && inString) {\n escaped = true;\n continue;\n }\n if (char === '\"') {\n inString = !inString;\n continue;\n }\n if (inString) continue;\n\n if (char === '[' || char === '{') {\n depth++;\n } else if (char === ']' || char === '}') {\n depth--;\n if (depth === 1) {\n lastCompleteElementEnd = i + 1;\n }\n } else if (char === ',' && depth === 1) {\n lastCompleteElementEnd = i;\n }\n }\n\n return lastCompleteElementEnd > 0 ? lastCompleteElementEnd : -1;\n}\n\nfunction findLastCompleteObjectProperty(json: string): number {\n let depth = 0;\n let inString = false;\n let escaped = false;\n let lastCommaIndex = -1;\n\n for (let i = 0; i < json.length; i++) {\n const char = json[i];\n\n if (escaped) {\n escaped = false;\n continue;\n }\n if (char === '\\\\' && inString) {\n escaped = true;\n continue;\n }\n if (char === '\"') {\n inString = !inString;\n continue;\n }\n if (inString) continue;\n\n if (char === '[' || char === '{') {\n depth++;\n } else if (char === ']' || char === '}') {\n depth--;\n } else if (char === ',' && depth === 1) {\n lastCommaIndex = i;\n }\n }\n\n return lastCommaIndex > 0 ? lastCommaIndex : -1;\n}\n\nexport function isLikelyTruncated(content: string): boolean {\n const trimmed = content.trim();\n if (!trimmed) return false;\n\n const brackets = countBrackets(trimmed);\n if (\n brackets.missingCloseBrackets > 0 ||\n brackets.missingCloseBraces > 0\n ) {\n return true;\n }\n\n const abruptEndings = [\n /,\\s*$/,\n /:\\s*$/,\n /\"\\s*:\\s*$/,\n /\\[\\s*$/,\n /{\\s*$/,\n ];\n\n for (const pattern of abruptEndings) {\n if (pattern.test(trimmed)) return true;\n }\n\n return false;\n}\n","/**\n * LLM Continuation Utility\n *\n * Handles truncated LLM responses with automatic continuation.\n * - Detects truncation via finish_reason and JSON structure\n * - Automatically continues with full context\n * - Merges partial and continuation responses\n * - Salvages partial data if max continuations reached\n *\n * @packageDocumentation\n */\n\nimport { z } from 'zod';\nimport { LLMClient, type LLMFinishReason } from './client.js';\nimport { detectTruncation } from './truncation-detector.js';\nimport { extractJsonFromText, autoCloseJson, isValidJson } from './json-parser.js';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface ContinuationOptions<T> {\n client: LLMClient;\n systemPrompt: string;\n userPrompt: string;\n schema?: z.ZodSchema<T>;\n maxTokens?: number;\n maxContinuations?: number;\n maxRetries?: number;\n buildContinuationPrompt: (\n partialResponse: string,\n attempt: number,\n ) => string;\n continuationSystemPrompt?: string;\n}\n\nexport interface ContinuationResult<T> {\n data: T;\n raw: string;\n continuationCount: number;\n warnings: string[];\n wasSalvaged: boolean;\n}\n\n// ============================================================================\n// Constants\n// ============================================================================\n\nconst DEFAULT_MAX_TOKENS = 8192;\nconst DEFAULT_MAX_CONTINUATIONS = 3;\n\n/**\n * Default continuation system prompt.\n * Used when no custom continuationSystemPrompt is provided.\n */\nconst DEFAULT_CONTINUATION_SYSTEM_PROMPT = `You are a JSON continuation assistant. Your ONLY job is to continue generating JSON from where the previous response was truncated.\n\nRules:\n1. Continue from EXACTLY where the previous output stopped\n2. Do NOT repeat any content already generated\n3. Complete the JSON structure properly with all closing brackets\n4. Do NOT wrap in markdown code blocks\n5. Output ONLY the continuation JSON, nothing else`;\n\n// ============================================================================\n// Helper Functions\n// ============================================================================\n\nexport function mergeResponses(\n previous: string,\n continuation: string,\n): string {\n const trimmedPrev = previous.trimEnd();\n const trimmedCont = continuation.trimStart();\n\n let cleanedCont = trimmedCont\n .replace(/^```json?\\s*/i, '')\n .replace(/```\\s*$/i, '')\n .trim();\n\n if (cleanedCont.startsWith('{')) {\n try {\n const contParsed = JSON.parse(autoCloseJson(cleanedCont));\n const keys = Object.keys(contParsed);\n if (keys.length === 1 && Array.isArray(contParsed[keys[0]])) {\n cleanedCont = contParsed[keys[0]]\n .map((item: unknown) => JSON.stringify(item))\n .join(',\\n');\n }\n } catch {\n // Continue with original cleaning\n }\n }\n\n if (cleanedCont.startsWith('}') || cleanedCont.startsWith(']')) {\n return trimmedPrev + cleanedCont;\n }\n\n const prevEndsWithValue = /[\\}\\]\\\"\\d]$/.test(trimmedPrev);\n const contStartsWithValue = /^[\\{\\[\\\"]/.test(cleanedCont);\n\n if (prevEndsWithValue && contStartsWithValue) {\n return trimmedPrev + ',\\n' + cleanedCont;\n }\n\n return trimmedPrev + cleanedCont;\n}\n\nexport function salvagePartialResponse<T>(rawResponse: string): T | null {\n console.warn('[Continuation] Attempting to salvage partial response');\n\n try {\n const cleanedResponse = extractJsonFromText(rawResponse) || rawResponse;\n const closed = autoCloseJson(cleanedResponse);\n const parsed = JSON.parse(closed) as T;\n console.log('[Continuation] Successfully salvaged partial response');\n return parsed;\n } catch (error) {\n console.error('[Continuation] Could not salvage response:', error);\n }\n\n return null;\n}\n\n// ============================================================================\n// Main Function\n// ============================================================================\n\nexport async function callWithContinuation<T>(\n options: ContinuationOptions<T>,\n): Promise<ContinuationResult<T>> {\n const {\n client,\n systemPrompt,\n userPrompt,\n schema,\n maxTokens = DEFAULT_MAX_TOKENS,\n maxContinuations = DEFAULT_MAX_CONTINUATIONS,\n buildContinuationPrompt,\n continuationSystemPrompt = DEFAULT_CONTINUATION_SYSTEM_PROMPT,\n } = options;\n\n let rawResponse = '';\n let continuationCount = 0;\n const warnings: string[] = [];\n let wasSalvaged = false;\n\n console.log('[Continuation] Starting LLM call with continuation support');\n console.log(\n `[Continuation] Max tokens: ${maxTokens}, Max continuations: ${maxContinuations}`,\n );\n\n try {\n const response = await client.callRawWithMetadata({\n systemPrompt,\n userPrompt,\n maxTokens,\n });\n\n rawResponse = extractJsonFromText(response.raw) || response.raw;\n\n console.log(\n `[Continuation] Initial response: ${rawResponse.length} chars, finish_reason: ${response.finishReason}`,\n );\n\n let truncation = detectTruncation(rawResponse, response.finishReason);\n\n while (truncation.isTruncated && continuationCount < maxContinuations) {\n continuationCount++;\n const warningMsg = `Response truncated (${truncation.reason}), continuing (attempt ${continuationCount}/${maxContinuations})`;\n console.log(`[Continuation] ${warningMsg}`);\n warnings.push(warningMsg);\n\n const contPrompt = buildContinuationPrompt(\n rawResponse,\n continuationCount,\n );\n\n const contResponse = await client.callRawWithMetadata({\n systemPrompt: continuationSystemPrompt,\n userPrompt: contPrompt,\n maxTokens,\n });\n\n console.log(\n `[Continuation] Continuation response: ${contResponse.raw.length} chars, finish_reason: ${contResponse.finishReason}`,\n );\n\n const cleanedContResponse =\n extractJsonFromText(contResponse.raw) || contResponse.raw;\n rawResponse = mergeResponses(rawResponse, cleanedContResponse);\n\n truncation = detectTruncation(rawResponse, contResponse.finishReason);\n }\n\n if (\n continuationCount >= maxContinuations &&\n truncation.isTruncated\n ) {\n console.warn(\n `[Continuation] Reached max continuations (${maxContinuations}), attempting to salvage...`,\n );\n warnings.push(\n `Reached max continuations - some content may be incomplete`,\n );\n wasSalvaged = true;\n }\n\n const cleanedResponse =\n extractJsonFromText(rawResponse) || rawResponse;\n let data: T;\n\n try {\n if (isValidJson(cleanedResponse)) {\n data = JSON.parse(cleanedResponse) as T;\n } else {\n const closed = autoCloseJson(cleanedResponse);\n data = JSON.parse(closed) as T;\n if (!wasSalvaged) {\n warnings.push('Response required auto-closing of JSON brackets');\n }\n }\n } catch (parseError) {\n const salvaged = salvagePartialResponse<T>(cleanedResponse);\n if (salvaged) {\n data = salvaged;\n wasSalvaged = true;\n warnings.push('Response was salvaged from partial data');\n } else {\n throw new Error(\n `Failed to parse response after ${continuationCount} continuations: ${parseError}`,\n );\n }\n }\n\n if (schema) {\n try {\n data = schema.parse(data);\n } catch (validationError) {\n console.warn(\n '[Continuation] Schema validation failed:',\n validationError,\n );\n warnings.push(`Schema validation issue: ${validationError}`);\n }\n }\n\n console.log(\n `[Continuation] Complete. Continuations: ${continuationCount}, Warnings: ${warnings.length}`,\n );\n\n return {\n data,\n raw: rawResponse,\n continuationCount,\n warnings,\n wasSalvaged,\n };\n } catch (error) {\n console.error('[Continuation] Error during LLM call:', error);\n throw error;\n }\n}\n\nexport function buildGenericContinuationPrompt(\n context: string,\n partialResponse: string,\n attempt: number,\n maxAttempts: number = DEFAULT_MAX_CONTINUATIONS,\n): string {\n return `## CONTINUATION REQUEST (Attempt ${attempt}/${maxAttempts})\n\nYour previous response was truncated. Continue generating from where you left off.\n\n### ORIGINAL CONTEXT\n${context}\n\n### WHAT YOU GENERATED SO FAR\n\\`\\`\\`json\n${partialResponse}\n\\`\\`\\`\n\n### INSTRUCTIONS\n1. Continue from EXACTLY where the response was cut off\n2. Do NOT repeat any content already generated\n3. Complete the JSON structure properly\n4. Do NOT wrap your response in markdown code blocks\n\nContinue generating now:`;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmCO,SAAS,iBACd,UACA,cACkB;AAClB,MAAI,iBAAiB,UAAU;AAC7B,UAAMA,eAAc,cAAc,QAAQ;AAC1C,WAAO;AAAA,MACL,aAAa;AAAA,MACb,QAAQ;AAAA,MACR,gBAAgB;AAAA,MAChB,qBAAqB,wBAAwB,QAAQ;AAAA,MACrD,sBAAsBA,aAAY;AAAA,MAClC,oBAAoBA,aAAY;AAAA,IAClC;AAAA,EACF;AAEA,MAAI;AACF,SAAK,MAAM,QAAQ;AACnB,WAAO,EAAE,aAAa,OAAO,QAAQ,OAAO;AAAA,EAC9C,QAAQ;AAAA,EAER;AAEA,MAAI,iBAAiB,UAAU,iBAAiB,MAAM;AACpD,UAAM,UAAU,SAAS,KAAK;AAE9B,UAAM,eACJ,QAAQ,SAAS,GAAG,KACpB,QAAQ,SAAS,GAAG,KACpB,QAAQ,SAAS,KAAK,KACtB,QAAQ,KAAK,OAAO,KACpB,QAAQ,KAAK,OAAO;AAEtB,QAAI,cAAc;AAChB,YAAMA,eAAc,cAAc,QAAQ;AAC1C,aAAO;AAAA,QACL,aAAa;AAAA,QACb,QAAQ;AAAA,QACR,gBAAgB;AAAA,QAChB,qBAAqB,wBAAwB,QAAQ;AAAA,QACrD,sBAAsBA,aAAY;AAAA,QAClC,oBAAoBA,aAAY;AAAA,MAClC;AAAA,IACF;AAEA,QAAI;AACF,YAAM,SAAS,cAAc,OAAO;AACpC,WAAK,MAAM,MAAM;AACjB,aAAO,EAAE,aAAa,OAAO,QAAQ,OAAO;AAAA,IAC9C,QAAQ;AACN,aAAO,EAAE,aAAa,OAAO,QAAQ,OAAO;AAAA,IAC9C;AAAA,EACF;AAEA,QAAM,cAAc,cAAc,QAAQ;AAC1C,MACE,YAAY,uBAAuB,KACnC,YAAY,qBAAqB,GACjC;AACA,WAAO;AAAA,MACL,aAAa;AAAA,MACb,QAAQ;AAAA,MACR,gBAAgB;AAAA,MAChB,qBAAqB,wBAAwB,QAAQ;AAAA,MACrD,sBAAsB,YAAY;AAAA,MAClC,oBAAoB,YAAY;AAAA,IAClC;AAAA,EACF;AAEA,SAAO,EAAE,aAAa,OAAO,QAAQ,OAAO;AAC9C;AAMA,SAAS,cAAc,MAOrB;AACA,MAAI,WAAW;AACf,MAAI,UAAU;AACd,MAAI,eAAe;AACnB,MAAI,gBAAgB;AACpB,MAAI,aAAa;AACjB,MAAI,cAAc;AAElB,aAAW,QAAQ,MAAM;AACvB,QAAI,SAAS;AACX,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,QAAQ,UAAU;AAC7B,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,iBAAW,CAAC;AACZ;AAAA,IACF;AACA,QAAI,SAAU;AAEd,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH;AACA;AAAA,MACF,KAAK;AACH;AACA;AAAA,MACF,KAAK;AACH;AACA;AAAA,MACF,KAAK;AACH;AACA;AAAA,IACJ;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,sBAAsB,KAAK,IAAI,GAAG,eAAe,aAAa;AAAA,IAC9D,oBAAoB,KAAK,IAAI,GAAG,aAAa,WAAW;AAAA,EAC1D;AACF;AAEO,SAAS,wBAAwB,MAA8B;AACpE,QAAM,aAAa,cAAc,IAAI;AACrC,MAAI;AACF,WAAO,KAAK,MAAM,UAAU;AAAA,EAC9B,QAAQ;AAAA,EAER;AAEA,QAAM,UAAU,KAAK,KAAK;AAE1B,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,UAAM,oBAAoB,6BAA6B,OAAO;AAC9D,QAAI,oBAAoB,GAAG;AACzB,YAAM,SAAS,QAAQ,UAAU,GAAG,iBAAiB,IAAI;AACzD,UAAI;AACF,eAAO,KAAK,MAAM,MAAM;AAAA,MAC1B,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,UAAM,SAAS,cAAc,OAAO;AACpC,QAAI;AACF,aAAO,KAAK,MAAM,MAAM;AAAA,IAC1B,QAAQ;AACN,YAAM,oBAAoB,+BAA+B,OAAO;AAChE,UAAI,oBAAoB,GAAG;AACzB,cAAM,SAAS,QAAQ,UAAU,GAAG,iBAAiB,IAAI;AACzD,YAAI;AACF,iBAAO,KAAK,MAAM,MAAM;AAAA,QAC1B,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,6BAA6B,MAAsB;AAC1D,MAAI,QAAQ;AACZ,MAAI,WAAW;AACf,MAAI,UAAU;AACd,MAAI,yBAAyB;AAE7B,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,OAAO,KAAK,CAAC;AAEnB,QAAI,SAAS;AACX,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,QAAQ,UAAU;AAC7B,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,iBAAW,CAAC;AACZ;AAAA,IACF;AACA,QAAI,SAAU;AAEd,QAAI,SAAS,OAAO,SAAS,KAAK;AAChC;AAAA,IACF,WAAW,SAAS,OAAO,SAAS,KAAK;AACvC;AACA,UAAI,UAAU,GAAG;AACf,iCAAyB,IAAI;AAAA,MAC/B;AAAA,IACF,WAAW,SAAS,OAAO,UAAU,GAAG;AACtC,+BAAyB;AAAA,IAC3B;AAAA,EACF;AAEA,SAAO,yBAAyB,IAAI,yBAAyB;AAC/D;AAEA,SAAS,+BAA+B,MAAsB;AAC5D,MAAI,QAAQ;AACZ,MAAI,WAAW;AACf,MAAI,UAAU;AACd,MAAI,iBAAiB;AAErB,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,OAAO,KAAK,CAAC;AAEnB,QAAI,SAAS;AACX,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,QAAQ,UAAU;AAC7B,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,iBAAW,CAAC;AACZ;AAAA,IACF;AACA,QAAI,SAAU;AAEd,QAAI,SAAS,OAAO,SAAS,KAAK;AAChC;AAAA,IACF,WAAW,SAAS,OAAO,SAAS,KAAK;AACvC;AAAA,IACF,WAAW,SAAS,OAAO,UAAU,GAAG;AACtC,uBAAiB;AAAA,IACnB;AAAA,EACF;AAEA,SAAO,iBAAiB,IAAI,iBAAiB;AAC/C;AAEO,SAAS,kBAAkB,SAA0B;AAC1D,QAAM,UAAU,QAAQ,KAAK;AAC7B,MAAI,CAAC,QAAS,QAAO;AAErB,QAAM,WAAW,cAAc,OAAO;AACtC,MACE,SAAS,uBAAuB,KAChC,SAAS,qBAAqB,GAC9B;AACA,WAAO;AAAA,EACT;AAEA,QAAM,gBAAgB;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,aAAW,WAAW,eAAe;AACnC,QAAI,QAAQ,KAAK,OAAO,EAAG,QAAO;AAAA,EACpC;AAEA,SAAO;AACT;;;ACnQA,IAAM,qBAAqB;AAC3B,IAAM,4BAA4B;AAMlC,IAAM,qCAAqC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAapC,SAAS,eACd,UACA,cACQ;AACR,QAAM,cAAc,SAAS,QAAQ;AACrC,QAAM,cAAc,aAAa,UAAU;AAE3C,MAAI,cAAc,YACf,QAAQ,iBAAiB,EAAE,EAC3B,QAAQ,YAAY,EAAE,EACtB,KAAK;AAER,MAAI,YAAY,WAAW,GAAG,GAAG;AAC/B,QAAI;AACF,YAAM,aAAa,KAAK,MAAM,cAAc,WAAW,CAAC;AACxD,YAAM,OAAO,OAAO,KAAK,UAAU;AACnC,UAAI,KAAK,WAAW,KAAK,MAAM,QAAQ,WAAW,KAAK,CAAC,CAAC,CAAC,GAAG;AAC3D,sBAAc,WAAW,KAAK,CAAC,CAAC,EAC7B,IAAI,CAAC,SAAkB,KAAK,UAAU,IAAI,CAAC,EAC3C,KAAK,KAAK;AAAA,MACf;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,MAAI,YAAY,WAAW,GAAG,KAAK,YAAY,WAAW,GAAG,GAAG;AAC9D,WAAO,cAAc;AAAA,EACvB;AAEA,QAAM,oBAAoB,cAAc,KAAK,WAAW;AACxD,QAAM,sBAAsB,YAAY,KAAK,WAAW;AAExD,MAAI,qBAAqB,qBAAqB;AAC5C,WAAO,cAAc,QAAQ;AAAA,EAC/B;AAEA,SAAO,cAAc;AACvB;AAEO,SAAS,uBAA0B,aAA+B;AACvE,UAAQ,KAAK,uDAAuD;AAEpE,MAAI;AACF,UAAM,kBAAkB,oBAAoB,WAAW,KAAK;AAC5D,UAAM,SAAS,cAAc,eAAe;AAC5C,UAAM,SAAS,KAAK,MAAM,MAAM;AAChC,YAAQ,IAAI,uDAAuD;AACnE,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,8CAA8C,KAAK;AAAA,EACnE;AAEA,SAAO;AACT;AAMA,eAAsB,qBACpB,SACgC;AAChC,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY;AAAA,IACZ,mBAAmB;AAAA,IACnB;AAAA,IACA,2BAA2B;AAAA,EAC7B,IAAI;AAEJ,MAAI,cAAc;AAClB,MAAI,oBAAoB;AACxB,QAAM,WAAqB,CAAC;AAC5B,MAAI,cAAc;AAElB,UAAQ,IAAI,4DAA4D;AACxE,UAAQ;AAAA,IACN,8BAA8B,SAAS,wBAAwB,gBAAgB;AAAA,EACjF;AAEA,MAAI;AACF,UAAM,WAAW,MAAM,OAAO,oBAAoB;AAAA,MAChD;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,kBAAc,oBAAoB,SAAS,GAAG,KAAK,SAAS;AAE5D,YAAQ;AAAA,MACN,oCAAoC,YAAY,MAAM,0BAA0B,SAAS,YAAY;AAAA,IACvG;AAEA,QAAI,aAAa,iBAAiB,aAAa,SAAS,YAAY;AAEpE,WAAO,WAAW,eAAe,oBAAoB,kBAAkB;AACrE;AACA,YAAM,aAAa,uBAAuB,WAAW,MAAM,0BAA0B,iBAAiB,IAAI,gBAAgB;AAC1H,cAAQ,IAAI,kBAAkB,UAAU,EAAE;AAC1C,eAAS,KAAK,UAAU;AAExB,YAAM,aAAa;AAAA,QACjB;AAAA,QACA;AAAA,MACF;AAEA,YAAM,eAAe,MAAM,OAAO,oBAAoB;AAAA,QACpD,cAAc;AAAA,QACd,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAED,cAAQ;AAAA,QACN,yCAAyC,aAAa,IAAI,MAAM,0BAA0B,aAAa,YAAY;AAAA,MACrH;AAEA,YAAM,sBACJ,oBAAoB,aAAa,GAAG,KAAK,aAAa;AACxD,oBAAc,eAAe,aAAa,mBAAmB;AAE7D,mBAAa,iBAAiB,aAAa,aAAa,YAAY;AAAA,IACtE;AAEA,QACE,qBAAqB,oBACrB,WAAW,aACX;AACA,cAAQ;AAAA,QACN,6CAA6C,gBAAgB;AAAA,MAC/D;AACA,eAAS;AAAA,QACP;AAAA,MACF;AACA,oBAAc;AAAA,IAChB;AAEA,UAAM,kBACJ,oBAAoB,WAAW,KAAK;AACtC,QAAI;AAEJ,QAAI;AACF,UAAI,YAAY,eAAe,GAAG;AAChC,eAAO,KAAK,MAAM,eAAe;AAAA,MACnC,OAAO;AACL,cAAM,SAAS,cAAc,eAAe;AAC5C,eAAO,KAAK,MAAM,MAAM;AACxB,YAAI,CAAC,aAAa;AAChB,mBAAS,KAAK,iDAAiD;AAAA,QACjE;AAAA,MACF;AAAA,IACF,SAAS,YAAY;AACnB,YAAM,WAAW,uBAA0B,eAAe;AAC1D,UAAI,UAAU;AACZ,eAAO;AACP,sBAAc;AACd,iBAAS,KAAK,yCAAyC;AAAA,MACzD,OAAO;AACL,cAAM,IAAI;AAAA,UACR,kCAAkC,iBAAiB,mBAAmB,UAAU;AAAA,QAClF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,QAAQ;AACV,UAAI;AACF,eAAO,OAAO,MAAM,IAAI;AAAA,MAC1B,SAAS,iBAAiB;AACxB,gBAAQ;AAAA,UACN;AAAA,UACA;AAAA,QACF;AACA,iBAAS,KAAK,4BAA4B,eAAe,EAAE;AAAA,MAC7D;AAAA,IACF;AAEA,YAAQ;AAAA,MACN,2CAA2C,iBAAiB,eAAe,SAAS,MAAM;AAAA,IAC5F;AAEA,WAAO;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAM,yCAAyC,KAAK;AAC5D,UAAM;AAAA,EACR;AACF;AAEO,SAAS,+BACd,SACA,iBACA,SACA,cAAsB,2BACd;AACR,SAAO,oCAAoC,OAAO,IAAI,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjE,OAAO;AAAA;AAAA;AAAA;AAAA,EAIP,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUjB;","names":["bracketInfo"]}
1
+ {"version":3,"sources":["../src/truncation-detector.ts","../src/continuation.ts"],"sourcesContent":["/**\n * Truncation Detector\n *\n * Utilities for detecting when LLM output has been truncated and\n * extracting usable content from partial responses.\n *\n * @packageDocumentation\n */\n\nimport type { LLMFinishReason } from './client.js';\nimport { autoCloseJson } from './json-parser.js';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport type TruncationReason =\n | 'finish_reason'\n | 'json_incomplete'\n | 'bracket_mismatch'\n | 'none';\n\nexport interface TruncationResult {\n isTruncated: boolean;\n reason: TruncationReason;\n partialContent?: string;\n lastCompleteElement?: unknown;\n missingCloseBrackets?: number;\n missingCloseBraces?: number;\n}\n\n// ============================================================================\n// Main Detection Function\n// ============================================================================\n\nexport function detectTruncation(\n response: string,\n finishReason: LLMFinishReason,\n): TruncationResult {\n if (finishReason === 'length') {\n const bracketInfo = countBrackets(response);\n return {\n isTruncated: true,\n reason: 'finish_reason',\n partialContent: response,\n lastCompleteElement: findLastCompleteElement(response),\n missingCloseBrackets: bracketInfo.missingCloseBrackets,\n missingCloseBraces: bracketInfo.missingCloseBraces,\n };\n }\n\n try {\n JSON.parse(response);\n return { isTruncated: false, reason: 'none' };\n } catch {\n // JSON is invalid, check if due to truncation\n }\n\n if (finishReason === 'stop' || finishReason === null) {\n const trimmed = response.trim();\n\n const isMidContent =\n trimmed.endsWith(',') ||\n trimmed.endsWith(':') ||\n trimmed.endsWith('\": ') ||\n /:\\s*$/.test(trimmed) ||\n /,\\s*$/.test(trimmed);\n\n if (isMidContent) {\n const bracketInfo = countBrackets(response);\n return {\n isTruncated: true,\n reason: 'json_incomplete',\n partialContent: response,\n lastCompleteElement: findLastCompleteElement(response),\n missingCloseBrackets: bracketInfo.missingCloseBrackets,\n missingCloseBraces: bracketInfo.missingCloseBraces,\n };\n }\n\n try {\n const closed = autoCloseJson(trimmed);\n JSON.parse(closed);\n return { isTruncated: false, reason: 'none' };\n } catch {\n return { isTruncated: false, reason: 'none' };\n }\n }\n\n const bracketInfo = countBrackets(response);\n if (\n bracketInfo.missingCloseBrackets > 0 ||\n bracketInfo.missingCloseBraces > 0\n ) {\n return {\n isTruncated: true,\n reason: 'bracket_mismatch',\n partialContent: response,\n lastCompleteElement: findLastCompleteElement(response),\n missingCloseBrackets: bracketInfo.missingCloseBrackets,\n missingCloseBraces: bracketInfo.missingCloseBraces,\n };\n }\n\n return { isTruncated: false, reason: 'none' };\n}\n\n// ============================================================================\n// Helper Functions\n// ============================================================================\n\nfunction countBrackets(json: string): {\n openBrackets: number;\n closeBrackets: number;\n openBraces: number;\n closeBraces: number;\n missingCloseBrackets: number;\n missingCloseBraces: number;\n} {\n let inString = false;\n let escaped = false;\n let openBrackets = 0;\n let closeBrackets = 0;\n let openBraces = 0;\n let closeBraces = 0;\n\n for (const char of json) {\n if (escaped) {\n escaped = false;\n continue;\n }\n if (char === '\\\\' && inString) {\n escaped = true;\n continue;\n }\n if (char === '\"') {\n inString = !inString;\n continue;\n }\n if (inString) continue;\n\n switch (char) {\n case '[':\n openBrackets++;\n break;\n case ']':\n closeBrackets++;\n break;\n case '{':\n openBraces++;\n break;\n case '}':\n closeBraces++;\n break;\n }\n }\n\n return {\n openBrackets,\n closeBrackets,\n openBraces,\n closeBraces,\n missingCloseBrackets: Math.max(0, openBrackets - closeBrackets),\n missingCloseBraces: Math.max(0, openBraces - closeBraces),\n };\n}\n\nexport function findLastCompleteElement(json: string): unknown | null {\n const autoClosed = autoCloseJson(json);\n try {\n return JSON.parse(autoClosed);\n } catch {\n // Auto-close didn't work\n }\n\n const trimmed = json.trim();\n\n if (trimmed.startsWith('[')) {\n const lastCompleteIndex = findLastCompleteArrayElement(trimmed);\n if (lastCompleteIndex > 0) {\n const subset = trimmed.substring(0, lastCompleteIndex) + ']';\n try {\n return JSON.parse(subset);\n } catch {\n // Continue\n }\n }\n }\n\n if (trimmed.startsWith('{')) {\n const closed = autoCloseJson(trimmed);\n try {\n return JSON.parse(closed);\n } catch {\n const lastCompleteIndex = findLastCompleteObjectProperty(trimmed);\n if (lastCompleteIndex > 0) {\n const subset = trimmed.substring(0, lastCompleteIndex) + '}';\n try {\n return JSON.parse(subset);\n } catch {\n // Give up\n }\n }\n }\n }\n\n return null;\n}\n\nfunction findLastCompleteArrayElement(json: string): number {\n let depth = 0;\n let inString = false;\n let escaped = false;\n let lastCompleteElementEnd = -1;\n\n for (let i = 0; i < json.length; i++) {\n const char = json[i];\n\n if (escaped) {\n escaped = false;\n continue;\n }\n if (char === '\\\\' && inString) {\n escaped = true;\n continue;\n }\n if (char === '\"') {\n inString = !inString;\n continue;\n }\n if (inString) continue;\n\n if (char === '[' || char === '{') {\n depth++;\n } else if (char === ']' || char === '}') {\n depth--;\n if (depth === 1) {\n lastCompleteElementEnd = i + 1;\n }\n } else if (char === ',' && depth === 1) {\n lastCompleteElementEnd = i;\n }\n }\n\n return lastCompleteElementEnd > 0 ? lastCompleteElementEnd : -1;\n}\n\nfunction findLastCompleteObjectProperty(json: string): number {\n let depth = 0;\n let inString = false;\n let escaped = false;\n let lastCommaIndex = -1;\n\n for (let i = 0; i < json.length; i++) {\n const char = json[i];\n\n if (escaped) {\n escaped = false;\n continue;\n }\n if (char === '\\\\' && inString) {\n escaped = true;\n continue;\n }\n if (char === '\"') {\n inString = !inString;\n continue;\n }\n if (inString) continue;\n\n if (char === '[' || char === '{') {\n depth++;\n } else if (char === ']' || char === '}') {\n depth--;\n } else if (char === ',' && depth === 1) {\n lastCommaIndex = i;\n }\n }\n\n return lastCommaIndex > 0 ? lastCommaIndex : -1;\n}\n\nexport function isLikelyTruncated(content: string): boolean {\n const trimmed = content.trim();\n if (!trimmed) return false;\n\n const brackets = countBrackets(trimmed);\n if (\n brackets.missingCloseBrackets > 0 ||\n brackets.missingCloseBraces > 0\n ) {\n return true;\n }\n\n const abruptEndings = [\n /,\\s*$/,\n /:\\s*$/,\n /\"\\s*:\\s*$/,\n /\\[\\s*$/,\n /{\\s*$/,\n ];\n\n for (const pattern of abruptEndings) {\n if (pattern.test(trimmed)) return true;\n }\n\n return false;\n}\n","/**\n * LLM Continuation Utility\n *\n * Handles truncated LLM responses with automatic continuation.\n * - Detects truncation via finish_reason and JSON structure\n * - Automatically continues with full context\n * - Merges partial and continuation responses\n * - Salvages partial data if max continuations reached\n *\n * @packageDocumentation\n */\n\nimport { z } from 'zod';\nimport { LLMClient, type LLMFinishReason } from './client.js';\nimport { detectTruncation } from './truncation-detector.js';\nimport { extractJsonFromText, autoCloseJson, isValidJson } from './json-parser.js';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface ContinuationOptions<T> {\n client: LLMClient;\n systemPrompt: string;\n userPrompt: string;\n schema?: z.ZodSchema<T>;\n maxTokens?: number;\n maxContinuations?: number;\n maxRetries?: number;\n buildContinuationPrompt: (\n partialResponse: string,\n attempt: number,\n ) => string;\n continuationSystemPrompt?: string;\n}\n\nexport interface ContinuationResult<T> {\n data: T;\n raw: string;\n continuationCount: number;\n warnings: string[];\n wasSalvaged: boolean;\n}\n\n// ============================================================================\n// Constants\n// ============================================================================\n\nconst DEFAULT_MAX_TOKENS = 8192;\nconst DEFAULT_MAX_CONTINUATIONS = 3;\n\n/**\n * Default continuation system prompt.\n * Used when no custom continuationSystemPrompt is provided.\n */\nconst DEFAULT_CONTINUATION_SYSTEM_PROMPT = `You are a JSON continuation assistant. Your ONLY job is to continue generating JSON from where the previous response was truncated.\n\nRules:\n1. Continue from EXACTLY where the previous output stopped\n2. Do NOT repeat any content already generated\n3. Complete the JSON structure properly with all closing brackets\n4. Do NOT wrap in markdown code blocks\n5. Output ONLY the continuation JSON, nothing else`;\n\n// ============================================================================\n// Helper Functions\n// ============================================================================\n\nexport function mergeResponses(\n previous: string,\n continuation: string,\n): string {\n const trimmedPrev = previous.trimEnd();\n const trimmedCont = continuation.trimStart();\n\n let cleanedCont = trimmedCont\n .replace(/^```json?\\s*/i, '')\n .replace(/```\\s*$/i, '')\n .trim();\n\n if (cleanedCont.startsWith('{')) {\n try {\n const contParsed = JSON.parse(autoCloseJson(cleanedCont));\n const keys = Object.keys(contParsed);\n if (keys.length === 1 && Array.isArray(contParsed[keys[0]])) {\n cleanedCont = contParsed[keys[0]]\n .map((item: unknown) => JSON.stringify(item))\n .join(',\\n');\n }\n } catch {\n // Continue with original cleaning\n }\n }\n\n if (cleanedCont.startsWith('}') || cleanedCont.startsWith(']')) {\n return trimmedPrev + cleanedCont;\n }\n\n const prevEndsWithValue = /[\\}\\]\\\"\\d]$/.test(trimmedPrev);\n const contStartsWithValue = /^[\\{\\[\\\"]/.test(cleanedCont);\n\n if (prevEndsWithValue && contStartsWithValue) {\n return trimmedPrev + ',\\n' + cleanedCont;\n }\n\n return trimmedPrev + cleanedCont;\n}\n\nexport function salvagePartialResponse<T>(rawResponse: string): T | null {\n console.warn('[Continuation] Attempting to salvage partial response');\n\n try {\n const cleanedResponse = extractJsonFromText(rawResponse) || rawResponse;\n const closed = autoCloseJson(cleanedResponse);\n const parsed = JSON.parse(closed) as T;\n console.log('[Continuation] Successfully salvaged partial response');\n return parsed;\n } catch (error) {\n console.error('[Continuation] Could not salvage response:', error);\n }\n\n return null;\n}\n\n// ============================================================================\n// Main Function\n// ============================================================================\n\nexport async function callWithContinuation<T>(\n options: ContinuationOptions<T>,\n): Promise<ContinuationResult<T>> {\n const {\n client,\n systemPrompt,\n userPrompt,\n schema,\n maxTokens = DEFAULT_MAX_TOKENS,\n maxContinuations = DEFAULT_MAX_CONTINUATIONS,\n buildContinuationPrompt,\n continuationSystemPrompt = DEFAULT_CONTINUATION_SYSTEM_PROMPT,\n } = options;\n\n let rawResponse = '';\n let continuationCount = 0;\n const warnings: string[] = [];\n let wasSalvaged = false;\n\n console.log('[Continuation] Starting LLM call with continuation support');\n console.log(\n `[Continuation] Max tokens: ${maxTokens}, Max continuations: ${maxContinuations}`,\n );\n\n try {\n const response = await client.callRawWithMetadata({\n systemPrompt,\n userPrompt,\n maxTokens,\n });\n\n rawResponse = extractJsonFromText(response.raw) || response.raw;\n\n console.log(\n `[Continuation] Initial response: ${rawResponse.length} chars, finish_reason: ${response.finishReason}`,\n );\n\n let truncation = detectTruncation(rawResponse, response.finishReason);\n\n while (truncation.isTruncated && continuationCount < maxContinuations) {\n continuationCount++;\n const warningMsg = `Response truncated (${truncation.reason}), continuing (attempt ${continuationCount}/${maxContinuations})`;\n console.log(`[Continuation] ${warningMsg}`);\n warnings.push(warningMsg);\n\n const contPrompt = buildContinuationPrompt(\n rawResponse,\n continuationCount,\n );\n\n const contResponse = await client.callRawWithMetadata({\n systemPrompt: continuationSystemPrompt,\n userPrompt: contPrompt,\n maxTokens,\n });\n\n console.log(\n `[Continuation] Continuation response: ${contResponse.raw.length} chars, finish_reason: ${contResponse.finishReason}`,\n );\n\n const cleanedContResponse =\n extractJsonFromText(contResponse.raw) || contResponse.raw;\n rawResponse = mergeResponses(rawResponse, cleanedContResponse);\n\n truncation = detectTruncation(rawResponse, contResponse.finishReason);\n }\n\n if (\n continuationCount >= maxContinuations &&\n truncation.isTruncated\n ) {\n console.warn(\n `[Continuation] Reached max continuations (${maxContinuations}), attempting to salvage...`,\n );\n warnings.push(\n `Reached max continuations - some content may be incomplete`,\n );\n wasSalvaged = true;\n }\n\n const cleanedResponse =\n extractJsonFromText(rawResponse) || rawResponse;\n let data: T;\n\n try {\n if (isValidJson(cleanedResponse)) {\n data = JSON.parse(cleanedResponse) as T;\n } else {\n const closed = autoCloseJson(cleanedResponse);\n data = JSON.parse(closed) as T;\n if (!wasSalvaged) {\n warnings.push('Response required auto-closing of JSON brackets');\n }\n }\n } catch (parseError) {\n const salvaged = salvagePartialResponse<T>(cleanedResponse);\n if (salvaged) {\n data = salvaged;\n wasSalvaged = true;\n warnings.push('Response was salvaged from partial data');\n } else {\n throw new Error(\n `Failed to parse response after ${continuationCount} continuations: ${parseError}`,\n );\n }\n }\n\n if (schema) {\n try {\n data = schema.parse(data);\n } catch (validationError) {\n console.warn(\n '[Continuation] Schema validation failed:',\n validationError,\n );\n warnings.push(`Schema validation issue: ${validationError}`);\n }\n }\n\n console.log(\n `[Continuation] Complete. Continuations: ${continuationCount}, Warnings: ${warnings.length}`,\n );\n\n return {\n data,\n raw: rawResponse,\n continuationCount,\n warnings,\n wasSalvaged,\n };\n } catch (error) {\n console.error('[Continuation] Error during LLM call:', error);\n throw error;\n }\n}\n\nexport function buildGenericContinuationPrompt(\n context: string,\n partialResponse: string,\n attempt: number,\n maxAttempts: number = DEFAULT_MAX_CONTINUATIONS,\n): string {\n return `## CONTINUATION REQUEST (Attempt ${attempt}/${maxAttempts})\n\nYour previous response was truncated. Continue generating from where you left off.\n\n### ORIGINAL CONTEXT\n${context}\n\n### WHAT YOU GENERATED SO FAR\n\\`\\`\\`json\n${partialResponse}\n\\`\\`\\`\n\n### INSTRUCTIONS\n1. Continue from EXACTLY where the response was cut off\n2. Do NOT repeat any content already generated\n3. Complete the JSON structure properly\n4. Do NOT wrap your response in markdown code blocks\n\nContinue generating now:`;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmCO,SAAS,iBACd,UACA,cACkB;AAClB,MAAI,iBAAiB,UAAU;AAC7B,UAAMA,eAAc,cAAc,QAAQ;AAC1C,WAAO;AAAA,MACL,aAAa;AAAA,MACb,QAAQ;AAAA,MACR,gBAAgB;AAAA,MAChB,qBAAqB,wBAAwB,QAAQ;AAAA,MACrD,sBAAsBA,aAAY;AAAA,MAClC,oBAAoBA,aAAY;AAAA,IAClC;AAAA,EACF;AAEA,MAAI;AACF,SAAK,MAAM,QAAQ;AACnB,WAAO,EAAE,aAAa,OAAO,QAAQ,OAAO;AAAA,EAC9C,QAAQ;AAAA,EAER;AAEA,MAAI,iBAAiB,UAAU,iBAAiB,MAAM;AACpD,UAAM,UAAU,SAAS,KAAK;AAE9B,UAAM,eACJ,QAAQ,SAAS,GAAG,KACpB,QAAQ,SAAS,GAAG,KACpB,QAAQ,SAAS,KAAK,KACtB,QAAQ,KAAK,OAAO,KACpB,QAAQ,KAAK,OAAO;AAEtB,QAAI,cAAc;AAChB,YAAMA,eAAc,cAAc,QAAQ;AAC1C,aAAO;AAAA,QACL,aAAa;AAAA,QACb,QAAQ;AAAA,QACR,gBAAgB;AAAA,QAChB,qBAAqB,wBAAwB,QAAQ;AAAA,QACrD,sBAAsBA,aAAY;AAAA,QAClC,oBAAoBA,aAAY;AAAA,MAClC;AAAA,IACF;AAEA,QAAI;AACF,YAAM,SAAS,cAAc,OAAO;AACpC,WAAK,MAAM,MAAM;AACjB,aAAO,EAAE,aAAa,OAAO,QAAQ,OAAO;AAAA,IAC9C,QAAQ;AACN,aAAO,EAAE,aAAa,OAAO,QAAQ,OAAO;AAAA,IAC9C;AAAA,EACF;AAEA,QAAM,cAAc,cAAc,QAAQ;AAC1C,MACE,YAAY,uBAAuB,KACnC,YAAY,qBAAqB,GACjC;AACA,WAAO;AAAA,MACL,aAAa;AAAA,MACb,QAAQ;AAAA,MACR,gBAAgB;AAAA,MAChB,qBAAqB,wBAAwB,QAAQ;AAAA,MACrD,sBAAsB,YAAY;AAAA,MAClC,oBAAoB,YAAY;AAAA,IAClC;AAAA,EACF;AAEA,SAAO,EAAE,aAAa,OAAO,QAAQ,OAAO;AAC9C;AAMA,SAAS,cAAc,MAOrB;AACA,MAAI,WAAW;AACf,MAAI,UAAU;AACd,MAAI,eAAe;AACnB,MAAI,gBAAgB;AACpB,MAAI,aAAa;AACjB,MAAI,cAAc;AAElB,aAAW,QAAQ,MAAM;AACvB,QAAI,SAAS;AACX,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,QAAQ,UAAU;AAC7B,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,iBAAW,CAAC;AACZ;AAAA,IACF;AACA,QAAI,SAAU;AAEd,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH;AACA;AAAA,MACF,KAAK;AACH;AACA;AAAA,MACF,KAAK;AACH;AACA;AAAA,MACF,KAAK;AACH;AACA;AAAA,IACJ;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,sBAAsB,KAAK,IAAI,GAAG,eAAe,aAAa;AAAA,IAC9D,oBAAoB,KAAK,IAAI,GAAG,aAAa,WAAW;AAAA,EAC1D;AACF;AAEO,SAAS,wBAAwB,MAA8B;AACpE,QAAM,aAAa,cAAc,IAAI;AACrC,MAAI;AACF,WAAO,KAAK,MAAM,UAAU;AAAA,EAC9B,QAAQ;AAAA,EAER;AAEA,QAAM,UAAU,KAAK,KAAK;AAE1B,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,UAAM,oBAAoB,6BAA6B,OAAO;AAC9D,QAAI,oBAAoB,GAAG;AACzB,YAAM,SAAS,QAAQ,UAAU,GAAG,iBAAiB,IAAI;AACzD,UAAI;AACF,eAAO,KAAK,MAAM,MAAM;AAAA,MAC1B,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,UAAM,SAAS,cAAc,OAAO;AACpC,QAAI;AACF,aAAO,KAAK,MAAM,MAAM;AAAA,IAC1B,QAAQ;AACN,YAAM,oBAAoB,+BAA+B,OAAO;AAChE,UAAI,oBAAoB,GAAG;AACzB,cAAM,SAAS,QAAQ,UAAU,GAAG,iBAAiB,IAAI;AACzD,YAAI;AACF,iBAAO,KAAK,MAAM,MAAM;AAAA,QAC1B,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,6BAA6B,MAAsB;AAC1D,MAAI,QAAQ;AACZ,MAAI,WAAW;AACf,MAAI,UAAU;AACd,MAAI,yBAAyB;AAE7B,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,OAAO,KAAK,CAAC;AAEnB,QAAI,SAAS;AACX,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,QAAQ,UAAU;AAC7B,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,iBAAW,CAAC;AACZ;AAAA,IACF;AACA,QAAI,SAAU;AAEd,QAAI,SAAS,OAAO,SAAS,KAAK;AAChC;AAAA,IACF,WAAW,SAAS,OAAO,SAAS,KAAK;AACvC;AACA,UAAI,UAAU,GAAG;AACf,iCAAyB,IAAI;AAAA,MAC/B;AAAA,IACF,WAAW,SAAS,OAAO,UAAU,GAAG;AACtC,+BAAyB;AAAA,IAC3B;AAAA,EACF;AAEA,SAAO,yBAAyB,IAAI,yBAAyB;AAC/D;AAEA,SAAS,+BAA+B,MAAsB;AAC5D,MAAI,QAAQ;AACZ,MAAI,WAAW;AACf,MAAI,UAAU;AACd,MAAI,iBAAiB;AAErB,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,OAAO,KAAK,CAAC;AAEnB,QAAI,SAAS;AACX,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,QAAQ,UAAU;AAC7B,gBAAU;AACV;AAAA,IACF;AACA,QAAI,SAAS,KAAK;AAChB,iBAAW,CAAC;AACZ;AAAA,IACF;AACA,QAAI,SAAU;AAEd,QAAI,SAAS,OAAO,SAAS,KAAK;AAChC;AAAA,IACF,WAAW,SAAS,OAAO,SAAS,KAAK;AACvC;AAAA,IACF,WAAW,SAAS,OAAO,UAAU,GAAG;AACtC,uBAAiB;AAAA,IACnB;AAAA,EACF;AAEA,SAAO,iBAAiB,IAAI,iBAAiB;AAC/C;AAEO,SAAS,kBAAkB,SAA0B;AAC1D,QAAM,UAAU,QAAQ,KAAK;AAC7B,MAAI,CAAC,QAAS,QAAO;AAErB,QAAM,WAAW,cAAc,OAAO;AACtC,MACE,SAAS,uBAAuB,KAChC,SAAS,qBAAqB,GAC9B;AACA,WAAO;AAAA,EACT;AAEA,QAAM,gBAAgB;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,aAAW,WAAW,eAAe;AACnC,QAAI,QAAQ,KAAK,OAAO,EAAG,QAAO;AAAA,EACpC;AAEA,SAAO;AACT;;;ACnQA,IAAM,qBAAqB;AAC3B,IAAM,4BAA4B;AAMlC,IAAM,qCAAqC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAapC,SAAS,eACd,UACA,cACQ;AACR,QAAM,cAAc,SAAS,QAAQ;AACrC,QAAM,cAAc,aAAa,UAAU;AAE3C,MAAI,cAAc,YACf,QAAQ,iBAAiB,EAAE,EAC3B,QAAQ,YAAY,EAAE,EACtB,KAAK;AAER,MAAI,YAAY,WAAW,GAAG,GAAG;AAC/B,QAAI;AACF,YAAM,aAAa,KAAK,MAAM,cAAc,WAAW,CAAC;AACxD,YAAM,OAAO,OAAO,KAAK,UAAU;AACnC,UAAI,KAAK,WAAW,KAAK,MAAM,QAAQ,WAAW,KAAK,CAAC,CAAC,CAAC,GAAG;AAC3D,sBAAc,WAAW,KAAK,CAAC,CAAC,EAC7B,IAAI,CAAC,SAAkB,KAAK,UAAU,IAAI,CAAC,EAC3C,KAAK,KAAK;AAAA,MACf;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,MAAI,YAAY,WAAW,GAAG,KAAK,YAAY,WAAW,GAAG,GAAG;AAC9D,WAAO,cAAc;AAAA,EACvB;AAEA,QAAM,oBAAoB,cAAc,KAAK,WAAW;AACxD,QAAM,sBAAsB,YAAY,KAAK,WAAW;AAExD,MAAI,qBAAqB,qBAAqB;AAC5C,WAAO,cAAc,QAAQ;AAAA,EAC/B;AAEA,SAAO,cAAc;AACvB;AAEO,SAAS,uBAA0B,aAA+B;AACvE,UAAQ,KAAK,uDAAuD;AAEpE,MAAI;AACF,UAAM,kBAAkB,oBAAoB,WAAW,KAAK;AAC5D,UAAM,SAAS,cAAc,eAAe;AAC5C,UAAM,SAAS,KAAK,MAAM,MAAM;AAChC,YAAQ,IAAI,uDAAuD;AACnE,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,8CAA8C,KAAK;AAAA,EACnE;AAEA,SAAO;AACT;AAMA,eAAsB,qBACpB,SACgC;AAChC,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY;AAAA,IACZ,mBAAmB;AAAA,IACnB;AAAA,IACA,2BAA2B;AAAA,EAC7B,IAAI;AAEJ,MAAI,cAAc;AAClB,MAAI,oBAAoB;AACxB,QAAM,WAAqB,CAAC;AAC5B,MAAI,cAAc;AAElB,UAAQ,IAAI,4DAA4D;AACxE,UAAQ;AAAA,IACN,8BAA8B,SAAS,wBAAwB,gBAAgB;AAAA,EACjF;AAEA,MAAI;AACF,UAAM,WAAW,MAAM,OAAO,oBAAoB;AAAA,MAChD;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,kBAAc,oBAAoB,SAAS,GAAG,KAAK,SAAS;AAE5D,YAAQ;AAAA,MACN,oCAAoC,YAAY,MAAM,0BAA0B,SAAS,YAAY;AAAA,IACvG;AAEA,QAAI,aAAa,iBAAiB,aAAa,SAAS,YAAY;AAEpE,WAAO,WAAW,eAAe,oBAAoB,kBAAkB;AACrE;AACA,YAAM,aAAa,uBAAuB,WAAW,MAAM,0BAA0B,iBAAiB,IAAI,gBAAgB;AAC1H,cAAQ,IAAI,kBAAkB,UAAU,EAAE;AAC1C,eAAS,KAAK,UAAU;AAExB,YAAM,aAAa;AAAA,QACjB;AAAA,QACA;AAAA,MACF;AAEA,YAAM,eAAe,MAAM,OAAO,oBAAoB;AAAA,QACpD,cAAc;AAAA,QACd,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAED,cAAQ;AAAA,QACN,yCAAyC,aAAa,IAAI,MAAM,0BAA0B,aAAa,YAAY;AAAA,MACrH;AAEA,YAAM,sBACJ,oBAAoB,aAAa,GAAG,KAAK,aAAa;AACxD,oBAAc,eAAe,aAAa,mBAAmB;AAE7D,mBAAa,iBAAiB,aAAa,aAAa,YAAY;AAAA,IACtE;AAEA,QACE,qBAAqB,oBACrB,WAAW,aACX;AACA,cAAQ;AAAA,QACN,6CAA6C,gBAAgB;AAAA,MAC/D;AACA,eAAS;AAAA,QACP;AAAA,MACF;AACA,oBAAc;AAAA,IAChB;AAEA,UAAM,kBACJ,oBAAoB,WAAW,KAAK;AACtC,QAAI;AAEJ,QAAI;AACF,UAAI,YAAY,eAAe,GAAG;AAChC,eAAO,KAAK,MAAM,eAAe;AAAA,MACnC,OAAO;AACL,cAAM,SAAS,cAAc,eAAe;AAC5C,eAAO,KAAK,MAAM,MAAM;AACxB,YAAI,CAAC,aAAa;AAChB,mBAAS,KAAK,iDAAiD;AAAA,QACjE;AAAA,MACF;AAAA,IACF,SAAS,YAAY;AACnB,YAAM,WAAW,uBAA0B,eAAe;AAC1D,UAAI,UAAU;AACZ,eAAO;AACP,sBAAc;AACd,iBAAS,KAAK,yCAAyC;AAAA,MACzD,OAAO;AACL,cAAM,IAAI;AAAA,UACR,kCAAkC,iBAAiB,mBAAmB,UAAU;AAAA,QAClF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,QAAQ;AACV,UAAI;AACF,eAAO,OAAO,MAAM,IAAI;AAAA,MAC1B,SAAS,iBAAiB;AACxB,gBAAQ;AAAA,UACN;AAAA,UACA;AAAA,QACF;AACA,iBAAS,KAAK,4BAA4B,eAAe,EAAE;AAAA,MAC7D;AAAA,IACF;AAEA,YAAQ;AAAA,MACN,2CAA2C,iBAAiB,eAAe,SAAS,MAAM;AAAA,IAC5F;AAEA,WAAO;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAM,yCAAyC,KAAK;AAC5D,UAAM;AAAA,EACR;AACF;AAEO,SAAS,+BACd,SACA,iBACA,SACA,cAAsB,2BACd;AACR,SAAO,oCAAoC,OAAO,IAAI,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjE,OAAO;AAAA;AAAA;AAAA;AAAA,EAIP,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUjB;","names":["bracketInfo"]}