@doclo/providers-generic-or 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +21 -4
- package/dist/index.js.map +1 -1
- package/package.json +3 -3
package/dist/index.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
// src/generic-or.ts
|
|
2
|
-
import { SchemaTranslator, combineSchemaAndUserPrompt } from "@doclo/providers-llm";
|
|
2
|
+
import { SchemaTranslator, combineSchemaAndUserPrompt, calculateCacheSavings } from "@doclo/providers-llm";
|
|
3
3
|
import { fetchWithTimeout, DEFAULT_LIMITS, safeJsonParse } from "@doclo/core/security";
|
|
4
4
|
|
|
5
5
|
// src/known-models.ts
|
|
@@ -78,6 +78,11 @@ var KNOWN_MODEL_PREFIXES = {
|
|
|
78
78
|
"mistralai/pixtral": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
79
79
|
"mistralai/devstral": { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },
|
|
80
80
|
"mistralai/codestral": { supportsVision: false, supportsReasoning: false, maxContextTokens: 256e3 },
|
|
81
|
+
// Magistral reasoning models
|
|
82
|
+
"mistralai/magistral-medium-2506": { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },
|
|
83
|
+
"mistralai/magistral-small-2506": { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },
|
|
84
|
+
"mistralai/magistral-medium": { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },
|
|
85
|
+
"mistralai/magistral-small": { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },
|
|
81
86
|
"mistralai/mistral-large": { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },
|
|
82
87
|
"mistralai/mistral-small": { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },
|
|
83
88
|
"mistralai/mistral-nemo": { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },
|
|
@@ -297,19 +302,26 @@ Respond with valid JSON.`
|
|
|
297
302
|
const reasoning_details = message?.reasoning_details;
|
|
298
303
|
const costUSD = data.usage?.total_cost ?? data.usage?.cost;
|
|
299
304
|
const vendor = extractVendorFromModel(this.config.model);
|
|
305
|
+
const cacheReadInputTokens = data.usage?.prompt_tokens_details?.cached_tokens ?? data.usage?.cached_tokens ?? data.usage?.cache_read_input_tokens;
|
|
306
|
+
const cacheCreationInputTokens = data.usage?.prompt_tokens_details?.cache_write_tokens ?? data.usage?.cache_creation_input_tokens;
|
|
307
|
+
const inputTokens = data.usage?.prompt_tokens;
|
|
308
|
+
const cacheSavingsPercent = calculateCacheSavings(vendor, inputTokens, cacheReadInputTokens);
|
|
300
309
|
return {
|
|
301
310
|
json: parsed,
|
|
302
311
|
rawText: content,
|
|
303
312
|
metrics: {
|
|
304
313
|
costUSD,
|
|
305
|
-
inputTokens
|
|
314
|
+
inputTokens,
|
|
306
315
|
outputTokens: data.usage?.completion_tokens,
|
|
307
316
|
latencyMs,
|
|
308
317
|
attemptNumber: 1,
|
|
309
318
|
provider: vendor,
|
|
310
319
|
model: this.config.model,
|
|
311
320
|
responseId: data.id,
|
|
312
|
-
modelUsed: data.model
|
|
321
|
+
modelUsed: data.model,
|
|
322
|
+
cacheCreationInputTokens,
|
|
323
|
+
cacheReadInputTokens,
|
|
324
|
+
cacheSavingsPercent
|
|
313
325
|
},
|
|
314
326
|
reasoning,
|
|
315
327
|
reasoning_details
|
|
@@ -368,6 +380,10 @@ Respond with valid JSON.`
|
|
|
368
380
|
* Build OpenAI-compatible message format.
|
|
369
381
|
*/
|
|
370
382
|
async buildMessages(input) {
|
|
383
|
+
const messages = [];
|
|
384
|
+
if (input.systemPrompt) {
|
|
385
|
+
messages.push({ role: "system", content: input.systemPrompt });
|
|
386
|
+
}
|
|
371
387
|
const content = [];
|
|
372
388
|
if (input.text) {
|
|
373
389
|
content.push({ type: "text", text: input.text });
|
|
@@ -389,7 +405,8 @@ Respond with valid JSON.`
|
|
|
389
405
|
}
|
|
390
406
|
}
|
|
391
407
|
}
|
|
392
|
-
|
|
408
|
+
messages.push({ role: "user", content });
|
|
409
|
+
return messages;
|
|
393
410
|
}
|
|
394
411
|
/**
|
|
395
412
|
* Build reasoning configuration for models that support it.
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/generic-or.ts","../src/known-models.ts","../src/index.ts"],"sourcesContent":["import type {\n LLMProvider,\n ProviderConfig,\n MultimodalInput,\n UnifiedSchema,\n LLMResponse,\n ProviderCapabilities,\n JsonMode,\n ReasoningConfig,\n ImageInput,\n PDFInput,\n ReasoningDetail\n} from \"@doclo/providers-llm\";\nimport { SchemaTranslator, combineSchemaAndUserPrompt } from \"@doclo/providers-llm\";\nimport { fetchWithTimeout, DEFAULT_LIMITS, safeJsonParse } from \"@doclo/core/security\";\nimport { getModelInfo, type KnownModelInfo } from \"./known-models.js\";\n\n/**\n * Extract vendor name from OpenRouter model identifier.\n * Example: \"qwen/qwen3-235b-a22b\" -> \"qwen\"\n */\nfunction extractVendorFromModel(model: string): string {\n const slashIndex = model.indexOf('/');\n return slashIndex > 0 ? model.substring(0, slashIndex) : 'generic';\n}\n\n/**\n * GenericORProvider - A universal provider for any OpenRouter model.\n *\n * Works with all OpenRouter models using OpenAI-compatible API format.\n * Supports Qwen, Llama, DeepSeek, GLM, Kimi, Mistral, and any other model\n * available through OpenRouter.\n *\n * Features:\n * - Automatic capability detection for known model families\n * - Response healing plugin for better JSON reliability\n * - PDF-to-image conversion for VLM models\n * - Reasoning token extraction for supported models\n *\n * @example\n * ```typescript\n * const provider = new GenericORProvider({\n * provider: 'generic-or',\n * model: 'qwen/qwen3-235b-a22b',\n * apiKey: process.env.OPENROUTER_API_KEY!,\n * });\n *\n * const result = await provider.completeJson({\n * input: { text: \"Extract the main topics from this text...\" },\n * schema: TopicsSchema,\n * mode: 'strict',\n * });\n * ```\n */\nexport class GenericORProvider implements LLMProvider {\n readonly name: string;\n readonly capabilities: ProviderCapabilities;\n\n private config: ProviderConfig;\n private translator: SchemaTranslator;\n private limits: typeof DEFAULT_LIMITS;\n private modelInfo: KnownModelInfo;\n\n constructor(config: ProviderConfig) {\n this.config = config;\n this.modelInfo = getModelInfo(config.model);\n\n const vendor = extractVendorFromModel(config.model);\n this.name = `${vendor}:${config.model}`;\n this.translator = new SchemaTranslator();\n\n this.capabilities = {\n supportsStructuredOutput: true, // All OpenRouter models support this via response_format\n supportsStreaming: false, // Not with structured outputs\n supportsImages: this.modelInfo.supportsVision,\n supportsPDFs: this.modelInfo.supportsVision, // Via image conversion\n maxPDFPages: undefined,\n maxPDFSize: undefined,\n maxContextTokens: this.modelInfo.maxContextTokens || 32768\n };\n\n // Merge custom limits with defaults\n this.limits = {\n ...DEFAULT_LIMITS,\n ...(config.limits || {})\n };\n }\n\n async completeJson<T>(params: {\n // Support both interfaces:\n // - Internal: input: MultimodalInput\n // - Node/CoreVLM: prompt: string | MultimodalInput\n input?: MultimodalInput;\n prompt?: string | MultimodalInput;\n schema?: UnifiedSchema<T>;\n mode?: JsonMode;\n max_tokens?: number;\n reasoning?: ReasoningConfig;\n embedSchemaInPrompt?: boolean;\n }): Promise<LLMResponse<T>> {\n const startTime = Date.now();\n\n // Normalize input: handle both 'input' and 'prompt' parameters\n let rawInput: MultimodalInput;\n if (params.input) {\n rawInput = params.input;\n } else if (params.prompt) {\n if (typeof params.prompt === 'string') {\n rawInput = { text: params.prompt };\n } else {\n rawInput = params.prompt as MultimodalInput;\n }\n } else {\n rawInput = { text: '' };\n }\n\n // Determine mode: default to 'strict', auto-relaxed if schema omitted\n const mode = params.mode || (params.schema ? 'strict' : 'relaxed');\n\n // Validate: strict mode requires schema\n if (mode === 'strict' && !params.schema) {\n throw new Error('schema is required when mode is \"strict\"');\n }\n\n // Convert PDFs to images if model supports vision\n const processedInput = await this.preprocessInput(rawInput);\n\n // Embed schema in prompt if enabled (default: true) and schema exists\n const shouldEmbedSchema = params.embedSchemaInPrompt !== false && params.schema;\n let enhancedInput = processedInput;\n\n if (shouldEmbedSchema) {\n const jsonSchema = this.translator.convertZodIfNeeded(params.schema!);\n const enhancedText = combineSchemaAndUserPrompt(\n jsonSchema,\n processedInput.text || ''\n );\n enhancedInput = {\n ...processedInput,\n text: enhancedText\n };\n } else if (mode === 'relaxed') {\n // In relaxed mode without schema, we still need to mention \"JSON\" in the prompt\n // because some providers (e.g., Alibaba/Qwen) require the word \"json\" in messages\n // when using response_format: { type: \"json_object\" }\n const text = processedInput.text || '';\n const needsJsonHint = !text.toLowerCase().includes('json');\n if (needsJsonHint) {\n enhancedInput = {\n ...processedInput,\n text: `${text}\\n\\nRespond with valid JSON.`\n };\n }\n }\n\n // Build messages with multimodal content\n const messages = await this.buildMessages(enhancedInput);\n\n // Build request body\n const requestBody: Record<string, unknown> = {\n model: this.config.model,\n messages,\n max_tokens: params.max_tokens || 4096,\n stream: false,\n // Enable usage tracking for OpenRouter cost info\n usage: { include: true },\n // Enable response healing plugin for better JSON reliability\n plugins: [{ id: 'response-healing' }]\n };\n\n if (mode === 'relaxed') {\n // Relaxed mode: just request valid JSON without strict schema\n requestBody.response_format = { type: \"json_object\" };\n\n if (process.env.DEBUG_PROVIDERS) {\n console.log('[GenericORProvider] Using relaxed JSON mode (json_object)');\n }\n } else {\n // Strict mode: use json_schema with strict validation\n // Note: Not all models support json_schema natively, but response-healing\n // plugin will extract JSON from models that only support instruction-following.\n // We don't use require_parameters: true to allow broader model compatibility.\n const schema = this.translator.toOpenAISchema(params.schema!);\n\n // Recursively fix schema for strict mode requirements\n this.fixSchemaRecursive(schema as Record<string, unknown>);\n\n if (process.env.DEBUG_PROVIDERS) {\n console.log('[GenericORProvider] Using strict JSON mode (json_schema)');\n }\n\n requestBody.response_format = {\n type: \"json_schema\",\n json_schema: {\n name: \"extraction\",\n schema\n }\n };\n }\n\n // Add reasoning configuration if provided and model supports it\n if (params.reasoning && this.modelInfo.supportsReasoning) {\n requestBody.reasoning = this.buildReasoningConfig(params.reasoning);\n }\n\n // Make API call to OpenRouter\n const headers: Record<string, string> = {\n \"Content-Type\": \"application/json\",\n \"Authorization\": `Bearer ${this.config.apiKey}`,\n \"HTTP-Referer\": \"https://github.com/docloai/sdk\",\n \"X-Title\": \"Doclo SDK\"\n };\n\n const response = await fetchWithTimeout(\n \"https://openrouter.ai/api/v1/chat/completions\",\n {\n method: \"POST\",\n headers,\n body: JSON.stringify(requestBody)\n },\n this.limits.REQUEST_TIMEOUT\n );\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`OpenRouter API error (${response.status}): ${error}`);\n }\n\n const data = await response.json() as {\n choices?: Array<{\n message?: {\n content?: string;\n reasoning?: string;\n reasoning_details?: Array<{ type: string; summary?: string; text?: string; data?: string; id: string | null; format: string; index?: number }>;\n };\n }>;\n usage?: {\n prompt_tokens?: number;\n completion_tokens?: number;\n total_cost?: number;\n cost?: number;\n };\n id?: string;\n model?: string;\n };\n const latencyMs = Date.now() - startTime;\n\n // Parse response\n const message = data.choices?.[0]?.message;\n const content = message?.content ?? \"{}\";\n const parsed = safeJsonParse(content) as T;\n\n // Extract reasoning fields if present\n const reasoning = message?.reasoning;\n const reasoning_details = message?.reasoning_details as ReasoningDetail[] | undefined;\n\n // Get cost from OpenRouter response\n const costUSD = data.usage?.total_cost ?? data.usage?.cost;\n\n const vendor = extractVendorFromModel(this.config.model);\n\n return {\n json: parsed as T,\n rawText: content,\n metrics: {\n costUSD,\n inputTokens: data.usage?.prompt_tokens,\n outputTokens: data.usage?.completion_tokens,\n latencyMs,\n attemptNumber: 1,\n provider: vendor,\n model: this.config.model,\n responseId: data.id,\n modelUsed: data.model\n },\n reasoning,\n reasoning_details\n };\n }\n\n /**\n * Preprocess input to convert PDFs to images for VLM models.\n */\n private async preprocessInput(input: MultimodalInput): Promise<MultimodalInput> {\n // Handle undefined/null input\n if (!input) {\n return { text: '' };\n }\n\n if (!input.pdfs?.length || !this.modelInfo.supportsVision) {\n return input;\n }\n\n // Convert PDFs to images\n const pdfImages = await this.convertPDFsToImages(input.pdfs);\n\n return {\n ...input,\n images: [...(input.images || []), ...pdfImages],\n pdfs: undefined // Consumed\n };\n }\n\n /**\n * Convert PDFs to images using pdf-to-img.\n */\n private async convertPDFsToImages(pdfs: PDFInput[]): Promise<ImageInput[]> {\n const images: ImageInput[] = [];\n\n // Dynamically import pdf-to-img to avoid bundling issues\n const { pdf } = await import('pdf-to-img');\n\n for (const pdfInput of pdfs) {\n let pdfBuffer: Buffer;\n\n if (pdfInput.base64) {\n // Extract raw base64 from data URL if needed\n const base64Data = this.extractBase64(pdfInput.base64);\n pdfBuffer = Buffer.from(base64Data, 'base64');\n } else if (pdfInput.url) {\n // Fetch PDF from URL\n const response = await fetchWithTimeout(pdfInput.url, {}, this.limits.REQUEST_TIMEOUT);\n if (!response.ok) {\n throw new Error(`Failed to fetch PDF from ${pdfInput.url}: ${response.status}`);\n }\n const arrayBuffer = await response.arrayBuffer();\n pdfBuffer = Buffer.from(arrayBuffer);\n } else {\n continue; // Skip invalid entries\n }\n\n // Convert PDF pages to images\n const pages = await pdf(pdfBuffer, { scale: 2 });\n\n for await (const page of pages) {\n images.push({\n base64: page.toString('base64'),\n mimeType: 'image/png'\n });\n }\n }\n\n return images;\n }\n\n /**\n * Build OpenAI-compatible message format.\n */\n private async buildMessages(input: MultimodalInput): Promise<Array<{ role: string; content: Array<{ type: string; text?: string; image_url?: { url: string } }> }>> {\n const content: Array<{ type: string; text?: string; image_url?: { url: string } }> = [];\n\n // Add text\n if (input.text) {\n content.push({ type: \"text\", text: input.text });\n }\n\n // Add images\n if (input.images && input.images.length > 0) {\n for (const image of input.images) {\n if (image.url) {\n content.push({\n type: \"image_url\",\n image_url: { url: image.url }\n });\n } else if (image.base64) {\n content.push({\n type: \"image_url\",\n image_url: {\n url: `data:${image.mimeType};base64,${this.extractBase64(image.base64)}`\n }\n });\n }\n }\n }\n\n return [{ role: \"user\", content }];\n }\n\n /**\n * Build reasoning configuration for models that support it.\n */\n private buildReasoningConfig(reasoning: ReasoningConfig): Record<string, unknown> {\n const config: Record<string, unknown> = {};\n\n // Most open source models use effort-based reasoning\n if (reasoning.effort) {\n config.effort = reasoning.effort;\n } else if (reasoning.enabled) {\n config.effort = 'medium'; // Default to medium\n }\n\n // Add exclude flag if specified\n if (reasoning.exclude !== undefined) {\n config.exclude = reasoning.exclude;\n }\n\n return Object.keys(config).length > 0 ? config : {};\n }\n\n /**\n * Recursively fix schema for strict mode requirements.\n * - All properties must be required\n * - additionalProperties must be false\n */\n private fixSchemaRecursive(obj: Record<string, unknown>): void {\n if (obj && typeof obj === 'object') {\n if (obj.type === 'object' && obj.properties) {\n const properties = obj.properties as Record<string, unknown>;\n const allProps = Object.keys(properties);\n obj.required = allProps;\n obj.additionalProperties = false;\n\n // Recursively fix nested properties\n for (const key of allProps) {\n this.fixSchemaRecursive(properties[key] as Record<string, unknown>);\n }\n } else if (obj.type === 'array' && obj.items) {\n this.fixSchemaRecursive(obj.items as Record<string, unknown>);\n }\n }\n }\n\n /**\n * Extract base64 data from a data URL or return as-is if already raw base64.\n */\n private extractBase64(input: string): string {\n if (input.startsWith('data:')) {\n const base64Part = input.split(',')[1];\n if (!base64Part) {\n throw new Error(`Invalid data URL format: ${input.substring(0, 50)}`);\n }\n return base64Part;\n }\n return input;\n }\n}\n","/**\n * Known model capabilities for accurate reporting.\n * Unknown models get sensible defaults and work with OpenRouter.\n */\n\nexport interface KnownModelInfo {\n supportsVision: boolean;\n supportsReasoning: boolean;\n maxContextTokens?: number;\n}\n\n/**\n * Prefix-based matching for model families.\n * More specific prefixes should come before less specific ones.\n * Data sourced from OpenRouter API: https://openrouter.ai/api/v1/models\n */\nexport const KNOWN_MODEL_PREFIXES: Record<string, KnownModelInfo> = {\n // Qwen models - VL variants support vision, thinking variants support reasoning\n 'qwen/qwen3-vl-235b-a22b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-vl-235b-a22b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'qwen/qwen3-vl-30b-a3b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },\n 'qwen/qwen3-vl-30b-a3b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'qwen/qwen3-vl-8b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 256000 },\n 'qwen/qwen3-vl-8b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'qwen/qwen3-vl': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'qwen/qwen-vl-max': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'qwen/qwen-vl-plus': { supportsVision: true, supportsReasoning: false, maxContextTokens: 7500 },\n 'qwen/qwen2.5-vl': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'qwen/qwq': { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },\n 'qwen/qwen3-235b-a22b-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-235b-a22b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'qwen/qwen3-30b-a3b-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },\n 'qwen/qwen3-30b-a3b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-next-80b-a3b-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'qwen/qwen3-next-80b-a3b': { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },\n 'qwen/qwen3-coder': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-max': { supportsVision: false, supportsReasoning: false, maxContextTokens: 256000 },\n 'qwen/qwen3-32b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'qwen/qwen3-14b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'qwen/qwen3-8b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 128000 },\n 'qwen/qwen3-4b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'qwen/qwen-plus-2025-07-28:thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 1000000 },\n 'qwen/qwen-plus': { supportsVision: false, supportsReasoning: false, maxContextTokens: 1000000 },\n 'qwen/qwen-turbo': { supportsVision: false, supportsReasoning: false, maxContextTokens: 1000000 },\n 'qwen/qwen-max': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n 'qwen/qwen-2.5': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n\n // Llama 4 models (vision capable)\n 'meta-llama/llama-4-maverick': { supportsVision: true, supportsReasoning: false, maxContextTokens: 1048576 },\n 'meta-llama/llama-4-scout': { supportsVision: true, supportsReasoning: false, maxContextTokens: 327680 },\n 'meta-llama/llama-guard-4': { supportsVision: true, supportsReasoning: false, maxContextTokens: 163840 },\n\n // Llama 3.x models - 3.2 vision variants support images\n 'meta-llama/llama-3.2-90b-vision': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'meta-llama/llama-3.2-11b-vision': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3.3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3.2': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3.1': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 8192 },\n\n // DeepSeek models - all support reasoning except base chat\n 'deepseek/deepseek-r1': { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },\n 'deepseek/deepseek-v3': { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },\n 'deepseek/deepseek-chat-v3': { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },\n 'deepseek/deepseek-chat': { supportsVision: false, supportsReasoning: false, maxContextTokens: 163840 },\n 'deepseek/deepseek-prover': { supportsVision: false, supportsReasoning: false, maxContextTokens: 163840 },\n\n // GLM models (Zhipu AI / Z.AI)\n 'z-ai/glm-4.6v': { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },\n 'z-ai/glm-4.5v': { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },\n 'z-ai/glm-4.6': { supportsVision: false, supportsReasoning: true, maxContextTokens: 202752 },\n 'z-ai/glm-4.5-air': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'z-ai/glm-4.5': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'z-ai/glm-4-32b': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n 'thudm/glm-4.1v-9b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },\n 'thudm/glm': { supportsVision: false, supportsReasoning: false, maxContextTokens: 65536 },\n\n // Kimi / Moonshot models\n 'moonshotai/kimi-dev-72b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'moonshotai/kimi-k2-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'moonshotai/kimi-k2-0905': { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },\n 'moonshotai/kimi-k2': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n\n // Mistral models - Mistral 3.x family supports vision\n 'mistralai/mistral-large-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/mistral-medium-3.1': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-medium-3': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-small-3.2': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-small-3.1': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/ministral-14b-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/ministral-8b-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/ministral-3b-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/pixtral-large': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/pixtral-12b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'mistralai/pixtral': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/devstral': { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/codestral': { supportsVision: false, supportsReasoning: false, maxContextTokens: 256000 },\n 'mistralai/mistral-large': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-small': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n 'mistralai/mistral-nemo': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mixtral': { supportsVision: false, supportsReasoning: false, maxContextTokens: 65536 },\n 'mistralai/mistral-7b': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n\n // xAI Grok models - Grok 4 supports vision and reasoning\n 'x-ai/grok-4.1-fast': { supportsVision: true, supportsReasoning: true, maxContextTokens: 2000000 },\n 'x-ai/grok-4-fast': { supportsVision: true, supportsReasoning: true, maxContextTokens: 2000000 },\n 'x-ai/grok-4': { supportsVision: true, supportsReasoning: true, maxContextTokens: 256000 },\n 'x-ai/grok-code-fast': { supportsVision: false, supportsReasoning: true, maxContextTokens: 256000 },\n 'x-ai/grok-3-mini': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'x-ai/grok-3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n\n // Cohere Command models\n 'cohere/command-a': { supportsVision: false, supportsReasoning: false, maxContextTokens: 256000 },\n 'cohere/command-r': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n\n // Gemma models (Google open source) - Gemma 3 supports vision\n 'google/gemma-3': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'google/gemma-3n': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n 'google/gemma-2': { supportsVision: false, supportsReasoning: false, maxContextTokens: 8192 },\n\n // Phi models (Microsoft) - phi-4-multimodal supports vision, phi-4-reasoning supports reasoning\n 'microsoft/phi-4-multimodal': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'microsoft/phi-4-reasoning': { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },\n 'microsoft/phi-4': { supportsVision: false, supportsReasoning: false, maxContextTokens: 16384 },\n 'microsoft/phi-3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n\n // OpenGVLab InternVL\n 'opengvlab/internvl3': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'opengvlab/internvl2': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n\n // StepFun AI - step3 supports both vision and reasoning\n 'stepfun-ai/step3': { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },\n 'stepfun-ai/step2': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'stepfun-ai/step1': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n\n // NVIDIA Nemotron - VL and ultra models support reasoning\n 'nvidia/nemotron-nano-12b-v2-vl': { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/nemotron-nano-9b-v2': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/llama-3.3-nemotron-super': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/llama-3.1-nemotron-ultra': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/llama-3.1-nemotron': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n\n // Amazon Nova models - vision capable\n 'amazon/nova-2-lite': { supportsVision: true, supportsReasoning: true, maxContextTokens: 1000000 },\n 'amazon/nova-premier': { supportsVision: true, supportsReasoning: false, maxContextTokens: 1000000 },\n 'amazon/nova-pro': { supportsVision: true, supportsReasoning: false, maxContextTokens: 300000 },\n 'amazon/nova-lite': { supportsVision: true, supportsReasoning: false, maxContextTokens: 300000 },\n 'amazon/nova-micro': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n\n // Baidu ERNIE models - VL variants support vision\n 'baidu/ernie-4.5-vl': { supportsVision: true, supportsReasoning: true, maxContextTokens: 123000 },\n 'baidu/ernie-4.5-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'baidu/ernie-4.5': { supportsVision: false, supportsReasoning: false, maxContextTokens: 123000 },\n\n // ByteDance models\n 'bytedance/ui-tars': { supportsVision: true, supportsReasoning: false, maxContextTokens: 128000 },\n\n // MiniMax models\n 'minimax/minimax-01': { supportsVision: true, supportsReasoning: false, maxContextTokens: 1000192 },\n 'minimax/minimax-m': { supportsVision: false, supportsReasoning: true, maxContextTokens: 1000000 },\n\n // Tencent Hunyuan\n 'tencent/hunyuan': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n\n // Alibaba Tongyi\n 'alibaba/tongyi-deepresearch': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n\n // Other notable models\n 'databricks/dbrx': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n};\n\n/**\n * Default capabilities for unknown models.\n * Assumes a basic text-only model with reasonable defaults.\n */\nexport const DEFAULT_MODEL_INFO: KnownModelInfo = {\n supportsVision: false,\n supportsReasoning: false,\n maxContextTokens: 32768,\n};\n\n/**\n * Get model capabilities from registry or return defaults.\n * Uses prefix matching - more specific prefixes should be listed first.\n *\n * @param model - Full model ID (e.g., \"qwen/qwen3-235b-a22b\")\n * @returns Model capabilities (known or default)\n */\nexport function getModelInfo(model: string): KnownModelInfo {\n // Sort prefixes by length (longest first) for most specific match\n const sortedPrefixes = Object.keys(KNOWN_MODEL_PREFIXES).sort((a, b) => b.length - a.length);\n\n for (const prefix of sortedPrefixes) {\n if (model.startsWith(prefix)) {\n return KNOWN_MODEL_PREFIXES[prefix];\n }\n }\n\n return DEFAULT_MODEL_INFO;\n}\n\n/**\n * Check if a model is known to support vision.\n *\n * @param model - Full model ID\n * @returns true if model supports vision input\n */\nexport function modelSupportsVision(model: string): boolean {\n return getModelInfo(model).supportsVision;\n}\n\n/**\n * Check if a model is known to support reasoning tokens.\n *\n * @param model - Full model ID\n * @returns true if model supports reasoning/thinking mode\n */\nexport function modelSupportsReasoning(model: string): boolean {\n return getModelInfo(model).supportsReasoning;\n}\n","export { GenericORProvider } from './generic-or.js';\nexport {\n getModelInfo,\n modelSupportsVision,\n modelSupportsReasoning,\n KNOWN_MODEL_PREFIXES,\n DEFAULT_MODEL_INFO,\n type KnownModelInfo\n} from './known-models.js';\n\nimport { GenericORProvider } from './generic-or.js';\nimport { registerProvider } from '@doclo/providers-llm';\n\n// Auto-register the provider when this package is imported\nregisterProvider('generic-or', (config) => new GenericORProvider(config));\n"],"mappings":";AAaA,SAAS,kBAAkB,kCAAkC;AAC7D,SAAS,kBAAkB,gBAAgB,qBAAqB;;;ACEzD,IAAM,uBAAuD;AAAA;AAAA,EAElE,oCAAoC,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC9G,2BAA2B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,kCAAkC,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC5G,yBAAyB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,6BAA6B,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EACvG,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC/F,iBAAiB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC5F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC/F,qBAAqB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,KAAK;AAAA,EAC9F,mBAAmB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC7F,YAAY,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACtF,iCAAiC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC5G,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACnG,+BAA+B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACzG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACjG,oCAAoC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/G,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EAC9F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC5F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC5F,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EAC5F,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC3F,sCAAsC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EAClH,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,IAAQ;AAAA,EAC/F,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,IAAQ;AAAA,EAChG,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC5F,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAG5F,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,QAAQ;AAAA,EAC3G,4BAA4B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,4BAA4B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGvG,mCAAmC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC7G,mCAAmC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC9G,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,KAAK;AAAA;AAAA,EAGhG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACnG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACnG,6BAA6B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACxG,0BAA0B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,4BAA4B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGxG,iBAAiB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC3F,iBAAiB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC1F,gBAAgB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC3F,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/F,gBAAgB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC3F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EAC9F,8BAA8B,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACvG,aAAa,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAGxF,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACtG,+BAA+B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC1G,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGlG,gCAAgC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC3G,gCAAgC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC3G,8BAA8B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACzG,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,gCAAgC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC3G,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,2BAA2B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,yBAAyB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EACnG,qBAAqB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAChG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAClG,uBAAuB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EACnG,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EACtG,0BAA0B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,qBAAqB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAChG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAGnG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EACjG,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EAC/F,eAAe,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EACzF,uBAAuB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EAClG,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/F,eAAe,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAG3F,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EAChG,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAGhG,kBAAkB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC7F,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC9F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,KAAK;AAAA;AAAA,EAG5F,8BAA8B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACzG,6BAA6B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACvG,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC9F,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAG/F,uBAAuB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EACjG,uBAAuB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAGjG,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC7F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC/F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAG9F,kCAAkC,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC5G,8BAA8B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACzG,mCAAmC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC9G,mCAAmC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC9G,6BAA6B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGzG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EACjG,uBAAuB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,IAAQ;AAAA,EACnG,mBAAmB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,IAAO;AAAA,EAC9F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,IAAO;AAAA,EAC/F,qBAAqB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAGjG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EAChG,4BAA4B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACvG,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAG/F,qBAAqB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAGhG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,QAAQ;AAAA,EAClG,qBAAqB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA;AAAA,EAGjG,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA;AAAA,EAG9F,+BAA+B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA;AAAA,EAG1G,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAChG;AAMO,IAAM,qBAAqC;AAAA,EAChD,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,kBAAkB;AACpB;AASO,SAAS,aAAa,OAA+B;AAE1D,QAAM,iBAAiB,OAAO,KAAK,oBAAoB,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM;AAE3F,aAAW,UAAU,gBAAgB;AACnC,QAAI,MAAM,WAAW,MAAM,GAAG;AAC5B,aAAO,qBAAqB,MAAM;AAAA,IACpC;AAAA,EACF;AAEA,SAAO;AACT;AAQO,SAAS,oBAAoB,OAAwB;AAC1D,SAAO,aAAa,KAAK,EAAE;AAC7B;AAQO,SAAS,uBAAuB,OAAwB;AAC7D,SAAO,aAAa,KAAK,EAAE;AAC7B;;;ADtMA,SAAS,uBAAuB,OAAuB;AACrD,QAAM,aAAa,MAAM,QAAQ,GAAG;AACpC,SAAO,aAAa,IAAI,MAAM,UAAU,GAAG,UAAU,IAAI;AAC3D;AA8BO,IAAM,oBAAN,MAA+C;AAAA,EAC3C;AAAA,EACA;AAAA,EAED;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAAwB;AAClC,SAAK,SAAS;AACd,SAAK,YAAY,aAAa,OAAO,KAAK;AAE1C,UAAM,SAAS,uBAAuB,OAAO,KAAK;AAClD,SAAK,OAAO,GAAG,MAAM,IAAI,OAAO,KAAK;AACrC,SAAK,aAAa,IAAI,iBAAiB;AAEvC,SAAK,eAAe;AAAA,MAClB,0BAA0B;AAAA;AAAA,MAC1B,mBAAmB;AAAA;AAAA,MACnB,gBAAgB,KAAK,UAAU;AAAA,MAC/B,cAAc,KAAK,UAAU;AAAA;AAAA,MAC7B,aAAa;AAAA,MACb,YAAY;AAAA,MACZ,kBAAkB,KAAK,UAAU,oBAAoB;AAAA,IACvD;AAGA,SAAK,SAAS;AAAA,MACZ,GAAG;AAAA,MACH,GAAI,OAAO,UAAU,CAAC;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,aAAgB,QAWM;AAC1B,UAAM,YAAY,KAAK,IAAI;AAG3B,QAAI;AACJ,QAAI,OAAO,OAAO;AAChB,iBAAW,OAAO;AAAA,IACpB,WAAW,OAAO,QAAQ;AACxB,UAAI,OAAO,OAAO,WAAW,UAAU;AACrC,mBAAW,EAAE,MAAM,OAAO,OAAO;AAAA,MACnC,OAAO;AACL,mBAAW,OAAO;AAAA,MACpB;AAAA,IACF,OAAO;AACL,iBAAW,EAAE,MAAM,GAAG;AAAA,IACxB;AAGA,UAAM,OAAO,OAAO,SAAS,OAAO,SAAS,WAAW;AAGxD,QAAI,SAAS,YAAY,CAAC,OAAO,QAAQ;AACvC,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AAGA,UAAM,iBAAiB,MAAM,KAAK,gBAAgB,QAAQ;AAG1D,UAAM,oBAAoB,OAAO,wBAAwB,SAAS,OAAO;AACzE,QAAI,gBAAgB;AAEpB,QAAI,mBAAmB;AACrB,YAAM,aAAa,KAAK,WAAW,mBAAmB,OAAO,MAAO;AACpE,YAAM,eAAe;AAAA,QACnB;AAAA,QACA,eAAe,QAAQ;AAAA,MACzB;AACA,sBAAgB;AAAA,QACd,GAAG;AAAA,QACH,MAAM;AAAA,MACR;AAAA,IACF,WAAW,SAAS,WAAW;AAI7B,YAAM,OAAO,eAAe,QAAQ;AACpC,YAAM,gBAAgB,CAAC,KAAK,YAAY,EAAE,SAAS,MAAM;AACzD,UAAI,eAAe;AACjB,wBAAgB;AAAA,UACd,GAAG;AAAA,UACH,MAAM,GAAG,IAAI;AAAA;AAAA;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAGA,UAAM,WAAW,MAAM,KAAK,cAAc,aAAa;AAGvD,UAAM,cAAuC;AAAA,MAC3C,OAAO,KAAK,OAAO;AAAA,MACnB;AAAA,MACA,YAAY,OAAO,cAAc;AAAA,MACjC,QAAQ;AAAA;AAAA,MAER,OAAO,EAAE,SAAS,KAAK;AAAA;AAAA,MAEvB,SAAS,CAAC,EAAE,IAAI,mBAAmB,CAAC;AAAA,IACtC;AAEA,QAAI,SAAS,WAAW;AAEtB,kBAAY,kBAAkB,EAAE,MAAM,cAAc;AAEpD,UAAI,QAAQ,IAAI,iBAAiB;AAC/B,gBAAQ,IAAI,2DAA2D;AAAA,MACzE;AAAA,IACF,OAAO;AAKL,YAAM,SAAS,KAAK,WAAW,eAAe,OAAO,MAAO;AAG5D,WAAK,mBAAmB,MAAiC;AAEzD,UAAI,QAAQ,IAAI,iBAAiB;AAC/B,gBAAQ,IAAI,0DAA0D;AAAA,MACxE;AAEA,kBAAY,kBAAkB;AAAA,QAC5B,MAAM;AAAA,QACN,aAAa;AAAA,UACX,MAAM;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,aAAa,KAAK,UAAU,mBAAmB;AACxD,kBAAY,YAAY,KAAK,qBAAqB,OAAO,SAAS;AAAA,IACpE;AAGA,UAAM,UAAkC;AAAA,MACtC,gBAAgB;AAAA,MAChB,iBAAiB,UAAU,KAAK,OAAO,MAAM;AAAA,MAC7C,gBAAgB;AAAA,MAChB,WAAW;AAAA,IACb;AAEA,UAAM,WAAW,MAAM;AAAA,MACrB;AAAA,MACA;AAAA,QACE,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU,WAAW;AAAA,MAClC;AAAA,MACA,KAAK,OAAO;AAAA,IACd;AAEA,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,yBAAyB,SAAS,MAAM,MAAM,KAAK,EAAE;AAAA,IACvE;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAiBjC,UAAM,YAAY,KAAK,IAAI,IAAI;AAG/B,UAAM,UAAU,KAAK,UAAU,CAAC,GAAG;AACnC,UAAM,UAAU,SAAS,WAAW;AACpC,UAAM,SAAS,cAAc,OAAO;AAGpC,UAAM,YAAY,SAAS;AAC3B,UAAM,oBAAoB,SAAS;AAGnC,UAAM,UAAU,KAAK,OAAO,cAAc,KAAK,OAAO;AAEtD,UAAM,SAAS,uBAAuB,KAAK,OAAO,KAAK;AAEvD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS;AAAA,QACP;AAAA,QACA,aAAa,KAAK,OAAO;AAAA,QACzB,cAAc,KAAK,OAAO;AAAA,QAC1B;AAAA,QACA,eAAe;AAAA,QACf,UAAU;AAAA,QACV,OAAO,KAAK,OAAO;AAAA,QACnB,YAAY,KAAK;AAAA,QACjB,WAAW,KAAK;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,OAAkD;AAE9E,QAAI,CAAC,OAAO;AACV,aAAO,EAAE,MAAM,GAAG;AAAA,IACpB;AAEA,QAAI,CAAC,MAAM,MAAM,UAAU,CAAC,KAAK,UAAU,gBAAgB;AACzD,aAAO;AAAA,IACT;AAGA,UAAM,YAAY,MAAM,KAAK,oBAAoB,MAAM,IAAI;AAE3D,WAAO;AAAA,MACL,GAAG;AAAA,MACH,QAAQ,CAAC,GAAI,MAAM,UAAU,CAAC,GAAI,GAAG,SAAS;AAAA,MAC9C,MAAM;AAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAoB,MAAyC;AACzE,UAAM,SAAuB,CAAC;AAG9B,UAAM,EAAE,IAAI,IAAI,MAAM,OAAO,YAAY;AAEzC,eAAW,YAAY,MAAM;AAC3B,UAAI;AAEJ,UAAI,SAAS,QAAQ;AAEnB,cAAM,aAAa,KAAK,cAAc,SAAS,MAAM;AACrD,oBAAY,OAAO,KAAK,YAAY,QAAQ;AAAA,MAC9C,WAAW,SAAS,KAAK;AAEvB,cAAM,WAAW,MAAM,iBAAiB,SAAS,KAAK,CAAC,GAAG,KAAK,OAAO,eAAe;AACrF,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,4BAA4B,SAAS,GAAG,KAAK,SAAS,MAAM,EAAE;AAAA,QAChF;AACA,cAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,oBAAY,OAAO,KAAK,WAAW;AAAA,MACrC,OAAO;AACL;AAAA,MACF;AAGA,YAAM,QAAQ,MAAM,IAAI,WAAW,EAAE,OAAO,EAAE,CAAC;AAE/C,uBAAiB,QAAQ,OAAO;AAC9B,eAAO,KAAK;AAAA,UACV,QAAQ,KAAK,SAAS,QAAQ;AAAA,UAC9B,UAAU;AAAA,QACZ,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cAAc,OAAwI;AAClK,UAAM,UAA+E,CAAC;AAGtF,QAAI,MAAM,MAAM;AACd,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK,CAAC;AAAA,IACjD;AAGA,QAAI,MAAM,UAAU,MAAM,OAAO,SAAS,GAAG;AAC3C,iBAAW,SAAS,MAAM,QAAQ;AAChC,YAAI,MAAM,KAAK;AACb,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,WAAW,EAAE,KAAK,MAAM,IAAI;AAAA,UAC9B,CAAC;AAAA,QACH,WAAW,MAAM,QAAQ;AACvB,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,WAAW;AAAA,cACT,KAAK,QAAQ,MAAM,QAAQ,WAAW,KAAK,cAAc,MAAM,MAAM,CAAC;AAAA,YACxE;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,WAAO,CAAC,EAAE,MAAM,QAAQ,QAAQ,CAAC;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,WAAqD;AAChF,UAAM,SAAkC,CAAC;AAGzC,QAAI,UAAU,QAAQ;AACpB,aAAO,SAAS,UAAU;AAAA,IAC5B,WAAW,UAAU,SAAS;AAC5B,aAAO,SAAS;AAAA,IAClB;AAGA,QAAI,UAAU,YAAY,QAAW;AACnC,aAAO,UAAU,UAAU;AAAA,IAC7B;AAEA,WAAO,OAAO,KAAK,MAAM,EAAE,SAAS,IAAI,SAAS,CAAC;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,mBAAmB,KAAoC;AAC7D,QAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAI,IAAI,SAAS,YAAY,IAAI,YAAY;AAC3C,cAAM,aAAa,IAAI;AACvB,cAAM,WAAW,OAAO,KAAK,UAAU;AACvC,YAAI,WAAW;AACf,YAAI,uBAAuB;AAG3B,mBAAW,OAAO,UAAU;AAC1B,eAAK,mBAAmB,WAAW,GAAG,CAA4B;AAAA,QACpE;AAAA,MACF,WAAW,IAAI,SAAS,WAAW,IAAI,OAAO;AAC5C,aAAK,mBAAmB,IAAI,KAAgC;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,OAAuB;AAC3C,QAAI,MAAM,WAAW,OAAO,GAAG;AAC7B,YAAM,aAAa,MAAM,MAAM,GAAG,EAAE,CAAC;AACrC,UAAI,CAAC,YAAY;AACf,cAAM,IAAI,MAAM,4BAA4B,MAAM,UAAU,GAAG,EAAE,CAAC,EAAE;AAAA,MACtE;AACA,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AACF;;;AExaA,SAAS,wBAAwB;AAGjC,iBAAiB,cAAc,CAAC,WAAW,IAAI,kBAAkB,MAAM,CAAC;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/generic-or.ts","../src/known-models.ts","../src/index.ts"],"sourcesContent":["import type {\n LLMProvider,\n ProviderConfig,\n MultimodalInput,\n UnifiedSchema,\n LLMResponse,\n ProviderCapabilities,\n JsonMode,\n ReasoningConfig,\n ImageInput,\n PDFInput,\n ReasoningDetail\n} from \"@doclo/providers-llm\";\nimport { SchemaTranslator, combineSchemaAndUserPrompt, calculateCacheSavings } from \"@doclo/providers-llm\";\nimport { fetchWithTimeout, DEFAULT_LIMITS, safeJsonParse } from \"@doclo/core/security\";\nimport { getModelInfo, type KnownModelInfo } from \"./known-models.js\";\n\n/**\n * Extract vendor name from OpenRouter model identifier.\n * Example: \"qwen/qwen3-235b-a22b\" -> \"qwen\"\n */\nfunction extractVendorFromModel(model: string): string {\n const slashIndex = model.indexOf('/');\n return slashIndex > 0 ? model.substring(0, slashIndex) : 'generic';\n}\n\n/**\n * GenericORProvider - A universal provider for any OpenRouter model.\n *\n * Works with all OpenRouter models using OpenAI-compatible API format.\n * Supports Qwen, Llama, DeepSeek, GLM, Kimi, Mistral, and any other model\n * available through OpenRouter.\n *\n * Features:\n * - Automatic capability detection for known model families\n * - Response healing plugin for better JSON reliability\n * - PDF-to-image conversion for VLM models\n * - Reasoning token extraction for supported models\n *\n * @example\n * ```typescript\n * const provider = new GenericORProvider({\n * provider: 'generic-or',\n * model: 'qwen/qwen3-235b-a22b',\n * apiKey: process.env.OPENROUTER_API_KEY!,\n * });\n *\n * const result = await provider.completeJson({\n * input: { text: \"Extract the main topics from this text...\" },\n * schema: TopicsSchema,\n * mode: 'strict',\n * });\n * ```\n */\nexport class GenericORProvider implements LLMProvider {\n readonly name: string;\n readonly capabilities: ProviderCapabilities;\n\n private config: ProviderConfig;\n private translator: SchemaTranslator;\n private limits: typeof DEFAULT_LIMITS;\n private modelInfo: KnownModelInfo;\n\n constructor(config: ProviderConfig) {\n this.config = config;\n this.modelInfo = getModelInfo(config.model);\n\n const vendor = extractVendorFromModel(config.model);\n this.name = `${vendor}:${config.model}`;\n this.translator = new SchemaTranslator();\n\n this.capabilities = {\n supportsStructuredOutput: true, // All OpenRouter models support this via response_format\n supportsStreaming: false, // Not with structured outputs\n supportsImages: this.modelInfo.supportsVision,\n supportsPDFs: this.modelInfo.supportsVision, // Via image conversion\n maxPDFPages: undefined,\n maxPDFSize: undefined,\n maxContextTokens: this.modelInfo.maxContextTokens || 32768\n };\n\n // Merge custom limits with defaults\n this.limits = {\n ...DEFAULT_LIMITS,\n ...(config.limits || {})\n };\n }\n\n async completeJson<T>(params: {\n // Support both interfaces:\n // - Internal: input: MultimodalInput\n // - Node/CoreVLM: prompt: string | MultimodalInput\n input?: MultimodalInput;\n prompt?: string | MultimodalInput;\n schema?: UnifiedSchema<T>;\n mode?: JsonMode;\n max_tokens?: number;\n reasoning?: ReasoningConfig;\n embedSchemaInPrompt?: boolean;\n }): Promise<LLMResponse<T>> {\n const startTime = Date.now();\n\n // Normalize input: handle both 'input' and 'prompt' parameters\n let rawInput: MultimodalInput;\n if (params.input) {\n rawInput = params.input;\n } else if (params.prompt) {\n if (typeof params.prompt === 'string') {\n rawInput = { text: params.prompt };\n } else {\n rawInput = params.prompt as MultimodalInput;\n }\n } else {\n rawInput = { text: '' };\n }\n\n // Determine mode: default to 'strict', auto-relaxed if schema omitted\n const mode = params.mode || (params.schema ? 'strict' : 'relaxed');\n\n // Validate: strict mode requires schema\n if (mode === 'strict' && !params.schema) {\n throw new Error('schema is required when mode is \"strict\"');\n }\n\n // Convert PDFs to images if model supports vision\n const processedInput = await this.preprocessInput(rawInput);\n\n // Embed schema in prompt if enabled (default: true) and schema exists\n const shouldEmbedSchema = params.embedSchemaInPrompt !== false && params.schema;\n let enhancedInput = processedInput;\n\n if (shouldEmbedSchema) {\n const jsonSchema = this.translator.convertZodIfNeeded(params.schema!);\n const enhancedText = combineSchemaAndUserPrompt(\n jsonSchema,\n processedInput.text || ''\n );\n enhancedInput = {\n ...processedInput,\n text: enhancedText\n };\n } else if (mode === 'relaxed') {\n // In relaxed mode without schema, we still need to mention \"JSON\" in the prompt\n // because some providers (e.g., Alibaba/Qwen) require the word \"json\" in messages\n // when using response_format: { type: \"json_object\" }\n const text = processedInput.text || '';\n const needsJsonHint = !text.toLowerCase().includes('json');\n if (needsJsonHint) {\n enhancedInput = {\n ...processedInput,\n text: `${text}\\n\\nRespond with valid JSON.`\n };\n }\n }\n\n // Build messages with multimodal content\n const messages = await this.buildMessages(enhancedInput);\n\n // Build request body\n const requestBody: Record<string, unknown> = {\n model: this.config.model,\n messages,\n max_tokens: params.max_tokens || 4096,\n stream: false,\n // Enable usage tracking for OpenRouter cost info\n usage: { include: true },\n // Enable response healing plugin for better JSON reliability\n plugins: [{ id: 'response-healing' }]\n };\n\n if (mode === 'relaxed') {\n // Relaxed mode: just request valid JSON without strict schema\n requestBody.response_format = { type: \"json_object\" };\n\n if (process.env.DEBUG_PROVIDERS) {\n console.log('[GenericORProvider] Using relaxed JSON mode (json_object)');\n }\n } else {\n // Strict mode: use json_schema with strict validation\n // Note: Not all models support json_schema natively, but response-healing\n // plugin will extract JSON from models that only support instruction-following.\n // We don't use require_parameters: true to allow broader model compatibility.\n const schema = this.translator.toOpenAISchema(params.schema!);\n\n // Recursively fix schema for strict mode requirements\n this.fixSchemaRecursive(schema as Record<string, unknown>);\n\n if (process.env.DEBUG_PROVIDERS) {\n console.log('[GenericORProvider] Using strict JSON mode (json_schema)');\n }\n\n requestBody.response_format = {\n type: \"json_schema\",\n json_schema: {\n name: \"extraction\",\n schema\n }\n };\n }\n\n // Add reasoning configuration if provided and model supports it\n if (params.reasoning && this.modelInfo.supportsReasoning) {\n requestBody.reasoning = this.buildReasoningConfig(params.reasoning);\n }\n\n // Make API call to OpenRouter\n const headers: Record<string, string> = {\n \"Content-Type\": \"application/json\",\n \"Authorization\": `Bearer ${this.config.apiKey}`,\n \"HTTP-Referer\": \"https://github.com/docloai/sdk\",\n \"X-Title\": \"Doclo SDK\"\n };\n\n const response = await fetchWithTimeout(\n \"https://openrouter.ai/api/v1/chat/completions\",\n {\n method: \"POST\",\n headers,\n body: JSON.stringify(requestBody)\n },\n this.limits.REQUEST_TIMEOUT\n );\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`OpenRouter API error (${response.status}): ${error}`);\n }\n\n const data = await response.json() as {\n choices?: Array<{\n message?: {\n content?: string;\n reasoning?: string;\n reasoning_details?: Array<{ type: string; summary?: string; text?: string; data?: string; id: string | null; format: string; index?: number }>;\n };\n }>;\n usage?: {\n prompt_tokens?: number;\n completion_tokens?: number;\n total_cost?: number;\n cost?: number;\n // Cache metrics - different providers use different field names\n prompt_tokens_details?: {\n cached_tokens?: number;\n cache_write_tokens?: number;\n };\n cached_tokens?: number; // Google\n cache_read_input_tokens?: number; // Anthropic native\n cache_creation_input_tokens?: number; // Anthropic native\n };\n id?: string;\n model?: string;\n };\n const latencyMs = Date.now() - startTime;\n\n // Parse response\n const message = data.choices?.[0]?.message;\n const content = message?.content ?? \"{}\";\n const parsed = safeJsonParse(content) as T;\n\n // Extract reasoning fields if present\n const reasoning = message?.reasoning;\n const reasoning_details = message?.reasoning_details as ReasoningDetail[] | undefined;\n\n // Get cost from OpenRouter response\n const costUSD = data.usage?.total_cost ?? data.usage?.cost;\n\n const vendor = extractVendorFromModel(this.config.model);\n\n // Extract cache metrics (OpenRouter returns these for providers with caching)\n // Different providers use different field names:\n // - OpenAI/XAI: prompt_tokens_details.cached_tokens\n // - Anthropic: prompt_tokens_details.cached_tokens (via OR) or cache_read_input_tokens (native)\n // - Google: cached_tokens\n const cacheReadInputTokens =\n data.usage?.prompt_tokens_details?.cached_tokens ??\n data.usage?.cached_tokens ??\n data.usage?.cache_read_input_tokens;\n const cacheCreationInputTokens =\n data.usage?.prompt_tokens_details?.cache_write_tokens ??\n data.usage?.cache_creation_input_tokens;\n const inputTokens = data.usage?.prompt_tokens;\n const cacheSavingsPercent = calculateCacheSavings(vendor, inputTokens, cacheReadInputTokens);\n\n return {\n json: parsed as T,\n rawText: content,\n metrics: {\n costUSD,\n inputTokens,\n outputTokens: data.usage?.completion_tokens,\n latencyMs,\n attemptNumber: 1,\n provider: vendor,\n model: this.config.model,\n responseId: data.id,\n modelUsed: data.model,\n cacheCreationInputTokens,\n cacheReadInputTokens,\n cacheSavingsPercent\n },\n reasoning,\n reasoning_details\n };\n }\n\n /**\n * Preprocess input to convert PDFs to images for VLM models.\n */\n private async preprocessInput(input: MultimodalInput): Promise<MultimodalInput> {\n // Handle undefined/null input\n if (!input) {\n return { text: '' };\n }\n\n if (!input.pdfs?.length || !this.modelInfo.supportsVision) {\n return input;\n }\n\n // Convert PDFs to images\n const pdfImages = await this.convertPDFsToImages(input.pdfs);\n\n return {\n ...input,\n images: [...(input.images || []), ...pdfImages],\n pdfs: undefined // Consumed\n };\n }\n\n /**\n * Convert PDFs to images using pdf-to-img.\n */\n private async convertPDFsToImages(pdfs: PDFInput[]): Promise<ImageInput[]> {\n const images: ImageInput[] = [];\n\n // Dynamically import pdf-to-img to avoid bundling issues\n const { pdf } = await import('pdf-to-img');\n\n for (const pdfInput of pdfs) {\n let pdfBuffer: Buffer;\n\n if (pdfInput.base64) {\n // Extract raw base64 from data URL if needed\n const base64Data = this.extractBase64(pdfInput.base64);\n pdfBuffer = Buffer.from(base64Data, 'base64');\n } else if (pdfInput.url) {\n // Fetch PDF from URL\n const response = await fetchWithTimeout(pdfInput.url, {}, this.limits.REQUEST_TIMEOUT);\n if (!response.ok) {\n throw new Error(`Failed to fetch PDF from ${pdfInput.url}: ${response.status}`);\n }\n const arrayBuffer = await response.arrayBuffer();\n pdfBuffer = Buffer.from(arrayBuffer);\n } else {\n continue; // Skip invalid entries\n }\n\n // Convert PDF pages to images\n const pages = await pdf(pdfBuffer, { scale: 2 });\n\n for await (const page of pages) {\n images.push({\n base64: page.toString('base64'),\n mimeType: 'image/png'\n });\n }\n }\n\n return images;\n }\n\n /**\n * Build OpenAI-compatible message format.\n */\n private async buildMessages(input: MultimodalInput): Promise<Array<{ role: string; content: string | Array<{ type: string; text?: string; image_url?: { url: string } }> }>> {\n const messages: Array<{ role: string; content: string | Array<{ type: string; text?: string; image_url?: { url: string } }> }> = [];\n\n // Add system message if provided\n if (input.systemPrompt) {\n messages.push({ role: \"system\", content: input.systemPrompt });\n }\n\n // Build user message content array\n const content: Array<{ type: string; text?: string; image_url?: { url: string } }> = [];\n\n // Add text\n if (input.text) {\n content.push({ type: \"text\", text: input.text });\n }\n\n // Add images\n if (input.images && input.images.length > 0) {\n for (const image of input.images) {\n if (image.url) {\n content.push({\n type: \"image_url\",\n image_url: { url: image.url }\n });\n } else if (image.base64) {\n content.push({\n type: \"image_url\",\n image_url: {\n url: `data:${image.mimeType};base64,${this.extractBase64(image.base64)}`\n }\n });\n }\n }\n }\n\n messages.push({ role: \"user\", content });\n return messages;\n }\n\n /**\n * Build reasoning configuration for models that support it.\n */\n private buildReasoningConfig(reasoning: ReasoningConfig): Record<string, unknown> {\n const config: Record<string, unknown> = {};\n\n // Most open source models use effort-based reasoning\n if (reasoning.effort) {\n config.effort = reasoning.effort;\n } else if (reasoning.enabled) {\n config.effort = 'medium'; // Default to medium\n }\n\n // Add exclude flag if specified\n if (reasoning.exclude !== undefined) {\n config.exclude = reasoning.exclude;\n }\n\n return Object.keys(config).length > 0 ? config : {};\n }\n\n /**\n * Recursively fix schema for strict mode requirements.\n * - All properties must be required\n * - additionalProperties must be false\n */\n private fixSchemaRecursive(obj: Record<string, unknown>): void {\n if (obj && typeof obj === 'object') {\n if (obj.type === 'object' && obj.properties) {\n const properties = obj.properties as Record<string, unknown>;\n const allProps = Object.keys(properties);\n obj.required = allProps;\n obj.additionalProperties = false;\n\n // Recursively fix nested properties\n for (const key of allProps) {\n this.fixSchemaRecursive(properties[key] as Record<string, unknown>);\n }\n } else if (obj.type === 'array' && obj.items) {\n this.fixSchemaRecursive(obj.items as Record<string, unknown>);\n }\n }\n }\n\n /**\n * Extract base64 data from a data URL or return as-is if already raw base64.\n */\n private extractBase64(input: string): string {\n if (input.startsWith('data:')) {\n const base64Part = input.split(',')[1];\n if (!base64Part) {\n throw new Error(`Invalid data URL format: ${input.substring(0, 50)}`);\n }\n return base64Part;\n }\n return input;\n }\n}\n","/**\n * Known model capabilities for accurate reporting.\n * Unknown models get sensible defaults and work with OpenRouter.\n */\n\nexport interface KnownModelInfo {\n supportsVision: boolean;\n supportsReasoning: boolean;\n maxContextTokens?: number;\n}\n\n/**\n * Prefix-based matching for model families.\n * More specific prefixes should come before less specific ones.\n * Data sourced from OpenRouter API: https://openrouter.ai/api/v1/models\n */\nexport const KNOWN_MODEL_PREFIXES: Record<string, KnownModelInfo> = {\n // Qwen models - VL variants support vision, thinking variants support reasoning\n 'qwen/qwen3-vl-235b-a22b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-vl-235b-a22b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'qwen/qwen3-vl-30b-a3b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },\n 'qwen/qwen3-vl-30b-a3b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'qwen/qwen3-vl-8b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 256000 },\n 'qwen/qwen3-vl-8b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'qwen/qwen3-vl': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'qwen/qwen-vl-max': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'qwen/qwen-vl-plus': { supportsVision: true, supportsReasoning: false, maxContextTokens: 7500 },\n 'qwen/qwen2.5-vl': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'qwen/qwq': { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },\n 'qwen/qwen3-235b-a22b-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-235b-a22b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'qwen/qwen3-30b-a3b-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },\n 'qwen/qwen3-30b-a3b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-next-80b-a3b-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'qwen/qwen3-next-80b-a3b': { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },\n 'qwen/qwen3-coder': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-max': { supportsVision: false, supportsReasoning: false, maxContextTokens: 256000 },\n 'qwen/qwen3-32b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'qwen/qwen3-14b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'qwen/qwen3-8b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 128000 },\n 'qwen/qwen3-4b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'qwen/qwen-plus-2025-07-28:thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 1000000 },\n 'qwen/qwen-plus': { supportsVision: false, supportsReasoning: false, maxContextTokens: 1000000 },\n 'qwen/qwen-turbo': { supportsVision: false, supportsReasoning: false, maxContextTokens: 1000000 },\n 'qwen/qwen-max': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n 'qwen/qwen-2.5': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n\n // Llama 4 models (vision capable)\n 'meta-llama/llama-4-maverick': { supportsVision: true, supportsReasoning: false, maxContextTokens: 1048576 },\n 'meta-llama/llama-4-scout': { supportsVision: true, supportsReasoning: false, maxContextTokens: 327680 },\n 'meta-llama/llama-guard-4': { supportsVision: true, supportsReasoning: false, maxContextTokens: 163840 },\n\n // Llama 3.x models - 3.2 vision variants support images\n 'meta-llama/llama-3.2-90b-vision': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'meta-llama/llama-3.2-11b-vision': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3.3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3.2': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3.1': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 8192 },\n\n // DeepSeek models - all support reasoning except base chat\n 'deepseek/deepseek-r1': { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },\n 'deepseek/deepseek-v3': { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },\n 'deepseek/deepseek-chat-v3': { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },\n 'deepseek/deepseek-chat': { supportsVision: false, supportsReasoning: false, maxContextTokens: 163840 },\n 'deepseek/deepseek-prover': { supportsVision: false, supportsReasoning: false, maxContextTokens: 163840 },\n\n // GLM models (Zhipu AI / Z.AI)\n 'z-ai/glm-4.6v': { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },\n 'z-ai/glm-4.5v': { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },\n 'z-ai/glm-4.6': { supportsVision: false, supportsReasoning: true, maxContextTokens: 202752 },\n 'z-ai/glm-4.5-air': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'z-ai/glm-4.5': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'z-ai/glm-4-32b': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n 'thudm/glm-4.1v-9b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },\n 'thudm/glm': { supportsVision: false, supportsReasoning: false, maxContextTokens: 65536 },\n\n // Kimi / Moonshot models\n 'moonshotai/kimi-dev-72b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'moonshotai/kimi-k2-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'moonshotai/kimi-k2-0905': { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },\n 'moonshotai/kimi-k2': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n\n // Mistral models - Mistral 3.x family supports vision\n 'mistralai/mistral-large-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/mistral-medium-3.1': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-medium-3': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-small-3.2': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-small-3.1': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/ministral-14b-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/ministral-8b-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/ministral-3b-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/pixtral-large': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/pixtral-12b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'mistralai/pixtral': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/devstral': { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/codestral': { supportsVision: false, supportsReasoning: false, maxContextTokens: 256000 },\n // Magistral reasoning models\n 'mistralai/magistral-medium-2506': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'mistralai/magistral-small-2506': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'mistralai/magistral-medium': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'mistralai/magistral-small': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'mistralai/mistral-large': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-small': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n 'mistralai/mistral-nemo': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mixtral': { supportsVision: false, supportsReasoning: false, maxContextTokens: 65536 },\n 'mistralai/mistral-7b': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n\n // xAI Grok models - Grok 4 supports vision and reasoning\n 'x-ai/grok-4.1-fast': { supportsVision: true, supportsReasoning: true, maxContextTokens: 2000000 },\n 'x-ai/grok-4-fast': { supportsVision: true, supportsReasoning: true, maxContextTokens: 2000000 },\n 'x-ai/grok-4': { supportsVision: true, supportsReasoning: true, maxContextTokens: 256000 },\n 'x-ai/grok-code-fast': { supportsVision: false, supportsReasoning: true, maxContextTokens: 256000 },\n 'x-ai/grok-3-mini': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'x-ai/grok-3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n\n // Cohere Command models\n 'cohere/command-a': { supportsVision: false, supportsReasoning: false, maxContextTokens: 256000 },\n 'cohere/command-r': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n\n // Gemma models (Google open source) - Gemma 3 supports vision\n 'google/gemma-3': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'google/gemma-3n': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n 'google/gemma-2': { supportsVision: false, supportsReasoning: false, maxContextTokens: 8192 },\n\n // Phi models (Microsoft) - phi-4-multimodal supports vision, phi-4-reasoning supports reasoning\n 'microsoft/phi-4-multimodal': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'microsoft/phi-4-reasoning': { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },\n 'microsoft/phi-4': { supportsVision: false, supportsReasoning: false, maxContextTokens: 16384 },\n 'microsoft/phi-3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n\n // OpenGVLab InternVL\n 'opengvlab/internvl3': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'opengvlab/internvl2': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n\n // StepFun AI - step3 supports both vision and reasoning\n 'stepfun-ai/step3': { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },\n 'stepfun-ai/step2': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'stepfun-ai/step1': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n\n // NVIDIA Nemotron - VL and ultra models support reasoning\n 'nvidia/nemotron-nano-12b-v2-vl': { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/nemotron-nano-9b-v2': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/llama-3.3-nemotron-super': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/llama-3.1-nemotron-ultra': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/llama-3.1-nemotron': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n\n // Amazon Nova models - vision capable\n 'amazon/nova-2-lite': { supportsVision: true, supportsReasoning: true, maxContextTokens: 1000000 },\n 'amazon/nova-premier': { supportsVision: true, supportsReasoning: false, maxContextTokens: 1000000 },\n 'amazon/nova-pro': { supportsVision: true, supportsReasoning: false, maxContextTokens: 300000 },\n 'amazon/nova-lite': { supportsVision: true, supportsReasoning: false, maxContextTokens: 300000 },\n 'amazon/nova-micro': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n\n // Baidu ERNIE models - VL variants support vision\n 'baidu/ernie-4.5-vl': { supportsVision: true, supportsReasoning: true, maxContextTokens: 123000 },\n 'baidu/ernie-4.5-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'baidu/ernie-4.5': { supportsVision: false, supportsReasoning: false, maxContextTokens: 123000 },\n\n // ByteDance models\n 'bytedance/ui-tars': { supportsVision: true, supportsReasoning: false, maxContextTokens: 128000 },\n\n // MiniMax models\n 'minimax/minimax-01': { supportsVision: true, supportsReasoning: false, maxContextTokens: 1000192 },\n 'minimax/minimax-m': { supportsVision: false, supportsReasoning: true, maxContextTokens: 1000000 },\n\n // Tencent Hunyuan\n 'tencent/hunyuan': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n\n // Alibaba Tongyi\n 'alibaba/tongyi-deepresearch': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n\n // Other notable models\n 'databricks/dbrx': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n};\n\n/**\n * Default capabilities for unknown models.\n * Assumes a basic text-only model with reasonable defaults.\n */\nexport const DEFAULT_MODEL_INFO: KnownModelInfo = {\n supportsVision: false,\n supportsReasoning: false,\n maxContextTokens: 32768,\n};\n\n/**\n * Get model capabilities from registry or return defaults.\n * Uses prefix matching - more specific prefixes should be listed first.\n *\n * @param model - Full model ID (e.g., \"qwen/qwen3-235b-a22b\")\n * @returns Model capabilities (known or default)\n */\nexport function getModelInfo(model: string): KnownModelInfo {\n // Sort prefixes by length (longest first) for most specific match\n const sortedPrefixes = Object.keys(KNOWN_MODEL_PREFIXES).sort((a, b) => b.length - a.length);\n\n for (const prefix of sortedPrefixes) {\n if (model.startsWith(prefix)) {\n return KNOWN_MODEL_PREFIXES[prefix];\n }\n }\n\n return DEFAULT_MODEL_INFO;\n}\n\n/**\n * Check if a model is known to support vision.\n *\n * @param model - Full model ID\n * @returns true if model supports vision input\n */\nexport function modelSupportsVision(model: string): boolean {\n return getModelInfo(model).supportsVision;\n}\n\n/**\n * Check if a model is known to support reasoning tokens.\n *\n * @param model - Full model ID\n * @returns true if model supports reasoning/thinking mode\n */\nexport function modelSupportsReasoning(model: string): boolean {\n return getModelInfo(model).supportsReasoning;\n}\n","export { GenericORProvider } from './generic-or.js';\nexport {\n getModelInfo,\n modelSupportsVision,\n modelSupportsReasoning,\n KNOWN_MODEL_PREFIXES,\n DEFAULT_MODEL_INFO,\n type KnownModelInfo\n} from './known-models.js';\n\nimport { GenericORProvider } from './generic-or.js';\nimport { registerProvider } from '@doclo/providers-llm';\n\n// Auto-register the provider when this package is imported\nregisterProvider('generic-or', (config) => new GenericORProvider(config));\n"],"mappings":";AAaA,SAAS,kBAAkB,4BAA4B,6BAA6B;AACpF,SAAS,kBAAkB,gBAAgB,qBAAqB;;;ACEzD,IAAM,uBAAuD;AAAA;AAAA,EAElE,oCAAoC,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC9G,2BAA2B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,kCAAkC,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC5G,yBAAyB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,6BAA6B,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EACvG,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC/F,iBAAiB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC5F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC/F,qBAAqB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,KAAK;AAAA,EAC9F,mBAAmB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC7F,YAAY,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACtF,iCAAiC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC5G,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACnG,+BAA+B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACzG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACjG,oCAAoC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/G,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EAC9F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC5F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC5F,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EAC5F,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC3F,sCAAsC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EAClH,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,IAAQ;AAAA,EAC/F,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,IAAQ;AAAA,EAChG,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC5F,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAG5F,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,QAAQ;AAAA,EAC3G,4BAA4B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,4BAA4B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGvG,mCAAmC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC7G,mCAAmC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC9G,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,KAAK;AAAA;AAAA,EAGhG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACnG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACnG,6BAA6B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACxG,0BAA0B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,4BAA4B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGxG,iBAAiB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC3F,iBAAiB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC1F,gBAAgB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC3F,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/F,gBAAgB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC3F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EAC9F,8BAA8B,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACvG,aAAa,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAGxF,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACtG,+BAA+B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC1G,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGlG,gCAAgC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC3G,gCAAgC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC3G,8BAA8B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACzG,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,gCAAgC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC3G,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,2BAA2B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,yBAAyB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EACnG,qBAAqB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAChG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAClG,uBAAuB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAEnG,mCAAmC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC7G,kCAAkC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC5G,8BAA8B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACxG,6BAA6B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACvG,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EACtG,0BAA0B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,qBAAqB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAChG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAGnG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EACjG,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EAC/F,eAAe,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EACzF,uBAAuB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EAClG,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/F,eAAe,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAG3F,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EAChG,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAGhG,kBAAkB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC7F,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC9F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,KAAK;AAAA;AAAA,EAG5F,8BAA8B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACzG,6BAA6B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACvG,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC9F,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAG/F,uBAAuB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EACjG,uBAAuB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAGjG,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC7F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC/F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAG9F,kCAAkC,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC5G,8BAA8B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACzG,mCAAmC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC9G,mCAAmC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC9G,6BAA6B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGzG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EACjG,uBAAuB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,IAAQ;AAAA,EACnG,mBAAmB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,IAAO;AAAA,EAC9F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,IAAO;AAAA,EAC/F,qBAAqB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAGjG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EAChG,4BAA4B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACvG,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAG/F,qBAAqB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAGhG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,QAAQ;AAAA,EAClG,qBAAqB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA;AAAA,EAGjG,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA;AAAA,EAG9F,+BAA+B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA;AAAA,EAG1G,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAChG;AAMO,IAAM,qBAAqC;AAAA,EAChD,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,kBAAkB;AACpB;AASO,SAAS,aAAa,OAA+B;AAE1D,QAAM,iBAAiB,OAAO,KAAK,oBAAoB,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM;AAE3F,aAAW,UAAU,gBAAgB;AACnC,QAAI,MAAM,WAAW,MAAM,GAAG;AAC5B,aAAO,qBAAqB,MAAM;AAAA,IACpC;AAAA,EACF;AAEA,SAAO;AACT;AAQO,SAAS,oBAAoB,OAAwB;AAC1D,SAAO,aAAa,KAAK,EAAE;AAC7B;AAQO,SAAS,uBAAuB,OAAwB;AAC7D,SAAO,aAAa,KAAK,EAAE;AAC7B;;;AD3MA,SAAS,uBAAuB,OAAuB;AACrD,QAAM,aAAa,MAAM,QAAQ,GAAG;AACpC,SAAO,aAAa,IAAI,MAAM,UAAU,GAAG,UAAU,IAAI;AAC3D;AA8BO,IAAM,oBAAN,MAA+C;AAAA,EAC3C;AAAA,EACA;AAAA,EAED;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAAwB;AAClC,SAAK,SAAS;AACd,SAAK,YAAY,aAAa,OAAO,KAAK;AAE1C,UAAM,SAAS,uBAAuB,OAAO,KAAK;AAClD,SAAK,OAAO,GAAG,MAAM,IAAI,OAAO,KAAK;AACrC,SAAK,aAAa,IAAI,iBAAiB;AAEvC,SAAK,eAAe;AAAA,MAClB,0BAA0B;AAAA;AAAA,MAC1B,mBAAmB;AAAA;AAAA,MACnB,gBAAgB,KAAK,UAAU;AAAA,MAC/B,cAAc,KAAK,UAAU;AAAA;AAAA,MAC7B,aAAa;AAAA,MACb,YAAY;AAAA,MACZ,kBAAkB,KAAK,UAAU,oBAAoB;AAAA,IACvD;AAGA,SAAK,SAAS;AAAA,MACZ,GAAG;AAAA,MACH,GAAI,OAAO,UAAU,CAAC;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,aAAgB,QAWM;AAC1B,UAAM,YAAY,KAAK,IAAI;AAG3B,QAAI;AACJ,QAAI,OAAO,OAAO;AAChB,iBAAW,OAAO;AAAA,IACpB,WAAW,OAAO,QAAQ;AACxB,UAAI,OAAO,OAAO,WAAW,UAAU;AACrC,mBAAW,EAAE,MAAM,OAAO,OAAO;AAAA,MACnC,OAAO;AACL,mBAAW,OAAO;AAAA,MACpB;AAAA,IACF,OAAO;AACL,iBAAW,EAAE,MAAM,GAAG;AAAA,IACxB;AAGA,UAAM,OAAO,OAAO,SAAS,OAAO,SAAS,WAAW;AAGxD,QAAI,SAAS,YAAY,CAAC,OAAO,QAAQ;AACvC,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AAGA,UAAM,iBAAiB,MAAM,KAAK,gBAAgB,QAAQ;AAG1D,UAAM,oBAAoB,OAAO,wBAAwB,SAAS,OAAO;AACzE,QAAI,gBAAgB;AAEpB,QAAI,mBAAmB;AACrB,YAAM,aAAa,KAAK,WAAW,mBAAmB,OAAO,MAAO;AACpE,YAAM,eAAe;AAAA,QACnB;AAAA,QACA,eAAe,QAAQ;AAAA,MACzB;AACA,sBAAgB;AAAA,QACd,GAAG;AAAA,QACH,MAAM;AAAA,MACR;AAAA,IACF,WAAW,SAAS,WAAW;AAI7B,YAAM,OAAO,eAAe,QAAQ;AACpC,YAAM,gBAAgB,CAAC,KAAK,YAAY,EAAE,SAAS,MAAM;AACzD,UAAI,eAAe;AACjB,wBAAgB;AAAA,UACd,GAAG;AAAA,UACH,MAAM,GAAG,IAAI;AAAA;AAAA;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAGA,UAAM,WAAW,MAAM,KAAK,cAAc,aAAa;AAGvD,UAAM,cAAuC;AAAA,MAC3C,OAAO,KAAK,OAAO;AAAA,MACnB;AAAA,MACA,YAAY,OAAO,cAAc;AAAA,MACjC,QAAQ;AAAA;AAAA,MAER,OAAO,EAAE,SAAS,KAAK;AAAA;AAAA,MAEvB,SAAS,CAAC,EAAE,IAAI,mBAAmB,CAAC;AAAA,IACtC;AAEA,QAAI,SAAS,WAAW;AAEtB,kBAAY,kBAAkB,EAAE,MAAM,cAAc;AAEpD,UAAI,QAAQ,IAAI,iBAAiB;AAC/B,gBAAQ,IAAI,2DAA2D;AAAA,MACzE;AAAA,IACF,OAAO;AAKL,YAAM,SAAS,KAAK,WAAW,eAAe,OAAO,MAAO;AAG5D,WAAK,mBAAmB,MAAiC;AAEzD,UAAI,QAAQ,IAAI,iBAAiB;AAC/B,gBAAQ,IAAI,0DAA0D;AAAA,MACxE;AAEA,kBAAY,kBAAkB;AAAA,QAC5B,MAAM;AAAA,QACN,aAAa;AAAA,UACX,MAAM;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,aAAa,KAAK,UAAU,mBAAmB;AACxD,kBAAY,YAAY,KAAK,qBAAqB,OAAO,SAAS;AAAA,IACpE;AAGA,UAAM,UAAkC;AAAA,MACtC,gBAAgB;AAAA,MAChB,iBAAiB,UAAU,KAAK,OAAO,MAAM;AAAA,MAC7C,gBAAgB;AAAA,MAChB,WAAW;AAAA,IACb;AAEA,UAAM,WAAW,MAAM;AAAA,MACrB;AAAA,MACA;AAAA,QACE,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU,WAAW;AAAA,MAClC;AAAA,MACA,KAAK,OAAO;AAAA,IACd;AAEA,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,yBAAyB,SAAS,MAAM,MAAM,KAAK,EAAE;AAAA,IACvE;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAyBjC,UAAM,YAAY,KAAK,IAAI,IAAI;AAG/B,UAAM,UAAU,KAAK,UAAU,CAAC,GAAG;AACnC,UAAM,UAAU,SAAS,WAAW;AACpC,UAAM,SAAS,cAAc,OAAO;AAGpC,UAAM,YAAY,SAAS;AAC3B,UAAM,oBAAoB,SAAS;AAGnC,UAAM,UAAU,KAAK,OAAO,cAAc,KAAK,OAAO;AAEtD,UAAM,SAAS,uBAAuB,KAAK,OAAO,KAAK;AAOvD,UAAM,uBACJ,KAAK,OAAO,uBAAuB,iBACnC,KAAK,OAAO,iBACZ,KAAK,OAAO;AACd,UAAM,2BACJ,KAAK,OAAO,uBAAuB,sBACnC,KAAK,OAAO;AACd,UAAM,cAAc,KAAK,OAAO;AAChC,UAAM,sBAAsB,sBAAsB,QAAQ,aAAa,oBAAoB;AAE3F,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS;AAAA,QACP;AAAA,QACA;AAAA,QACA,cAAc,KAAK,OAAO;AAAA,QAC1B;AAAA,QACA,eAAe;AAAA,QACf,UAAU;AAAA,QACV,OAAO,KAAK,OAAO;AAAA,QACnB,YAAY,KAAK;AAAA,QACjB,WAAW,KAAK;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,OAAkD;AAE9E,QAAI,CAAC,OAAO;AACV,aAAO,EAAE,MAAM,GAAG;AAAA,IACpB;AAEA,QAAI,CAAC,MAAM,MAAM,UAAU,CAAC,KAAK,UAAU,gBAAgB;AACzD,aAAO;AAAA,IACT;AAGA,UAAM,YAAY,MAAM,KAAK,oBAAoB,MAAM,IAAI;AAE3D,WAAO;AAAA,MACL,GAAG;AAAA,MACH,QAAQ,CAAC,GAAI,MAAM,UAAU,CAAC,GAAI,GAAG,SAAS;AAAA,MAC9C,MAAM;AAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAoB,MAAyC;AACzE,UAAM,SAAuB,CAAC;AAG9B,UAAM,EAAE,IAAI,IAAI,MAAM,OAAO,YAAY;AAEzC,eAAW,YAAY,MAAM;AAC3B,UAAI;AAEJ,UAAI,SAAS,QAAQ;AAEnB,cAAM,aAAa,KAAK,cAAc,SAAS,MAAM;AACrD,oBAAY,OAAO,KAAK,YAAY,QAAQ;AAAA,MAC9C,WAAW,SAAS,KAAK;AAEvB,cAAM,WAAW,MAAM,iBAAiB,SAAS,KAAK,CAAC,GAAG,KAAK,OAAO,eAAe;AACrF,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,4BAA4B,SAAS,GAAG,KAAK,SAAS,MAAM,EAAE;AAAA,QAChF;AACA,cAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,oBAAY,OAAO,KAAK,WAAW;AAAA,MACrC,OAAO;AACL;AAAA,MACF;AAGA,YAAM,QAAQ,MAAM,IAAI,WAAW,EAAE,OAAO,EAAE,CAAC;AAE/C,uBAAiB,QAAQ,OAAO;AAC9B,eAAO,KAAK;AAAA,UACV,QAAQ,KAAK,SAAS,QAAQ;AAAA,UAC9B,UAAU;AAAA,QACZ,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cAAc,OAAiJ;AAC3K,UAAM,WAA2H,CAAC;AAGlI,QAAI,MAAM,cAAc;AACtB,eAAS,KAAK,EAAE,MAAM,UAAU,SAAS,MAAM,aAAa,CAAC;AAAA,IAC/D;AAGA,UAAM,UAA+E,CAAC;AAGtF,QAAI,MAAM,MAAM;AACd,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK,CAAC;AAAA,IACjD;AAGA,QAAI,MAAM,UAAU,MAAM,OAAO,SAAS,GAAG;AAC3C,iBAAW,SAAS,MAAM,QAAQ;AAChC,YAAI,MAAM,KAAK;AACb,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,WAAW,EAAE,KAAK,MAAM,IAAI;AAAA,UAC9B,CAAC;AAAA,QACH,WAAW,MAAM,QAAQ;AACvB,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,WAAW;AAAA,cACT,KAAK,QAAQ,MAAM,QAAQ,WAAW,KAAK,cAAc,MAAM,MAAM,CAAC;AAAA,YACxE;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,aAAS,KAAK,EAAE,MAAM,QAAQ,QAAQ,CAAC;AACvC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,WAAqD;AAChF,UAAM,SAAkC,CAAC;AAGzC,QAAI,UAAU,QAAQ;AACpB,aAAO,SAAS,UAAU;AAAA,IAC5B,WAAW,UAAU,SAAS;AAC5B,aAAO,SAAS;AAAA,IAClB;AAGA,QAAI,UAAU,YAAY,QAAW;AACnC,aAAO,UAAU,UAAU;AAAA,IAC7B;AAEA,WAAO,OAAO,KAAK,MAAM,EAAE,SAAS,IAAI,SAAS,CAAC;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,mBAAmB,KAAoC;AAC7D,QAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAI,IAAI,SAAS,YAAY,IAAI,YAAY;AAC3C,cAAM,aAAa,IAAI;AACvB,cAAM,WAAW,OAAO,KAAK,UAAU;AACvC,YAAI,WAAW;AACf,YAAI,uBAAuB;AAG3B,mBAAW,OAAO,UAAU;AAC1B,eAAK,mBAAmB,WAAW,GAAG,CAA4B;AAAA,QACpE;AAAA,MACF,WAAW,IAAI,SAAS,WAAW,IAAI,OAAO;AAC5C,aAAK,mBAAmB,IAAI,KAAgC;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,OAAuB;AAC3C,QAAI,MAAM,WAAW,OAAO,GAAG;AAC7B,YAAM,aAAa,MAAM,MAAM,GAAG,EAAE,CAAC;AACrC,UAAI,CAAC,YAAY;AACf,cAAM,IAAI,MAAM,4BAA4B,MAAM,UAAU,GAAG,EAAE,CAAC,EAAE;AAAA,MACtE;AACA,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AACF;;;AE3cA,SAAS,wBAAwB;AAGjC,iBAAiB,cAAc,CAAC,WAAW,IAAI,kBAAkB,MAAM,CAAC;","names":[]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@doclo/providers-generic-or",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.2",
|
|
4
4
|
"description": "Generic OpenRouter provider for open-source models (Qwen, Llama, DeepSeek, GLM, Kimi, etc.)",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"repository": {
|
|
@@ -24,8 +24,8 @@
|
|
|
24
24
|
],
|
|
25
25
|
"dependencies": {
|
|
26
26
|
"pdf-to-img": "^4.2.0",
|
|
27
|
-
"@doclo/core": "^0.
|
|
28
|
-
"@doclo/providers-llm": "^0.1.
|
|
27
|
+
"@doclo/core": "^0.2.2",
|
|
28
|
+
"@doclo/providers-llm": "^0.1.11"
|
|
29
29
|
},
|
|
30
30
|
"devDependencies": {
|
|
31
31
|
"@types/node": "^20",
|