@doclo/providers-generic-or 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/index.d.ts +119 -0
- package/dist/index.js +455 -0
- package/dist/index.js.map +1 -0
- package/package.json +39 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Doclo AI
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import { LLMProvider, ProviderCapabilities, ProviderConfig, MultimodalInput, UnifiedSchema, JsonMode, ReasoningConfig, LLMResponse } from '@doclo/providers-llm';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* GenericORProvider - A universal provider for any OpenRouter model.
|
|
5
|
+
*
|
|
6
|
+
* Works with all OpenRouter models using OpenAI-compatible API format.
|
|
7
|
+
* Supports Qwen, Llama, DeepSeek, GLM, Kimi, Mistral, and any other model
|
|
8
|
+
* available through OpenRouter.
|
|
9
|
+
*
|
|
10
|
+
* Features:
|
|
11
|
+
* - Automatic capability detection for known model families
|
|
12
|
+
* - Response healing plugin for better JSON reliability
|
|
13
|
+
* - PDF-to-image conversion for VLM models
|
|
14
|
+
* - Reasoning token extraction for supported models
|
|
15
|
+
*
|
|
16
|
+
* @example
|
|
17
|
+
* ```typescript
|
|
18
|
+
* const provider = new GenericORProvider({
|
|
19
|
+
* provider: 'generic-or',
|
|
20
|
+
* model: 'qwen/qwen3-235b-a22b',
|
|
21
|
+
* apiKey: process.env.OPENROUTER_API_KEY!,
|
|
22
|
+
* });
|
|
23
|
+
*
|
|
24
|
+
* const result = await provider.completeJson({
|
|
25
|
+
* input: { text: "Extract the main topics from this text..." },
|
|
26
|
+
* schema: TopicsSchema,
|
|
27
|
+
* mode: 'strict',
|
|
28
|
+
* });
|
|
29
|
+
* ```
|
|
30
|
+
*/
|
|
31
|
+
declare class GenericORProvider implements LLMProvider {
|
|
32
|
+
readonly name: string;
|
|
33
|
+
readonly capabilities: ProviderCapabilities;
|
|
34
|
+
private config;
|
|
35
|
+
private translator;
|
|
36
|
+
private limits;
|
|
37
|
+
private modelInfo;
|
|
38
|
+
constructor(config: ProviderConfig);
|
|
39
|
+
completeJson<T>(params: {
|
|
40
|
+
input?: MultimodalInput;
|
|
41
|
+
prompt?: string | MultimodalInput;
|
|
42
|
+
schema?: UnifiedSchema<T>;
|
|
43
|
+
mode?: JsonMode;
|
|
44
|
+
max_tokens?: number;
|
|
45
|
+
reasoning?: ReasoningConfig;
|
|
46
|
+
embedSchemaInPrompt?: boolean;
|
|
47
|
+
}): Promise<LLMResponse<T>>;
|
|
48
|
+
/**
|
|
49
|
+
* Preprocess input to convert PDFs to images for VLM models.
|
|
50
|
+
*/
|
|
51
|
+
private preprocessInput;
|
|
52
|
+
/**
|
|
53
|
+
* Convert PDFs to images using pdf-to-img.
|
|
54
|
+
*/
|
|
55
|
+
private convertPDFsToImages;
|
|
56
|
+
/**
|
|
57
|
+
* Build OpenAI-compatible message format.
|
|
58
|
+
*/
|
|
59
|
+
private buildMessages;
|
|
60
|
+
/**
|
|
61
|
+
* Build reasoning configuration for models that support it.
|
|
62
|
+
*/
|
|
63
|
+
private buildReasoningConfig;
|
|
64
|
+
/**
|
|
65
|
+
* Recursively fix schema for strict mode requirements.
|
|
66
|
+
* - All properties must be required
|
|
67
|
+
* - additionalProperties must be false
|
|
68
|
+
*/
|
|
69
|
+
private fixSchemaRecursive;
|
|
70
|
+
/**
|
|
71
|
+
* Extract base64 data from a data URL or return as-is if already raw base64.
|
|
72
|
+
*/
|
|
73
|
+
private extractBase64;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Known model capabilities for accurate reporting.
|
|
78
|
+
* Unknown models get sensible defaults and work with OpenRouter.
|
|
79
|
+
*/
|
|
80
|
+
interface KnownModelInfo {
|
|
81
|
+
supportsVision: boolean;
|
|
82
|
+
supportsReasoning: boolean;
|
|
83
|
+
maxContextTokens?: number;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Prefix-based matching for model families.
|
|
87
|
+
* More specific prefixes should come before less specific ones.
|
|
88
|
+
* Data sourced from OpenRouter API: https://openrouter.ai/api/v1/models
|
|
89
|
+
*/
|
|
90
|
+
declare const KNOWN_MODEL_PREFIXES: Record<string, KnownModelInfo>;
|
|
91
|
+
/**
|
|
92
|
+
* Default capabilities for unknown models.
|
|
93
|
+
* Assumes a basic text-only model with reasonable defaults.
|
|
94
|
+
*/
|
|
95
|
+
declare const DEFAULT_MODEL_INFO: KnownModelInfo;
|
|
96
|
+
/**
|
|
97
|
+
* Get model capabilities from registry or return defaults.
|
|
98
|
+
* Uses prefix matching - more specific prefixes should be listed first.
|
|
99
|
+
*
|
|
100
|
+
* @param model - Full model ID (e.g., "qwen/qwen3-235b-a22b")
|
|
101
|
+
* @returns Model capabilities (known or default)
|
|
102
|
+
*/
|
|
103
|
+
declare function getModelInfo(model: string): KnownModelInfo;
|
|
104
|
+
/**
|
|
105
|
+
* Check if a model is known to support vision.
|
|
106
|
+
*
|
|
107
|
+
* @param model - Full model ID
|
|
108
|
+
* @returns true if model supports vision input
|
|
109
|
+
*/
|
|
110
|
+
declare function modelSupportsVision(model: string): boolean;
|
|
111
|
+
/**
|
|
112
|
+
* Check if a model is known to support reasoning tokens.
|
|
113
|
+
*
|
|
114
|
+
* @param model - Full model ID
|
|
115
|
+
* @returns true if model supports reasoning/thinking mode
|
|
116
|
+
*/
|
|
117
|
+
declare function modelSupportsReasoning(model: string): boolean;
|
|
118
|
+
|
|
119
|
+
export { DEFAULT_MODEL_INFO, GenericORProvider, KNOWN_MODEL_PREFIXES, type KnownModelInfo, getModelInfo, modelSupportsReasoning, modelSupportsVision };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,455 @@
|
|
|
1
|
+
// src/generic-or.ts
|
|
2
|
+
import { SchemaTranslator, combineSchemaAndUserPrompt } from "@doclo/providers-llm";
|
|
3
|
+
import { fetchWithTimeout, DEFAULT_LIMITS, safeJsonParse } from "@doclo/core/security";
|
|
4
|
+
|
|
5
|
+
// src/known-models.ts
|
|
6
|
+
var KNOWN_MODEL_PREFIXES = {
|
|
7
|
+
// Qwen models - VL variants support vision, thinking variants support reasoning
|
|
8
|
+
"qwen/qwen3-vl-235b-a22b-thinking": { supportsVision: true, supportsReasoning: true, maxContextTokens: 262144 },
|
|
9
|
+
"qwen/qwen3-vl-235b-a22b": { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },
|
|
10
|
+
"qwen/qwen3-vl-30b-a3b-thinking": { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },
|
|
11
|
+
"qwen/qwen3-vl-30b-a3b": { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },
|
|
12
|
+
"qwen/qwen3-vl-8b-thinking": { supportsVision: true, supportsReasoning: true, maxContextTokens: 256e3 },
|
|
13
|
+
"qwen/qwen3-vl-8b": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
14
|
+
"qwen/qwen3-vl": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
15
|
+
"qwen/qwen-vl-max": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
16
|
+
"qwen/qwen-vl-plus": { supportsVision: true, supportsReasoning: false, maxContextTokens: 7500 },
|
|
17
|
+
"qwen/qwen2.5-vl": { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },
|
|
18
|
+
"qwen/qwq": { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },
|
|
19
|
+
"qwen/qwen3-235b-a22b-thinking": { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },
|
|
20
|
+
"qwen/qwen3-235b-a22b": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
21
|
+
"qwen/qwen3-30b-a3b-thinking": { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },
|
|
22
|
+
"qwen/qwen3-30b-a3b": { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },
|
|
23
|
+
"qwen/qwen3-next-80b-a3b-thinking": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
24
|
+
"qwen/qwen3-next-80b-a3b": { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },
|
|
25
|
+
"qwen/qwen3-coder": { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },
|
|
26
|
+
"qwen/qwen3-max": { supportsVision: false, supportsReasoning: false, maxContextTokens: 256e3 },
|
|
27
|
+
"qwen/qwen3-32b": { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },
|
|
28
|
+
"qwen/qwen3-14b": { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },
|
|
29
|
+
"qwen/qwen3-8b": { supportsVision: false, supportsReasoning: true, maxContextTokens: 128e3 },
|
|
30
|
+
"qwen/qwen3-4b": { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },
|
|
31
|
+
"qwen/qwen-plus-2025-07-28:thinking": { supportsVision: false, supportsReasoning: true, maxContextTokens: 1e6 },
|
|
32
|
+
"qwen/qwen-plus": { supportsVision: false, supportsReasoning: false, maxContextTokens: 1e6 },
|
|
33
|
+
"qwen/qwen-turbo": { supportsVision: false, supportsReasoning: false, maxContextTokens: 1e6 },
|
|
34
|
+
"qwen/qwen-max": { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },
|
|
35
|
+
"qwen/qwen-2.5": { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },
|
|
36
|
+
// Llama 4 models (vision capable)
|
|
37
|
+
"meta-llama/llama-4-maverick": { supportsVision: true, supportsReasoning: false, maxContextTokens: 1048576 },
|
|
38
|
+
"meta-llama/llama-4-scout": { supportsVision: true, supportsReasoning: false, maxContextTokens: 327680 },
|
|
39
|
+
"meta-llama/llama-guard-4": { supportsVision: true, supportsReasoning: false, maxContextTokens: 163840 },
|
|
40
|
+
// Llama 3.x models - 3.2 vision variants support images
|
|
41
|
+
"meta-llama/llama-3.2-90b-vision": { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },
|
|
42
|
+
"meta-llama/llama-3.2-11b-vision": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
43
|
+
"meta-llama/llama-3.3": { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },
|
|
44
|
+
"meta-llama/llama-3.2": { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },
|
|
45
|
+
"meta-llama/llama-3.1": { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },
|
|
46
|
+
"meta-llama/llama-3": { supportsVision: false, supportsReasoning: false, maxContextTokens: 8192 },
|
|
47
|
+
// DeepSeek models - all support reasoning except base chat
|
|
48
|
+
"deepseek/deepseek-r1": { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },
|
|
49
|
+
"deepseek/deepseek-v3": { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },
|
|
50
|
+
"deepseek/deepseek-chat-v3": { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },
|
|
51
|
+
"deepseek/deepseek-chat": { supportsVision: false, supportsReasoning: false, maxContextTokens: 163840 },
|
|
52
|
+
"deepseek/deepseek-prover": { supportsVision: false, supportsReasoning: false, maxContextTokens: 163840 },
|
|
53
|
+
// GLM models (Zhipu AI / Z.AI)
|
|
54
|
+
"z-ai/glm-4.6v": { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },
|
|
55
|
+
"z-ai/glm-4.5v": { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },
|
|
56
|
+
"z-ai/glm-4.6": { supportsVision: false, supportsReasoning: true, maxContextTokens: 202752 },
|
|
57
|
+
"z-ai/glm-4.5-air": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
58
|
+
"z-ai/glm-4.5": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
59
|
+
"z-ai/glm-4-32b": { supportsVision: false, supportsReasoning: false, maxContextTokens: 128e3 },
|
|
60
|
+
"thudm/glm-4.1v-9b-thinking": { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },
|
|
61
|
+
"thudm/glm": { supportsVision: false, supportsReasoning: false, maxContextTokens: 65536 },
|
|
62
|
+
// Kimi / Moonshot models
|
|
63
|
+
"moonshotai/kimi-dev-72b": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
64
|
+
"moonshotai/kimi-k2-thinking": { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },
|
|
65
|
+
"moonshotai/kimi-k2-0905": { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },
|
|
66
|
+
"moonshotai/kimi-k2": { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },
|
|
67
|
+
// Mistral models - Mistral 3.x family supports vision
|
|
68
|
+
"mistralai/mistral-large-2512": { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },
|
|
69
|
+
"mistralai/mistral-medium-3.1": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
70
|
+
"mistralai/mistral-medium-3": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
71
|
+
"mistralai/mistral-small-3.2": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
72
|
+
"mistralai/mistral-small-3.1": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
73
|
+
"mistralai/ministral-14b-2512": { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },
|
|
74
|
+
"mistralai/ministral-8b-2512": { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },
|
|
75
|
+
"mistralai/ministral-3b-2512": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
76
|
+
"mistralai/pixtral-large": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
77
|
+
"mistralai/pixtral-12b": { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },
|
|
78
|
+
"mistralai/pixtral": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
79
|
+
"mistralai/devstral": { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },
|
|
80
|
+
"mistralai/codestral": { supportsVision: false, supportsReasoning: false, maxContextTokens: 256e3 },
|
|
81
|
+
"mistralai/mistral-large": { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },
|
|
82
|
+
"mistralai/mistral-small": { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },
|
|
83
|
+
"mistralai/mistral-nemo": { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },
|
|
84
|
+
"mistralai/mixtral": { supportsVision: false, supportsReasoning: false, maxContextTokens: 65536 },
|
|
85
|
+
"mistralai/mistral-7b": { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },
|
|
86
|
+
// xAI Grok models - Grok 4 supports vision and reasoning
|
|
87
|
+
"x-ai/grok-4.1-fast": { supportsVision: true, supportsReasoning: true, maxContextTokens: 2e6 },
|
|
88
|
+
"x-ai/grok-4-fast": { supportsVision: true, supportsReasoning: true, maxContextTokens: 2e6 },
|
|
89
|
+
"x-ai/grok-4": { supportsVision: true, supportsReasoning: true, maxContextTokens: 256e3 },
|
|
90
|
+
"x-ai/grok-code-fast": { supportsVision: false, supportsReasoning: true, maxContextTokens: 256e3 },
|
|
91
|
+
"x-ai/grok-3-mini": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
92
|
+
"x-ai/grok-3": { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },
|
|
93
|
+
// Cohere Command models
|
|
94
|
+
"cohere/command-a": { supportsVision: false, supportsReasoning: false, maxContextTokens: 256e3 },
|
|
95
|
+
"cohere/command-r": { supportsVision: false, supportsReasoning: false, maxContextTokens: 128e3 },
|
|
96
|
+
// Gemma models (Google open source) - Gemma 3 supports vision
|
|
97
|
+
"google/gemma-3": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
98
|
+
"google/gemma-3n": { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },
|
|
99
|
+
"google/gemma-2": { supportsVision: false, supportsReasoning: false, maxContextTokens: 8192 },
|
|
100
|
+
// Phi models (Microsoft) - phi-4-multimodal supports vision, phi-4-reasoning supports reasoning
|
|
101
|
+
"microsoft/phi-4-multimodal": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
102
|
+
"microsoft/phi-4-reasoning": { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },
|
|
103
|
+
"microsoft/phi-4": { supportsVision: false, supportsReasoning: false, maxContextTokens: 16384 },
|
|
104
|
+
"microsoft/phi-3": { supportsVision: false, supportsReasoning: false, maxContextTokens: 128e3 },
|
|
105
|
+
// OpenGVLab InternVL
|
|
106
|
+
"opengvlab/internvl3": { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },
|
|
107
|
+
"opengvlab/internvl2": { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },
|
|
108
|
+
// StepFun AI - step3 supports both vision and reasoning
|
|
109
|
+
"stepfun-ai/step3": { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },
|
|
110
|
+
"stepfun-ai/step2": { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },
|
|
111
|
+
"stepfun-ai/step1": { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },
|
|
112
|
+
// NVIDIA Nemotron - VL and ultra models support reasoning
|
|
113
|
+
"nvidia/nemotron-nano-12b-v2-vl": { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },
|
|
114
|
+
"nvidia/nemotron-nano-9b-v2": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
115
|
+
"nvidia/llama-3.3-nemotron-super": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
116
|
+
"nvidia/llama-3.1-nemotron-ultra": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
117
|
+
"nvidia/llama-3.1-nemotron": { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },
|
|
118
|
+
// Amazon Nova models - vision capable
|
|
119
|
+
"amazon/nova-2-lite": { supportsVision: true, supportsReasoning: true, maxContextTokens: 1e6 },
|
|
120
|
+
"amazon/nova-premier": { supportsVision: true, supportsReasoning: false, maxContextTokens: 1e6 },
|
|
121
|
+
"amazon/nova-pro": { supportsVision: true, supportsReasoning: false, maxContextTokens: 3e5 },
|
|
122
|
+
"amazon/nova-lite": { supportsVision: true, supportsReasoning: false, maxContextTokens: 3e5 },
|
|
123
|
+
"amazon/nova-micro": { supportsVision: false, supportsReasoning: false, maxContextTokens: 128e3 },
|
|
124
|
+
// Baidu ERNIE models - VL variants support vision
|
|
125
|
+
"baidu/ernie-4.5-vl": { supportsVision: true, supportsReasoning: true, maxContextTokens: 123e3 },
|
|
126
|
+
"baidu/ernie-4.5-thinking": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
127
|
+
"baidu/ernie-4.5": { supportsVision: false, supportsReasoning: false, maxContextTokens: 123e3 },
|
|
128
|
+
// ByteDance models
|
|
129
|
+
"bytedance/ui-tars": { supportsVision: true, supportsReasoning: false, maxContextTokens: 128e3 },
|
|
130
|
+
// MiniMax models
|
|
131
|
+
"minimax/minimax-01": { supportsVision: true, supportsReasoning: false, maxContextTokens: 1000192 },
|
|
132
|
+
"minimax/minimax-m": { supportsVision: false, supportsReasoning: true, maxContextTokens: 1e6 },
|
|
133
|
+
// Tencent Hunyuan
|
|
134
|
+
"tencent/hunyuan": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
135
|
+
// Alibaba Tongyi
|
|
136
|
+
"alibaba/tongyi-deepresearch": { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },
|
|
137
|
+
// Other notable models
|
|
138
|
+
"databricks/dbrx": { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 }
|
|
139
|
+
};
|
|
140
|
+
var DEFAULT_MODEL_INFO = {
|
|
141
|
+
supportsVision: false,
|
|
142
|
+
supportsReasoning: false,
|
|
143
|
+
maxContextTokens: 32768
|
|
144
|
+
};
|
|
145
|
+
function getModelInfo(model) {
|
|
146
|
+
const sortedPrefixes = Object.keys(KNOWN_MODEL_PREFIXES).sort((a, b) => b.length - a.length);
|
|
147
|
+
for (const prefix of sortedPrefixes) {
|
|
148
|
+
if (model.startsWith(prefix)) {
|
|
149
|
+
return KNOWN_MODEL_PREFIXES[prefix];
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
return DEFAULT_MODEL_INFO;
|
|
153
|
+
}
|
|
154
|
+
function modelSupportsVision(model) {
|
|
155
|
+
return getModelInfo(model).supportsVision;
|
|
156
|
+
}
|
|
157
|
+
function modelSupportsReasoning(model) {
|
|
158
|
+
return getModelInfo(model).supportsReasoning;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// src/generic-or.ts
|
|
162
|
+
function extractVendorFromModel(model) {
|
|
163
|
+
const slashIndex = model.indexOf("/");
|
|
164
|
+
return slashIndex > 0 ? model.substring(0, slashIndex) : "generic";
|
|
165
|
+
}
|
|
166
|
+
var GenericORProvider = class {
|
|
167
|
+
name;
|
|
168
|
+
capabilities;
|
|
169
|
+
config;
|
|
170
|
+
translator;
|
|
171
|
+
limits;
|
|
172
|
+
modelInfo;
|
|
173
|
+
constructor(config) {
|
|
174
|
+
this.config = config;
|
|
175
|
+
this.modelInfo = getModelInfo(config.model);
|
|
176
|
+
const vendor = extractVendorFromModel(config.model);
|
|
177
|
+
this.name = `${vendor}:${config.model}`;
|
|
178
|
+
this.translator = new SchemaTranslator();
|
|
179
|
+
this.capabilities = {
|
|
180
|
+
supportsStructuredOutput: true,
|
|
181
|
+
// All OpenRouter models support this via response_format
|
|
182
|
+
supportsStreaming: false,
|
|
183
|
+
// Not with structured outputs
|
|
184
|
+
supportsImages: this.modelInfo.supportsVision,
|
|
185
|
+
supportsPDFs: this.modelInfo.supportsVision,
|
|
186
|
+
// Via image conversion
|
|
187
|
+
maxPDFPages: void 0,
|
|
188
|
+
maxPDFSize: void 0,
|
|
189
|
+
maxContextTokens: this.modelInfo.maxContextTokens || 32768
|
|
190
|
+
};
|
|
191
|
+
this.limits = {
|
|
192
|
+
...DEFAULT_LIMITS,
|
|
193
|
+
...config.limits || {}
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
async completeJson(params) {
|
|
197
|
+
const startTime = Date.now();
|
|
198
|
+
let rawInput;
|
|
199
|
+
if (params.input) {
|
|
200
|
+
rawInput = params.input;
|
|
201
|
+
} else if (params.prompt) {
|
|
202
|
+
if (typeof params.prompt === "string") {
|
|
203
|
+
rawInput = { text: params.prompt };
|
|
204
|
+
} else {
|
|
205
|
+
rawInput = params.prompt;
|
|
206
|
+
}
|
|
207
|
+
} else {
|
|
208
|
+
rawInput = { text: "" };
|
|
209
|
+
}
|
|
210
|
+
const mode = params.mode || (params.schema ? "strict" : "relaxed");
|
|
211
|
+
if (mode === "strict" && !params.schema) {
|
|
212
|
+
throw new Error('schema is required when mode is "strict"');
|
|
213
|
+
}
|
|
214
|
+
const processedInput = await this.preprocessInput(rawInput);
|
|
215
|
+
const shouldEmbedSchema = params.embedSchemaInPrompt !== false && params.schema;
|
|
216
|
+
let enhancedInput = processedInput;
|
|
217
|
+
if (shouldEmbedSchema) {
|
|
218
|
+
const jsonSchema = this.translator.convertZodIfNeeded(params.schema);
|
|
219
|
+
const enhancedText = combineSchemaAndUserPrompt(
|
|
220
|
+
jsonSchema,
|
|
221
|
+
processedInput.text || ""
|
|
222
|
+
);
|
|
223
|
+
enhancedInput = {
|
|
224
|
+
...processedInput,
|
|
225
|
+
text: enhancedText
|
|
226
|
+
};
|
|
227
|
+
} else if (mode === "relaxed") {
|
|
228
|
+
const text = processedInput.text || "";
|
|
229
|
+
const needsJsonHint = !text.toLowerCase().includes("json");
|
|
230
|
+
if (needsJsonHint) {
|
|
231
|
+
enhancedInput = {
|
|
232
|
+
...processedInput,
|
|
233
|
+
text: `${text}
|
|
234
|
+
|
|
235
|
+
Respond with valid JSON.`
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
const messages = await this.buildMessages(enhancedInput);
|
|
240
|
+
const requestBody = {
|
|
241
|
+
model: this.config.model,
|
|
242
|
+
messages,
|
|
243
|
+
max_tokens: params.max_tokens || 4096,
|
|
244
|
+
stream: false,
|
|
245
|
+
// Enable usage tracking for OpenRouter cost info
|
|
246
|
+
usage: { include: true },
|
|
247
|
+
// Enable response healing plugin for better JSON reliability
|
|
248
|
+
plugins: [{ id: "response-healing" }]
|
|
249
|
+
};
|
|
250
|
+
if (mode === "relaxed") {
|
|
251
|
+
requestBody.response_format = { type: "json_object" };
|
|
252
|
+
if (process.env.DEBUG_PROVIDERS) {
|
|
253
|
+
console.log("[GenericORProvider] Using relaxed JSON mode (json_object)");
|
|
254
|
+
}
|
|
255
|
+
} else {
|
|
256
|
+
const schema = this.translator.toOpenAISchema(params.schema);
|
|
257
|
+
this.fixSchemaRecursive(schema);
|
|
258
|
+
if (process.env.DEBUG_PROVIDERS) {
|
|
259
|
+
console.log("[GenericORProvider] Using strict JSON mode (json_schema)");
|
|
260
|
+
}
|
|
261
|
+
requestBody.response_format = {
|
|
262
|
+
type: "json_schema",
|
|
263
|
+
json_schema: {
|
|
264
|
+
name: "extraction",
|
|
265
|
+
schema
|
|
266
|
+
}
|
|
267
|
+
};
|
|
268
|
+
}
|
|
269
|
+
if (params.reasoning && this.modelInfo.supportsReasoning) {
|
|
270
|
+
requestBody.reasoning = this.buildReasoningConfig(params.reasoning);
|
|
271
|
+
}
|
|
272
|
+
const headers = {
|
|
273
|
+
"Content-Type": "application/json",
|
|
274
|
+
"Authorization": `Bearer ${this.config.apiKey}`,
|
|
275
|
+
"HTTP-Referer": "https://github.com/docloai/sdk",
|
|
276
|
+
"X-Title": "Doclo SDK"
|
|
277
|
+
};
|
|
278
|
+
const response = await fetchWithTimeout(
|
|
279
|
+
"https://openrouter.ai/api/v1/chat/completions",
|
|
280
|
+
{
|
|
281
|
+
method: "POST",
|
|
282
|
+
headers,
|
|
283
|
+
body: JSON.stringify(requestBody)
|
|
284
|
+
},
|
|
285
|
+
this.limits.REQUEST_TIMEOUT
|
|
286
|
+
);
|
|
287
|
+
if (!response.ok) {
|
|
288
|
+
const error = await response.text();
|
|
289
|
+
throw new Error(`OpenRouter API error (${response.status}): ${error}`);
|
|
290
|
+
}
|
|
291
|
+
const data = await response.json();
|
|
292
|
+
const latencyMs = Date.now() - startTime;
|
|
293
|
+
const message = data.choices?.[0]?.message;
|
|
294
|
+
const content = message?.content ?? "{}";
|
|
295
|
+
const parsed = safeJsonParse(content);
|
|
296
|
+
const reasoning = message?.reasoning;
|
|
297
|
+
const reasoning_details = message?.reasoning_details;
|
|
298
|
+
const costUSD = data.usage?.total_cost ?? data.usage?.cost;
|
|
299
|
+
const vendor = extractVendorFromModel(this.config.model);
|
|
300
|
+
return {
|
|
301
|
+
json: parsed,
|
|
302
|
+
rawText: content,
|
|
303
|
+
metrics: {
|
|
304
|
+
costUSD,
|
|
305
|
+
inputTokens: data.usage?.prompt_tokens,
|
|
306
|
+
outputTokens: data.usage?.completion_tokens,
|
|
307
|
+
latencyMs,
|
|
308
|
+
attemptNumber: 1,
|
|
309
|
+
provider: vendor,
|
|
310
|
+
model: this.config.model,
|
|
311
|
+
responseId: data.id,
|
|
312
|
+
modelUsed: data.model
|
|
313
|
+
},
|
|
314
|
+
reasoning,
|
|
315
|
+
reasoning_details
|
|
316
|
+
};
|
|
317
|
+
}
|
|
318
|
+
/**
|
|
319
|
+
* Preprocess input to convert PDFs to images for VLM models.
|
|
320
|
+
*/
|
|
321
|
+
async preprocessInput(input) {
|
|
322
|
+
if (!input) {
|
|
323
|
+
return { text: "" };
|
|
324
|
+
}
|
|
325
|
+
if (!input.pdfs?.length || !this.modelInfo.supportsVision) {
|
|
326
|
+
return input;
|
|
327
|
+
}
|
|
328
|
+
const pdfImages = await this.convertPDFsToImages(input.pdfs);
|
|
329
|
+
return {
|
|
330
|
+
...input,
|
|
331
|
+
images: [...input.images || [], ...pdfImages],
|
|
332
|
+
pdfs: void 0
|
|
333
|
+
// Consumed
|
|
334
|
+
};
|
|
335
|
+
}
|
|
336
|
+
/**
|
|
337
|
+
* Convert PDFs to images using pdf-to-img.
|
|
338
|
+
*/
|
|
339
|
+
async convertPDFsToImages(pdfs) {
|
|
340
|
+
const images = [];
|
|
341
|
+
const { pdf } = await import("pdf-to-img");
|
|
342
|
+
for (const pdfInput of pdfs) {
|
|
343
|
+
let pdfBuffer;
|
|
344
|
+
if (pdfInput.base64) {
|
|
345
|
+
const base64Data = this.extractBase64(pdfInput.base64);
|
|
346
|
+
pdfBuffer = Buffer.from(base64Data, "base64");
|
|
347
|
+
} else if (pdfInput.url) {
|
|
348
|
+
const response = await fetchWithTimeout(pdfInput.url, {}, this.limits.REQUEST_TIMEOUT);
|
|
349
|
+
if (!response.ok) {
|
|
350
|
+
throw new Error(`Failed to fetch PDF from ${pdfInput.url}: ${response.status}`);
|
|
351
|
+
}
|
|
352
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
353
|
+
pdfBuffer = Buffer.from(arrayBuffer);
|
|
354
|
+
} else {
|
|
355
|
+
continue;
|
|
356
|
+
}
|
|
357
|
+
const pages = await pdf(pdfBuffer, { scale: 2 });
|
|
358
|
+
for await (const page of pages) {
|
|
359
|
+
images.push({
|
|
360
|
+
base64: page.toString("base64"),
|
|
361
|
+
mimeType: "image/png"
|
|
362
|
+
});
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
return images;
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* Build OpenAI-compatible message format.
|
|
369
|
+
*/
|
|
370
|
+
async buildMessages(input) {
|
|
371
|
+
const content = [];
|
|
372
|
+
if (input.text) {
|
|
373
|
+
content.push({ type: "text", text: input.text });
|
|
374
|
+
}
|
|
375
|
+
if (input.images && input.images.length > 0) {
|
|
376
|
+
for (const image of input.images) {
|
|
377
|
+
if (image.url) {
|
|
378
|
+
content.push({
|
|
379
|
+
type: "image_url",
|
|
380
|
+
image_url: { url: image.url }
|
|
381
|
+
});
|
|
382
|
+
} else if (image.base64) {
|
|
383
|
+
content.push({
|
|
384
|
+
type: "image_url",
|
|
385
|
+
image_url: {
|
|
386
|
+
url: `data:${image.mimeType};base64,${this.extractBase64(image.base64)}`
|
|
387
|
+
}
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
return [{ role: "user", content }];
|
|
393
|
+
}
|
|
394
|
+
/**
|
|
395
|
+
* Build reasoning configuration for models that support it.
|
|
396
|
+
*/
|
|
397
|
+
buildReasoningConfig(reasoning) {
|
|
398
|
+
const config = {};
|
|
399
|
+
if (reasoning.effort) {
|
|
400
|
+
config.effort = reasoning.effort;
|
|
401
|
+
} else if (reasoning.enabled) {
|
|
402
|
+
config.effort = "medium";
|
|
403
|
+
}
|
|
404
|
+
if (reasoning.exclude !== void 0) {
|
|
405
|
+
config.exclude = reasoning.exclude;
|
|
406
|
+
}
|
|
407
|
+
return Object.keys(config).length > 0 ? config : {};
|
|
408
|
+
}
|
|
409
|
+
/**
|
|
410
|
+
* Recursively fix schema for strict mode requirements.
|
|
411
|
+
* - All properties must be required
|
|
412
|
+
* - additionalProperties must be false
|
|
413
|
+
*/
|
|
414
|
+
fixSchemaRecursive(obj) {
|
|
415
|
+
if (obj && typeof obj === "object") {
|
|
416
|
+
if (obj.type === "object" && obj.properties) {
|
|
417
|
+
const properties = obj.properties;
|
|
418
|
+
const allProps = Object.keys(properties);
|
|
419
|
+
obj.required = allProps;
|
|
420
|
+
obj.additionalProperties = false;
|
|
421
|
+
for (const key of allProps) {
|
|
422
|
+
this.fixSchemaRecursive(properties[key]);
|
|
423
|
+
}
|
|
424
|
+
} else if (obj.type === "array" && obj.items) {
|
|
425
|
+
this.fixSchemaRecursive(obj.items);
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
/**
|
|
430
|
+
* Extract base64 data from a data URL or return as-is if already raw base64.
|
|
431
|
+
*/
|
|
432
|
+
extractBase64(input) {
|
|
433
|
+
if (input.startsWith("data:")) {
|
|
434
|
+
const base64Part = input.split(",")[1];
|
|
435
|
+
if (!base64Part) {
|
|
436
|
+
throw new Error(`Invalid data URL format: ${input.substring(0, 50)}`);
|
|
437
|
+
}
|
|
438
|
+
return base64Part;
|
|
439
|
+
}
|
|
440
|
+
return input;
|
|
441
|
+
}
|
|
442
|
+
};
|
|
443
|
+
|
|
444
|
+
// src/index.ts
|
|
445
|
+
import { registerProvider } from "@doclo/providers-llm";
|
|
446
|
+
registerProvider("generic-or", (config) => new GenericORProvider(config));
|
|
447
|
+
export {
|
|
448
|
+
DEFAULT_MODEL_INFO,
|
|
449
|
+
GenericORProvider,
|
|
450
|
+
KNOWN_MODEL_PREFIXES,
|
|
451
|
+
getModelInfo,
|
|
452
|
+
modelSupportsReasoning,
|
|
453
|
+
modelSupportsVision
|
|
454
|
+
};
|
|
455
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/generic-or.ts","../src/known-models.ts","../src/index.ts"],"sourcesContent":["import type {\n LLMProvider,\n ProviderConfig,\n MultimodalInput,\n UnifiedSchema,\n LLMResponse,\n ProviderCapabilities,\n JsonMode,\n ReasoningConfig,\n ImageInput,\n PDFInput,\n ReasoningDetail\n} from \"@doclo/providers-llm\";\nimport { SchemaTranslator, combineSchemaAndUserPrompt } from \"@doclo/providers-llm\";\nimport { fetchWithTimeout, DEFAULT_LIMITS, safeJsonParse } from \"@doclo/core/security\";\nimport { getModelInfo, type KnownModelInfo } from \"./known-models.js\";\n\n/**\n * Extract vendor name from OpenRouter model identifier.\n * Example: \"qwen/qwen3-235b-a22b\" -> \"qwen\"\n */\nfunction extractVendorFromModel(model: string): string {\n const slashIndex = model.indexOf('/');\n return slashIndex > 0 ? model.substring(0, slashIndex) : 'generic';\n}\n\n/**\n * GenericORProvider - A universal provider for any OpenRouter model.\n *\n * Works with all OpenRouter models using OpenAI-compatible API format.\n * Supports Qwen, Llama, DeepSeek, GLM, Kimi, Mistral, and any other model\n * available through OpenRouter.\n *\n * Features:\n * - Automatic capability detection for known model families\n * - Response healing plugin for better JSON reliability\n * - PDF-to-image conversion for VLM models\n * - Reasoning token extraction for supported models\n *\n * @example\n * ```typescript\n * const provider = new GenericORProvider({\n * provider: 'generic-or',\n * model: 'qwen/qwen3-235b-a22b',\n * apiKey: process.env.OPENROUTER_API_KEY!,\n * });\n *\n * const result = await provider.completeJson({\n * input: { text: \"Extract the main topics from this text...\" },\n * schema: TopicsSchema,\n * mode: 'strict',\n * });\n * ```\n */\nexport class GenericORProvider implements LLMProvider {\n readonly name: string;\n readonly capabilities: ProviderCapabilities;\n\n private config: ProviderConfig;\n private translator: SchemaTranslator;\n private limits: typeof DEFAULT_LIMITS;\n private modelInfo: KnownModelInfo;\n\n constructor(config: ProviderConfig) {\n this.config = config;\n this.modelInfo = getModelInfo(config.model);\n\n const vendor = extractVendorFromModel(config.model);\n this.name = `${vendor}:${config.model}`;\n this.translator = new SchemaTranslator();\n\n this.capabilities = {\n supportsStructuredOutput: true, // All OpenRouter models support this via response_format\n supportsStreaming: false, // Not with structured outputs\n supportsImages: this.modelInfo.supportsVision,\n supportsPDFs: this.modelInfo.supportsVision, // Via image conversion\n maxPDFPages: undefined,\n maxPDFSize: undefined,\n maxContextTokens: this.modelInfo.maxContextTokens || 32768\n };\n\n // Merge custom limits with defaults\n this.limits = {\n ...DEFAULT_LIMITS,\n ...(config.limits || {})\n };\n }\n\n async completeJson<T>(params: {\n // Support both interfaces:\n // - Internal: input: MultimodalInput\n // - Node/CoreVLM: prompt: string | MultimodalInput\n input?: MultimodalInput;\n prompt?: string | MultimodalInput;\n schema?: UnifiedSchema<T>;\n mode?: JsonMode;\n max_tokens?: number;\n reasoning?: ReasoningConfig;\n embedSchemaInPrompt?: boolean;\n }): Promise<LLMResponse<T>> {\n const startTime = Date.now();\n\n // Normalize input: handle both 'input' and 'prompt' parameters\n let rawInput: MultimodalInput;\n if (params.input) {\n rawInput = params.input;\n } else if (params.prompt) {\n if (typeof params.prompt === 'string') {\n rawInput = { text: params.prompt };\n } else {\n rawInput = params.prompt as MultimodalInput;\n }\n } else {\n rawInput = { text: '' };\n }\n\n // Determine mode: default to 'strict', auto-relaxed if schema omitted\n const mode = params.mode || (params.schema ? 'strict' : 'relaxed');\n\n // Validate: strict mode requires schema\n if (mode === 'strict' && !params.schema) {\n throw new Error('schema is required when mode is \"strict\"');\n }\n\n // Convert PDFs to images if model supports vision\n const processedInput = await this.preprocessInput(rawInput);\n\n // Embed schema in prompt if enabled (default: true) and schema exists\n const shouldEmbedSchema = params.embedSchemaInPrompt !== false && params.schema;\n let enhancedInput = processedInput;\n\n if (shouldEmbedSchema) {\n const jsonSchema = this.translator.convertZodIfNeeded(params.schema!);\n const enhancedText = combineSchemaAndUserPrompt(\n jsonSchema,\n processedInput.text || ''\n );\n enhancedInput = {\n ...processedInput,\n text: enhancedText\n };\n } else if (mode === 'relaxed') {\n // In relaxed mode without schema, we still need to mention \"JSON\" in the prompt\n // because some providers (e.g., Alibaba/Qwen) require the word \"json\" in messages\n // when using response_format: { type: \"json_object\" }\n const text = processedInput.text || '';\n const needsJsonHint = !text.toLowerCase().includes('json');\n if (needsJsonHint) {\n enhancedInput = {\n ...processedInput,\n text: `${text}\\n\\nRespond with valid JSON.`\n };\n }\n }\n\n // Build messages with multimodal content\n const messages = await this.buildMessages(enhancedInput);\n\n // Build request body\n const requestBody: Record<string, unknown> = {\n model: this.config.model,\n messages,\n max_tokens: params.max_tokens || 4096,\n stream: false,\n // Enable usage tracking for OpenRouter cost info\n usage: { include: true },\n // Enable response healing plugin for better JSON reliability\n plugins: [{ id: 'response-healing' }]\n };\n\n if (mode === 'relaxed') {\n // Relaxed mode: just request valid JSON without strict schema\n requestBody.response_format = { type: \"json_object\" };\n\n if (process.env.DEBUG_PROVIDERS) {\n console.log('[GenericORProvider] Using relaxed JSON mode (json_object)');\n }\n } else {\n // Strict mode: use json_schema with strict validation\n // Note: Not all models support json_schema natively, but response-healing\n // plugin will extract JSON from models that only support instruction-following.\n // We don't use require_parameters: true to allow broader model compatibility.\n const schema = this.translator.toOpenAISchema(params.schema!);\n\n // Recursively fix schema for strict mode requirements\n this.fixSchemaRecursive(schema as Record<string, unknown>);\n\n if (process.env.DEBUG_PROVIDERS) {\n console.log('[GenericORProvider] Using strict JSON mode (json_schema)');\n }\n\n requestBody.response_format = {\n type: \"json_schema\",\n json_schema: {\n name: \"extraction\",\n schema\n }\n };\n }\n\n // Add reasoning configuration if provided and model supports it\n if (params.reasoning && this.modelInfo.supportsReasoning) {\n requestBody.reasoning = this.buildReasoningConfig(params.reasoning);\n }\n\n // Make API call to OpenRouter\n const headers: Record<string, string> = {\n \"Content-Type\": \"application/json\",\n \"Authorization\": `Bearer ${this.config.apiKey}`,\n \"HTTP-Referer\": \"https://github.com/docloai/sdk\",\n \"X-Title\": \"Doclo SDK\"\n };\n\n const response = await fetchWithTimeout(\n \"https://openrouter.ai/api/v1/chat/completions\",\n {\n method: \"POST\",\n headers,\n body: JSON.stringify(requestBody)\n },\n this.limits.REQUEST_TIMEOUT\n );\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`OpenRouter API error (${response.status}): ${error}`);\n }\n\n const data = await response.json() as {\n choices?: Array<{\n message?: {\n content?: string;\n reasoning?: string;\n reasoning_details?: Array<{ type: string; summary?: string; text?: string; data?: string; id: string | null; format: string; index?: number }>;\n };\n }>;\n usage?: {\n prompt_tokens?: number;\n completion_tokens?: number;\n total_cost?: number;\n cost?: number;\n };\n id?: string;\n model?: string;\n };\n const latencyMs = Date.now() - startTime;\n\n // Parse response\n const message = data.choices?.[0]?.message;\n const content = message?.content ?? \"{}\";\n const parsed = safeJsonParse(content) as T;\n\n // Extract reasoning fields if present\n const reasoning = message?.reasoning;\n const reasoning_details = message?.reasoning_details as ReasoningDetail[] | undefined;\n\n // Get cost from OpenRouter response\n const costUSD = data.usage?.total_cost ?? data.usage?.cost;\n\n const vendor = extractVendorFromModel(this.config.model);\n\n return {\n json: parsed as T,\n rawText: content,\n metrics: {\n costUSD,\n inputTokens: data.usage?.prompt_tokens,\n outputTokens: data.usage?.completion_tokens,\n latencyMs,\n attemptNumber: 1,\n provider: vendor,\n model: this.config.model,\n responseId: data.id,\n modelUsed: data.model\n },\n reasoning,\n reasoning_details\n };\n }\n\n /**\n * Preprocess input to convert PDFs to images for VLM models.\n */\n private async preprocessInput(input: MultimodalInput): Promise<MultimodalInput> {\n // Handle undefined/null input\n if (!input) {\n return { text: '' };\n }\n\n if (!input.pdfs?.length || !this.modelInfo.supportsVision) {\n return input;\n }\n\n // Convert PDFs to images\n const pdfImages = await this.convertPDFsToImages(input.pdfs);\n\n return {\n ...input,\n images: [...(input.images || []), ...pdfImages],\n pdfs: undefined // Consumed\n };\n }\n\n /**\n * Convert PDFs to images using pdf-to-img.\n */\n private async convertPDFsToImages(pdfs: PDFInput[]): Promise<ImageInput[]> {\n const images: ImageInput[] = [];\n\n // Dynamically import pdf-to-img to avoid bundling issues\n const { pdf } = await import('pdf-to-img');\n\n for (const pdfInput of pdfs) {\n let pdfBuffer: Buffer;\n\n if (pdfInput.base64) {\n // Extract raw base64 from data URL if needed\n const base64Data = this.extractBase64(pdfInput.base64);\n pdfBuffer = Buffer.from(base64Data, 'base64');\n } else if (pdfInput.url) {\n // Fetch PDF from URL\n const response = await fetchWithTimeout(pdfInput.url, {}, this.limits.REQUEST_TIMEOUT);\n if (!response.ok) {\n throw new Error(`Failed to fetch PDF from ${pdfInput.url}: ${response.status}`);\n }\n const arrayBuffer = await response.arrayBuffer();\n pdfBuffer = Buffer.from(arrayBuffer);\n } else {\n continue; // Skip invalid entries\n }\n\n // Convert PDF pages to images\n const pages = await pdf(pdfBuffer, { scale: 2 });\n\n for await (const page of pages) {\n images.push({\n base64: page.toString('base64'),\n mimeType: 'image/png'\n });\n }\n }\n\n return images;\n }\n\n /**\n * Build OpenAI-compatible message format.\n */\n private async buildMessages(input: MultimodalInput): Promise<Array<{ role: string; content: Array<{ type: string; text?: string; image_url?: { url: string } }> }>> {\n const content: Array<{ type: string; text?: string; image_url?: { url: string } }> = [];\n\n // Add text\n if (input.text) {\n content.push({ type: \"text\", text: input.text });\n }\n\n // Add images\n if (input.images && input.images.length > 0) {\n for (const image of input.images) {\n if (image.url) {\n content.push({\n type: \"image_url\",\n image_url: { url: image.url }\n });\n } else if (image.base64) {\n content.push({\n type: \"image_url\",\n image_url: {\n url: `data:${image.mimeType};base64,${this.extractBase64(image.base64)}`\n }\n });\n }\n }\n }\n\n return [{ role: \"user\", content }];\n }\n\n /**\n * Build reasoning configuration for models that support it.\n */\n private buildReasoningConfig(reasoning: ReasoningConfig): Record<string, unknown> {\n const config: Record<string, unknown> = {};\n\n // Most open source models use effort-based reasoning\n if (reasoning.effort) {\n config.effort = reasoning.effort;\n } else if (reasoning.enabled) {\n config.effort = 'medium'; // Default to medium\n }\n\n // Add exclude flag if specified\n if (reasoning.exclude !== undefined) {\n config.exclude = reasoning.exclude;\n }\n\n return Object.keys(config).length > 0 ? config : {};\n }\n\n /**\n * Recursively fix schema for strict mode requirements.\n * - All properties must be required\n * - additionalProperties must be false\n */\n private fixSchemaRecursive(obj: Record<string, unknown>): void {\n if (obj && typeof obj === 'object') {\n if (obj.type === 'object' && obj.properties) {\n const properties = obj.properties as Record<string, unknown>;\n const allProps = Object.keys(properties);\n obj.required = allProps;\n obj.additionalProperties = false;\n\n // Recursively fix nested properties\n for (const key of allProps) {\n this.fixSchemaRecursive(properties[key] as Record<string, unknown>);\n }\n } else if (obj.type === 'array' && obj.items) {\n this.fixSchemaRecursive(obj.items as Record<string, unknown>);\n }\n }\n }\n\n /**\n * Extract base64 data from a data URL or return as-is if already raw base64.\n */\n private extractBase64(input: string): string {\n if (input.startsWith('data:')) {\n const base64Part = input.split(',')[1];\n if (!base64Part) {\n throw new Error(`Invalid data URL format: ${input.substring(0, 50)}`);\n }\n return base64Part;\n }\n return input;\n }\n}\n","/**\n * Known model capabilities for accurate reporting.\n * Unknown models get sensible defaults and work with OpenRouter.\n */\n\nexport interface KnownModelInfo {\n supportsVision: boolean;\n supportsReasoning: boolean;\n maxContextTokens?: number;\n}\n\n/**\n * Prefix-based matching for model families.\n * More specific prefixes should come before less specific ones.\n * Data sourced from OpenRouter API: https://openrouter.ai/api/v1/models\n */\nexport const KNOWN_MODEL_PREFIXES: Record<string, KnownModelInfo> = {\n // Qwen models - VL variants support vision, thinking variants support reasoning\n 'qwen/qwen3-vl-235b-a22b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-vl-235b-a22b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'qwen/qwen3-vl-30b-a3b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },\n 'qwen/qwen3-vl-30b-a3b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'qwen/qwen3-vl-8b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 256000 },\n 'qwen/qwen3-vl-8b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'qwen/qwen3-vl': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'qwen/qwen-vl-max': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'qwen/qwen-vl-plus': { supportsVision: true, supportsReasoning: false, maxContextTokens: 7500 },\n 'qwen/qwen2.5-vl': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'qwen/qwq': { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },\n 'qwen/qwen3-235b-a22b-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-235b-a22b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'qwen/qwen3-30b-a3b-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },\n 'qwen/qwen3-30b-a3b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-next-80b-a3b-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'qwen/qwen3-next-80b-a3b': { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },\n 'qwen/qwen3-coder': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'qwen/qwen3-max': { supportsVision: false, supportsReasoning: false, maxContextTokens: 256000 },\n 'qwen/qwen3-32b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'qwen/qwen3-14b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'qwen/qwen3-8b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 128000 },\n 'qwen/qwen3-4b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 40960 },\n 'qwen/qwen-plus-2025-07-28:thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 1000000 },\n 'qwen/qwen-plus': { supportsVision: false, supportsReasoning: false, maxContextTokens: 1000000 },\n 'qwen/qwen-turbo': { supportsVision: false, supportsReasoning: false, maxContextTokens: 1000000 },\n 'qwen/qwen-max': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n 'qwen/qwen-2.5': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n\n // Llama 4 models (vision capable)\n 'meta-llama/llama-4-maverick': { supportsVision: true, supportsReasoning: false, maxContextTokens: 1048576 },\n 'meta-llama/llama-4-scout': { supportsVision: true, supportsReasoning: false, maxContextTokens: 327680 },\n 'meta-llama/llama-guard-4': { supportsVision: true, supportsReasoning: false, maxContextTokens: 163840 },\n\n // Llama 3.x models - 3.2 vision variants support images\n 'meta-llama/llama-3.2-90b-vision': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'meta-llama/llama-3.2-11b-vision': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3.3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3.2': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3.1': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'meta-llama/llama-3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 8192 },\n\n // DeepSeek models - all support reasoning except base chat\n 'deepseek/deepseek-r1': { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },\n 'deepseek/deepseek-v3': { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },\n 'deepseek/deepseek-chat-v3': { supportsVision: false, supportsReasoning: true, maxContextTokens: 163840 },\n 'deepseek/deepseek-chat': { supportsVision: false, supportsReasoning: false, maxContextTokens: 163840 },\n 'deepseek/deepseek-prover': { supportsVision: false, supportsReasoning: false, maxContextTokens: 163840 },\n\n // GLM models (Zhipu AI / Z.AI)\n 'z-ai/glm-4.6v': { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },\n 'z-ai/glm-4.5v': { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },\n 'z-ai/glm-4.6': { supportsVision: false, supportsReasoning: true, maxContextTokens: 202752 },\n 'z-ai/glm-4.5-air': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'z-ai/glm-4.5': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'z-ai/glm-4-32b': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n 'thudm/glm-4.1v-9b-thinking': { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },\n 'thudm/glm': { supportsVision: false, supportsReasoning: false, maxContextTokens: 65536 },\n\n // Kimi / Moonshot models\n 'moonshotai/kimi-dev-72b': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'moonshotai/kimi-k2-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 262144 },\n 'moonshotai/kimi-k2-0905': { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },\n 'moonshotai/kimi-k2': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n\n // Mistral models - Mistral 3.x family supports vision\n 'mistralai/mistral-large-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/mistral-medium-3.1': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-medium-3': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-small-3.2': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-small-3.1': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/ministral-14b-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/ministral-8b-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/ministral-3b-2512': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/pixtral-large': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/pixtral-12b': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'mistralai/pixtral': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/devstral': { supportsVision: false, supportsReasoning: false, maxContextTokens: 262144 },\n 'mistralai/codestral': { supportsVision: false, supportsReasoning: false, maxContextTokens: 256000 },\n 'mistralai/mistral-large': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mistral-small': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n 'mistralai/mistral-nemo': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n 'mistralai/mixtral': { supportsVision: false, supportsReasoning: false, maxContextTokens: 65536 },\n 'mistralai/mistral-7b': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n\n // xAI Grok models - Grok 4 supports vision and reasoning\n 'x-ai/grok-4.1-fast': { supportsVision: true, supportsReasoning: true, maxContextTokens: 2000000 },\n 'x-ai/grok-4-fast': { supportsVision: true, supportsReasoning: true, maxContextTokens: 2000000 },\n 'x-ai/grok-4': { supportsVision: true, supportsReasoning: true, maxContextTokens: 256000 },\n 'x-ai/grok-code-fast': { supportsVision: false, supportsReasoning: true, maxContextTokens: 256000 },\n 'x-ai/grok-3-mini': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'x-ai/grok-3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n\n // Cohere Command models\n 'cohere/command-a': { supportsVision: false, supportsReasoning: false, maxContextTokens: 256000 },\n 'cohere/command-r': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n\n // Gemma models (Google open source) - Gemma 3 supports vision\n 'google/gemma-3': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'google/gemma-3n': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n 'google/gemma-2': { supportsVision: false, supportsReasoning: false, maxContextTokens: 8192 },\n\n // Phi models (Microsoft) - phi-4-multimodal supports vision, phi-4-reasoning supports reasoning\n 'microsoft/phi-4-multimodal': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'microsoft/phi-4-reasoning': { supportsVision: false, supportsReasoning: true, maxContextTokens: 32768 },\n 'microsoft/phi-4': { supportsVision: false, supportsReasoning: false, maxContextTokens: 16384 },\n 'microsoft/phi-3': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n\n // OpenGVLab InternVL\n 'opengvlab/internvl3': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n 'opengvlab/internvl2': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n\n // StepFun AI - step3 supports both vision and reasoning\n 'stepfun-ai/step3': { supportsVision: true, supportsReasoning: true, maxContextTokens: 65536 },\n 'stepfun-ai/step2': { supportsVision: true, supportsReasoning: false, maxContextTokens: 131072 },\n 'stepfun-ai/step1': { supportsVision: true, supportsReasoning: false, maxContextTokens: 32768 },\n\n // NVIDIA Nemotron - VL and ultra models support reasoning\n 'nvidia/nemotron-nano-12b-v2-vl': { supportsVision: true, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/nemotron-nano-9b-v2': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/llama-3.3-nemotron-super': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/llama-3.1-nemotron-ultra': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'nvidia/llama-3.1-nemotron': { supportsVision: false, supportsReasoning: false, maxContextTokens: 131072 },\n\n // Amazon Nova models - vision capable\n 'amazon/nova-2-lite': { supportsVision: true, supportsReasoning: true, maxContextTokens: 1000000 },\n 'amazon/nova-premier': { supportsVision: true, supportsReasoning: false, maxContextTokens: 1000000 },\n 'amazon/nova-pro': { supportsVision: true, supportsReasoning: false, maxContextTokens: 300000 },\n 'amazon/nova-lite': { supportsVision: true, supportsReasoning: false, maxContextTokens: 300000 },\n 'amazon/nova-micro': { supportsVision: false, supportsReasoning: false, maxContextTokens: 128000 },\n\n // Baidu ERNIE models - VL variants support vision\n 'baidu/ernie-4.5-vl': { supportsVision: true, supportsReasoning: true, maxContextTokens: 123000 },\n 'baidu/ernie-4.5-thinking': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n 'baidu/ernie-4.5': { supportsVision: false, supportsReasoning: false, maxContextTokens: 123000 },\n\n // ByteDance models\n 'bytedance/ui-tars': { supportsVision: true, supportsReasoning: false, maxContextTokens: 128000 },\n\n // MiniMax models\n 'minimax/minimax-01': { supportsVision: true, supportsReasoning: false, maxContextTokens: 1000192 },\n 'minimax/minimax-m': { supportsVision: false, supportsReasoning: true, maxContextTokens: 1000000 },\n\n // Tencent Hunyuan\n 'tencent/hunyuan': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n\n // Alibaba Tongyi\n 'alibaba/tongyi-deepresearch': { supportsVision: false, supportsReasoning: true, maxContextTokens: 131072 },\n\n // Other notable models\n 'databricks/dbrx': { supportsVision: false, supportsReasoning: false, maxContextTokens: 32768 },\n};\n\n/**\n * Default capabilities for unknown models.\n * Assumes a basic text-only model with reasonable defaults.\n */\nexport const DEFAULT_MODEL_INFO: KnownModelInfo = {\n supportsVision: false,\n supportsReasoning: false,\n maxContextTokens: 32768,\n};\n\n/**\n * Get model capabilities from registry or return defaults.\n * Uses prefix matching - more specific prefixes should be listed first.\n *\n * @param model - Full model ID (e.g., \"qwen/qwen3-235b-a22b\")\n * @returns Model capabilities (known or default)\n */\nexport function getModelInfo(model: string): KnownModelInfo {\n // Sort prefixes by length (longest first) for most specific match\n const sortedPrefixes = Object.keys(KNOWN_MODEL_PREFIXES).sort((a, b) => b.length - a.length);\n\n for (const prefix of sortedPrefixes) {\n if (model.startsWith(prefix)) {\n return KNOWN_MODEL_PREFIXES[prefix];\n }\n }\n\n return DEFAULT_MODEL_INFO;\n}\n\n/**\n * Check if a model is known to support vision.\n *\n * @param model - Full model ID\n * @returns true if model supports vision input\n */\nexport function modelSupportsVision(model: string): boolean {\n return getModelInfo(model).supportsVision;\n}\n\n/**\n * Check if a model is known to support reasoning tokens.\n *\n * @param model - Full model ID\n * @returns true if model supports reasoning/thinking mode\n */\nexport function modelSupportsReasoning(model: string): boolean {\n return getModelInfo(model).supportsReasoning;\n}\n","export { GenericORProvider } from './generic-or.js';\nexport {\n getModelInfo,\n modelSupportsVision,\n modelSupportsReasoning,\n KNOWN_MODEL_PREFIXES,\n DEFAULT_MODEL_INFO,\n type KnownModelInfo\n} from './known-models.js';\n\nimport { GenericORProvider } from './generic-or.js';\nimport { registerProvider } from '@doclo/providers-llm';\n\n// Auto-register the provider when this package is imported\nregisterProvider('generic-or', (config) => new GenericORProvider(config));\n"],"mappings":";AAaA,SAAS,kBAAkB,kCAAkC;AAC7D,SAAS,kBAAkB,gBAAgB,qBAAqB;;;ACEzD,IAAM,uBAAuD;AAAA;AAAA,EAElE,oCAAoC,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC9G,2BAA2B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,kCAAkC,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC5G,yBAAyB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,6BAA6B,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EACvG,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC/F,iBAAiB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC5F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC/F,qBAAqB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,KAAK;AAAA,EAC9F,mBAAmB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC7F,YAAY,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACtF,iCAAiC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC5G,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACnG,+BAA+B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACzG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACjG,oCAAoC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/G,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EAC9F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC5F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC5F,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EAC5F,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC3F,sCAAsC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EAClH,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,IAAQ;AAAA,EAC/F,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,IAAQ;AAAA,EAChG,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC5F,iBAAiB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAG5F,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,QAAQ;AAAA,EAC3G,4BAA4B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,4BAA4B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGvG,mCAAmC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC7G,mCAAmC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC9G,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACpG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,KAAK;AAAA;AAAA,EAGhG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACnG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACnG,6BAA6B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACxG,0BAA0B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,4BAA4B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGxG,iBAAiB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC3F,iBAAiB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC1F,gBAAgB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC3F,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/F,gBAAgB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC3F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EAC9F,8BAA8B,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACvG,aAAa,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAGxF,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACtG,+BAA+B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC1G,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGlG,gCAAgC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC3G,gCAAgC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC3G,8BAA8B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACzG,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,gCAAgC,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC3G,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,+BAA+B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC1G,2BAA2B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,yBAAyB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EACnG,qBAAqB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAChG,sBAAsB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAClG,uBAAuB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EACnG,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACvG,2BAA2B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EACtG,0BAA0B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACtG,qBAAqB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAChG,wBAAwB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAGnG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EACjG,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EAC/F,eAAe,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EACzF,uBAAuB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EAClG,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC/F,eAAe,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAG3F,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA,EAChG,oBAAoB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAGhG,kBAAkB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC7F,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC9F,kBAAkB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,KAAK;AAAA;AAAA,EAG5F,8BAA8B,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EACzG,6BAA6B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EACvG,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EAC9F,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAG/F,uBAAuB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA,EACjG,uBAAuB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAGjG,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAM;AAAA,EAC7F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,OAAO;AAAA,EAC/F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAM;AAAA;AAAA,EAG9F,kCAAkC,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC5G,8BAA8B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACzG,mCAAmC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC9G,mCAAmC,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EAC9G,6BAA6B,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,OAAO;AAAA;AAAA,EAGzG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA,EACjG,uBAAuB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,IAAQ;AAAA,EACnG,mBAAmB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,IAAO;AAAA,EAC9F,oBAAoB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,IAAO;AAAA,EAC/F,qBAAqB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAGjG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,MAAM,kBAAkB,MAAO;AAAA,EAChG,4BAA4B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA,EACvG,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAG/F,qBAAqB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,MAAO;AAAA;AAAA,EAGhG,sBAAsB,EAAE,gBAAgB,MAAM,mBAAmB,OAAO,kBAAkB,QAAQ;AAAA,EAClG,qBAAqB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,IAAQ;AAAA;AAAA,EAGjG,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA;AAAA,EAG9F,+BAA+B,EAAE,gBAAgB,OAAO,mBAAmB,MAAM,kBAAkB,OAAO;AAAA;AAAA,EAG1G,mBAAmB,EAAE,gBAAgB,OAAO,mBAAmB,OAAO,kBAAkB,MAAM;AAChG;AAMO,IAAM,qBAAqC;AAAA,EAChD,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,kBAAkB;AACpB;AASO,SAAS,aAAa,OAA+B;AAE1D,QAAM,iBAAiB,OAAO,KAAK,oBAAoB,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM;AAE3F,aAAW,UAAU,gBAAgB;AACnC,QAAI,MAAM,WAAW,MAAM,GAAG;AAC5B,aAAO,qBAAqB,MAAM;AAAA,IACpC;AAAA,EACF;AAEA,SAAO;AACT;AAQO,SAAS,oBAAoB,OAAwB;AAC1D,SAAO,aAAa,KAAK,EAAE;AAC7B;AAQO,SAAS,uBAAuB,OAAwB;AAC7D,SAAO,aAAa,KAAK,EAAE;AAC7B;;;ADtMA,SAAS,uBAAuB,OAAuB;AACrD,QAAM,aAAa,MAAM,QAAQ,GAAG;AACpC,SAAO,aAAa,IAAI,MAAM,UAAU,GAAG,UAAU,IAAI;AAC3D;AA8BO,IAAM,oBAAN,MAA+C;AAAA,EAC3C;AAAA,EACA;AAAA,EAED;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAAwB;AAClC,SAAK,SAAS;AACd,SAAK,YAAY,aAAa,OAAO,KAAK;AAE1C,UAAM,SAAS,uBAAuB,OAAO,KAAK;AAClD,SAAK,OAAO,GAAG,MAAM,IAAI,OAAO,KAAK;AACrC,SAAK,aAAa,IAAI,iBAAiB;AAEvC,SAAK,eAAe;AAAA,MAClB,0BAA0B;AAAA;AAAA,MAC1B,mBAAmB;AAAA;AAAA,MACnB,gBAAgB,KAAK,UAAU;AAAA,MAC/B,cAAc,KAAK,UAAU;AAAA;AAAA,MAC7B,aAAa;AAAA,MACb,YAAY;AAAA,MACZ,kBAAkB,KAAK,UAAU,oBAAoB;AAAA,IACvD;AAGA,SAAK,SAAS;AAAA,MACZ,GAAG;AAAA,MACH,GAAI,OAAO,UAAU,CAAC;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,MAAM,aAAgB,QAWM;AAC1B,UAAM,YAAY,KAAK,IAAI;AAG3B,QAAI;AACJ,QAAI,OAAO,OAAO;AAChB,iBAAW,OAAO;AAAA,IACpB,WAAW,OAAO,QAAQ;AACxB,UAAI,OAAO,OAAO,WAAW,UAAU;AACrC,mBAAW,EAAE,MAAM,OAAO,OAAO;AAAA,MACnC,OAAO;AACL,mBAAW,OAAO;AAAA,MACpB;AAAA,IACF,OAAO;AACL,iBAAW,EAAE,MAAM,GAAG;AAAA,IACxB;AAGA,UAAM,OAAO,OAAO,SAAS,OAAO,SAAS,WAAW;AAGxD,QAAI,SAAS,YAAY,CAAC,OAAO,QAAQ;AACvC,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AAGA,UAAM,iBAAiB,MAAM,KAAK,gBAAgB,QAAQ;AAG1D,UAAM,oBAAoB,OAAO,wBAAwB,SAAS,OAAO;AACzE,QAAI,gBAAgB;AAEpB,QAAI,mBAAmB;AACrB,YAAM,aAAa,KAAK,WAAW,mBAAmB,OAAO,MAAO;AACpE,YAAM,eAAe;AAAA,QACnB;AAAA,QACA,eAAe,QAAQ;AAAA,MACzB;AACA,sBAAgB;AAAA,QACd,GAAG;AAAA,QACH,MAAM;AAAA,MACR;AAAA,IACF,WAAW,SAAS,WAAW;AAI7B,YAAM,OAAO,eAAe,QAAQ;AACpC,YAAM,gBAAgB,CAAC,KAAK,YAAY,EAAE,SAAS,MAAM;AACzD,UAAI,eAAe;AACjB,wBAAgB;AAAA,UACd,GAAG;AAAA,UACH,MAAM,GAAG,IAAI;AAAA;AAAA;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAGA,UAAM,WAAW,MAAM,KAAK,cAAc,aAAa;AAGvD,UAAM,cAAuC;AAAA,MAC3C,OAAO,KAAK,OAAO;AAAA,MACnB;AAAA,MACA,YAAY,OAAO,cAAc;AAAA,MACjC,QAAQ;AAAA;AAAA,MAER,OAAO,EAAE,SAAS,KAAK;AAAA;AAAA,MAEvB,SAAS,CAAC,EAAE,IAAI,mBAAmB,CAAC;AAAA,IACtC;AAEA,QAAI,SAAS,WAAW;AAEtB,kBAAY,kBAAkB,EAAE,MAAM,cAAc;AAEpD,UAAI,QAAQ,IAAI,iBAAiB;AAC/B,gBAAQ,IAAI,2DAA2D;AAAA,MACzE;AAAA,IACF,OAAO;AAKL,YAAM,SAAS,KAAK,WAAW,eAAe,OAAO,MAAO;AAG5D,WAAK,mBAAmB,MAAiC;AAEzD,UAAI,QAAQ,IAAI,iBAAiB;AAC/B,gBAAQ,IAAI,0DAA0D;AAAA,MACxE;AAEA,kBAAY,kBAAkB;AAAA,QAC5B,MAAM;AAAA,QACN,aAAa;AAAA,UACX,MAAM;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,aAAa,KAAK,UAAU,mBAAmB;AACxD,kBAAY,YAAY,KAAK,qBAAqB,OAAO,SAAS;AAAA,IACpE;AAGA,UAAM,UAAkC;AAAA,MACtC,gBAAgB;AAAA,MAChB,iBAAiB,UAAU,KAAK,OAAO,MAAM;AAAA,MAC7C,gBAAgB;AAAA,MAChB,WAAW;AAAA,IACb;AAEA,UAAM,WAAW,MAAM;AAAA,MACrB;AAAA,MACA;AAAA,QACE,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU,WAAW;AAAA,MAClC;AAAA,MACA,KAAK,OAAO;AAAA,IACd;AAEA,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,yBAAyB,SAAS,MAAM,MAAM,KAAK,EAAE;AAAA,IACvE;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAiBjC,UAAM,YAAY,KAAK,IAAI,IAAI;AAG/B,UAAM,UAAU,KAAK,UAAU,CAAC,GAAG;AACnC,UAAM,UAAU,SAAS,WAAW;AACpC,UAAM,SAAS,cAAc,OAAO;AAGpC,UAAM,YAAY,SAAS;AAC3B,UAAM,oBAAoB,SAAS;AAGnC,UAAM,UAAU,KAAK,OAAO,cAAc,KAAK,OAAO;AAEtD,UAAM,SAAS,uBAAuB,KAAK,OAAO,KAAK;AAEvD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS;AAAA,QACP;AAAA,QACA,aAAa,KAAK,OAAO;AAAA,QACzB,cAAc,KAAK,OAAO;AAAA,QAC1B;AAAA,QACA,eAAe;AAAA,QACf,UAAU;AAAA,QACV,OAAO,KAAK,OAAO;AAAA,QACnB,YAAY,KAAK;AAAA,QACjB,WAAW,KAAK;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,OAAkD;AAE9E,QAAI,CAAC,OAAO;AACV,aAAO,EAAE,MAAM,GAAG;AAAA,IACpB;AAEA,QAAI,CAAC,MAAM,MAAM,UAAU,CAAC,KAAK,UAAU,gBAAgB;AACzD,aAAO;AAAA,IACT;AAGA,UAAM,YAAY,MAAM,KAAK,oBAAoB,MAAM,IAAI;AAE3D,WAAO;AAAA,MACL,GAAG;AAAA,MACH,QAAQ,CAAC,GAAI,MAAM,UAAU,CAAC,GAAI,GAAG,SAAS;AAAA,MAC9C,MAAM;AAAA;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAoB,MAAyC;AACzE,UAAM,SAAuB,CAAC;AAG9B,UAAM,EAAE,IAAI,IAAI,MAAM,OAAO,YAAY;AAEzC,eAAW,YAAY,MAAM;AAC3B,UAAI;AAEJ,UAAI,SAAS,QAAQ;AAEnB,cAAM,aAAa,KAAK,cAAc,SAAS,MAAM;AACrD,oBAAY,OAAO,KAAK,YAAY,QAAQ;AAAA,MAC9C,WAAW,SAAS,KAAK;AAEvB,cAAM,WAAW,MAAM,iBAAiB,SAAS,KAAK,CAAC,GAAG,KAAK,OAAO,eAAe;AACrF,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,4BAA4B,SAAS,GAAG,KAAK,SAAS,MAAM,EAAE;AAAA,QAChF;AACA,cAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,oBAAY,OAAO,KAAK,WAAW;AAAA,MACrC,OAAO;AACL;AAAA,MACF;AAGA,YAAM,QAAQ,MAAM,IAAI,WAAW,EAAE,OAAO,EAAE,CAAC;AAE/C,uBAAiB,QAAQ,OAAO;AAC9B,eAAO,KAAK;AAAA,UACV,QAAQ,KAAK,SAAS,QAAQ;AAAA,UAC9B,UAAU;AAAA,QACZ,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cAAc,OAAwI;AAClK,UAAM,UAA+E,CAAC;AAGtF,QAAI,MAAM,MAAM;AACd,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,MAAM,KAAK,CAAC;AAAA,IACjD;AAGA,QAAI,MAAM,UAAU,MAAM,OAAO,SAAS,GAAG;AAC3C,iBAAW,SAAS,MAAM,QAAQ;AAChC,YAAI,MAAM,KAAK;AACb,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,WAAW,EAAE,KAAK,MAAM,IAAI;AAAA,UAC9B,CAAC;AAAA,QACH,WAAW,MAAM,QAAQ;AACvB,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,WAAW;AAAA,cACT,KAAK,QAAQ,MAAM,QAAQ,WAAW,KAAK,cAAc,MAAM,MAAM,CAAC;AAAA,YACxE;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,WAAO,CAAC,EAAE,MAAM,QAAQ,QAAQ,CAAC;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,WAAqD;AAChF,UAAM,SAAkC,CAAC;AAGzC,QAAI,UAAU,QAAQ;AACpB,aAAO,SAAS,UAAU;AAAA,IAC5B,WAAW,UAAU,SAAS;AAC5B,aAAO,SAAS;AAAA,IAClB;AAGA,QAAI,UAAU,YAAY,QAAW;AACnC,aAAO,UAAU,UAAU;AAAA,IAC7B;AAEA,WAAO,OAAO,KAAK,MAAM,EAAE,SAAS,IAAI,SAAS,CAAC;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,mBAAmB,KAAoC;AAC7D,QAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAI,IAAI,SAAS,YAAY,IAAI,YAAY;AAC3C,cAAM,aAAa,IAAI;AACvB,cAAM,WAAW,OAAO,KAAK,UAAU;AACvC,YAAI,WAAW;AACf,YAAI,uBAAuB;AAG3B,mBAAW,OAAO,UAAU;AAC1B,eAAK,mBAAmB,WAAW,GAAG,CAA4B;AAAA,QACpE;AAAA,MACF,WAAW,IAAI,SAAS,WAAW,IAAI,OAAO;AAC5C,aAAK,mBAAmB,IAAI,KAAgC;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,OAAuB;AAC3C,QAAI,MAAM,WAAW,OAAO,GAAG;AAC7B,YAAM,aAAa,MAAM,MAAM,GAAG,EAAE,CAAC;AACrC,UAAI,CAAC,YAAY;AACf,cAAM,IAAI,MAAM,4BAA4B,MAAM,UAAU,GAAG,EAAE,CAAC,EAAE;AAAA,MACtE;AACA,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AACF;;;AExaA,SAAS,wBAAwB;AAGjC,iBAAiB,cAAc,CAAC,WAAW,IAAI,kBAAkB,MAAM,CAAC;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@doclo/providers-generic-or",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Generic OpenRouter provider for open-source models (Qwen, Llama, DeepSeek, GLM, Kimi, etc.)",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"repository": {
|
|
7
|
+
"type": "git",
|
|
8
|
+
"url": "https://github.com/docloai/sdk.git",
|
|
9
|
+
"directory": "packages/providers-generic-or"
|
|
10
|
+
},
|
|
11
|
+
"publishConfig": {
|
|
12
|
+
"access": "public"
|
|
13
|
+
},
|
|
14
|
+
"type": "module",
|
|
15
|
+
"main": "dist/index.js",
|
|
16
|
+
"exports": {
|
|
17
|
+
".": {
|
|
18
|
+
"types": "./dist/index.d.ts",
|
|
19
|
+
"default": "./dist/index.js"
|
|
20
|
+
}
|
|
21
|
+
},
|
|
22
|
+
"files": [
|
|
23
|
+
"dist"
|
|
24
|
+
],
|
|
25
|
+
"dependencies": {
|
|
26
|
+
"pdf-to-img": "^4.2.0",
|
|
27
|
+
"@doclo/core": "^0.1.9",
|
|
28
|
+
"@doclo/providers-llm": "^0.1.6"
|
|
29
|
+
},
|
|
30
|
+
"devDependencies": {
|
|
31
|
+
"@types/node": "^20",
|
|
32
|
+
"tsup": "^8.0.0",
|
|
33
|
+
"typescript": "^5.6.0"
|
|
34
|
+
},
|
|
35
|
+
"scripts": {
|
|
36
|
+
"build": "tsup",
|
|
37
|
+
"type-check": "tsc --noEmit"
|
|
38
|
+
}
|
|
39
|
+
}
|