ohlcv-ai 1.1.0 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -123,7 +123,6 @@ export declare interface AliYunChatOptions {
123
123
  maxTokens?: number;
124
124
  stream?: boolean;
125
125
  systemPrompt?: string;
126
- modelType?: AliYunModelType;
127
126
  }
128
127
 
129
128
  export declare interface AliyunConfig {
@@ -339,7 +338,6 @@ export declare interface DeepSeekChatOptions {
339
338
  maxTokens?: number;
340
339
  stream?: boolean;
341
340
  systemPrompt?: string;
342
- modelType?: DeepSeekModelType;
343
341
  topP?: number;
344
342
  frequencyPenalty?: number;
345
343
  presencePenalty?: number;
@@ -666,7 +664,6 @@ export declare interface OpenAIChatOptions {
666
664
  maxTokens?: number;
667
665
  stream?: boolean;
668
666
  systemPrompt?: string;
669
- modelType?: OpenAIModelType;
670
667
  topP?: number;
671
668
  frequencyPenalty?: number;
672
669
  presencePenalty?: number;
package/dist/index.js CHANGED
@@ -1,5 +1,5 @@
1
- "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});var N=(n=>(n.QWEN_TURBO="qwen-turbo",n.QWEN_PLUS="qwen-plus",n.QWEN_MAX="qwen-max",n.QWEN_MAX_LONGCONTEXT="qwen-max-longcontext",n.QWEN_2_5B="qwen2.5-0.5b",n.QWEN_2_5B_INSTRUCT="qwen2.5-0.5b-instruct",n.QWEN_2_5B_7B="qwen2.5-7b",n.QWEN_2_5B_7B_INSTRUCT="qwen2.5-7b-instruct",n.QWEN_2_5B_14B="qwen2.5-14b",n.QWEN_2_5B_14B_INSTRUCT="qwen2.5-14b-instruct",n.QWEN_2_5B_32B="qwen2.5-32b",n.QWEN_2_5B_32B_INSTRUCT="qwen2.5-32b-instruct",n.QWEN_2_5B_72B="qwen2.5-72b",n.QWEN_2_5B_72B_INSTRUCT="qwen2.5-72b-instruct",n.QWEN_2_5B_CODER="qwen2.5-coder",n.QWEN_2_5B_CODER_7B="qwen2.5-coder-7b",n.QWEN_2_5B_CODER_14B="qwen2.5-coder-14b",n.QWEN_2_5B_CODER_32B="qwen2.5-coder-32b",n.QWEN_VL_LITE="qwen-vl-lite",n.QWEN_VL_PLUS="qwen-vl-plus",n.QWEN_VL_MAX="qwen-vl-max",n.QWEN_AUDIO_TURBO="qwen-audio-turbo",n.QWEN_AUDIO_CHAT="qwen-audio-chat",n.QWEN_MATH_7B="qwen-math-7b",n.LLAMA2_7B_CHAT_V2="llama2-7b-chat-v2",n.BAICHUAN2_7B_CHAT_V1="baichuan2-7b-chat-v1",n.QWEN_FINANCIAL="qwen-financial",n.QWEN_FINANCIAL_14B="qwen-financial-14b",n.QWEN_FINANCIAL_32B="qwen-financial-32b",n.QWEN_MEDICAL="qwen-medical",n.QWEN_MEDICAL_14B="qwen-medical-14b",n.QWEN_MEDICAL_32B="qwen-medical-32b",n.QWEN_OMNI="qwen-omni",n.QWEN_OMNI_PRO="qwen-omni-pro",n))(N||{});const k=new Map([["qwen-turbo",{name:"qwen-turbo",displayName:"Qwen-Turbo",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions","https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation"],format:"openai",description:"Lightweight version, fast response speed, suitable for general conversation scenarios",maxTokens:2e3,contextLength:8e3,capabilities:["text-generation","chat"]}],["qwen-plus",{name:"qwen-plus",displayName:"Qwen-Plus",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Enhanced version, suitable for complex tasks and long text processing",maxTokens:6e3,contextLength:32e3,capabilities:["text-generation","chat","reasoning"]}],["qwen-max",{name:"qwen-max",displayName:"Qwen-Max",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Maximum version, strongest capabilities, suitable for high-demand professional tasks",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","chat","reasoning","coding","analysis"]}],["qwen-max-longcontext",{name:"qwen-max-longcontext",displayName:"Qwen-Max-LongContext",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Supports 128K long context, suitable for long document processing",maxTokens:8e3,contextLength:128e3,capabilities:["text-generation","chat","document-analysis"]}],["qwen2.5-0.5b",{name:"qwen2.5-0.5b",displayName:"Qwen2.5-0.5B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Ultra-lightweight 0.5B parameter model for edge devices",maxTokens:4e3,contextLength:32e3,capabilities:["text-generation","chat"]}],["qwen2.5-0.5b-instruct",{name:"qwen2.5-0.5b-instruct",displayName:"Qwen2.5-0.5B-Instruct",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Instruction-tuned 0.5B model for specific tasks",maxTokens:4e3,contextLength:32e3,capabilities:["instruction-following","chat"]}],["qwen2.5-7b",{name:"qwen2.5-7b",displayName:"Qwen2.5-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"7B parameter base model, balanced performance and efficiency",maxTokens:6e3,contextLength:32e3,capabilities:["text-generation","reasoning"]}],["qwen2.5-7b-instruct",{name:"qwen2.5-7b-instruct",displayName:"Qwen2.5-7B-Instruct",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Instruction-tuned 7B model for chat and tasks",maxTokens:6e3,contextLength:32e3,capabilities:["chat","instruction-following","coding"]}],["qwen2.5-14b",{name:"qwen2.5-14b",displayName:"Qwen2.5-14B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"14B parameter model with enhanced capabilities",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","analysis","reasoning"]}],["qwen2.5-32b",{name:"qwen2.5-32b",displayName:"Qwen2.5-32B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"32B parameter high-performance model",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","complex-reasoning","analysis"]}],["qwen2.5-72b",{name:"qwen2.5-72b",displayName:"Qwen2.5-72B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"72B parameter state-of-the-art model",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","expert-analysis","research"]}],["qwen2.5-coder",{name:"qwen2.5-coder",displayName:"Qwen2.5-Coder",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","code-explanation","debugging"]}],["qwen2.5-coder-7b",{name:"qwen2.5-coder-7b",displayName:"Qwen2.5-Coder-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"7B parameter code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","programming"]}],["qwen2.5-coder-14b",{name:"qwen2.5-coder-14b",displayName:"Qwen2.5-Coder-14B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"14B parameter advanced code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","code-review","optimization"]}],["qwen-vl-lite",{name:"qwen-vl-lite",displayName:"Qwen-VL-Lite",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Lightweight vision-language model for basic image understanding",maxTokens:2e3,contextLength:8e3,capabilities:["image-understanding","visual-qa"]}],["qwen-vl-plus",{name:"qwen-vl-plus",displayName:"Qwen-VL-Plus",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Vision-language model supporting image understanding",maxTokens:4e3,contextLength:32e3,capabilities:["image-understanding","document-analysis","visual-reasoning"]}],["qwen-vl-max",{name:"qwen-vl-max",displayName:"Qwen-VL-Max",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Most powerful vision-language model",maxTokens:8e3,contextLength:32e3,capabilities:["image-understanding","video-analysis","multimodal-reasoning"]}],["qwen-audio-turbo",{name:"qwen-audio-turbo",displayName:"Qwen-Audio-Turbo",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Fast audio processing and speech-to-text model",maxTokens:2e3,contextLength:8e3,capabilities:["speech-recognition","audio-analysis"]}],["qwen-audio-chat",{name:"qwen-audio-chat",displayName:"Qwen-Audio-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Audio conversation and processing model",maxTokens:4e3,contextLength:32e3,capabilities:["audio-chat","voice-assistant","speech-synthesis"]}],["qwen-math-7b",{name:"qwen-math-7b",displayName:"Qwen-Math-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for mathematical reasoning and problem solving",maxTokens:4e3,contextLength:32e3,capabilities:["mathematical-reasoning","problem-solving"]}],["llama2-7b-chat-v2",{name:"llama2-7b-chat-v2",displayName:"LLaMA2-7B-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Meta's LLaMA2-7B model",maxTokens:2e3,contextLength:8e3,capabilities:["chat","text-generation"]}],["baichuan2-7b-chat-v1",{name:"baichuan2-7b-chat-v1",displayName:"Baichuan2-7B-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Baichuan AI's Baichuan2-7B model",maxTokens:2e3,contextLength:8e3,capabilities:["chat","chinese-nlp"]}],["qwen-financial",{name:"qwen-financial",displayName:"Qwen-Financial",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for financial analysis and market insights",maxTokens:6e3,contextLength:32e3,capabilities:["financial-analysis","market-prediction","risk-assessment"]}],["qwen-medical",{name:"qwen-medical",displayName:"Qwen-Medical",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for medical consultation and health analysis",maxTokens:6e3,contextLength:32e3,capabilities:["medical-consultation","health-analysis","diagnostic-support"]}],["qwen-omni",{name:"qwen-omni",displayName:"Qwen-Omni",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Omnidirectional multimodal model supporting text, image, audio",maxTokens:8e3,contextLength:64e3,capabilities:["text-generation","image-understanding","audio-processing","multimodal"]}],["qwen-omni-pro",{name:"qwen-omni-pro",displayName:"Qwen-Omni-Pro",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Professional omnidirectional multimodal model with advanced capabilities",maxTokens:16e3,contextLength:128e3,capabilities:["text-generation","multimodal","complex-reasoning","expert-analysis"]}]]);function V(n){return k.get(n)}function A(){return Array.from(k.values())}function I(n){for(const e of k.values())if(e.name===n)return e}function R(){return Array.from(k.keys())}function D(n){const e=Object.values(N);for(const t of e)if(t===n)return t;return null}class S{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||N.QWEN_TURBO,this.timeout=e.timeout||3e4,!this.apiKey)throw new Error("API Key cannot be empty");if(!k.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,t,s){const a=[],r=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";if(s?.systemPrompt||r){const i=[s?.systemPrompt,r].filter(Boolean).join(`
2
- `);a.push({role:"system",content:i})}a.push({role:"user",content:e});const o=await this.chatCompletion(a,t,{temperature:s?.temperature,maxTokens:s?.maxTokens,stream:!1,modelType:s?.modelType});return this.extractContent(o)}async chatCompletion(e,t,s){const a=s?.modelType||this.modelType,r=k.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=s?.temperature??.7,i=s?.maxTokens??1e3,d=s?.stream??!1,l=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";l&&(e.some(g=>g.role==="system")?e=e.map(g=>g.role==="system"?{...g,content:`${g.content}
1
+ "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});var _=(n=>(n.QWEN_TURBO="qwen-turbo",n.QWEN_PLUS="qwen-plus",n.QWEN_MAX="qwen-max",n.QWEN_MAX_LONGCONTEXT="qwen-max-longcontext",n.QWEN_2_5B="qwen2.5-0.5b",n.QWEN_2_5B_INSTRUCT="qwen2.5-0.5b-instruct",n.QWEN_2_5B_7B="qwen2.5-7b",n.QWEN_2_5B_7B_INSTRUCT="qwen2.5-7b-instruct",n.QWEN_2_5B_14B="qwen2.5-14b",n.QWEN_2_5B_14B_INSTRUCT="qwen2.5-14b-instruct",n.QWEN_2_5B_32B="qwen2.5-32b",n.QWEN_2_5B_32B_INSTRUCT="qwen2.5-32b-instruct",n.QWEN_2_5B_72B="qwen2.5-72b",n.QWEN_2_5B_72B_INSTRUCT="qwen2.5-72b-instruct",n.QWEN_2_5B_CODER="qwen2.5-coder",n.QWEN_2_5B_CODER_7B="qwen2.5-coder-7b",n.QWEN_2_5B_CODER_14B="qwen2.5-coder-14b",n.QWEN_2_5B_CODER_32B="qwen2.5-coder-32b",n.QWEN_VL_LITE="qwen-vl-lite",n.QWEN_VL_PLUS="qwen-vl-plus",n.QWEN_VL_MAX="qwen-vl-max",n.QWEN_AUDIO_TURBO="qwen-audio-turbo",n.QWEN_AUDIO_CHAT="qwen-audio-chat",n.QWEN_MATH_7B="qwen-math-7b",n.LLAMA2_7B_CHAT_V2="llama2-7b-chat-v2",n.BAICHUAN2_7B_CHAT_V1="baichuan2-7b-chat-v1",n.QWEN_FINANCIAL="qwen-financial",n.QWEN_FINANCIAL_14B="qwen-financial-14b",n.QWEN_FINANCIAL_32B="qwen-financial-32b",n.QWEN_MEDICAL="qwen-medical",n.QWEN_MEDICAL_14B="qwen-medical-14b",n.QWEN_MEDICAL_32B="qwen-medical-32b",n.QWEN_OMNI="qwen-omni",n.QWEN_OMNI_PRO="qwen-omni-pro",n))(_||{});const k=new Map([["qwen-turbo",{name:"qwen-turbo",displayName:"Qwen-Turbo",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions","https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation"],format:"openai",description:"Lightweight version, fast response speed, suitable for general conversation scenarios",maxTokens:2e3,contextLength:8e3,capabilities:["text-generation","chat"]}],["qwen-plus",{name:"qwen-plus",displayName:"Qwen-Plus",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Enhanced version, suitable for complex tasks and long text processing",maxTokens:6e3,contextLength:32e3,capabilities:["text-generation","chat","reasoning"]}],["qwen-max",{name:"qwen-max",displayName:"Qwen-Max",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Maximum version, strongest capabilities, suitable for high-demand professional tasks",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","chat","reasoning","coding","analysis"]}],["qwen-max-longcontext",{name:"qwen-max-longcontext",displayName:"Qwen-Max-LongContext",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Supports 128K long context, suitable for long document processing",maxTokens:8e3,contextLength:128e3,capabilities:["text-generation","chat","document-analysis"]}],["qwen2.5-0.5b",{name:"qwen2.5-0.5b",displayName:"Qwen2.5-0.5B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Ultra-lightweight 0.5B parameter model for edge devices",maxTokens:4e3,contextLength:32e3,capabilities:["text-generation","chat"]}],["qwen2.5-0.5b-instruct",{name:"qwen2.5-0.5b-instruct",displayName:"Qwen2.5-0.5B-Instruct",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Instruction-tuned 0.5B model for specific tasks",maxTokens:4e3,contextLength:32e3,capabilities:["instruction-following","chat"]}],["qwen2.5-7b",{name:"qwen2.5-7b",displayName:"Qwen2.5-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"7B parameter base model, balanced performance and efficiency",maxTokens:6e3,contextLength:32e3,capabilities:["text-generation","reasoning"]}],["qwen2.5-7b-instruct",{name:"qwen2.5-7b-instruct",displayName:"Qwen2.5-7B-Instruct",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Instruction-tuned 7B model for chat and tasks",maxTokens:6e3,contextLength:32e3,capabilities:["chat","instruction-following","coding"]}],["qwen2.5-14b",{name:"qwen2.5-14b",displayName:"Qwen2.5-14B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"14B parameter model with enhanced capabilities",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","analysis","reasoning"]}],["qwen2.5-32b",{name:"qwen2.5-32b",displayName:"Qwen2.5-32B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"32B parameter high-performance model",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","complex-reasoning","analysis"]}],["qwen2.5-72b",{name:"qwen2.5-72b",displayName:"Qwen2.5-72B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"72B parameter state-of-the-art model",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","expert-analysis","research"]}],["qwen2.5-coder",{name:"qwen2.5-coder",displayName:"Qwen2.5-Coder",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","code-explanation","debugging"]}],["qwen2.5-coder-7b",{name:"qwen2.5-coder-7b",displayName:"Qwen2.5-Coder-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"7B parameter code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","programming"]}],["qwen2.5-coder-14b",{name:"qwen2.5-coder-14b",displayName:"Qwen2.5-Coder-14B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"14B parameter advanced code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","code-review","optimization"]}],["qwen-vl-lite",{name:"qwen-vl-lite",displayName:"Qwen-VL-Lite",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Lightweight vision-language model for basic image understanding",maxTokens:2e3,contextLength:8e3,capabilities:["image-understanding","visual-qa"]}],["qwen-vl-plus",{name:"qwen-vl-plus",displayName:"Qwen-VL-Plus",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Vision-language model supporting image understanding",maxTokens:4e3,contextLength:32e3,capabilities:["image-understanding","document-analysis","visual-reasoning"]}],["qwen-vl-max",{name:"qwen-vl-max",displayName:"Qwen-VL-Max",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Most powerful vision-language model",maxTokens:8e3,contextLength:32e3,capabilities:["image-understanding","video-analysis","multimodal-reasoning"]}],["qwen-audio-turbo",{name:"qwen-audio-turbo",displayName:"Qwen-Audio-Turbo",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Fast audio processing and speech-to-text model",maxTokens:2e3,contextLength:8e3,capabilities:["speech-recognition","audio-analysis"]}],["qwen-audio-chat",{name:"qwen-audio-chat",displayName:"Qwen-Audio-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Audio conversation and processing model",maxTokens:4e3,contextLength:32e3,capabilities:["audio-chat","voice-assistant","speech-synthesis"]}],["qwen-math-7b",{name:"qwen-math-7b",displayName:"Qwen-Math-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for mathematical reasoning and problem solving",maxTokens:4e3,contextLength:32e3,capabilities:["mathematical-reasoning","problem-solving"]}],["llama2-7b-chat-v2",{name:"llama2-7b-chat-v2",displayName:"LLaMA2-7B-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Meta's LLaMA2-7B model",maxTokens:2e3,contextLength:8e3,capabilities:["chat","text-generation"]}],["baichuan2-7b-chat-v1",{name:"baichuan2-7b-chat-v1",displayName:"Baichuan2-7B-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Baichuan AI's Baichuan2-7B model",maxTokens:2e3,contextLength:8e3,capabilities:["chat","chinese-nlp"]}],["qwen-financial",{name:"qwen-financial",displayName:"Qwen-Financial",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for financial analysis and market insights",maxTokens:6e3,contextLength:32e3,capabilities:["financial-analysis","market-prediction","risk-assessment"]}],["qwen-medical",{name:"qwen-medical",displayName:"Qwen-Medical",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for medical consultation and health analysis",maxTokens:6e3,contextLength:32e3,capabilities:["medical-consultation","health-analysis","diagnostic-support"]}],["qwen-omni",{name:"qwen-omni",displayName:"Qwen-Omni",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Omnidirectional multimodal model supporting text, image, audio",maxTokens:8e3,contextLength:64e3,capabilities:["text-generation","image-understanding","audio-processing","multimodal"]}],["qwen-omni-pro",{name:"qwen-omni-pro",displayName:"Qwen-Omni-Pro",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Professional omnidirectional multimodal model with advanced capabilities",maxTokens:16e3,contextLength:128e3,capabilities:["text-generation","multimodal","complex-reasoning","expert-analysis"]}]]);function V(n){return k.get(n)}function A(){return Array.from(k.values())}function I(n){for(const e of k.values())if(e.name===n)return e}function R(){return Array.from(k.keys())}function D(n){const e=Object.values(_);for(const t of e)if(t===n)return t;return null}class S{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||_.QWEN_TURBO,this.timeout=e.timeout||3e4,!this.apiKey)throw new Error("API Key cannot be empty");if(!k.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,t,s){const a=[],r=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";if(s?.systemPrompt||r){const i=[s?.systemPrompt,r].filter(Boolean).join(`
2
+ `);a.push({role:"system",content:i})}a.push({role:"user",content:e});const o=await this.chatCompletion(a,t,{temperature:s?.temperature,maxTokens:s?.maxTokens,stream:!1,modelType:this.modelType});return this.extractContent(o)}async chatCompletion(e,t,s){const a=s?.modelType||this.modelType,r=k.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=s?.temperature??.7,i=s?.maxTokens??1e3,d=s?.stream??!1,l=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";l&&(e.some(g=>g.role==="system")?e=e.map(g=>g.role==="system"?{...g,content:`${g.content}
3
3
  ${l}`}:g):e.unshift({role:"system",content:l}));const u=r.endpoint,c=r.format==="openai"?this.buildOpenAIRequest(r.name,e,o,i,d):this.buildDashScopeRequest(r.name,e,o,i);try{return await this.makeRequest(u,c,d)}catch(p){throw new Error(`Aliyun AI request failed: ${p.message}`)}}async chatStream(e,t,s,a){const r=a?.modelType||this.modelType,o=k.get(r);if(!o)throw new Error(`Unsupported model type: ${r}`);if(o.format!=="openai")throw new Error("Streaming conversation only supports OpenAI format models");const i=a?.temperature??.7,d=a?.maxTokens??1e3,l=s==="en"?"Please respond in English only.":s==="cn"?"请使用中文回答。":"";l&&(e.some(c=>c.role==="system")?e=e.map(c=>c.role==="system"?{...c,content:`${c.content}
4
4
  ${l}`}:c):e.unshift({role:"system",content:l}));const u=this.buildOpenAIRequest(o.name,e,i,d,!0);try{await this.makeStreamRequest(o.endpoint,u,t)}catch(m){throw new Error(`Streaming request failed: ${m.message}`)}}async analyzeOHLCV(e,t,s,a,r){const o=s||"comprehensive",i=e.length,l={trend:t==="en"?"Provide a detailed trend analysis of this OHLCV data, including price direction, support/resistance levels, and trend strength.":"提供详细的OHLCV数据趋势分析,包括价格方向、支撑/阻力位和趋势强度。",volume:t==="en"?"Analyze the volume patterns in this OHLCV data, including volume trends, unusual volume spikes, and volume-price relationships.":"分析OHLCV数据中的成交量模式,包括成交量趋势、异常成交量波动和量价关系。",technical:t==="en"?"Perform technical analysis on this OHLCV data, identifying potential technical indicators, patterns, and trading signals.":"对OHLCV数据进行技术分析,识别潜在的技术指标、图表形态和交易信号。",comprehensive:t==="en"?"Provide a comprehensive analysis of this OHLCV data, covering trends, volume, technical aspects, and potential market implications.":"提供全面的OHLCV数据分析,涵盖趋势、成交量、技术面和潜在市场影响。"}[o],u=t==="en"?"Please provide your analysis in English.":"请用中文进行分析。";let m="";if(i>0){const h=e[0],f=e[i-1].close-h.close,w=f/h.close*100;let C=h.high,v=h.low,O=0;for(const E of e)E.high>C&&(C=E.high),E.low<v&&(v=E.low),O+=E.volume;const P=O/i;m=t==="en"?`This dataset contains ${i} periods of OHLCV data.
5
5
  Price range: ${v.toFixed(2)} - ${C.toFixed(2)}
@@ -41,7 +41,7 @@ ${p}
41
41
  ${p}
42
42
  Please analyze this data as requested.`:`这是OHLCV数据(${i}个周期):
43
43
  ${p}
44
- 请按要求分析这些数据。`;const y=[{role:"system",content:c},{role:"user",content:g}];try{const h=await this.chatCompletion(y,t,{temperature:r?.temperature||.5,maxTokens:r?.maxTokens||1500,stream:!1,modelType:r?.modelType});return this.extractContent(h)}catch(h){throw new Error(`OHLCV analysis failed: ${h.message}`)}}async analyzeOHLCVEnhanced(e,t,s,a,r=!1,o){if(r){const i=t==="en"?`You are a professional financial data analyst. Analyze the OHLCV data and provide a structured response with:
44
+ 请按要求分析这些数据。`;const y=[{role:"system",content:c},{role:"user",content:g}];try{const h=await this.chatCompletion(y,t,{temperature:r?.temperature||.5,maxTokens:r?.maxTokens||1500,stream:!1,modelType:this.modelType});return this.extractContent(h)}catch(h){throw new Error(`OHLCV analysis failed: ${h.message}`)}}async analyzeOHLCVEnhanced(e,t,s,a,r=!1,o){if(r){const i=t==="en"?`You are a professional financial data analyst. Analyze the OHLCV data and provide a structured response with:
45
45
  1. Summary (brief overview)
46
46
  2. Details (key observations, 3-5 points)
47
47
  3. Recommendations (actionable insights, 2-3 points)
@@ -55,7 +55,7 @@ ${d}`;a&&(l+=t==="en"?`
55
55
 
56
56
  Additional request: ${a}`:`
57
57
 
58
- 附加要求:${a}`);const u=[{role:"system",content:i},{role:"user",content:l}];try{const m=await this.chatCompletion(u,t,{temperature:o?.temperature||.4,maxTokens:o?.maxTokens||1200,stream:!1,modelType:o?.modelType}),c=this.extractContent(m);try{const p=JSON.parse(c);if(p.summary&&Array.isArray(p.details)&&Array.isArray(p.recommendations))return p}catch{}return c}catch(m){throw new Error(`Structured OHLCV analysis failed: ${m.message}`)}}return this.analyzeOHLCV(e,t,s,a,o)}async predictingOHLCV(e,t,s,a,r){const o=s||"Based on these OHLCV data, predict the next period",i=a||1;if(!Number.isInteger(i)||i<=0)throw new Error(`Invalid count parameter: ${i}. Must be a positive integer.`);const d=50;if(i>d)throw new Error(`Count parameter too large: ${i}. Maximum allowed is ${d}. Please reduce the count or split your request.`);const l=i===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${i} consecutive OHLCV objects for the next ${i} periods.`,u=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";let m=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
58
+ 附加要求:${a}`);const u=[{role:"system",content:i},{role:"user",content:l}];try{const m=await this.chatCompletion(u,t,{temperature:o?.temperature||.4,maxTokens:o?.maxTokens||1200,stream:!1,modelType:this.modelType}),c=this.extractContent(m);try{const p=JSON.parse(c);if(p.summary&&Array.isArray(p.details)&&Array.isArray(p.recommendations))return p}catch{}return c}catch(m){throw new Error(`Structured OHLCV analysis failed: ${m.message}`)}}return this.analyzeOHLCV(e,t,s,a,o)}async predictingOHLCV(e,t,s,a,r){const o=s||"Based on these OHLCV data, predict the next period",i=a||1;if(!Number.isInteger(i)||i<=0)throw new Error(`Invalid count parameter: ${i}. Must be a positive integer.`);const d=50;if(i>d)throw new Error(`Count parameter too large: ${i}. Maximum allowed is ${d}. Please reduce the count or split your request.`);const l=i===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${i} consecutive OHLCV objects for the next ${i} periods.`,u=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";let m=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
59
59
  Your task: ${o}
60
60
  CRITICAL RULES:
61
61
  1. ${l}
@@ -77,10 +77,10 @@ ${i===1?`Example of valid response for 1 period:
77
77
 
78
78
  ${u}`);const c=JSON.stringify(e,null,2),p=`Here is the historical OHLCV data (${e.length} periods):
79
79
  ${c}
80
- Please process this data according to the system instructions. Remember to return EXACTLY ${i} OHLCV object(s) in a JSON array with no additional text.`,g=[{role:"system",content:m},{role:"user",content:p}];try{const y=i*50+100,h=Math.max(r?.maxTokens||1e3,y),b=await this.chatCompletion(g,t,{temperature:r?.temperature||.3,maxTokens:h,stream:!1,modelType:r?.modelType}),f=this.extractContent(b),w=this.parseOHLCVResponse(f);if(w.length!==i)throw new Error(`AI returned ${w.length} OHLCV objects, but expected ${i}.`);return w}catch(y){throw new Error(`OHLCV analysis failed: ${y.message}`)}}parseOHLCVResponse(e){try{const t=JSON.parse(e);if(!Array.isArray(t))throw new Error("Response is not in array format");return t.map((a,r)=>{if(typeof a!="object"||a===null)throw new Error(`Element ${r} is not a valid object`);const{open:o,high:i,low:d,close:l,volume:u}=a,m=["open","high","low","close","volume"];for(const c of m)if(typeof a[c]!="number"||isNaN(a[c]))throw new Error(`Element ${r} field ${c} is not a valid number`);if(i<d)throw new Error(`Element ${r}: high cannot be lower than low`);if(l<d||l>i)throw new Error(`Element ${r}: close must be between low and high`);return{open:Number(o),high:Number(i),low:Number(d),close:Number(l),volume:Number(u)}})}catch(t){const s=e.match(/\[[\s\S]*\]/);if(s)return this.parseOHLCVResponse(s[0]);throw new Error(`Unable to parse AI returned OHLCV data: ${t}
80
+ Please process this data according to the system instructions. Remember to return EXACTLY ${i} OHLCV object(s) in a JSON array with no additional text.`,g=[{role:"system",content:m},{role:"user",content:p}];try{const y=i*50+100,h=Math.max(r?.maxTokens||1e3,y),b=await this.chatCompletion(g,t,{temperature:r?.temperature||.3,maxTokens:h,stream:!1,modelType:this.modelType}),f=this.extractContent(b),w=this.parseOHLCVResponse(f);if(w.length!==i)throw new Error(`AI returned ${w.length} OHLCV objects, but expected ${i}.`);return w}catch(y){throw new Error(`OHLCV analysis failed: ${y.message}`)}}parseOHLCVResponse(e){try{const t=JSON.parse(e);if(!Array.isArray(t))throw new Error("Response is not in array format");return t.map((a,r)=>{if(typeof a!="object"||a===null)throw new Error(`Element ${r} is not a valid object`);const{open:o,high:i,low:d,close:l,volume:u}=a,m=["open","high","low","close","volume"];for(const c of m)if(typeof a[c]!="number"||isNaN(a[c]))throw new Error(`Element ${r} field ${c} is not a valid number`);if(i<d)throw new Error(`Element ${r}: high cannot be lower than low`);if(l<d||l>i)throw new Error(`Element ${r}: close must be between low and high`);return{open:Number(o),high:Number(i),low:Number(d),close:Number(l),volume:Number(u)}})}catch(t){const s=e.match(/\[[\s\S]*\]/);if(s)return this.parseOHLCVResponse(s[0]);throw new Error(`Unable to parse AI returned OHLCV data: ${t}
81
81
  Original content: ${e.substring(0,200)}...`)}}setModel(e){if(!k.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=k.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.',"en");return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}buildOpenAIRequest(e,t,s,a,r){return{model:e,messages:t,temperature:s,max_tokens:a,stream:r}}buildDashScopeRequest(e,t,s,a){return{model:e,input:{messages:t},parameters:{temperature:s,max_tokens:a,result_format:"message"}}}async makeRequest(e,t,s){const a=new AbortController,r=setTimeout(()=>a.abort(),this.timeout);try{const o=await fetch(e,{method:"POST",headers:{Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json; charset=utf-8",Accept:"application/json"},body:JSON.stringify(t),signal:a.signal});if(clearTimeout(r),!o.ok){const i=await o.text();throw new Error(`HTTP ${o.status}: ${i}`)}return s?o.body:await o.json()}catch(o){throw clearTimeout(r),o.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):o}}async makeStreamRequest(e,t,s){const a=await this.makeRequest(e,t,!0);if(!a)throw new Error("Failed to get streaming response");const r=a.getReader(),o=new TextDecoder("utf-8");let i="";try{for(;;){const{done:d,value:l}=await r.read();if(d){s("",!0);break}i+=o.decode(l,{stream:!0});const u=i.split(`
82
- `);i=u.pop()||"";for(const m of u)if(m.startsWith("data: ")){const c=m.slice(6);if(c==="[DONE]"){s("",!0);return}try{const p=JSON.parse(c);p.choices?.[0]?.delta?.content&&s(p.choices[0].delta.content,!1)}catch{}}}}finally{r.releaseLock()}}extractContent(e){if(e.choices?.[0]?.message?.content)return e.choices[0].message.content;if(e.output?.choices?.[0]?.message?.content)return e.output.choices[0].message.content;if(e.output?.text)return e.output.text;throw new Error("Unable to parse response content")}}function B(n,e){return new S({apiKey:n,modelType:e})}var _=(n=>(n.DEEPSEEK_CHAT="deepseek-chat",n.DEEPSEEK_CHAT_LITE="deepseek-chat-lite",n.DEEPSEEK_CHAT_PRO="deepseek-chat-pro",n.DEEPSEEK_CHAT_MAX="deepseek-chat-max",n.DEEPSEEK_CODER="deepseek-coder",n.DEEPSEEK_CODER_LITE="deepseek-coder-lite",n.DEEPSEEK_CODER_PRO="deepseek-coder-pro",n.DEEPSEEK_MATH="deepseek-math",n.DEEPSEEK_MATH_PRO="deepseek-math-pro",n.DEEPSEEK_REASONER="deepseek-reasoner",n.DEEPSEEK_REASONER_PRO="deepseek-reasoner-pro",n.DEEPSEEK_VISION="deepseek-vision",n.DEEPSEEK_VISION_PRO="deepseek-vision-pro",n.DEEPSEEK_FINANCE="deepseek-finance",n.DEEPSEEK_LAW="deepseek-law",n.DEEPSEEK_MEDICAL="deepseek-medical",n.DEEPSEEK_RESEARCH="deepseek-research",n.DEEPSEEK_OMNI="deepseek-omni",n.DEEPSEEK_OMNI_PRO="deepseek-omni-pro",n.DEEPSEEK_LLM="deepseek-llm",n.DEEPSEEK_LLM_67B="deepseek-llm-67b",n.DEEPSEEK_LLM_131B="deepseek-llm-131b",n))(_||{});const $=new Map([["deepseek-chat",{name:"deepseek-chat",displayName:"DeepSeek Chat",endpoint:"https://api.deepseek.com/v1/chat/completions",endpoints:["https://api.deepseek.com/v1/chat/completions"],format:"openai",description:"General purpose chat model for everyday conversations and tasks",maxTokens:4096,contextLength:16e3,capabilities:["chat","text-generation","reasoning"],version:"2025-01"}],["deepseek-chat-lite",{name:"deepseek-chat-lite",displayName:"DeepSeek Chat Lite",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Lightweight chat model optimized for speed and efficiency",maxTokens:2048,contextLength:8e3,capabilities:["chat","text-generation"],version:"2025-01"}],["deepseek-chat-pro",{name:"deepseek-chat-pro",displayName:"DeepSeek Chat Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional chat model with enhanced reasoning capabilities",maxTokens:8192,contextLength:32e3,capabilities:["chat","text-generation","complex-reasoning","analysis"],version:"2025-01"}],["deepseek-chat-max",{name:"deepseek-chat-max",displayName:"DeepSeek Chat Max",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Maximum capability chat model for most demanding tasks",maxTokens:16384,contextLength:64e3,capabilities:["chat","text-generation","expert-analysis","research"],version:"2025-01"}],["deepseek-coder",{name:"deepseek-coder",displayName:"DeepSeek Coder",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized model for code generation and programming tasks",maxTokens:16384,contextLength:64e3,capabilities:["code-generation","programming","debugging","code-review"],version:"2025-01"}],["deepseek-coder-lite",{name:"deepseek-coder-lite",displayName:"DeepSeek Coder Lite",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Lightweight code generation model",maxTokens:4096,contextLength:16e3,capabilities:["code-generation","programming"],version:"2025-01"}],["deepseek-coder-pro",{name:"deepseek-coder-pro",displayName:"DeepSeek Coder Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional code generation model with advanced features",maxTokens:32768,contextLength:128e3,capabilities:["code-generation","programming","system-design","architecture"],version:"2025-01"}],["deepseek-math",{name:"deepseek-math",displayName:"DeepSeek Math",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized model for mathematical reasoning and problem solving",maxTokens:8192,contextLength:32e3,capabilities:["mathematical-reasoning","problem-solving","calculations"],version:"2025-01"}],["deepseek-math-pro",{name:"deepseek-math-pro",displayName:"DeepSeek Math Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced mathematical reasoning model for complex problems",maxTokens:16384,contextLength:64e3,capabilities:["mathematical-reasoning","advanced-calculus","statistics"],version:"2025-01"}],["deepseek-reasoner",{name:"deepseek-reasoner",displayName:"DeepSeek Reasoner",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Dedicated reasoning model for logical analysis",maxTokens:8192,contextLength:32e3,capabilities:["logical-reasoning","analysis","decision-making"],version:"2025-01"}],["deepseek-reasoner-pro",{name:"deepseek-reasoner-pro",displayName:"DeepSeek Reasoner Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced reasoning model for complex logical problems",maxTokens:16384,contextLength:64e3,capabilities:["complex-reasoning","scientific-analysis","research"],version:"2025-01"}],["deepseek-vision",{name:"deepseek-vision",displayName:"DeepSeek Vision",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Vision model for image understanding and analysis",maxTokens:4096,contextLength:16e3,capabilities:["image-understanding","visual-qa","document-analysis"],version:"2025-01"}],["deepseek-vision-pro",{name:"deepseek-vision-pro",displayName:"DeepSeek Vision Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced vision model for complex visual tasks",maxTokens:8192,contextLength:32e3,capabilities:["image-understanding","video-analysis","visual-reasoning"],version:"2025-01"}],["deepseek-finance",{name:"deepseek-finance",displayName:"DeepSeek Finance",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for financial analysis, market prediction, and investment insights",maxTokens:8192,contextLength:32e3,capabilities:["financial-analysis","market-prediction","risk-assessment","investment-advice"],version:"2025-01"}],["deepseek-law",{name:"deepseek-law",displayName:"DeepSeek Law",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for legal analysis, contract review, and legal research",maxTokens:16384,contextLength:64e3,capabilities:["legal-analysis","contract-review","legal-research"],version:"2025-01"}],["deepseek-medical",{name:"deepseek-medical",displayName:"DeepSeek Medical",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for medical consultation, diagnosis support, and health analysis",maxTokens:8192,contextLength:32e3,capabilities:["medical-consultation","diagnostic-support","health-analysis"],version:"2025-01"}],["deepseek-research",{name:"deepseek-research",displayName:"DeepSeek Research",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for academic research and scientific analysis",maxTokens:32768,contextLength:128e3,capabilities:["academic-research","scientific-analysis","paper-writing"],version:"2025-01"}],["deepseek-omni",{name:"deepseek-omni",displayName:"DeepSeek Omni",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Multimodal model supporting text, image, and audio",maxTokens:16384,contextLength:64e3,capabilities:["text-generation","image-understanding","audio-processing","multimodal"],version:"2025-01"}],["deepseek-omni-pro",{name:"deepseek-omni-pro",displayName:"DeepSeek Omni Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional multimodal model with advanced capabilities",maxTokens:32768,contextLength:128e3,capabilities:["text-generation","multimodal","complex-reasoning","expert-analysis"],version:"2025-01"}],["deepseek-llm",{name:"deepseek-llm",displayName:"DeepSeek LLM",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Base large language model",maxTokens:4096,contextLength:16e3,capabilities:["text-generation"],version:"2024-12"}]]);function K(n){return $.get(n)}function M(){return Array.from($.values())}function F(n){for(const e of $.values())if(e.name===n)return e}function U(){return Array.from($.keys())}function Q(n){const e=Object.values(_);for(const t of e)if(t===n)return t;return null}class q{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||_.DEEPSEEK_CHAT,this.timeout=e.timeout||3e4,this.baseURL=e.baseURL||"https://api.deepseek.com",!this.apiKey)throw new Error("API Key cannot be empty");if(!$.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,t,s){const a=[],r=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";if(s?.systemPrompt||r){const i=[s?.systemPrompt,r].filter(Boolean).join(`
83
- `);a.push({role:"system",content:i})}a.push({role:"user",content:e});const o=await this.chatCompletion(a,t,{temperature:s?.temperature,maxTokens:s?.maxTokens,stream:!1,modelType:s?.modelType,topP:s?.topP,frequencyPenalty:s?.frequencyPenalty,presencePenalty:s?.presencePenalty,stop:s?.stop,tools:s?.tools,toolChoice:s?.toolChoice});return this.extractContent(o)}async chatCompletion(e,t,s){const a=s?.modelType||this.modelType,r=$.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=s?.temperature??.7,i=s?.maxTokens??2e3,d=s?.stream??!1,l=s?.topP??1,u=s?.frequencyPenalty??0,m=s?.presencePenalty??0,c=s?.stop,p=s?.tools,g=s?.toolChoice,y=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";y&&(e.some(w=>w.role==="system")?e=e.map(w=>w.role==="system"?{...w,content:`${w.content}
82
+ `);i=u.pop()||"";for(const m of u)if(m.startsWith("data: ")){const c=m.slice(6);if(c==="[DONE]"){s("",!0);return}try{const p=JSON.parse(c);p.choices?.[0]?.delta?.content&&s(p.choices[0].delta.content,!1)}catch{}}}}finally{r.releaseLock()}}extractContent(e){if(e.choices?.[0]?.message?.content)return e.choices[0].message.content;if(e.output?.choices?.[0]?.message?.content)return e.output.choices[0].message.content;if(e.output?.text)return e.output.text;throw new Error("Unable to parse response content")}}function B(n,e){return new S({apiKey:n,modelType:e})}var N=(n=>(n.DEEPSEEK_CHAT="deepseek-chat",n.DEEPSEEK_CHAT_LITE="deepseek-chat-lite",n.DEEPSEEK_CHAT_PRO="deepseek-chat-pro",n.DEEPSEEK_CHAT_MAX="deepseek-chat-max",n.DEEPSEEK_CODER="deepseek-coder",n.DEEPSEEK_CODER_LITE="deepseek-coder-lite",n.DEEPSEEK_CODER_PRO="deepseek-coder-pro",n.DEEPSEEK_MATH="deepseek-math",n.DEEPSEEK_MATH_PRO="deepseek-math-pro",n.DEEPSEEK_REASONER="deepseek-reasoner",n.DEEPSEEK_REASONER_PRO="deepseek-reasoner-pro",n.DEEPSEEK_VISION="deepseek-vision",n.DEEPSEEK_VISION_PRO="deepseek-vision-pro",n.DEEPSEEK_FINANCE="deepseek-finance",n.DEEPSEEK_LAW="deepseek-law",n.DEEPSEEK_MEDICAL="deepseek-medical",n.DEEPSEEK_RESEARCH="deepseek-research",n.DEEPSEEK_OMNI="deepseek-omni",n.DEEPSEEK_OMNI_PRO="deepseek-omni-pro",n.DEEPSEEK_LLM="deepseek-llm",n.DEEPSEEK_LLM_67B="deepseek-llm-67b",n.DEEPSEEK_LLM_131B="deepseek-llm-131b",n))(N||{});const $=new Map([["deepseek-chat",{name:"deepseek-chat",displayName:"DeepSeek Chat",endpoint:"https://api.deepseek.com/v1/chat/completions",endpoints:["https://api.deepseek.com/v1/chat/completions"],format:"openai",description:"General purpose chat model for everyday conversations and tasks",maxTokens:4096,contextLength:16e3,capabilities:["chat","text-generation","reasoning"],version:"2025-01"}],["deepseek-chat-lite",{name:"deepseek-chat-lite",displayName:"DeepSeek Chat Lite",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Lightweight chat model optimized for speed and efficiency",maxTokens:2048,contextLength:8e3,capabilities:["chat","text-generation"],version:"2025-01"}],["deepseek-chat-pro",{name:"deepseek-chat-pro",displayName:"DeepSeek Chat Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional chat model with enhanced reasoning capabilities",maxTokens:8192,contextLength:32e3,capabilities:["chat","text-generation","complex-reasoning","analysis"],version:"2025-01"}],["deepseek-chat-max",{name:"deepseek-chat-max",displayName:"DeepSeek Chat Max",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Maximum capability chat model for most demanding tasks",maxTokens:16384,contextLength:64e3,capabilities:["chat","text-generation","expert-analysis","research"],version:"2025-01"}],["deepseek-coder",{name:"deepseek-coder",displayName:"DeepSeek Coder",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized model for code generation and programming tasks",maxTokens:16384,contextLength:64e3,capabilities:["code-generation","programming","debugging","code-review"],version:"2025-01"}],["deepseek-coder-lite",{name:"deepseek-coder-lite",displayName:"DeepSeek Coder Lite",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Lightweight code generation model",maxTokens:4096,contextLength:16e3,capabilities:["code-generation","programming"],version:"2025-01"}],["deepseek-coder-pro",{name:"deepseek-coder-pro",displayName:"DeepSeek Coder Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional code generation model with advanced features",maxTokens:32768,contextLength:128e3,capabilities:["code-generation","programming","system-design","architecture"],version:"2025-01"}],["deepseek-math",{name:"deepseek-math",displayName:"DeepSeek Math",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized model for mathematical reasoning and problem solving",maxTokens:8192,contextLength:32e3,capabilities:["mathematical-reasoning","problem-solving","calculations"],version:"2025-01"}],["deepseek-math-pro",{name:"deepseek-math-pro",displayName:"DeepSeek Math Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced mathematical reasoning model for complex problems",maxTokens:16384,contextLength:64e3,capabilities:["mathematical-reasoning","advanced-calculus","statistics"],version:"2025-01"}],["deepseek-reasoner",{name:"deepseek-reasoner",displayName:"DeepSeek Reasoner",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Dedicated reasoning model for logical analysis",maxTokens:8192,contextLength:32e3,capabilities:["logical-reasoning","analysis","decision-making"],version:"2025-01"}],["deepseek-reasoner-pro",{name:"deepseek-reasoner-pro",displayName:"DeepSeek Reasoner Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced reasoning model for complex logical problems",maxTokens:16384,contextLength:64e3,capabilities:["complex-reasoning","scientific-analysis","research"],version:"2025-01"}],["deepseek-vision",{name:"deepseek-vision",displayName:"DeepSeek Vision",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Vision model for image understanding and analysis",maxTokens:4096,contextLength:16e3,capabilities:["image-understanding","visual-qa","document-analysis"],version:"2025-01"}],["deepseek-vision-pro",{name:"deepseek-vision-pro",displayName:"DeepSeek Vision Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced vision model for complex visual tasks",maxTokens:8192,contextLength:32e3,capabilities:["image-understanding","video-analysis","visual-reasoning"],version:"2025-01"}],["deepseek-finance",{name:"deepseek-finance",displayName:"DeepSeek Finance",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for financial analysis, market prediction, and investment insights",maxTokens:8192,contextLength:32e3,capabilities:["financial-analysis","market-prediction","risk-assessment","investment-advice"],version:"2025-01"}],["deepseek-law",{name:"deepseek-law",displayName:"DeepSeek Law",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for legal analysis, contract review, and legal research",maxTokens:16384,contextLength:64e3,capabilities:["legal-analysis","contract-review","legal-research"],version:"2025-01"}],["deepseek-medical",{name:"deepseek-medical",displayName:"DeepSeek Medical",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for medical consultation, diagnosis support, and health analysis",maxTokens:8192,contextLength:32e3,capabilities:["medical-consultation","diagnostic-support","health-analysis"],version:"2025-01"}],["deepseek-research",{name:"deepseek-research",displayName:"DeepSeek Research",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for academic research and scientific analysis",maxTokens:32768,contextLength:128e3,capabilities:["academic-research","scientific-analysis","paper-writing"],version:"2025-01"}],["deepseek-omni",{name:"deepseek-omni",displayName:"DeepSeek Omni",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Multimodal model supporting text, image, and audio",maxTokens:16384,contextLength:64e3,capabilities:["text-generation","image-understanding","audio-processing","multimodal"],version:"2025-01"}],["deepseek-omni-pro",{name:"deepseek-omni-pro",displayName:"DeepSeek Omni Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional multimodal model with advanced capabilities",maxTokens:32768,contextLength:128e3,capabilities:["text-generation","multimodal","complex-reasoning","expert-analysis"],version:"2025-01"}],["deepseek-llm",{name:"deepseek-llm",displayName:"DeepSeek LLM",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Base large language model",maxTokens:4096,contextLength:16e3,capabilities:["text-generation"],version:"2024-12"}]]);function M(n){return $.get(n)}function K(){return Array.from($.values())}function F(n){for(const e of $.values())if(e.name===n)return e}function U(){return Array.from($.keys())}function Q(n){const e=Object.values(N);for(const t of e)if(t===n)return t;return null}class q{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||N.DEEPSEEK_CHAT,this.timeout=e.timeout||3e4,this.baseURL=e.baseURL||"https://api.deepseek.com",!this.apiKey)throw new Error("API Key cannot be empty");if(!$.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,t,s){const a=[],r=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";if(s?.systemPrompt||r){const i=[s?.systemPrompt,r].filter(Boolean).join(`
83
+ `);a.push({role:"system",content:i})}a.push({role:"user",content:e});const o=await this.chatCompletion(a,t,{temperature:s?.temperature,maxTokens:s?.maxTokens,stream:!1,modelType:this.modelType,topP:s?.topP,frequencyPenalty:s?.frequencyPenalty,presencePenalty:s?.presencePenalty,stop:s?.stop,tools:s?.tools,toolChoice:s?.toolChoice});return this.extractContent(o)}async chatCompletion(e,t,s){const a=s?.modelType||this.modelType,r=$.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=s?.temperature??.7,i=s?.maxTokens??2e3,d=s?.stream??!1,l=s?.topP??1,u=s?.frequencyPenalty??0,m=s?.presencePenalty??0,c=s?.stop,p=s?.tools,g=s?.toolChoice,y=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";y&&(e.some(w=>w.role==="system")?e=e.map(w=>w.role==="system"?{...w,content:`${w.content}
84
84
  ${y}`}:w):e.unshift({role:"system",content:y}));const h=r.endpoint,b=this.buildOpenAIRequest(r.name,e,o,i,d,l,u,m,c,p,g);try{return await this.makeRequest(h,b,d)}catch(f){throw new Error(`DeepSeek AI request failed: ${f.message}`)}}async chatStream(e,t,s,a){const r=a?.modelType||this.modelType,o=$.get(r);if(!o)throw new Error(`Unsupported model type: ${r}`);const i=a?.temperature??.7,d=a?.maxTokens??2e3,l=a?.topP??1,u=a?.frequencyPenalty??0,m=a?.presencePenalty??0,c=s==="en"?"Please respond in English only.":s==="cn"?"请使用中文回答。":"";c&&(e.some(y=>y.role==="system")?e=e.map(y=>y.role==="system"?{...y,content:`${y.content}
85
85
  ${c}`}:y):e.unshift({role:"system",content:c}));const p=this.buildOpenAIRequest(o.name,e,i,d,!0,l,u,m,a?.stop,a?.tools,a?.toolChoice);try{await this.makeStreamRequest(o.endpoint,p,t)}catch(g){throw new Error(`Streaming request failed: ${g.message}`)}}async analyzeOHLCV(e,t,s,a,r){const o=s||"comprehensive",i=e.length,l={trend:{en:"Provide a detailed trend analysis of this OHLCV data, including price direction, support/resistance levels, and trend strength.",cn:"提供详细的OHLCV数据趋势分析,包括价格方向、支撑/阻力位和趋势强度。"},volume:{en:"Analyze the volume patterns in this OHLCV data, including volume trends, unusual volume spikes, and volume-price relationships.",cn:"分析OHLCV数据中的成交量模式,包括成交量趋势、异常成交量波动和量价关系。"},technical:{en:"Perform technical analysis on this OHLCV data, identifying potential technical indicators, patterns, and trading signals.",cn:"对OHLCV数据进行技术分析,识别潜在的技术指标、图表形态和交易信号。"},comprehensive:{en:"Provide a comprehensive analysis of this OHLCV data, covering trends, volume, technical aspects, and potential market implications.",cn:"提供全面的OHLCV数据分析,涵盖趋势、成交量、技术面和潜在市场影响。"}}[o][t],u=t==="en"?"Please provide your analysis in English.":"请用中文进行分析。";let m="";if(i>0){const h=e[0],f=e[i-1].close-h.close,w=f/h.close*100;let C=h.high,v=h.low,O=0;for(const E of e)E.high>C&&(C=E.high),E.low<v&&(v=E.low),O+=E.volume;const P=O/i;m=t==="en"?`This dataset contains ${i} periods of OHLCV data.
86
86
  Price range: ${v.toFixed(2)} - ${C.toFixed(2)}
@@ -122,7 +122,7 @@ ${p}
122
122
  ${p}
123
123
  Please analyze this data as requested.`:`这是OHLCV数据(${i}个周期):
124
124
  ${p}
125
- 请按要求分析这些数据。`;const y=[{role:"system",content:c},{role:"user",content:g}];try{const h=await this.chatCompletion(y,t,{temperature:r?.temperature||.5,maxTokens:r?.maxTokens||1500,stream:!1,modelType:r?.modelType||_.DEEPSEEK_FINANCE,topP:r?.topP,frequencyPenalty:r?.frequencyPenalty,presencePenalty:r?.presencePenalty});return this.extractContent(h)}catch(h){throw new Error(`OHLCV analysis failed: ${h.message}`)}}async analyzeOHLCVEnhanced(e,t,s,a,r=!1,o){if(r){const i=t==="en"?`You are a professional financial data analyst. Analyze the OHLCV data and provide a structured response with:
125
+ 请按要求分析这些数据。`;const y=[{role:"system",content:c},{role:"user",content:g}];try{const h=await this.chatCompletion(y,t,{temperature:r?.temperature||.5,maxTokens:r?.maxTokens||1500,stream:!1,modelType:this.modelType,topP:r?.topP,frequencyPenalty:r?.frequencyPenalty,presencePenalty:r?.presencePenalty});return this.extractContent(h)}catch(h){throw new Error(`OHLCV analysis failed: ${h.message}`)}}async analyzeOHLCVEnhanced(e,t,s,a,r=!1,o){if(r){const i=t==="en"?`You are a professional financial data analyst. Analyze the OHLCV data and provide a structured response with:
126
126
  1. Summary (brief overview)
127
127
  2. Details (key observations, 3-5 points)
128
128
  3. Recommendations (actionable insights, 2-3 points)
@@ -136,7 +136,7 @@ ${d}`;a&&(l+=t==="en"?`
136
136
 
137
137
  Additional request: ${a}`:`
138
138
 
139
- 附加要求:${a}`);const u=[{role:"system",content:i},{role:"user",content:l}];try{const m=await this.chatCompletion(u,t,{temperature:o?.temperature||.4,maxTokens:o?.maxTokens||1200,stream:!1,modelType:o?.modelType||_.DEEPSEEK_FINANCE}),c=this.extractContent(m);try{const p=JSON.parse(c);if(p.summary&&Array.isArray(p.details)&&Array.isArray(p.recommendations))return p}catch{}return c}catch(m){throw new Error(`Structured OHLCV analysis failed: ${m.message}`)}}return this.analyzeOHLCV(e,t,s,a,o)}async predictingOHLCV(e,t,s,a){const r=t||"Based on these OHLCV data, predict the next period",o=s||1;if(!Number.isInteger(o)||o<=0)throw new Error(`Invalid count parameter: ${o}. Must be a positive integer.`);const i=50;if(o>i)throw new Error(`Count parameter too large: ${o}. Maximum allowed is ${i}.`);const d=o===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${o} consecutive OHLCV objects for the next ${o} periods.`,l=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
139
+ 附加要求:${a}`);const u=[{role:"system",content:i},{role:"user",content:l}];try{const m=await this.chatCompletion(u,t,{temperature:o?.temperature||.4,maxTokens:o?.maxTokens||1200,stream:!1,modelType:this.modelType}),c=this.extractContent(m);try{const p=JSON.parse(c);if(p.summary&&Array.isArray(p.details)&&Array.isArray(p.recommendations))return p}catch{}return c}catch(m){throw new Error(`Structured OHLCV analysis failed: ${m.message}`)}}return this.analyzeOHLCV(e,t,s,a,o)}async predictingOHLCV(e,t,s,a){const r=t||"Based on these OHLCV data, predict the next period",o=s||1;if(!Number.isInteger(o)||o<=0)throw new Error(`Invalid count parameter: ${o}. Must be a positive integer.`);const i=50;if(o>i)throw new Error(`Count parameter too large: ${o}. Maximum allowed is ${i}.`);const d=o===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${o} consecutive OHLCV objects for the next ${o} periods.`,l=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
140
140
  Your task: ${r}
141
141
  CRITICAL RULES:
142
142
  1. ${d}
@@ -157,7 +157,7 @@ ${o===1?`Example of valid response for 1 period:
157
157
  ... ${o-2} more OHLCV objects following the same pattern`:""}
158
158
  ]`}`,u=JSON.stringify(e,null,2),m=`Here is the historical OHLCV data (${e.length} periods):
159
159
  ${u}
160
- Please process this data according to the system instructions. Remember to return EXACTLY ${o} OHLCV object(s) in a JSON array with no additional text.`,c=[{role:"system",content:l},{role:"user",content:m}];try{const p=o*50+100,g=Math.max(a?.maxTokens||2e3,p),y=await this.chatCompletion(c,"en",{temperature:a?.temperature||.3,maxTokens:g,stream:!1,modelType:a?.modelType||_.DEEPSEEK_FINANCE,topP:a?.topP,frequencyPenalty:a?.frequencyPenalty,presencePenalty:a?.presencePenalty}),h=this.extractContent(y),b=this.parseOHLCVResponse(h);if(b.length!==o)throw new Error(`AI returned ${b.length} OHLCV objects, but expected ${o}.`);return b}catch(p){throw new Error(`OHLCV analysis failed: ${p.message}`)}}setModel(e){if(!$.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=$.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.',"en");return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}buildOpenAIRequest(e,t,s,a,r,o,i,d,l,u,m){const c={model:e,messages:t,temperature:s,max_tokens:a,stream:r};return o!==void 0&&(c.top_p=o),i!==void 0&&(c.frequency_penalty=i),d!==void 0&&(c.presence_penalty=d),l&&(c.stop=l),u&&(c.tools=u),m&&(c.tool_choice=m),c}async makeRequest(e,t,s){const a=new AbortController,r=setTimeout(()=>a.abort(),this.timeout);try{const o=await fetch(e,{method:"POST",headers:{Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json; charset=utf-8",Accept:"application/json"},body:JSON.stringify(t),signal:a.signal});if(clearTimeout(r),!o.ok){const i=await o.text();throw new Error(`HTTP ${o.status}: ${i}`)}return s?o.body:await o.json()}catch(o){throw clearTimeout(r),o.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):o}}async makeStreamRequest(e,t,s){const a=await this.makeRequest(e,t,!0);if(!a)throw new Error("Failed to get streaming response");const r=a.getReader(),o=new TextDecoder("utf-8");let i="";try{for(;;){const{done:d,value:l}=await r.read();if(d){s("",!0);break}i+=o.decode(l,{stream:!0});const u=i.split(`
160
+ Please process this data according to the system instructions. Remember to return EXACTLY ${o} OHLCV object(s) in a JSON array with no additional text.`,c=[{role:"system",content:l},{role:"user",content:m}];try{const p=o*50+100,g=Math.max(a?.maxTokens||2e3,p),y=await this.chatCompletion(c,"en",{temperature:a?.temperature||.3,maxTokens:g,stream:!1,modelType:this.modelType,topP:a?.topP,frequencyPenalty:a?.frequencyPenalty,presencePenalty:a?.presencePenalty}),h=this.extractContent(y),b=this.parseOHLCVResponse(h);if(b.length!==o)throw new Error(`AI returned ${b.length} OHLCV objects, but expected ${o}.`);return b}catch(p){throw new Error(`OHLCV analysis failed: ${p.message}`)}}setModel(e){if(!$.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=$.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.',"en");return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}buildOpenAIRequest(e,t,s,a,r,o,i,d,l,u,m){const c={model:e,messages:t,temperature:s,max_tokens:a,stream:r};return o!==void 0&&(c.top_p=o),i!==void 0&&(c.frequency_penalty=i),d!==void 0&&(c.presence_penalty=d),l&&(c.stop=l),u&&(c.tools=u),m&&(c.tool_choice=m),c}async makeRequest(e,t,s){const a=new AbortController,r=setTimeout(()=>a.abort(),this.timeout);try{const o=await fetch(e,{method:"POST",headers:{Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json; charset=utf-8",Accept:"application/json"},body:JSON.stringify(t),signal:a.signal});if(clearTimeout(r),!o.ok){const i=await o.text();throw new Error(`HTTP ${o.status}: ${i}`)}return s?o.body:await o.json()}catch(o){throw clearTimeout(r),o.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):o}}async makeStreamRequest(e,t,s){const a=await this.makeRequest(e,t,!0);if(!a)throw new Error("Failed to get streaming response");const r=a.getReader(),o=new TextDecoder("utf-8");let i="";try{for(;;){const{done:d,value:l}=await r.read();if(d){s("",!0);break}i+=o.decode(l,{stream:!0});const u=i.split(`
161
161
  `);i=u.pop()||"";for(const m of u)if(m.startsWith("data: ")){const c=m.slice(6);if(c==="[DONE]"){s("",!0);return}try{const p=JSON.parse(c);p.choices?.[0]?.delta?.content&&s(p.choices[0].delta.content,!1)}catch{}}}}finally{r.releaseLock()}}extractContent(e){if(e.choices?.[0]?.message?.content)return e.choices[0].message.content;if(e.output?.choices?.[0]?.message?.content)return e.output.choices[0].message.content;if(e.output?.text)return e.output.text;if(e.choices?.[0]?.delta?.content)return e.choices[0].delta.content;throw new Error("Unable to parse response content")}parseOHLCVResponse(e){try{const t=JSON.parse(e);if(!Array.isArray(t))throw new Error("Response is not in array format");return t.map((a,r)=>{if(typeof a!="object"||a===null)throw new Error(`Element ${r} is not a valid object`);const{open:o,high:i,low:d,close:l,volume:u}=a,m=["open","high","low","close","volume"];for(const c of m)if(typeof a[c]!="number"||isNaN(a[c]))throw new Error(`Element ${r} field ${c} is not a valid number`);if(i<d)throw new Error(`Element ${r}: high cannot be lower than low`);if(l<d||l>i)throw new Error(`Element ${r}: close must be between low and high`);return{open:Number(o),high:Number(i),low:Number(d),close:Number(l),volume:Number(u)}})}catch(t){const s=e.match(/\[[\s\S]*\]/);if(s)return this.parseOHLCVResponse(s[0]);throw new Error(`Unable to parse AI returned OHLCV data: ${t}
162
162
  Original content: ${e.substring(0,200)}...`)}}}function z(n,e){return new q({apiKey:n,modelType:e})}var T=(n=>(n.GPT4="gpt-4",n.GPT4_0314="gpt-4-0314",n.GPT4_0613="gpt-4-0613",n.GPT4_32K="gpt-4-32k",n.GPT4_32K_0314="gpt-4-32k-0314",n.GPT4_32K_0613="gpt-4-32k-0613",n.GPT4_TURBO="gpt-4-turbo",n.GPT4_TURBO_PREVIEW="gpt-4-turbo-preview",n.GPT4_TURBO_2024_04_09="gpt-4-turbo-2024-04-09",n.GPT4_OMNI="gpt-4o",n.GPT4_OMNI_2024_05_13="gpt-4o-2024-05-13",n.GPT4_OMNI_MINI="gpt-4o-mini",n.GPT4_OMNI_MINI_2024_07_18="gpt-4o-mini-2024-07-18",n.GPT3_5_TURBO="gpt-3.5-turbo",n.GPT3_5_TURBO_0125="gpt-3.5-turbo-0125",n.GPT3_5_TURBO_1106="gpt-3.5-turbo-1106",n.GPT3_5_TURBO_INSTRUCT="gpt-3.5-turbo-instruct",n.GPT3_5_TURBO_16K="gpt-3.5-turbo-16k",n.GPT3_5_TURBO_16K_0613="gpt-3.5-turbo-16k-0613",n.DAVINCI_002="davinci-002",n.BABBAGE_002="babbage-002",n.TEXT_DAVINCI_003="text-davinci-003",n.TEXT_DAVINCI_002="text-davinci-002",n.TEXT_DAVINCI_001="text-davinci-001",n.TEXT_CURIE_001="text-curie-001",n.TEXT_BABBAGE_001="text-babbage-001",n.TEXT_ADA_001="text-ada-001",n.TEXT_EMBEDDING_ADA_002="text-embedding-ada-002",n.TEXT_EMBEDDING_3_SMALL="text-embedding-3-small",n.TEXT_EMBEDDING_3_LARGE="text-embedding-3-large",n.DALL_E_2="dall-e-2",n.DALL_E_3="dall-e-3",n.WHISPER_1="whisper-1",n.TTS_1="tts-1",n.TTS_1_HD="tts-1-hd",n.MODERATION_LATEST="text-moderation-latest",n.MODERATION_STABLE="text-moderation-stable",n.GPT3_5_TURBO_FINETUNED="ft:gpt-3.5-turbo-0125:personal:",n.GPT4_FINETUNED="ft:gpt-4-0125-preview:personal:",n.GPT4_VISION_PREVIEW="gpt-4-vision-preview",n))(T||{});const x=new Map([["gpt-4",{name:"gpt-4",displayName:"GPT-4",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Powerful multi-purpose model for complex tasks",maxTokens:8192,contextLength:8192,capabilities:["chat","text-generation","reasoning","analysis"],inputCostPer1KTokens:.03,outputCostPer1KTokens:.06,supportedFeatures:["chat","function-calling"]}],["gpt-4-turbo",{name:"gpt-4-turbo",displayName:"GPT-4 Turbo",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Enhanced GPT-4 with 128K context, knowledge cutoff April 2023",maxTokens:4096,contextLength:128e3,capabilities:["chat","text-generation","reasoning","analysis","vision"],inputCostPer1KTokens:.01,outputCostPer1KTokens:.03,supportedFeatures:["chat","function-calling","vision","json-mode"]}],["gpt-4o",{name:"gpt-4o",displayName:"GPT-4o",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Versatile model supporting text, images, audio with fast response",maxTokens:4096,contextLength:128e3,capabilities:["chat","text-generation","vision","audio-processing","multimodal"],inputCostPer1KTokens:.005,outputCostPer1KTokens:.015,supportedFeatures:["chat","function-calling","vision","audio","json-mode"]}],["gpt-4o-mini",{name:"gpt-4o-mini",displayName:"GPT-4o Mini",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Compact and efficient version of GPT-4o with lower cost",maxTokens:16384,contextLength:128e3,capabilities:["chat","text-generation","vision"],inputCostPer1KTokens:15e-5,outputCostPer1KTokens:6e-4,supportedFeatures:["chat","function-calling","vision","json-mode"]}],["gpt-3.5-turbo",{name:"gpt-3.5-turbo",displayName:"GPT-3.5 Turbo",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Fast and cost-effective, suitable for most conversational tasks",maxTokens:4096,contextLength:16385,capabilities:["chat","text-generation","code-generation"],inputCostPer1KTokens:5e-4,outputCostPer1KTokens:.0015,supportedFeatures:["chat","function-calling"]}],["gpt-3.5-turbo-instruct",{name:"gpt-3.5-turbo-instruct",displayName:"GPT-3.5 Turbo Instruct",endpoint:"https://api.openai.com/v1/completions",format:"openai",description:"Instruction-tuned version for text completion tasks",maxTokens:4096,contextLength:4097,capabilities:["text-completion","instruction-following"],inputCostPer1KTokens:.0015,outputCostPer1KTokens:.002,supportedFeatures:["completions"]}],["text-embedding-ada-002",{name:"text-embedding-ada-002",displayName:"Text Embedding Ada 002",endpoint:"https://api.openai.com/v1/embeddings",format:"openai",description:"Text embedding model, 1536 dimensions, suitable for retrieval and similarity",contextLength:8191,capabilities:["embeddings","semantic-search"],inputCostPer1KTokens:1e-4,supportedFeatures:["embeddings"]}],["text-embedding-3-small",{name:"text-embedding-3-small",displayName:"Text Embedding 3 Small",endpoint:"https://api.openai.com/v1/embeddings",format:"openai",description:"Small text embedding model, 1536 dimensions, balance of performance and cost",contextLength:8191,capabilities:["embeddings","semantic-search"],inputCostPer1KTokens:2e-5,supportedFeatures:["embeddings"]}],["dall-e-3",{name:"dall-e-3",displayName:"DALL-E 3",endpoint:"https://api.openai.com/v1/images/generations",format:"openai",description:"Advanced image generation model producing high-quality, high-resolution images",capabilities:["image-generation","creative-design"],inputCostPer1KTokens:.04,supportedFeatures:["image-generation","variations","edits"]}],["whisper-1",{name:"whisper-1",displayName:"Whisper",endpoint:"https://api.openai.com/v1/audio/transcriptions",format:"openai",description:"Speech recognition model supporting multilingual transcription and translation",capabilities:["speech-recognition","audio-transcription","translation"],inputCostPer1KTokens:.006,supportedFeatures:["transcriptions","translations"]}],["tts-1-hd",{name:"tts-1-hd",displayName:"TTS-1 HD",endpoint:"https://api.openai.com/v1/audio/speech",format:"openai",description:"High-quality text-to-speech with multiple voice options",capabilities:["speech-synthesis","text-to-speech"],inputCostPer1KTokens:.015,supportedFeatures:["speech","voice-selection"]}],["text-moderation-latest",{name:"text-moderation-latest",displayName:"Moderation Latest",endpoint:"https://api.openai.com/v1/moderations",format:"openai",description:"Content moderation model for detecting harmful content",capabilities:["content-moderation","safety"],inputCostPer1KTokens:1e-4,supportedFeatures:["moderation"]}]]);function j(n){return x.get(n)}function L(){return Array.from(x.values())}function W(n){for(const e of x.values())if(e.name===n)return e}function G(){return Array.from(x.keys())}function J(){return L().filter(n=>n.capabilities.includes("chat"))}function X(){return L().filter(n=>n.capabilities.includes("text-completion"))}function Y(){return L().filter(n=>n.capabilities.includes("embeddings"))}function Z(){return L().filter(n=>n.capabilities.includes("vision")||n.capabilities.includes("image-generation"))}function ee(){return L().filter(n=>n.capabilities.includes("audio-processing")||n.capabilities.includes("speech-recognition")||n.capabilities.includes("speech-synthesis"))}function te(){return L().filter(n=>n.capabilities.includes("multimodal"))}function ne(){const n=["gpt-4o","gpt-4o-mini","gpt-4-turbo","gpt-3.5-turbo","text-embedding-3-small","dall-e-3"];return L().filter(e=>n.includes(e.name))}function oe(){return L().filter(n=>n.inputCostPer1KTokens&&n.inputCostPer1KTokens<.001).sort((n,e)=>(n.inputCostPer1KTokens||0)-(e.inputCostPer1KTokens||0))}function ae(){return L().filter(n=>n.contextLength&&n.contextLength>=128e3).sort((n,e)=>(e.contextLength||0)-(n.contextLength||0))}function se(n,e,t=0){const s=(n.inputCostPer1KTokens||0)/1e3*e,a=(n.outputCostPer1KTokens||0)/1e3*t;return{inputTokens:e,outputTokens:t,inputCost:s,outputCost:a,totalCost:s+a}}function ie(n){let e=L();switch(n.taskType){case"chat":e=e.filter(t=>t.capabilities.includes("chat"));break;case"completion":e=e.filter(t=>t.capabilities.includes("text-completion"));break;case"embedding":e=e.filter(t=>t.capabilities.includes("embeddings"));break;case"image":e=e.filter(t=>t.capabilities.includes("image-generation")||t.capabilities.includes("vision"));break;case"audio":e=e.filter(t=>t.capabilities.includes("speech-recognition")||t.capabilities.includes("speech-synthesis"));break}return n.contextLength&&(e=e.filter(t=>t.contextLength&&t.contextLength>=n.contextLength)),n.features&&n.features.length>0&&(e=e.filter(t=>n.features.every(s=>t.supportedFeatures?.includes(s)||t.capabilities.includes(s)))),n.budget&&e.sort((t,s)=>(t.inputCostPer1KTokens||0)-(s.inputCostPer1KTokens||0)),e.slice(0,5)}function re(n){const e=Object.values(T);for(const t of e)if(t===n)return t;return null}class H{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||T.GPT3_5_TURBO,this.timeout=e.timeout||3e4,this.organization=e.organization,this.baseURL=e.baseURL||"https://api.openai.com/v1",!this.apiKey)throw new Error("API Key cannot be empty");if(!x.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,t,s){const a=[],r=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";if(s?.systemPrompt||r){const i=[s?.systemPrompt,r].filter(Boolean).join(`
163
163
  `);a.push({role:"system",content:i})}a.push({role:"user",content:e});const o=await this.chatCompletion(a,t,{temperature:s?.temperature,maxTokens:s?.maxTokens,stream:!1,topP:s?.topP,frequencyPenalty:s?.frequencyPenalty,presencePenalty:s?.presencePenalty,stop:s?.stop});return this.extractContent(o)}async chatCompletion(e,t,s){const a=s?.modelType||this.modelType,r=x.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=s?.temperature??.7,i=s?.maxTokens??1e3,d=s?.stream??!1,l=r.endpoint,u=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";u&&(e.some(p=>p.role==="system")?e=e.map(p=>p.role==="system"?{...p,content:`${p.content}
@@ -202,7 +202,7 @@ ${p}
202
202
  ${p}
203
203
  Please analyze this data as requested.`:`这是OHLCV数据(${i}个周期):
204
204
  ${p}
205
- 请按要求分析这些数据。`;const y=[{role:"system",content:c},{role:"user",content:g}];try{const h=await this.chatCompletion(y,t,{temperature:r?.temperature||.5,maxTokens:r?.maxTokens||1500,stream:!1,modelType:r?.modelType,topP:r?.topP,frequencyPenalty:r?.frequencyPenalty,presencePenalty:r?.presencePenalty,stop:r?.stop});return this.extractContent(h)}catch(h){throw new Error(`OHLCV analysis failed: ${h.message}`)}}async analyzeOHLCVEnhanced(e,t,s,a,r=!1,o){if(r){const i=t==="en"?`You are a professional financial data analyst. Analyze the OHLCV data and provide a structured response with:
205
+ 请按要求分析这些数据。`;const y=[{role:"system",content:c},{role:"user",content:g}];try{const h=await this.chatCompletion(y,t,{temperature:r?.temperature||.5,maxTokens:r?.maxTokens||1500,stream:!1,modelType:this.modelType,topP:r?.topP,frequencyPenalty:r?.frequencyPenalty,presencePenalty:r?.presencePenalty,stop:r?.stop});return this.extractContent(h)}catch(h){throw new Error(`OHLCV analysis failed: ${h.message}`)}}async analyzeOHLCVEnhanced(e,t,s,a,r=!1,o){if(r){const i=t==="en"?`You are a professional financial data analyst. Analyze the OHLCV data and provide a structured response with:
206
206
  1. Summary (brief overview)
207
207
  2. Details (key observations, 3-5 points)
208
208
  3. Recommendations (actionable insights, 2-3 points)
@@ -216,7 +216,7 @@ ${d}`;a&&(l+=t==="en"?`
216
216
 
217
217
  Additional request: ${a}`:`
218
218
 
219
- 附加要求:${a}`);const u=[{role:"system",content:i},{role:"user",content:l}];try{const m=await this.chatCompletion(u,t,{temperature:o?.temperature||.4,maxTokens:o?.maxTokens||1200,stream:!1,modelType:o?.modelType}),c=this.extractContent(m);try{const p=JSON.parse(c);if(p.summary&&Array.isArray(p.details)&&Array.isArray(p.recommendations))return p}catch{}return c}catch(m){throw new Error(`Structured OHLCV analysis failed: ${m.message}`)}}return this.analyzeOHLCV(e,t,s,a,o)}async predictingOHLCV(e,t,s,a){const r=t||"Based on these OHLCV data, predict the next period",o=s||1;if(!Number.isInteger(o)||o<=0)throw new Error(`Invalid count parameter: ${o}. Must be a positive integer.`);const i=50;if(o>i)throw new Error(`Count parameter too large: ${o}. Maximum allowed is ${i}. Please reduce the count or split your request.`);const d=o===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${o} consecutive OHLCV objects for the next ${o} periods.`,l=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
219
+ 附加要求:${a}`);const u=[{role:"system",content:i},{role:"user",content:l}];try{const m=await this.chatCompletion(u,t,{temperature:o?.temperature||.4,maxTokens:o?.maxTokens||1200,stream:!1,modelType:this.modelType}),c=this.extractContent(m);try{const p=JSON.parse(c);if(p.summary&&Array.isArray(p.details)&&Array.isArray(p.recommendations))return p}catch{}return c}catch(m){throw new Error(`Structured OHLCV analysis failed: ${m.message}`)}}return this.analyzeOHLCV(e,t,s,a,o)}async predictingOHLCV(e,t,s,a){const r=t||"Based on these OHLCV data, predict the next period",o=s||1;if(!Number.isInteger(o)||o<=0)throw new Error(`Invalid count parameter: ${o}. Must be a positive integer.`);const i=50;if(o>i)throw new Error(`Count parameter too large: ${o}. Maximum allowed is ${i}. Please reduce the count or split your request.`);const d=o===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${o} consecutive OHLCV objects for the next ${o} periods.`,l=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
220
220
  Your task: ${r}
221
221
  CRITICAL RULES:
222
222
  1. ${d}
@@ -236,6 +236,6 @@ ${o===1?`Example of valid response for 1 period:
236
236
  ... ${o-2} more OHLCV objects following the same pattern`:""}
237
237
  ]`}`,u=JSON.stringify(e,null,2),m=`Here is the historical OHLCV data (${e.length} periods):
238
238
  ${u}
239
- Please process this data according to the system instructions. Remember to return EXACTLY ${o} OHLCV object(s) in a JSON array with no additional text.`,c=[{role:"system",content:l},{role:"user",content:m}];try{const p=o*50+100,g=Math.max(a?.maxTokens||1e3,p),y=await this.chatCompletion(c,"en",{temperature:a?.temperature||.3,maxTokens:g,stream:!1,modelType:a?.modelType,topP:a?.topP,frequencyPenalty:a?.frequencyPenalty,presencePenalty:a?.presencePenalty,stop:a?.stop}),h=this.extractContent(y),b=this.parseOHLCVResponse(h);if(b.length!==o)throw new Error(`AI returned ${b.length} OHLCV objects, but expected ${o}.`);return b}catch(p){throw new Error(`OHLCV analysis failed: ${p.message}`)}}async generateImage(e,t){const s=t?.modelType||T.DALL_E_3;if(s!==T.DALL_E_2&&s!==T.DALL_E_3)throw new Error("Image generation only supports DALL-E models");const a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,prompt:e,n:t?.n||1,size:t?.size||"1024x1024",quality:t?.quality||"standard",style:t?.style||"vivid",response_format:t?.responseFormat||"url"};try{const o=await this.makeRequest(a.endpoint,r,!1);if(o.data&&Array.isArray(o.data))return o.data.map(i=>t?.responseFormat==="b64_json"?i.b64_json:i.url);throw new Error("Invalid response format from image generation")}catch(o){throw new Error(`Image generation failed: ${o.message}`)}}async createEmbeddings(e,t){const s=t?.modelType||T.TEXT_EMBEDDING_ADA_002,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,input:e};t?.dimensions&&a.name===T.TEXT_EMBEDDING_3_SMALL&&(r.dimensions=t.dimensions);try{const o=await this.makeRequest(a.endpoint,r,!1);if(o.data&&Array.isArray(o.data))return o.data.map(i=>i.embedding);throw new Error("Invalid response format from embeddings")}catch(o){throw new Error(`Embedding creation failed: ${o.message}`)}}async transcribeAudio(e,t){const s=t?.modelType||T.WHISPER_1,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r=new FormData;if(typeof e=="string")throw new Error("File path/Base64 support requires additional implementation");r.append("file",e),r.append("model",a.name),t?.language&&r.append("language",t.language),t?.prompt&&r.append("prompt",t.prompt),t?.responseFormat&&r.append("response_format",t.responseFormat),t?.temperature!==void 0&&r.append("temperature",t.temperature.toString());try{const o=await this.makeFormDataRequest(a.endpoint,r,!1);return o.text||o.transcription||""}catch(o){throw new Error(`Audio transcription failed: ${o.message}`)}}async textToSpeech(e,t){const s=t?.modelType||T.TTS_1_HD,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,input:e,voice:t?.voice||"alloy",response_format:t?.responseFormat||"mp3",speed:t?.speed||1};try{return await this.makeRequest(a.endpoint,r,!1,!0)}catch(o){throw new Error(`Text-to-speech conversion failed: ${o.message}`)}}async moderateContent(e,t){const s=t?.modelType||T.MODERATION_LATEST,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,input:e};try{return(await this.makeRequest(a.endpoint,r,!1)).results||[]}catch(o){throw new Error(`Content moderation failed: ${o.message}`)}}setModel(e){if(!x.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=x.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.',"en");return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}estimateCost(e,t=0,s){const a=s||this.modelType,r=x.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=(r.inputCostPer1KTokens||0)/1e3*e,i=(r.outputCostPer1KTokens||0)/1e3*t;return{inputCost:o,outputCost:i,totalCost:o+i}}buildOpenAIRequest(e,t,s,a,r,o){const i={model:e,messages:t,temperature:s,max_tokens:a,stream:r};return o?.topP!==void 0&&(i.top_p=o.topP),o?.frequencyPenalty!==void 0&&(i.frequency_penalty=o.frequencyPenalty),o?.presencePenalty!==void 0&&(i.presence_penalty=o.presencePenalty),o?.stop!==void 0&&(i.stop=o.stop),i}async makeRequest(e,t,s,a=!1){const r=new AbortController,o=setTimeout(()=>r.abort(),this.timeout);try{const i=e.startsWith("http")?e:`${this.baseURL}${e}`,d={Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"};this.organization&&(d["OpenAI-Organization"]=this.organization);const l=await fetch(i,{method:"POST",headers:d,body:JSON.stringify(t),signal:r.signal});if(clearTimeout(o),!l.ok){const u=await l.text();try{const m=JSON.parse(u);throw new Error(`HTTP ${l.status}: ${m.error?.message||u}`)}catch{throw new Error(`HTTP ${l.status}: ${u}`)}}return a?await l.arrayBuffer():s?l.body:await l.json()}catch(i){throw clearTimeout(o),i.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):i}}async makeFormDataRequest(e,t,s){const a=new AbortController,r=setTimeout(()=>a.abort(),this.timeout);try{const o=e.startsWith("http")?e:`${this.baseURL}${e}`,i={Authorization:`Bearer ${this.apiKey}`};this.organization&&(i["OpenAI-Organization"]=this.organization);const d=await fetch(o,{method:"POST",headers:i,body:t,signal:a.signal});if(clearTimeout(r),!d.ok){const l=await d.text();try{const u=JSON.parse(l);throw new Error(`HTTP ${d.status}: ${u.error?.message||l}`)}catch{throw new Error(`HTTP ${d.status}: ${l}`)}}return s?d.body:await d.json()}catch(o){throw clearTimeout(r),o.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):o}}async makeStreamRequest(e,t,s){const a=await this.makeRequest(e,t,!0);if(!a)throw new Error("Failed to get streaming response");const r=a.getReader(),o=new TextDecoder("utf-8");let i="";try{for(;;){const{done:d,value:l}=await r.read();if(d){s("",!0);break}i+=o.decode(l,{stream:!0});const u=i.split(`
239
+ Please process this data according to the system instructions. Remember to return EXACTLY ${o} OHLCV object(s) in a JSON array with no additional text.`,c=[{role:"system",content:l},{role:"user",content:m}];try{const p=o*50+100,g=Math.max(a?.maxTokens||1e3,p),y=await this.chatCompletion(c,"en",{temperature:a?.temperature||.3,maxTokens:g,stream:!1,modelType:this.modelType,topP:a?.topP,frequencyPenalty:a?.frequencyPenalty,presencePenalty:a?.presencePenalty,stop:a?.stop}),h=this.extractContent(y),b=this.parseOHLCVResponse(h);if(b.length!==o)throw new Error(`AI returned ${b.length} OHLCV objects, but expected ${o}.`);return b}catch(p){throw new Error(`OHLCV analysis failed: ${p.message}`)}}async generateImage(e,t){const s=t?.modelType||T.DALL_E_3;if(s!==T.DALL_E_2&&s!==T.DALL_E_3)throw new Error("Image generation only supports DALL-E models");const a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,prompt:e,n:t?.n||1,size:t?.size||"1024x1024",quality:t?.quality||"standard",style:t?.style||"vivid",response_format:t?.responseFormat||"url"};try{const o=await this.makeRequest(a.endpoint,r,!1);if(o.data&&Array.isArray(o.data))return o.data.map(i=>t?.responseFormat==="b64_json"?i.b64_json:i.url);throw new Error("Invalid response format from image generation")}catch(o){throw new Error(`Image generation failed: ${o.message}`)}}async createEmbeddings(e,t){const s=t?.modelType||T.TEXT_EMBEDDING_ADA_002,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,input:e};t?.dimensions&&a.name===T.TEXT_EMBEDDING_3_SMALL&&(r.dimensions=t.dimensions);try{const o=await this.makeRequest(a.endpoint,r,!1);if(o.data&&Array.isArray(o.data))return o.data.map(i=>i.embedding);throw new Error("Invalid response format from embeddings")}catch(o){throw new Error(`Embedding creation failed: ${o.message}`)}}async transcribeAudio(e,t){const s=t?.modelType||T.WHISPER_1,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r=new FormData;if(typeof e=="string")throw new Error("File path/Base64 support requires additional implementation");r.append("file",e),r.append("model",a.name),t?.language&&r.append("language",t.language),t?.prompt&&r.append("prompt",t.prompt),t?.responseFormat&&r.append("response_format",t.responseFormat),t?.temperature!==void 0&&r.append("temperature",t.temperature.toString());try{const o=await this.makeFormDataRequest(a.endpoint,r,!1);return o.text||o.transcription||""}catch(o){throw new Error(`Audio transcription failed: ${o.message}`)}}async textToSpeech(e,t){const s=t?.modelType||T.TTS_1_HD,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,input:e,voice:t?.voice||"alloy",response_format:t?.responseFormat||"mp3",speed:t?.speed||1};try{return await this.makeRequest(a.endpoint,r,!1,!0)}catch(o){throw new Error(`Text-to-speech conversion failed: ${o.message}`)}}async moderateContent(e,t){const s=t?.modelType||T.MODERATION_LATEST,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,input:e};try{return(await this.makeRequest(a.endpoint,r,!1)).results||[]}catch(o){throw new Error(`Content moderation failed: ${o.message}`)}}setModel(e){if(!x.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=x.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.',"en");return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}estimateCost(e,t=0,s){const a=s||this.modelType,r=x.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=(r.inputCostPer1KTokens||0)/1e3*e,i=(r.outputCostPer1KTokens||0)/1e3*t;return{inputCost:o,outputCost:i,totalCost:o+i}}buildOpenAIRequest(e,t,s,a,r,o){const i={model:e,messages:t,temperature:s,max_tokens:a,stream:r};return o?.topP!==void 0&&(i.top_p=o.topP),o?.frequencyPenalty!==void 0&&(i.frequency_penalty=o.frequencyPenalty),o?.presencePenalty!==void 0&&(i.presence_penalty=o.presencePenalty),o?.stop!==void 0&&(i.stop=o.stop),i}async makeRequest(e,t,s,a=!1){const r=new AbortController,o=setTimeout(()=>r.abort(),this.timeout);try{const i=e.startsWith("http")?e:`${this.baseURL}${e}`,d={Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"};this.organization&&(d["OpenAI-Organization"]=this.organization);const l=await fetch(i,{method:"POST",headers:d,body:JSON.stringify(t),signal:r.signal});if(clearTimeout(o),!l.ok){const u=await l.text();try{const m=JSON.parse(u);throw new Error(`HTTP ${l.status}: ${m.error?.message||u}`)}catch{throw new Error(`HTTP ${l.status}: ${u}`)}}return a?await l.arrayBuffer():s?l.body:await l.json()}catch(i){throw clearTimeout(o),i.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):i}}async makeFormDataRequest(e,t,s){const a=new AbortController,r=setTimeout(()=>a.abort(),this.timeout);try{const o=e.startsWith("http")?e:`${this.baseURL}${e}`,i={Authorization:`Bearer ${this.apiKey}`};this.organization&&(i["OpenAI-Organization"]=this.organization);const d=await fetch(o,{method:"POST",headers:i,body:t,signal:a.signal});if(clearTimeout(r),!d.ok){const l=await d.text();try{const u=JSON.parse(l);throw new Error(`HTTP ${d.status}: ${u.error?.message||l}`)}catch{throw new Error(`HTTP ${d.status}: ${l}`)}}return s?d.body:await d.json()}catch(o){throw clearTimeout(r),o.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):o}}async makeStreamRequest(e,t,s){const a=await this.makeRequest(e,t,!0);if(!a)throw new Error("Failed to get streaming response");const r=a.getReader(),o=new TextDecoder("utf-8");let i="";try{for(;;){const{done:d,value:l}=await r.read();if(d){s("",!0);break}i+=o.decode(l,{stream:!0});const u=i.split(`
240
240
  `);i=u.pop()||"";for(const m of u)if(m.startsWith("data: ")){const c=m.slice(6);if(c==="[DONE]"){s("",!0);return}try{const p=JSON.parse(c);p.choices?.[0]?.delta?.content&&s(p.choices[0].delta.content,!1)}catch{}}}}finally{r.releaseLock()}}extractContent(e){if(e.choices?.[0]?.message?.content)return e.choices[0].message.content;if(e.data?.[0]?.b64_json)return e.data[0].b64_json;if(e.data?.[0]?.url)return e.data[0].url;if(e.text)return e.text;throw new Error("Unable to parse response content")}parseOHLCVResponse(e){try{const t=JSON.parse(e);if(!Array.isArray(t))throw new Error("Response is not in array format");return t.map((a,r)=>{if(typeof a!="object"||a===null)throw new Error(`Element ${r} is not a valid object`);const{open:o,high:i,low:d,close:l,volume:u}=a,m=["open","high","low","close","volume"];for(const c of m)if(typeof a[c]!="number"||isNaN(a[c]))throw new Error(`Element ${r} field ${c} is not a valid number`);if(i<d)throw new Error(`Element ${r}: high cannot be lower than low`);if(l<d||l>i)throw new Error(`Element ${r}: close must be between low and high`);return{open:Number(o),high:Number(i),low:Number(d),close:Number(l),volume:Number(u)}})}catch(t){const s=e.match(/\[[\s\S]*\]/);if(s)return this.parseOHLCVResponse(s[0]);throw new Error(`Unable to parse AI returned OHLCV data: ${t}
241
- Original content: ${e.substring(0,200)}...`)}}}function ce(n,e){return new H({apiKey:n,modelType:e})}exports.ALIYUN_MODELS=k;exports.AliYunModelType=N;exports.AliyunAI=S;exports.DEEPSEEK_MODELS=$;exports.DeepSeekAI=q;exports.DeepSeekModelType=_;exports.OPENAI_MODELS=x;exports.OpenAI=H;exports.OpenAIModelType=T;exports.createAliyunAI=B;exports.createDeepSeekAI=z;exports.createOpenAI=ce;exports.estimateCost=se;exports.getAllDeepSeekModels=M;exports.getAllModels=A;exports.getAllOpenAIModels=L;exports.getAudioModelsOpenAI=ee;exports.getAvailableAliYunModelTypes=R;exports.getAvailableDeepSeekModelTypes=U;exports.getAvailableOpenAIModelTypes=G;exports.getChatModels=J;exports.getCompletionModels=X;exports.getCostEfficientModels=oe;exports.getDeepSeekModel=K;exports.getDeepSeekModelByName=F;exports.getEmbeddingModels=Y;exports.getHighContextModels=ae;exports.getLatestModels=ne;exports.getModel=V;exports.getModelByName=I;exports.getMultimodalModelsOpenAI=te;exports.getOpenAIModel=j;exports.getOpenAIModelByName=W;exports.getVisionModelsOpenAI=Z;exports.stringToAliYunModelType=D;exports.stringToDeepSeekModelType=Q;exports.stringToOpenAIModelType=re;exports.suggestModel=ie;
241
+ Original content: ${e.substring(0,200)}...`)}}}function ce(n,e){return new H({apiKey:n,modelType:e})}exports.ALIYUN_MODELS=k;exports.AliYunModelType=_;exports.AliyunAI=S;exports.DEEPSEEK_MODELS=$;exports.DeepSeekAI=q;exports.DeepSeekModelType=N;exports.OPENAI_MODELS=x;exports.OpenAI=H;exports.OpenAIModelType=T;exports.createAliyunAI=B;exports.createDeepSeekAI=z;exports.createOpenAI=ce;exports.estimateCost=se;exports.getAllDeepSeekModels=K;exports.getAllModels=A;exports.getAllOpenAIModels=L;exports.getAudioModelsOpenAI=ee;exports.getAvailableAliYunModelTypes=R;exports.getAvailableDeepSeekModelTypes=U;exports.getAvailableOpenAIModelTypes=G;exports.getChatModels=J;exports.getCompletionModels=X;exports.getCostEfficientModels=oe;exports.getDeepSeekModel=M;exports.getDeepSeekModelByName=F;exports.getEmbeddingModels=Y;exports.getHighContextModels=ae;exports.getLatestModels=ne;exports.getModel=V;exports.getModelByName=I;exports.getMultimodalModelsOpenAI=te;exports.getOpenAIModel=j;exports.getOpenAIModelByName=W;exports.getVisionModelsOpenAI=Z;exports.stringToAliYunModelType=D;exports.stringToDeepSeekModelType=Q;exports.stringToOpenAIModelType=re;exports.suggestModel=ie;
package/dist/index.mjs CHANGED
@@ -1,4 +1,4 @@
1
- var N = /* @__PURE__ */ ((n) => (n.QWEN_TURBO = "qwen-turbo", n.QWEN_PLUS = "qwen-plus", n.QWEN_MAX = "qwen-max", n.QWEN_MAX_LONGCONTEXT = "qwen-max-longcontext", n.QWEN_2_5B = "qwen2.5-0.5b", n.QWEN_2_5B_INSTRUCT = "qwen2.5-0.5b-instruct", n.QWEN_2_5B_7B = "qwen2.5-7b", n.QWEN_2_5B_7B_INSTRUCT = "qwen2.5-7b-instruct", n.QWEN_2_5B_14B = "qwen2.5-14b", n.QWEN_2_5B_14B_INSTRUCT = "qwen2.5-14b-instruct", n.QWEN_2_5B_32B = "qwen2.5-32b", n.QWEN_2_5B_32B_INSTRUCT = "qwen2.5-32b-instruct", n.QWEN_2_5B_72B = "qwen2.5-72b", n.QWEN_2_5B_72B_INSTRUCT = "qwen2.5-72b-instruct", n.QWEN_2_5B_CODER = "qwen2.5-coder", n.QWEN_2_5B_CODER_7B = "qwen2.5-coder-7b", n.QWEN_2_5B_CODER_14B = "qwen2.5-coder-14b", n.QWEN_2_5B_CODER_32B = "qwen2.5-coder-32b", n.QWEN_VL_LITE = "qwen-vl-lite", n.QWEN_VL_PLUS = "qwen-vl-plus", n.QWEN_VL_MAX = "qwen-vl-max", n.QWEN_AUDIO_TURBO = "qwen-audio-turbo", n.QWEN_AUDIO_CHAT = "qwen-audio-chat", n.QWEN_MATH_7B = "qwen-math-7b", n.LLAMA2_7B_CHAT_V2 = "llama2-7b-chat-v2", n.BAICHUAN2_7B_CHAT_V1 = "baichuan2-7b-chat-v1", n.QWEN_FINANCIAL = "qwen-financial", n.QWEN_FINANCIAL_14B = "qwen-financial-14b", n.QWEN_FINANCIAL_32B = "qwen-financial-32b", n.QWEN_MEDICAL = "qwen-medical", n.QWEN_MEDICAL_14B = "qwen-medical-14b", n.QWEN_MEDICAL_32B = "qwen-medical-32b", n.QWEN_OMNI = "qwen-omni", n.QWEN_OMNI_PRO = "qwen-omni-pro", n))(N || {});
1
+ var O = /* @__PURE__ */ ((n) => (n.QWEN_TURBO = "qwen-turbo", n.QWEN_PLUS = "qwen-plus", n.QWEN_MAX = "qwen-max", n.QWEN_MAX_LONGCONTEXT = "qwen-max-longcontext", n.QWEN_2_5B = "qwen2.5-0.5b", n.QWEN_2_5B_INSTRUCT = "qwen2.5-0.5b-instruct", n.QWEN_2_5B_7B = "qwen2.5-7b", n.QWEN_2_5B_7B_INSTRUCT = "qwen2.5-7b-instruct", n.QWEN_2_5B_14B = "qwen2.5-14b", n.QWEN_2_5B_14B_INSTRUCT = "qwen2.5-14b-instruct", n.QWEN_2_5B_32B = "qwen2.5-32b", n.QWEN_2_5B_32B_INSTRUCT = "qwen2.5-32b-instruct", n.QWEN_2_5B_72B = "qwen2.5-72b", n.QWEN_2_5B_72B_INSTRUCT = "qwen2.5-72b-instruct", n.QWEN_2_5B_CODER = "qwen2.5-coder", n.QWEN_2_5B_CODER_7B = "qwen2.5-coder-7b", n.QWEN_2_5B_CODER_14B = "qwen2.5-coder-14b", n.QWEN_2_5B_CODER_32B = "qwen2.5-coder-32b", n.QWEN_VL_LITE = "qwen-vl-lite", n.QWEN_VL_PLUS = "qwen-vl-plus", n.QWEN_VL_MAX = "qwen-vl-max", n.QWEN_AUDIO_TURBO = "qwen-audio-turbo", n.QWEN_AUDIO_CHAT = "qwen-audio-chat", n.QWEN_MATH_7B = "qwen-math-7b", n.LLAMA2_7B_CHAT_V2 = "llama2-7b-chat-v2", n.BAICHUAN2_7B_CHAT_V1 = "baichuan2-7b-chat-v1", n.QWEN_FINANCIAL = "qwen-financial", n.QWEN_FINANCIAL_14B = "qwen-financial-14b", n.QWEN_FINANCIAL_32B = "qwen-financial-32b", n.QWEN_MEDICAL = "qwen-medical", n.QWEN_MEDICAL_14B = "qwen-medical-14b", n.QWEN_MEDICAL_32B = "qwen-medical-32b", n.QWEN_OMNI = "qwen-omni", n.QWEN_OMNI_PRO = "qwen-omni-pro", n))(O || {});
2
2
  const k = /* @__PURE__ */ new Map([
3
3
  [
4
4
  "qwen-turbo",
@@ -373,7 +373,7 @@ function I() {
373
373
  return Array.from(k.keys());
374
374
  }
375
375
  function D(n) {
376
- const e = Object.values(N);
376
+ const e = Object.values(O);
377
377
  for (const t of e)
378
378
  if (t === n)
379
379
  return t;
@@ -387,7 +387,7 @@ class q {
387
387
  * @param config.timeout - Timeout, default 30 seconds
388
388
  */
389
389
  constructor(e) {
390
- if (this.apiKey = e.apiKey, this.modelType = e.modelType || N.QWEN_TURBO, this.timeout = e.timeout || 3e4, !this.apiKey)
390
+ if (this.apiKey = e.apiKey, this.modelType = e.modelType || O.QWEN_TURBO, this.timeout = e.timeout || 3e4, !this.apiKey)
391
391
  throw new Error("API Key cannot be empty");
392
392
  if (!k.get(this.modelType))
393
393
  throw new Error(`Unsupported model type: ${this.modelType}`);
@@ -411,7 +411,7 @@ class q {
411
411
  temperature: s?.temperature,
412
412
  maxTokens: s?.maxTokens,
413
413
  stream: !1,
414
- modelType: s?.modelType
414
+ modelType: this.modelType
415
415
  });
416
416
  return this.extractContent(o);
417
417
  }
@@ -554,7 +554,7 @@ ${p}
554
554
  temperature: r?.temperature || 0.5,
555
555
  maxTokens: r?.maxTokens || 1500,
556
556
  stream: !1,
557
- modelType: r?.modelType
557
+ modelType: this.modelType
558
558
  });
559
559
  return this.extractContent(h);
560
560
  } catch (h) {
@@ -599,7 +599,7 @@ Additional request: ${a}` : `
599
599
  temperature: o?.temperature || 0.4,
600
600
  maxTokens: o?.maxTokens || 1200,
601
601
  stream: !1,
602
- modelType: o?.modelType
602
+ modelType: this.modelType
603
603
  }), c = this.extractContent(m);
604
604
  try {
605
605
  const p = JSON.parse(c);
@@ -664,7 +664,7 @@ Please process this data according to the system instructions. Remember to retur
664
664
  temperature: r?.temperature || 0.3,
665
665
  maxTokens: h,
666
666
  stream: !1,
667
- modelType: r?.modelType
667
+ modelType: this.modelType
668
668
  }), f = this.extractContent(b), w = this.parseOHLCVResponse(f);
669
669
  if (w.length !== i)
670
670
  throw new Error(`AI returned ${w.length} OHLCV objects, but expected ${i}.`);
@@ -841,7 +841,7 @@ Original content: ${e.substring(0, 200)}...`);
841
841
  function B(n, e) {
842
842
  return new q({ apiKey: n, modelType: e });
843
843
  }
844
- var O = /* @__PURE__ */ ((n) => (n.DEEPSEEK_CHAT = "deepseek-chat", n.DEEPSEEK_CHAT_LITE = "deepseek-chat-lite", n.DEEPSEEK_CHAT_PRO = "deepseek-chat-pro", n.DEEPSEEK_CHAT_MAX = "deepseek-chat-max", n.DEEPSEEK_CODER = "deepseek-coder", n.DEEPSEEK_CODER_LITE = "deepseek-coder-lite", n.DEEPSEEK_CODER_PRO = "deepseek-coder-pro", n.DEEPSEEK_MATH = "deepseek-math", n.DEEPSEEK_MATH_PRO = "deepseek-math-pro", n.DEEPSEEK_REASONER = "deepseek-reasoner", n.DEEPSEEK_REASONER_PRO = "deepseek-reasoner-pro", n.DEEPSEEK_VISION = "deepseek-vision", n.DEEPSEEK_VISION_PRO = "deepseek-vision-pro", n.DEEPSEEK_FINANCE = "deepseek-finance", n.DEEPSEEK_LAW = "deepseek-law", n.DEEPSEEK_MEDICAL = "deepseek-medical", n.DEEPSEEK_RESEARCH = "deepseek-research", n.DEEPSEEK_OMNI = "deepseek-omni", n.DEEPSEEK_OMNI_PRO = "deepseek-omni-pro", n.DEEPSEEK_LLM = "deepseek-llm", n.DEEPSEEK_LLM_67B = "deepseek-llm-67b", n.DEEPSEEK_LLM_131B = "deepseek-llm-131b", n))(O || {});
844
+ var N = /* @__PURE__ */ ((n) => (n.DEEPSEEK_CHAT = "deepseek-chat", n.DEEPSEEK_CHAT_LITE = "deepseek-chat-lite", n.DEEPSEEK_CHAT_PRO = "deepseek-chat-pro", n.DEEPSEEK_CHAT_MAX = "deepseek-chat-max", n.DEEPSEEK_CODER = "deepseek-coder", n.DEEPSEEK_CODER_LITE = "deepseek-coder-lite", n.DEEPSEEK_CODER_PRO = "deepseek-coder-pro", n.DEEPSEEK_MATH = "deepseek-math", n.DEEPSEEK_MATH_PRO = "deepseek-math-pro", n.DEEPSEEK_REASONER = "deepseek-reasoner", n.DEEPSEEK_REASONER_PRO = "deepseek-reasoner-pro", n.DEEPSEEK_VISION = "deepseek-vision", n.DEEPSEEK_VISION_PRO = "deepseek-vision-pro", n.DEEPSEEK_FINANCE = "deepseek-finance", n.DEEPSEEK_LAW = "deepseek-law", n.DEEPSEEK_MEDICAL = "deepseek-medical", n.DEEPSEEK_RESEARCH = "deepseek-research", n.DEEPSEEK_OMNI = "deepseek-omni", n.DEEPSEEK_OMNI_PRO = "deepseek-omni-pro", n.DEEPSEEK_LLM = "deepseek-llm", n.DEEPSEEK_LLM_67B = "deepseek-llm-67b", n.DEEPSEEK_LLM_131B = "deepseek-llm-131b", n))(N || {});
845
845
  const $ = /* @__PURE__ */ new Map([
846
846
  // Chat models
847
847
  [
@@ -1150,7 +1150,7 @@ function Q() {
1150
1150
  return Array.from($.keys());
1151
1151
  }
1152
1152
  function z(n) {
1153
- const e = Object.values(O);
1153
+ const e = Object.values(N);
1154
1154
  for (const t of e)
1155
1155
  if (t === n)
1156
1156
  return t;
@@ -1165,7 +1165,7 @@ class H {
1165
1165
  * @param config.baseURL - Base URL for API, default official endpoint
1166
1166
  */
1167
1167
  constructor(e) {
1168
- if (this.apiKey = e.apiKey, this.modelType = e.modelType || O.DEEPSEEK_CHAT, this.timeout = e.timeout || 3e4, this.baseURL = e.baseURL || "https://api.deepseek.com", !this.apiKey)
1168
+ if (this.apiKey = e.apiKey, this.modelType = e.modelType || N.DEEPSEEK_CHAT, this.timeout = e.timeout || 3e4, this.baseURL = e.baseURL || "https://api.deepseek.com", !this.apiKey)
1169
1169
  throw new Error("API Key cannot be empty");
1170
1170
  if (!$.get(this.modelType))
1171
1171
  throw new Error(`Unsupported model type: ${this.modelType}`);
@@ -1189,7 +1189,7 @@ class H {
1189
1189
  temperature: s?.temperature,
1190
1190
  maxTokens: s?.maxTokens,
1191
1191
  stream: !1,
1192
- modelType: s?.modelType,
1192
+ modelType: this.modelType,
1193
1193
  topP: s?.topP,
1194
1194
  frequencyPenalty: s?.frequencyPenalty,
1195
1195
  presencePenalty: s?.presencePenalty,
@@ -1366,7 +1366,7 @@ ${p}
1366
1366
  temperature: r?.temperature || 0.5,
1367
1367
  maxTokens: r?.maxTokens || 1500,
1368
1368
  stream: !1,
1369
- modelType: r?.modelType || O.DEEPSEEK_FINANCE,
1369
+ modelType: this.modelType,
1370
1370
  topP: r?.topP,
1371
1371
  frequencyPenalty: r?.frequencyPenalty,
1372
1372
  presencePenalty: r?.presencePenalty
@@ -1414,7 +1414,7 @@ Additional request: ${a}` : `
1414
1414
  temperature: o?.temperature || 0.4,
1415
1415
  maxTokens: o?.maxTokens || 1200,
1416
1416
  stream: !1,
1417
- modelType: o?.modelType || O.DEEPSEEK_FINANCE
1417
+ modelType: this.modelType
1418
1418
  }), c = this.extractContent(m);
1419
1419
  try {
1420
1420
  const p = JSON.parse(c);
@@ -1474,7 +1474,7 @@ Please process this data according to the system instructions. Remember to retur
1474
1474
  temperature: a?.temperature || 0.3,
1475
1475
  maxTokens: g,
1476
1476
  stream: !1,
1477
- modelType: a?.modelType || O.DEEPSEEK_FINANCE,
1477
+ modelType: this.modelType,
1478
1478
  topP: a?.topP,
1479
1479
  frequencyPenalty: a?.frequencyPenalty,
1480
1480
  presencePenalty: a?.presencePenalty
@@ -2143,7 +2143,7 @@ ${p}
2143
2143
  temperature: r?.temperature || 0.5,
2144
2144
  maxTokens: r?.maxTokens || 1500,
2145
2145
  stream: !1,
2146
- modelType: r?.modelType,
2146
+ modelType: this.modelType,
2147
2147
  topP: r?.topP,
2148
2148
  frequencyPenalty: r?.frequencyPenalty,
2149
2149
  presencePenalty: r?.presencePenalty,
@@ -2192,7 +2192,7 @@ Additional request: ${a}` : `
2192
2192
  temperature: o?.temperature || 0.4,
2193
2193
  maxTokens: o?.maxTokens || 1200,
2194
2194
  stream: !1,
2195
- modelType: o?.modelType
2195
+ modelType: this.modelType
2196
2196
  }), c = this.extractContent(m);
2197
2197
  try {
2198
2198
  const p = JSON.parse(c);
@@ -2251,7 +2251,7 @@ Please process this data according to the system instructions. Remember to retur
2251
2251
  temperature: a?.temperature || 0.3,
2252
2252
  maxTokens: g,
2253
2253
  stream: !1,
2254
- modelType: a?.modelType,
2254
+ modelType: this.modelType,
2255
2255
  topP: a?.topP,
2256
2256
  frequencyPenalty: a?.frequencyPenalty,
2257
2257
  presencePenalty: a?.presencePenalty,
@@ -2598,11 +2598,11 @@ function ce(n, e) {
2598
2598
  }
2599
2599
  export {
2600
2600
  k as ALIYUN_MODELS,
2601
- N as AliYunModelType,
2601
+ O as AliYunModelType,
2602
2602
  q as AliyunAI,
2603
2603
  $ as DEEPSEEK_MODELS,
2604
2604
  H as DeepSeekAI,
2605
- O as DeepSeekModelType,
2605
+ N as DeepSeekModelType,
2606
2606
  x as OPENAI_MODELS,
2607
2607
  S as OpenAI,
2608
2608
  T as OpenAIModelType,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ohlcv-ai",
3
- "version": "1.1.0",
3
+ "version": "1.1.1",
4
4
  "description": "AI agents focused on processing ohlcv data structures.",
5
5
  "author": "happyboy",
6
6
  "license": "Apache-2.0",