ohlcv-ai 1.0.9 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,8 +1,64 @@
1
- "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});var k=(t=>(t.QWEN_TURBO="qwen-turbo",t.QWEN_PLUS="qwen-plus",t.QWEN_MAX="qwen-max",t.QWEN_MAX_LONGCONTEXT="qwen-max-longcontext",t.QWEN_2_5B="qwen2.5-0.5b",t.QWEN_2_5B_INSTRUCT="qwen2.5-0.5b-instruct",t.QWEN_2_5B_7B="qwen2.5-7b",t.QWEN_2_5B_7B_INSTRUCT="qwen2.5-7b-instruct",t.QWEN_2_5B_14B="qwen2.5-14b",t.QWEN_2_5B_14B_INSTRUCT="qwen2.5-14b-instruct",t.QWEN_2_5B_32B="qwen2.5-32b",t.QWEN_2_5B_32B_INSTRUCT="qwen2.5-32b-instruct",t.QWEN_2_5B_72B="qwen2.5-72b",t.QWEN_2_5B_72B_INSTRUCT="qwen2.5-72b-instruct",t.QWEN_2_5B_CODER="qwen2.5-coder",t.QWEN_2_5B_CODER_7B="qwen2.5-coder-7b",t.QWEN_2_5B_CODER_14B="qwen2.5-coder-14b",t.QWEN_2_5B_CODER_32B="qwen2.5-coder-32b",t.QWEN_VL_LITE="qwen-vl-lite",t.QWEN_VL_PLUS="qwen-vl-plus",t.QWEN_VL_MAX="qwen-vl-max",t.QWEN_AUDIO_TURBO="qwen-audio-turbo",t.QWEN_AUDIO_CHAT="qwen-audio-chat",t.QWEN_MATH_7B="qwen-math-7b",t.LLAMA2_7B_CHAT_V2="llama2-7b-chat-v2",t.BAICHUAN2_7B_CHAT_V1="baichuan2-7b-chat-v1",t.QWEN_FINANCIAL="qwen-financial",t.QWEN_FINANCIAL_14B="qwen-financial-14b",t.QWEN_FINANCIAL_32B="qwen-financial-32b",t.QWEN_MEDICAL="qwen-medical",t.QWEN_MEDICAL_14B="qwen-medical-14b",t.QWEN_MEDICAL_32B="qwen-medical-32b",t.QWEN_OMNI="qwen-omni",t.QWEN_OMNI_PRO="qwen-omni-pro",t))(k||{});const b=new Map([["qwen-turbo",{name:"qwen-turbo",displayName:"Qwen-Turbo",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions","https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation"],format:"openai",description:"Lightweight version, fast response speed, suitable for general conversation scenarios",maxTokens:2e3,contextLength:8e3,capabilities:["text-generation","chat"]}],["qwen-plus",{name:"qwen-plus",displayName:"Qwen-Plus",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Enhanced version, suitable for complex tasks and long text processing",maxTokens:6e3,contextLength:32e3,capabilities:["text-generation","chat","reasoning"]}],["qwen-max",{name:"qwen-max",displayName:"Qwen-Max",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Maximum version, strongest capabilities, suitable for high-demand professional tasks",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","chat","reasoning","coding","analysis"]}],["qwen-max-longcontext",{name:"qwen-max-longcontext",displayName:"Qwen-Max-LongContext",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Supports 128K long context, suitable for long document processing",maxTokens:8e3,contextLength:128e3,capabilities:["text-generation","chat","document-analysis"]}],["qwen2.5-0.5b",{name:"qwen2.5-0.5b",displayName:"Qwen2.5-0.5B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Ultra-lightweight 0.5B parameter model for edge devices",maxTokens:4e3,contextLength:32e3,capabilities:["text-generation","chat"]}],["qwen2.5-0.5b-instruct",{name:"qwen2.5-0.5b-instruct",displayName:"Qwen2.5-0.5B-Instruct",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Instruction-tuned 0.5B model for specific tasks",maxTokens:4e3,contextLength:32e3,capabilities:["instruction-following","chat"]}],["qwen2.5-7b",{name:"qwen2.5-7b",displayName:"Qwen2.5-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"7B parameter base model, balanced performance and efficiency",maxTokens:6e3,contextLength:32e3,capabilities:["text-generation","reasoning"]}],["qwen2.5-7b-instruct",{name:"qwen2.5-7b-instruct",displayName:"Qwen2.5-7B-Instruct",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Instruction-tuned 7B model for chat and tasks",maxTokens:6e3,contextLength:32e3,capabilities:["chat","instruction-following","coding"]}],["qwen2.5-14b",{name:"qwen2.5-14b",displayName:"Qwen2.5-14B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"14B parameter model with enhanced capabilities",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","analysis","reasoning"]}],["qwen2.5-32b",{name:"qwen2.5-32b",displayName:"Qwen2.5-32B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"32B parameter high-performance model",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","complex-reasoning","analysis"]}],["qwen2.5-72b",{name:"qwen2.5-72b",displayName:"Qwen2.5-72B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"72B parameter state-of-the-art model",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","expert-analysis","research"]}],["qwen2.5-coder",{name:"qwen2.5-coder",displayName:"Qwen2.5-Coder",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","code-explanation","debugging"]}],["qwen2.5-coder-7b",{name:"qwen2.5-coder-7b",displayName:"Qwen2.5-Coder-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"7B parameter code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","programming"]}],["qwen2.5-coder-14b",{name:"qwen2.5-coder-14b",displayName:"Qwen2.5-Coder-14B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"14B parameter advanced code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","code-review","optimization"]}],["qwen-vl-lite",{name:"qwen-vl-lite",displayName:"Qwen-VL-Lite",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Lightweight vision-language model for basic image understanding",maxTokens:2e3,contextLength:8e3,capabilities:["image-understanding","visual-qa"]}],["qwen-vl-plus",{name:"qwen-vl-plus",displayName:"Qwen-VL-Plus",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Vision-language model supporting image understanding",maxTokens:4e3,contextLength:32e3,capabilities:["image-understanding","document-analysis","visual-reasoning"]}],["qwen-vl-max",{name:"qwen-vl-max",displayName:"Qwen-VL-Max",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Most powerful vision-language model",maxTokens:8e3,contextLength:32e3,capabilities:["image-understanding","video-analysis","multimodal-reasoning"]}],["qwen-audio-turbo",{name:"qwen-audio-turbo",displayName:"Qwen-Audio-Turbo",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Fast audio processing and speech-to-text model",maxTokens:2e3,contextLength:8e3,capabilities:["speech-recognition","audio-analysis"]}],["qwen-audio-chat",{name:"qwen-audio-chat",displayName:"Qwen-Audio-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Audio conversation and processing model",maxTokens:4e3,contextLength:32e3,capabilities:["audio-chat","voice-assistant","speech-synthesis"]}],["qwen-math-7b",{name:"qwen-math-7b",displayName:"Qwen-Math-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for mathematical reasoning and problem solving",maxTokens:4e3,contextLength:32e3,capabilities:["mathematical-reasoning","problem-solving"]}],["llama2-7b-chat-v2",{name:"llama2-7b-chat-v2",displayName:"LLaMA2-7B-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Meta's LLaMA2-7B model",maxTokens:2e3,contextLength:8e3,capabilities:["chat","text-generation"]}],["baichuan2-7b-chat-v1",{name:"baichuan2-7b-chat-v1",displayName:"Baichuan2-7B-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Baichuan AI's Baichuan2-7B model",maxTokens:2e3,contextLength:8e3,capabilities:["chat","chinese-nlp"]}],["qwen-financial",{name:"qwen-financial",displayName:"Qwen-Financial",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for financial analysis and market insights",maxTokens:6e3,contextLength:32e3,capabilities:["financial-analysis","market-prediction","risk-assessment"]}],["qwen-medical",{name:"qwen-medical",displayName:"Qwen-Medical",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for medical consultation and health analysis",maxTokens:6e3,contextLength:32e3,capabilities:["medical-consultation","health-analysis","diagnostic-support"]}],["qwen-omni",{name:"qwen-omni",displayName:"Qwen-Omni",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Omnidirectional multimodal model supporting text, image, audio",maxTokens:8e3,contextLength:64e3,capabilities:["text-generation","image-understanding","audio-processing","multimodal"]}],["qwen-omni-pro",{name:"qwen-omni-pro",displayName:"Qwen-Omni-Pro",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Professional omnidirectional multimodal model with advanced capabilities",maxTokens:16e3,contextLength:128e3,capabilities:["text-generation","multimodal","complex-reasoning","expert-analysis"]}]]);function N(t){return b.get(t)}function P(){return Array.from(b.values())}function O(t){for(const e of b.values())if(e.name===t)return e}function q(){return Array.from(b.keys())}function S(t){const e=Object.values(k);for(const o of e)if(o===t)return o;return null}class _{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||k.QWEN_TURBO,this.timeout=e.timeout||3e4,!this.apiKey)throw new Error("API Key cannot be empty");if(!b.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,o){const s=[];o?.systemPrompt&&s.push({role:"system",content:o.systemPrompt}),s.push({role:"user",content:e});const a=await this.chatCompletion(s,{temperature:o?.temperature,maxTokens:o?.maxTokens,stream:!1});return this.extractContent(a)}async chatCompletion(e,o){const s=o?.modelType||this.modelType,a=b.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const i=o?.temperature??.7,n=o?.maxTokens??1e3,r=o?.stream??!1,p=a.endpoint,m=a.format==="openai"?this.buildOpenAIRequest(a.name,e,i,n,r):this.buildDashScopeRequest(a.name,e,i,n);try{return await this.makeRequest(p,m,r)}catch(d){throw new Error(`Aliyun AI request failed: ${d.message}`)}}async chatStream(e,o,s){const a=s?.modelType||this.modelType,i=b.get(a);if(!i)throw new Error(`Unsupported model type: ${a}`);if(i.format!=="openai")throw new Error("Streaming conversation only supports OpenAI format models");const n=s?.temperature??.7,r=s?.maxTokens??1e3,p=this.buildOpenAIRequest(i.name,e,n,r,!0);try{await this.makeStreamRequest(i.endpoint,p,o)}catch(c){throw new Error(`Streaming request failed: ${c.message}`)}}setModel(e){if(!b.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=b.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.');return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}buildOpenAIRequest(e,o,s,a,i){return{model:e,messages:o,temperature:s,max_tokens:a,stream:i}}buildDashScopeRequest(e,o,s,a){return{model:e,input:{messages:o},parameters:{temperature:s,max_tokens:a,result_format:"message"}}}async makeRequest(e,o,s){const a=new AbortController,i=setTimeout(()=>a.abort(),this.timeout);try{const n=await fetch(e,{method:"POST",headers:{Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json; charset=utf-8",Accept:"application/json"},body:JSON.stringify(o),signal:a.signal});if(clearTimeout(i),!n.ok){const r=await n.text();throw new Error(`HTTP ${n.status}: ${r}`)}return s?n.body:await n.json()}catch(n){throw clearTimeout(i),n.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):n}}async makeStreamRequest(e,o,s){const a=await this.makeRequest(e,o,!0);if(!a)throw new Error("Failed to get streaming response");const i=a.getReader(),n=new TextDecoder("utf-8");let r="";try{for(;;){const{done:p,value:c}=await i.read();if(p){s("",!0);break}r+=n.decode(c,{stream:!0});const m=r.split(`
2
- `);r=m.pop()||"";for(const d of m)if(d.startsWith("data: ")){const l=d.slice(6);if(l==="[DONE]"){s("",!0);return}try{const u=JSON.parse(l);u.choices?.[0]?.delta?.content&&s(u.choices[0].delta.content,!1)}catch{}}}}finally{i.releaseLock()}}extractContent(e){if(e.choices?.[0]?.message?.content)return e.choices[0].message.content;if(e.output?.choices?.[0]?.message?.content)return e.output.choices[0].message.content;if(e.output?.text)return e.output.text;throw new Error("Unable to parse response content")}async predictingOHLCV(e,o,s,a){const i=o||"Based on these OHLCV data, predict the next period",n=s||1;if(!Number.isInteger(n)||n<=0)throw new Error(`Invalid count parameter: ${n}. Must be a positive integer.`);const r=50;if(n>r)throw new Error(`Count parameter too large: ${n}. Maximum allowed is ${r}. Please reduce the count or split your request.`);const p=n===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${n} consecutive OHLCV objects for the next ${n} periods.`,c=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
3
- Your task: ${i}
1
+ "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});var _=(n=>(n.QWEN_TURBO="qwen-turbo",n.QWEN_PLUS="qwen-plus",n.QWEN_MAX="qwen-max",n.QWEN_MAX_LONGCONTEXT="qwen-max-longcontext",n.QWEN_2_5B="qwen2.5-0.5b",n.QWEN_2_5B_INSTRUCT="qwen2.5-0.5b-instruct",n.QWEN_2_5B_7B="qwen2.5-7b",n.QWEN_2_5B_7B_INSTRUCT="qwen2.5-7b-instruct",n.QWEN_2_5B_14B="qwen2.5-14b",n.QWEN_2_5B_14B_INSTRUCT="qwen2.5-14b-instruct",n.QWEN_2_5B_32B="qwen2.5-32b",n.QWEN_2_5B_32B_INSTRUCT="qwen2.5-32b-instruct",n.QWEN_2_5B_72B="qwen2.5-72b",n.QWEN_2_5B_72B_INSTRUCT="qwen2.5-72b-instruct",n.QWEN_2_5B_CODER="qwen2.5-coder",n.QWEN_2_5B_CODER_7B="qwen2.5-coder-7b",n.QWEN_2_5B_CODER_14B="qwen2.5-coder-14b",n.QWEN_2_5B_CODER_32B="qwen2.5-coder-32b",n.QWEN_VL_LITE="qwen-vl-lite",n.QWEN_VL_PLUS="qwen-vl-plus",n.QWEN_VL_MAX="qwen-vl-max",n.QWEN_AUDIO_TURBO="qwen-audio-turbo",n.QWEN_AUDIO_CHAT="qwen-audio-chat",n.QWEN_MATH_7B="qwen-math-7b",n.LLAMA2_7B_CHAT_V2="llama2-7b-chat-v2",n.BAICHUAN2_7B_CHAT_V1="baichuan2-7b-chat-v1",n.QWEN_FINANCIAL="qwen-financial",n.QWEN_FINANCIAL_14B="qwen-financial-14b",n.QWEN_FINANCIAL_32B="qwen-financial-32b",n.QWEN_MEDICAL="qwen-medical",n.QWEN_MEDICAL_14B="qwen-medical-14b",n.QWEN_MEDICAL_32B="qwen-medical-32b",n.QWEN_OMNI="qwen-omni",n.QWEN_OMNI_PRO="qwen-omni-pro",n))(_||{});const k=new Map([["qwen-turbo",{name:"qwen-turbo",displayName:"Qwen-Turbo",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions","https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation"],format:"openai",description:"Lightweight version, fast response speed, suitable for general conversation scenarios",maxTokens:2e3,contextLength:8e3,capabilities:["text-generation","chat"]}],["qwen-plus",{name:"qwen-plus",displayName:"Qwen-Plus",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Enhanced version, suitable for complex tasks and long text processing",maxTokens:6e3,contextLength:32e3,capabilities:["text-generation","chat","reasoning"]}],["qwen-max",{name:"qwen-max",displayName:"Qwen-Max",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Maximum version, strongest capabilities, suitable for high-demand professional tasks",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","chat","reasoning","coding","analysis"]}],["qwen-max-longcontext",{name:"qwen-max-longcontext",displayName:"Qwen-Max-LongContext",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",endpoints:["https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"],format:"openai",description:"Supports 128K long context, suitable for long document processing",maxTokens:8e3,contextLength:128e3,capabilities:["text-generation","chat","document-analysis"]}],["qwen2.5-0.5b",{name:"qwen2.5-0.5b",displayName:"Qwen2.5-0.5B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Ultra-lightweight 0.5B parameter model for edge devices",maxTokens:4e3,contextLength:32e3,capabilities:["text-generation","chat"]}],["qwen2.5-0.5b-instruct",{name:"qwen2.5-0.5b-instruct",displayName:"Qwen2.5-0.5B-Instruct",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Instruction-tuned 0.5B model for specific tasks",maxTokens:4e3,contextLength:32e3,capabilities:["instruction-following","chat"]}],["qwen2.5-7b",{name:"qwen2.5-7b",displayName:"Qwen2.5-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"7B parameter base model, balanced performance and efficiency",maxTokens:6e3,contextLength:32e3,capabilities:["text-generation","reasoning"]}],["qwen2.5-7b-instruct",{name:"qwen2.5-7b-instruct",displayName:"Qwen2.5-7B-Instruct",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Instruction-tuned 7B model for chat and tasks",maxTokens:6e3,contextLength:32e3,capabilities:["chat","instruction-following","coding"]}],["qwen2.5-14b",{name:"qwen2.5-14b",displayName:"Qwen2.5-14B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"14B parameter model with enhanced capabilities",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","analysis","reasoning"]}],["qwen2.5-32b",{name:"qwen2.5-32b",displayName:"Qwen2.5-32B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"32B parameter high-performance model",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","complex-reasoning","analysis"]}],["qwen2.5-72b",{name:"qwen2.5-72b",displayName:"Qwen2.5-72B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"72B parameter state-of-the-art model",maxTokens:8e3,contextLength:32e3,capabilities:["text-generation","expert-analysis","research"]}],["qwen2.5-coder",{name:"qwen2.5-coder",displayName:"Qwen2.5-Coder",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","code-explanation","debugging"]}],["qwen2.5-coder-7b",{name:"qwen2.5-coder-7b",displayName:"Qwen2.5-Coder-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"7B parameter code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","programming"]}],["qwen2.5-coder-14b",{name:"qwen2.5-coder-14b",displayName:"Qwen2.5-Coder-14B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"14B parameter advanced code generation model",maxTokens:8e3,contextLength:32e3,capabilities:["code-generation","code-review","optimization"]}],["qwen-vl-lite",{name:"qwen-vl-lite",displayName:"Qwen-VL-Lite",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Lightweight vision-language model for basic image understanding",maxTokens:2e3,contextLength:8e3,capabilities:["image-understanding","visual-qa"]}],["qwen-vl-plus",{name:"qwen-vl-plus",displayName:"Qwen-VL-Plus",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Vision-language model supporting image understanding",maxTokens:4e3,contextLength:32e3,capabilities:["image-understanding","document-analysis","visual-reasoning"]}],["qwen-vl-max",{name:"qwen-vl-max",displayName:"Qwen-VL-Max",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Most powerful vision-language model",maxTokens:8e3,contextLength:32e3,capabilities:["image-understanding","video-analysis","multimodal-reasoning"]}],["qwen-audio-turbo",{name:"qwen-audio-turbo",displayName:"Qwen-Audio-Turbo",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Fast audio processing and speech-to-text model",maxTokens:2e3,contextLength:8e3,capabilities:["speech-recognition","audio-analysis"]}],["qwen-audio-chat",{name:"qwen-audio-chat",displayName:"Qwen-Audio-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Audio conversation and processing model",maxTokens:4e3,contextLength:32e3,capabilities:["audio-chat","voice-assistant","speech-synthesis"]}],["qwen-math-7b",{name:"qwen-math-7b",displayName:"Qwen-Math-7B",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for mathematical reasoning and problem solving",maxTokens:4e3,contextLength:32e3,capabilities:["mathematical-reasoning","problem-solving"]}],["llama2-7b-chat-v2",{name:"llama2-7b-chat-v2",displayName:"LLaMA2-7B-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Meta's LLaMA2-7B model",maxTokens:2e3,contextLength:8e3,capabilities:["chat","text-generation"]}],["baichuan2-7b-chat-v1",{name:"baichuan2-7b-chat-v1",displayName:"Baichuan2-7B-Chat",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Baichuan AI's Baichuan2-7B model",maxTokens:2e3,contextLength:8e3,capabilities:["chat","chinese-nlp"]}],["qwen-financial",{name:"qwen-financial",displayName:"Qwen-Financial",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for financial analysis and market insights",maxTokens:6e3,contextLength:32e3,capabilities:["financial-analysis","market-prediction","risk-assessment"]}],["qwen-medical",{name:"qwen-medical",displayName:"Qwen-Medical",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Specialized for medical consultation and health analysis",maxTokens:6e3,contextLength:32e3,capabilities:["medical-consultation","health-analysis","diagnostic-support"]}],["qwen-omni",{name:"qwen-omni",displayName:"Qwen-Omni",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Omnidirectional multimodal model supporting text, image, audio",maxTokens:8e3,contextLength:64e3,capabilities:["text-generation","image-understanding","audio-processing","multimodal"]}],["qwen-omni-pro",{name:"qwen-omni-pro",displayName:"Qwen-Omni-Pro",endpoint:"https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",format:"openai",description:"Professional omnidirectional multimodal model with advanced capabilities",maxTokens:16e3,contextLength:128e3,capabilities:["text-generation","multimodal","complex-reasoning","expert-analysis"]}]]);function V(n){return k.get(n)}function A(){return Array.from(k.values())}function I(n){for(const e of k.values())if(e.name===n)return e}function R(){return Array.from(k.keys())}function D(n){const e=Object.values(_);for(const t of e)if(t===n)return t;return null}class S{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||_.QWEN_TURBO,this.timeout=e.timeout||3e4,!this.apiKey)throw new Error("API Key cannot be empty");if(!k.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,t,s){const a=[],r=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";if(s?.systemPrompt||r){const i=[s?.systemPrompt,r].filter(Boolean).join(`
2
+ `);a.push({role:"system",content:i})}a.push({role:"user",content:e});const o=await this.chatCompletion(a,t,{temperature:s?.temperature,maxTokens:s?.maxTokens,stream:!1,modelType:this.modelType});return this.extractContent(o)}async chatCompletion(e,t,s){const a=s?.modelType||this.modelType,r=k.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=s?.temperature??.7,i=s?.maxTokens??1e3,d=s?.stream??!1,l=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";l&&(e.some(g=>g.role==="system")?e=e.map(g=>g.role==="system"?{...g,content:`${g.content}
3
+ ${l}`}:g):e.unshift({role:"system",content:l}));const u=r.endpoint,c=r.format==="openai"?this.buildOpenAIRequest(r.name,e,o,i,d):this.buildDashScopeRequest(r.name,e,o,i);try{return await this.makeRequest(u,c,d)}catch(p){throw new Error(`Aliyun AI request failed: ${p.message}`)}}async chatStream(e,t,s,a){const r=a?.modelType||this.modelType,o=k.get(r);if(!o)throw new Error(`Unsupported model type: ${r}`);if(o.format!=="openai")throw new Error("Streaming conversation only supports OpenAI format models");const i=a?.temperature??.7,d=a?.maxTokens??1e3,l=s==="en"?"Please respond in English only.":s==="cn"?"请使用中文回答。":"";l&&(e.some(c=>c.role==="system")?e=e.map(c=>c.role==="system"?{...c,content:`${c.content}
4
+ ${l}`}:c):e.unshift({role:"system",content:l}));const u=this.buildOpenAIRequest(o.name,e,i,d,!0);try{await this.makeStreamRequest(o.endpoint,u,t)}catch(m){throw new Error(`Streaming request failed: ${m.message}`)}}async analyzeOHLCV(e,t,s,a,r){const o=s||"comprehensive",i=e.length,l={trend:t==="en"?"Provide a detailed trend analysis of this OHLCV data, including price direction, support/resistance levels, and trend strength.":"提供详细的OHLCV数据趋势分析,包括价格方向、支撑/阻力位和趋势强度。",volume:t==="en"?"Analyze the volume patterns in this OHLCV data, including volume trends, unusual volume spikes, and volume-price relationships.":"分析OHLCV数据中的成交量模式,包括成交量趋势、异常成交量波动和量价关系。",technical:t==="en"?"Perform technical analysis on this OHLCV data, identifying potential technical indicators, patterns, and trading signals.":"对OHLCV数据进行技术分析,识别潜在的技术指标、图表形态和交易信号。",comprehensive:t==="en"?"Provide a comprehensive analysis of this OHLCV data, covering trends, volume, technical aspects, and potential market implications.":"提供全面的OHLCV数据分析,涵盖趋势、成交量、技术面和潜在市场影响。"}[o],u=t==="en"?"Please provide your analysis in English.":"请用中文进行分析。";let m="";if(i>0){const h=e[0],f=e[i-1].close-h.close,w=f/h.close*100;let C=h.high,v=h.low,O=0;for(const E of e)E.high>C&&(C=E.high),E.low<v&&(v=E.low),O+=E.volume;const P=O/i;m=t==="en"?`This dataset contains ${i} periods of OHLCV data.
5
+ Price range: ${v.toFixed(2)} - ${C.toFixed(2)}
6
+ Overall price change: ${f>=0?"+":""}${f.toFixed(2)} (${f>=0?"+":""}${w.toFixed(2)}%)
7
+ Average volume: ${P.toFixed(0)}`:`该数据集包含 ${i} 个周期的OHLCV数据。
8
+ 价格范围:${v.toFixed(2)} - ${C.toFixed(2)}
9
+ 总体价格变化:${f>=0?"+":""}${f.toFixed(2)} (${f>=0?"+":""}${w.toFixed(2)}%)
10
+ 平均成交量:${P.toFixed(0)}`}let c=t==="en"?`You are a professional financial data analyst. Your task is to analyze OHLCV (Open, High, Low, Close, Volume) data and provide insights.
11
+ Analysis focus: ${l}
12
+ ${m?`Data characteristics:
13
+ ${m}
14
+
15
+ `:""}
16
+ Please provide:
17
+ 1. Clear and structured analysis
18
+ 2. Key observations from the data
19
+ 3. Potential implications or insights
20
+ 4. Recommendations or considerations (if applicable)
21
+ Format your response as a well-organized text analysis.`:`您是一位专业的金融数据分析师。您的任务是分析OHLCV(开盘价、最高价、最低价、收盘价、成交量)数据并提供见解。
22
+ 分析重点:${l}
23
+ ${m?`数据特征:
24
+ ${m}
25
+
26
+ `:""}
27
+ 请提供:
28
+ 1. 清晰且有结构的分析
29
+ 2. 数据的关键观察结果
30
+ 3. 潜在的启示或见解
31
+ 4. 建议或注意事项(如适用)
32
+ 请以组织良好的文本分析形式回复。`;c+=`
33
+
34
+ ${u}`;const p=JSON.stringify(e,null,2);let g="";a?g=t==="en"?`Here is the OHLCV data (${i} periods):
35
+ ${p}
36
+ My specific question or request: ${a}
37
+ Please analyze this data considering my request above.`:`这是OHLCV数据(${i}个周期):
38
+ ${p}
39
+ 我的具体问题或需求:${a}
40
+ 请根据我的上述需求分析这些数据。`:g=t==="en"?`Here is the OHLCV data (${i} periods):
41
+ ${p}
42
+ Please analyze this data as requested.`:`这是OHLCV数据(${i}个周期):
43
+ ${p}
44
+ 请按要求分析这些数据。`;const y=[{role:"system",content:c},{role:"user",content:g}];try{const h=await this.chatCompletion(y,t,{temperature:r?.temperature||.5,maxTokens:r?.maxTokens||1500,stream:!1,modelType:this.modelType});return this.extractContent(h)}catch(h){throw new Error(`OHLCV analysis failed: ${h.message}`)}}async analyzeOHLCVEnhanced(e,t,s,a,r=!1,o){if(r){const i=t==="en"?`You are a professional financial data analyst. Analyze the OHLCV data and provide a structured response with:
45
+ 1. Summary (brief overview)
46
+ 2. Details (key observations, 3-5 points)
47
+ 3. Recommendations (actionable insights, 2-3 points)
48
+ Format as JSON: {"summary": "...", "details": ["...", "..."], "recommendations": ["...", "..."]}`:`您是一位专业的金融数据分析师。分析OHLCV数据并提供结构化响应:
49
+ 1. 总结(简要概述)
50
+ 2. 详情(关键观察结果,3-5点)
51
+ 3. 建议(可操作的见解,2-3点)
52
+ 格式化为JSON:{"summary": "...", "details": ["...", "..."], "recommendations": ["...", "..."]}`,d=JSON.stringify(e,null,2);let l=t==="en"?`Analyze this OHLCV data (${e.length} periods):
53
+ ${d}`:`分析此OHLCV数据(${e.length}个周期):
54
+ ${d}`;a&&(l+=t==="en"?`
55
+
56
+ Additional request: ${a}`:`
57
+
58
+ 附加要求:${a}`);const u=[{role:"system",content:i},{role:"user",content:l}];try{const m=await this.chatCompletion(u,t,{temperature:o?.temperature||.4,maxTokens:o?.maxTokens||1200,stream:!1,modelType:this.modelType}),c=this.extractContent(m);try{const p=JSON.parse(c);if(p.summary&&Array.isArray(p.details)&&Array.isArray(p.recommendations))return p}catch{}return c}catch(m){throw new Error(`Structured OHLCV analysis failed: ${m.message}`)}}return this.analyzeOHLCV(e,t,s,a,o)}async predictingOHLCV(e,t,s,a,r){const o=s||"Based on these OHLCV data, predict the next period",i=a||1;if(!Number.isInteger(i)||i<=0)throw new Error(`Invalid count parameter: ${i}. Must be a positive integer.`);const d=50;if(i>d)throw new Error(`Count parameter too large: ${i}. Maximum allowed is ${d}. Please reduce the count or split your request.`);const l=i===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${i} consecutive OHLCV objects for the next ${i} periods.`,u=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";let m=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
59
+ Your task: ${o}
4
60
  CRITICAL RULES:
5
- 1. ${p}
61
+ 1. ${l}
6
62
  2. Return ONLY a JSON array of OHLCV objects, NO explanations, comments, or other text
7
63
  3. The OHLCV array format must match: [{open, high, low, close, volume}, ...]
8
64
  4. All numbers must be valid numbers
@@ -10,20 +66,80 @@ CRITICAL RULES:
10
66
  6. Maintain consistency with historical trends and patterns
11
67
  7. For technical analysis, provide reasonable values based on typical patterns
12
68
  8. Do not include markdown formatting, only pure JSON
13
- ${n===1?`Example of valid response for 1 period:
14
- [{"open": 115.5, "high": 118.0, "low": 114.0, "close": 117.0, "volume": 1350000}]`:`Example of valid response for ${n} periods:
69
+ ${i===1?`Example of valid response for 1 period:
70
+ [{"open": 115.5, "high": 118.0, "low": 114.0, "close": 117.0, "volume": 1350000}]`:`Example of valid response for ${i} periods:
15
71
  [
16
72
  {"open": 115.5, "high": 118.0, "low": 114.0, "close": 117.0, "volume": 1350000},
17
73
  {"open": 117.5, "high": 120.0, "low": 116.0, "close": 119.0, "volume": 1400000}
18
- ${n>2?`,
19
- ... ${n-2} more OHLCV objects following the same pattern`:""}
20
- ]`}`,m=JSON.stringify(e,null,2),d=`Here is the historical OHLCV data (${e.length} periods):
74
+ ${i>2?`,
75
+ ... ${i-2} more OHLCV objects following the same pattern`:""}
76
+ ]`}`;u&&(m=`${m}
77
+
78
+ ${u}`);const c=JSON.stringify(e,null,2),p=`Here is the historical OHLCV data (${e.length} periods):
79
+ ${c}
80
+ Please process this data according to the system instructions. Remember to return EXACTLY ${i} OHLCV object(s) in a JSON array with no additional text.`,g=[{role:"system",content:m},{role:"user",content:p}];try{const y=i*50+100,h=Math.max(r?.maxTokens||1e3,y),b=await this.chatCompletion(g,t,{temperature:r?.temperature||.3,maxTokens:h,stream:!1,modelType:this.modelType}),f=this.extractContent(b),w=this.parseOHLCVResponse(f);if(w.length!==i)throw new Error(`AI returned ${w.length} OHLCV objects, but expected ${i}.`);return w}catch(y){throw new Error(`OHLCV analysis failed: ${y.message}`)}}parseOHLCVResponse(e){try{const t=JSON.parse(e);if(!Array.isArray(t))throw new Error("Response is not in array format");return t.map((a,r)=>{if(typeof a!="object"||a===null)throw new Error(`Element ${r} is not a valid object`);const{open:o,high:i,low:d,close:l,volume:u}=a,m=["open","high","low","close","volume"];for(const c of m)if(typeof a[c]!="number"||isNaN(a[c]))throw new Error(`Element ${r} field ${c} is not a valid number`);if(i<d)throw new Error(`Element ${r}: high cannot be lower than low`);if(l<d||l>i)throw new Error(`Element ${r}: close must be between low and high`);return{open:Number(o),high:Number(i),low:Number(d),close:Number(l),volume:Number(u)}})}catch(t){const s=e.match(/\[[\s\S]*\]/);if(s)return this.parseOHLCVResponse(s[0]);throw new Error(`Unable to parse AI returned OHLCV data: ${t}
81
+ Original content: ${e.substring(0,200)}...`)}}setModel(e){if(!k.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=k.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.',"en");return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}buildOpenAIRequest(e,t,s,a,r){return{model:e,messages:t,temperature:s,max_tokens:a,stream:r}}buildDashScopeRequest(e,t,s,a){return{model:e,input:{messages:t},parameters:{temperature:s,max_tokens:a,result_format:"message"}}}async makeRequest(e,t,s){const a=new AbortController,r=setTimeout(()=>a.abort(),this.timeout);try{const o=await fetch(e,{method:"POST",headers:{Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json; charset=utf-8",Accept:"application/json"},body:JSON.stringify(t),signal:a.signal});if(clearTimeout(r),!o.ok){const i=await o.text();throw new Error(`HTTP ${o.status}: ${i}`)}return s?o.body:await o.json()}catch(o){throw clearTimeout(r),o.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):o}}async makeStreamRequest(e,t,s){const a=await this.makeRequest(e,t,!0);if(!a)throw new Error("Failed to get streaming response");const r=a.getReader(),o=new TextDecoder("utf-8");let i="";try{for(;;){const{done:d,value:l}=await r.read();if(d){s("",!0);break}i+=o.decode(l,{stream:!0});const u=i.split(`
82
+ `);i=u.pop()||"";for(const m of u)if(m.startsWith("data: ")){const c=m.slice(6);if(c==="[DONE]"){s("",!0);return}try{const p=JSON.parse(c);p.choices?.[0]?.delta?.content&&s(p.choices[0].delta.content,!1)}catch{}}}}finally{r.releaseLock()}}extractContent(e){if(e.choices?.[0]?.message?.content)return e.choices[0].message.content;if(e.output?.choices?.[0]?.message?.content)return e.output.choices[0].message.content;if(e.output?.text)return e.output.text;throw new Error("Unable to parse response content")}}function B(n,e){return new S({apiKey:n,modelType:e})}var N=(n=>(n.DEEPSEEK_CHAT="deepseek-chat",n.DEEPSEEK_CHAT_LITE="deepseek-chat-lite",n.DEEPSEEK_CHAT_PRO="deepseek-chat-pro",n.DEEPSEEK_CHAT_MAX="deepseek-chat-max",n.DEEPSEEK_CODER="deepseek-coder",n.DEEPSEEK_CODER_LITE="deepseek-coder-lite",n.DEEPSEEK_CODER_PRO="deepseek-coder-pro",n.DEEPSEEK_MATH="deepseek-math",n.DEEPSEEK_MATH_PRO="deepseek-math-pro",n.DEEPSEEK_REASONER="deepseek-reasoner",n.DEEPSEEK_REASONER_PRO="deepseek-reasoner-pro",n.DEEPSEEK_VISION="deepseek-vision",n.DEEPSEEK_VISION_PRO="deepseek-vision-pro",n.DEEPSEEK_FINANCE="deepseek-finance",n.DEEPSEEK_LAW="deepseek-law",n.DEEPSEEK_MEDICAL="deepseek-medical",n.DEEPSEEK_RESEARCH="deepseek-research",n.DEEPSEEK_OMNI="deepseek-omni",n.DEEPSEEK_OMNI_PRO="deepseek-omni-pro",n.DEEPSEEK_LLM="deepseek-llm",n.DEEPSEEK_LLM_67B="deepseek-llm-67b",n.DEEPSEEK_LLM_131B="deepseek-llm-131b",n))(N||{});const $=new Map([["deepseek-chat",{name:"deepseek-chat",displayName:"DeepSeek Chat",endpoint:"https://api.deepseek.com/v1/chat/completions",endpoints:["https://api.deepseek.com/v1/chat/completions"],format:"openai",description:"General purpose chat model for everyday conversations and tasks",maxTokens:4096,contextLength:16e3,capabilities:["chat","text-generation","reasoning"],version:"2025-01"}],["deepseek-chat-lite",{name:"deepseek-chat-lite",displayName:"DeepSeek Chat Lite",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Lightweight chat model optimized for speed and efficiency",maxTokens:2048,contextLength:8e3,capabilities:["chat","text-generation"],version:"2025-01"}],["deepseek-chat-pro",{name:"deepseek-chat-pro",displayName:"DeepSeek Chat Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional chat model with enhanced reasoning capabilities",maxTokens:8192,contextLength:32e3,capabilities:["chat","text-generation","complex-reasoning","analysis"],version:"2025-01"}],["deepseek-chat-max",{name:"deepseek-chat-max",displayName:"DeepSeek Chat Max",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Maximum capability chat model for most demanding tasks",maxTokens:16384,contextLength:64e3,capabilities:["chat","text-generation","expert-analysis","research"],version:"2025-01"}],["deepseek-coder",{name:"deepseek-coder",displayName:"DeepSeek Coder",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized model for code generation and programming tasks",maxTokens:16384,contextLength:64e3,capabilities:["code-generation","programming","debugging","code-review"],version:"2025-01"}],["deepseek-coder-lite",{name:"deepseek-coder-lite",displayName:"DeepSeek Coder Lite",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Lightweight code generation model",maxTokens:4096,contextLength:16e3,capabilities:["code-generation","programming"],version:"2025-01"}],["deepseek-coder-pro",{name:"deepseek-coder-pro",displayName:"DeepSeek Coder Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional code generation model with advanced features",maxTokens:32768,contextLength:128e3,capabilities:["code-generation","programming","system-design","architecture"],version:"2025-01"}],["deepseek-math",{name:"deepseek-math",displayName:"DeepSeek Math",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized model for mathematical reasoning and problem solving",maxTokens:8192,contextLength:32e3,capabilities:["mathematical-reasoning","problem-solving","calculations"],version:"2025-01"}],["deepseek-math-pro",{name:"deepseek-math-pro",displayName:"DeepSeek Math Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced mathematical reasoning model for complex problems",maxTokens:16384,contextLength:64e3,capabilities:["mathematical-reasoning","advanced-calculus","statistics"],version:"2025-01"}],["deepseek-reasoner",{name:"deepseek-reasoner",displayName:"DeepSeek Reasoner",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Dedicated reasoning model for logical analysis",maxTokens:8192,contextLength:32e3,capabilities:["logical-reasoning","analysis","decision-making"],version:"2025-01"}],["deepseek-reasoner-pro",{name:"deepseek-reasoner-pro",displayName:"DeepSeek Reasoner Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced reasoning model for complex logical problems",maxTokens:16384,contextLength:64e3,capabilities:["complex-reasoning","scientific-analysis","research"],version:"2025-01"}],["deepseek-vision",{name:"deepseek-vision",displayName:"DeepSeek Vision",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Vision model for image understanding and analysis",maxTokens:4096,contextLength:16e3,capabilities:["image-understanding","visual-qa","document-analysis"],version:"2025-01"}],["deepseek-vision-pro",{name:"deepseek-vision-pro",displayName:"DeepSeek Vision Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced vision model for complex visual tasks",maxTokens:8192,contextLength:32e3,capabilities:["image-understanding","video-analysis","visual-reasoning"],version:"2025-01"}],["deepseek-finance",{name:"deepseek-finance",displayName:"DeepSeek Finance",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for financial analysis, market prediction, and investment insights",maxTokens:8192,contextLength:32e3,capabilities:["financial-analysis","market-prediction","risk-assessment","investment-advice"],version:"2025-01"}],["deepseek-law",{name:"deepseek-law",displayName:"DeepSeek Law",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for legal analysis, contract review, and legal research",maxTokens:16384,contextLength:64e3,capabilities:["legal-analysis","contract-review","legal-research"],version:"2025-01"}],["deepseek-medical",{name:"deepseek-medical",displayName:"DeepSeek Medical",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for medical consultation, diagnosis support, and health analysis",maxTokens:8192,contextLength:32e3,capabilities:["medical-consultation","diagnostic-support","health-analysis"],version:"2025-01"}],["deepseek-research",{name:"deepseek-research",displayName:"DeepSeek Research",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for academic research and scientific analysis",maxTokens:32768,contextLength:128e3,capabilities:["academic-research","scientific-analysis","paper-writing"],version:"2025-01"}],["deepseek-omni",{name:"deepseek-omni",displayName:"DeepSeek Omni",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Multimodal model supporting text, image, and audio",maxTokens:16384,contextLength:64e3,capabilities:["text-generation","image-understanding","audio-processing","multimodal"],version:"2025-01"}],["deepseek-omni-pro",{name:"deepseek-omni-pro",displayName:"DeepSeek Omni Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional multimodal model with advanced capabilities",maxTokens:32768,contextLength:128e3,capabilities:["text-generation","multimodal","complex-reasoning","expert-analysis"],version:"2025-01"}],["deepseek-llm",{name:"deepseek-llm",displayName:"DeepSeek LLM",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Base large language model",maxTokens:4096,contextLength:16e3,capabilities:["text-generation"],version:"2024-12"}]]);function M(n){return $.get(n)}function K(){return Array.from($.values())}function F(n){for(const e of $.values())if(e.name===n)return e}function U(){return Array.from($.keys())}function Q(n){const e=Object.values(N);for(const t of e)if(t===n)return t;return null}class q{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||N.DEEPSEEK_CHAT,this.timeout=e.timeout||3e4,this.baseURL=e.baseURL||"https://api.deepseek.com",!this.apiKey)throw new Error("API Key cannot be empty");if(!$.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,t,s){const a=[],r=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";if(s?.systemPrompt||r){const i=[s?.systemPrompt,r].filter(Boolean).join(`
83
+ `);a.push({role:"system",content:i})}a.push({role:"user",content:e});const o=await this.chatCompletion(a,t,{temperature:s?.temperature,maxTokens:s?.maxTokens,stream:!1,modelType:this.modelType,topP:s?.topP,frequencyPenalty:s?.frequencyPenalty,presencePenalty:s?.presencePenalty,stop:s?.stop,tools:s?.tools,toolChoice:s?.toolChoice});return this.extractContent(o)}async chatCompletion(e,t,s){const a=s?.modelType||this.modelType,r=$.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=s?.temperature??.7,i=s?.maxTokens??2e3,d=s?.stream??!1,l=s?.topP??1,u=s?.frequencyPenalty??0,m=s?.presencePenalty??0,c=s?.stop,p=s?.tools,g=s?.toolChoice,y=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";y&&(e.some(w=>w.role==="system")?e=e.map(w=>w.role==="system"?{...w,content:`${w.content}
84
+ ${y}`}:w):e.unshift({role:"system",content:y}));const h=r.endpoint,b=this.buildOpenAIRequest(r.name,e,o,i,d,l,u,m,c,p,g);try{return await this.makeRequest(h,b,d)}catch(f){throw new Error(`DeepSeek AI request failed: ${f.message}`)}}async chatStream(e,t,s,a){const r=a?.modelType||this.modelType,o=$.get(r);if(!o)throw new Error(`Unsupported model type: ${r}`);const i=a?.temperature??.7,d=a?.maxTokens??2e3,l=a?.topP??1,u=a?.frequencyPenalty??0,m=a?.presencePenalty??0,c=s==="en"?"Please respond in English only.":s==="cn"?"请使用中文回答。":"";c&&(e.some(y=>y.role==="system")?e=e.map(y=>y.role==="system"?{...y,content:`${y.content}
85
+ ${c}`}:y):e.unshift({role:"system",content:c}));const p=this.buildOpenAIRequest(o.name,e,i,d,!0,l,u,m,a?.stop,a?.tools,a?.toolChoice);try{await this.makeStreamRequest(o.endpoint,p,t)}catch(g){throw new Error(`Streaming request failed: ${g.message}`)}}async analyzeOHLCV(e,t,s,a,r){const o=s||"comprehensive",i=e.length,l={trend:{en:"Provide a detailed trend analysis of this OHLCV data, including price direction, support/resistance levels, and trend strength.",cn:"提供详细的OHLCV数据趋势分析,包括价格方向、支撑/阻力位和趋势强度。"},volume:{en:"Analyze the volume patterns in this OHLCV data, including volume trends, unusual volume spikes, and volume-price relationships.",cn:"分析OHLCV数据中的成交量模式,包括成交量趋势、异常成交量波动和量价关系。"},technical:{en:"Perform technical analysis on this OHLCV data, identifying potential technical indicators, patterns, and trading signals.",cn:"对OHLCV数据进行技术分析,识别潜在的技术指标、图表形态和交易信号。"},comprehensive:{en:"Provide a comprehensive analysis of this OHLCV data, covering trends, volume, technical aspects, and potential market implications.",cn:"提供全面的OHLCV数据分析,涵盖趋势、成交量、技术面和潜在市场影响。"}}[o][t],u=t==="en"?"Please provide your analysis in English.":"请用中文进行分析。";let m="";if(i>0){const h=e[0],f=e[i-1].close-h.close,w=f/h.close*100;let C=h.high,v=h.low,O=0;for(const E of e)E.high>C&&(C=E.high),E.low<v&&(v=E.low),O+=E.volume;const P=O/i;m=t==="en"?`This dataset contains ${i} periods of OHLCV data.
86
+ Price range: ${v.toFixed(2)} - ${C.toFixed(2)}
87
+ Overall price change: ${f>=0?"+":""}${f.toFixed(2)} (${f>=0?"+":""}${w.toFixed(2)}%)
88
+ Average volume: ${P.toFixed(0)}`:`该数据集包含 ${i} 个周期的OHLCV数据。
89
+ 价格范围:${v.toFixed(2)} - ${C.toFixed(2)}
90
+ 总体价格变化:${f>=0?"+":""}${f.toFixed(2)} (${f>=0?"+":""}${w.toFixed(2)}%)
91
+ 平均成交量:${P.toFixed(0)}`}let c=t==="en"?`You are a professional financial data analyst. Your task is to analyze OHLCV (Open, High, Low, Close, Volume) data and provide insights.
92
+ Analysis focus: ${l}
93
+ ${m?`Data characteristics:
21
94
  ${m}
22
- Please process this data according to the system instructions. Remember to return EXACTLY ${n} OHLCV object(s) in a JSON array with no additional text.`,l=[{role:"system",content:c},{role:"user",content:d}];try{const u=n*50+100,x=Math.max(a?.maxTokens||1e3,u),T=await this.chatCompletion(l,{temperature:a?.temperature||.3,maxTokens:x,stream:!1,modelType:a?.modelType}),E=this.extractContent(T),f=this.parseOHLCVResponse(E);if(f.length!==n)throw new Error(`AI returned ${f.length} OHLCV objects, but expected ${n}.`);return f}catch(u){throw new Error(`OHLCV analysis failed: ${u.message}`)}}parseOHLCVResponse(e){try{const o=JSON.parse(e);if(!Array.isArray(o))throw new Error("Response is not in array format");return o.map((a,i)=>{if(typeof a!="object"||a===null)throw new Error(`Element ${i} is not a valid object`);const{open:n,high:r,low:p,close:c,volume:m}=a,d=["open","high","low","close","volume"];for(const l of d)if(typeof a[l]!="number"||isNaN(a[l]))throw new Error(`Element ${i} field ${l} is not a valid number`);if(r<p)throw new Error(`Element ${i}: high cannot be lower than low`);if(c<p||c>r)throw new Error(`Element ${i}: close must be between low and high`);return{open:Number(n),high:Number(r),low:Number(p),close:Number(c),volume:Number(m)}})}catch(o){const s=e.match(/\[[\s\S]*\]/);if(s)return this.parseOHLCVResponse(s[0]);throw new Error(`Unable to parse AI returned OHLCV data: ${o}
23
- Original content: ${e.substring(0,200)}...`)}}}function A(t,e){return new _({apiKey:t,modelType:e})}var v=(t=>(t.DEEPSEEK_CHAT="deepseek-chat",t.DEEPSEEK_CHAT_LITE="deepseek-chat-lite",t.DEEPSEEK_CHAT_PRO="deepseek-chat-pro",t.DEEPSEEK_CHAT_MAX="deepseek-chat-max",t.DEEPSEEK_CODER="deepseek-coder",t.DEEPSEEK_CODER_LITE="deepseek-coder-lite",t.DEEPSEEK_CODER_PRO="deepseek-coder-pro",t.DEEPSEEK_MATH="deepseek-math",t.DEEPSEEK_MATH_PRO="deepseek-math-pro",t.DEEPSEEK_REASONER="deepseek-reasoner",t.DEEPSEEK_REASONER_PRO="deepseek-reasoner-pro",t.DEEPSEEK_VISION="deepseek-vision",t.DEEPSEEK_VISION_PRO="deepseek-vision-pro",t.DEEPSEEK_FINANCE="deepseek-finance",t.DEEPSEEK_LAW="deepseek-law",t.DEEPSEEK_MEDICAL="deepseek-medical",t.DEEPSEEK_RESEARCH="deepseek-research",t.DEEPSEEK_OMNI="deepseek-omni",t.DEEPSEEK_OMNI_PRO="deepseek-omni-pro",t.DEEPSEEK_LLM="deepseek-llm",t.DEEPSEEK_LLM_67B="deepseek-llm-67b",t.DEEPSEEK_LLM_131B="deepseek-llm-131b",t))(v||{});const y=new Map([["deepseek-chat",{name:"deepseek-chat",displayName:"DeepSeek Chat",endpoint:"https://api.deepseek.com/v1/chat/completions",endpoints:["https://api.deepseek.com/v1/chat/completions"],format:"openai",description:"General purpose chat model for everyday conversations and tasks",maxTokens:4096,contextLength:16e3,capabilities:["chat","text-generation","reasoning"],version:"2025-01"}],["deepseek-chat-lite",{name:"deepseek-chat-lite",displayName:"DeepSeek Chat Lite",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Lightweight chat model optimized for speed and efficiency",maxTokens:2048,contextLength:8e3,capabilities:["chat","text-generation"],version:"2025-01"}],["deepseek-chat-pro",{name:"deepseek-chat-pro",displayName:"DeepSeek Chat Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional chat model with enhanced reasoning capabilities",maxTokens:8192,contextLength:32e3,capabilities:["chat","text-generation","complex-reasoning","analysis"],version:"2025-01"}],["deepseek-chat-max",{name:"deepseek-chat-max",displayName:"DeepSeek Chat Max",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Maximum capability chat model for most demanding tasks",maxTokens:16384,contextLength:64e3,capabilities:["chat","text-generation","expert-analysis","research"],version:"2025-01"}],["deepseek-coder",{name:"deepseek-coder",displayName:"DeepSeek Coder",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized model for code generation and programming tasks",maxTokens:16384,contextLength:64e3,capabilities:["code-generation","programming","debugging","code-review"],version:"2025-01"}],["deepseek-coder-lite",{name:"deepseek-coder-lite",displayName:"DeepSeek Coder Lite",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Lightweight code generation model",maxTokens:4096,contextLength:16e3,capabilities:["code-generation","programming"],version:"2025-01"}],["deepseek-coder-pro",{name:"deepseek-coder-pro",displayName:"DeepSeek Coder Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional code generation model with advanced features",maxTokens:32768,contextLength:128e3,capabilities:["code-generation","programming","system-design","architecture"],version:"2025-01"}],["deepseek-math",{name:"deepseek-math",displayName:"DeepSeek Math",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized model for mathematical reasoning and problem solving",maxTokens:8192,contextLength:32e3,capabilities:["mathematical-reasoning","problem-solving","calculations"],version:"2025-01"}],["deepseek-math-pro",{name:"deepseek-math-pro",displayName:"DeepSeek Math Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced mathematical reasoning model for complex problems",maxTokens:16384,contextLength:64e3,capabilities:["mathematical-reasoning","advanced-calculus","statistics"],version:"2025-01"}],["deepseek-reasoner",{name:"deepseek-reasoner",displayName:"DeepSeek Reasoner",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Dedicated reasoning model for logical analysis",maxTokens:8192,contextLength:32e3,capabilities:["logical-reasoning","analysis","decision-making"],version:"2025-01"}],["deepseek-reasoner-pro",{name:"deepseek-reasoner-pro",displayName:"DeepSeek Reasoner Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced reasoning model for complex logical problems",maxTokens:16384,contextLength:64e3,capabilities:["complex-reasoning","scientific-analysis","research"],version:"2025-01"}],["deepseek-vision",{name:"deepseek-vision",displayName:"DeepSeek Vision",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Vision model for image understanding and analysis",maxTokens:4096,contextLength:16e3,capabilities:["image-understanding","visual-qa","document-analysis"],version:"2025-01"}],["deepseek-vision-pro",{name:"deepseek-vision-pro",displayName:"DeepSeek Vision Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Advanced vision model for complex visual tasks",maxTokens:8192,contextLength:32e3,capabilities:["image-understanding","video-analysis","visual-reasoning"],version:"2025-01"}],["deepseek-finance",{name:"deepseek-finance",displayName:"DeepSeek Finance",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for financial analysis, market prediction, and investment insights",maxTokens:8192,contextLength:32e3,capabilities:["financial-analysis","market-prediction","risk-assessment","investment-advice"],version:"2025-01"}],["deepseek-law",{name:"deepseek-law",displayName:"DeepSeek Law",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for legal analysis, contract review, and legal research",maxTokens:16384,contextLength:64e3,capabilities:["legal-analysis","contract-review","legal-research"],version:"2025-01"}],["deepseek-medical",{name:"deepseek-medical",displayName:"DeepSeek Medical",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for medical consultation, diagnosis support, and health analysis",maxTokens:8192,contextLength:32e3,capabilities:["medical-consultation","diagnostic-support","health-analysis"],version:"2025-01"}],["deepseek-research",{name:"deepseek-research",displayName:"DeepSeek Research",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Specialized for academic research and scientific analysis",maxTokens:32768,contextLength:128e3,capabilities:["academic-research","scientific-analysis","paper-writing"],version:"2025-01"}],["deepseek-omni",{name:"deepseek-omni",displayName:"DeepSeek Omni",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Multimodal model supporting text, image, and audio",maxTokens:16384,contextLength:64e3,capabilities:["text-generation","image-understanding","audio-processing","multimodal"],version:"2025-01"}],["deepseek-omni-pro",{name:"deepseek-omni-pro",displayName:"DeepSeek Omni Pro",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Professional multimodal model with advanced capabilities",maxTokens:32768,contextLength:128e3,capabilities:["text-generation","multimodal","complex-reasoning","expert-analysis"],version:"2025-01"}],["deepseek-llm",{name:"deepseek-llm",displayName:"DeepSeek LLM",endpoint:"https://api.deepseek.com/v1/chat/completions",format:"openai",description:"Base large language model",maxTokens:4096,contextLength:16e3,capabilities:["text-generation"],version:"2024-12"}]]);function $(t){return y.get(t)}function R(){return Array.from(y.values())}function D(t){for(const e of y.values())if(e.name===t)return e}function I(){return Array.from(y.keys())}function B(t){const e=Object.values(v);for(const o of e)if(o===t)return o;return null}class C{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||v.DEEPSEEK_CHAT,this.timeout=e.timeout||3e4,this.baseURL=e.baseURL||"https://api.deepseek.com",!this.apiKey)throw new Error("API Key cannot be empty");if(!y.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,o){const s=[];o?.systemPrompt&&s.push({role:"system",content:o.systemPrompt}),s.push({role:"user",content:e});const a=await this.chatCompletion(s,{temperature:o?.temperature,maxTokens:o?.maxTokens,stream:!1,modelType:o?.modelType,topP:o?.topP,frequencyPenalty:o?.frequencyPenalty,presencePenalty:o?.presencePenalty,stop:o?.stop,tools:o?.tools,toolChoice:o?.toolChoice});return this.extractContent(a)}async chatCompletion(e,o){const s=o?.modelType||this.modelType,a=y.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const i=o?.temperature??.7,n=o?.maxTokens??2e3,r=o?.stream??!1,p=o?.topP??1,c=o?.frequencyPenalty??0,m=o?.presencePenalty??0,d=o?.stop,l=o?.tools,u=o?.toolChoice,x=a.endpoint,T=this.buildOpenAIRequest(a.name,e,i,n,r,p,c,m,d,l,u);try{return await this.makeRequest(x,T,r)}catch(E){throw new Error(`DeepSeek AI request failed: ${E.message}`)}}async chatStream(e,o,s){const a=s?.modelType||this.modelType,i=y.get(a);if(!i)throw new Error(`Unsupported model type: ${a}`);const n=s?.temperature??.7,r=s?.maxTokens??2e3,p=s?.topP??1,c=s?.frequencyPenalty??0,m=s?.presencePenalty??0,d=this.buildOpenAIRequest(i.name,e,n,r,!0,p,c,m,s?.stop,s?.tools,s?.toolChoice);try{await this.makeStreamRequest(i.endpoint,d,o)}catch(l){throw new Error(`Streaming request failed: ${l.message}`)}}async predictingOHLCV(e,o,s,a){const i=o||"Based on these OHLCV data, predict the next period",n=s||1;if(!Number.isInteger(n)||n<=0)throw new Error(`Invalid count parameter: ${n}. Must be a positive integer.`);const r=50;if(n>r)throw new Error(`Count parameter too large: ${n}. Maximum allowed is ${r}.`);const p=n===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${n} consecutive OHLCV objects for the next ${n} periods.`,c=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
24
- Your task: ${i}
95
+
96
+ `:""}
97
+ Please provide:
98
+ 1. Clear and structured analysis
99
+ 2. Key observations from the data
100
+ 3. Potential implications or insights
101
+ 4. Recommendations or considerations (if applicable)
102
+ Format your response as a well-organized text analysis.`:`您是一位专业的金融数据分析师。您的任务是分析OHLCV(开盘价、最高价、最低价、收盘价、成交量)数据并提供见解。
103
+ 分析重点:${l}
104
+ ${m?`数据特征:
105
+ ${m}
106
+
107
+ `:""}
108
+ 请提供:
109
+ 1. 清晰且有结构的分析
110
+ 2. 数据的关键观察结果
111
+ 3. 潜在的启示或见解
112
+ 4. 建议或注意事项(如适用)
113
+ 请以组织良好的文本分析形式回复。`;c+=`
114
+
115
+ ${u}`;const p=JSON.stringify(e,null,2);let g="";a?g=t==="en"?`Here is the OHLCV data (${i} periods):
116
+ ${p}
117
+ My specific question or request: ${a}
118
+ Please analyze this data considering my request above.`:`这是OHLCV数据(${i}个周期):
119
+ ${p}
120
+ 我的具体问题或需求:${a}
121
+ 请根据我的上述需求分析这些数据。`:g=t==="en"?`Here is the OHLCV data (${i} periods):
122
+ ${p}
123
+ Please analyze this data as requested.`:`这是OHLCV数据(${i}个周期):
124
+ ${p}
125
+ 请按要求分析这些数据。`;const y=[{role:"system",content:c},{role:"user",content:g}];try{const h=await this.chatCompletion(y,t,{temperature:r?.temperature||.5,maxTokens:r?.maxTokens||1500,stream:!1,modelType:this.modelType,topP:r?.topP,frequencyPenalty:r?.frequencyPenalty,presencePenalty:r?.presencePenalty});return this.extractContent(h)}catch(h){throw new Error(`OHLCV analysis failed: ${h.message}`)}}async analyzeOHLCVEnhanced(e,t,s,a,r=!1,o){if(r){const i=t==="en"?`You are a professional financial data analyst. Analyze the OHLCV data and provide a structured response with:
126
+ 1. Summary (brief overview)
127
+ 2. Details (key observations, 3-5 points)
128
+ 3. Recommendations (actionable insights, 2-3 points)
129
+ Format as JSON: {"summary": "...", "details": ["...", "..."], "recommendations": ["...", "..."]}`:`您是一位专业的金融数据分析师。分析OHLCV数据并提供结构化响应:
130
+ 1. 总结(简要概述)
131
+ 2. 详情(关键观察结果,3-5点)
132
+ 3. 建议(可操作的见解,2-3点)
133
+ 格式化为JSON:{"summary": "...", "details": ["...", "..."], "recommendations": ["...", "..."]}`,d=JSON.stringify(e,null,2);let l=t==="en"?`Analyze this OHLCV data (${e.length} periods):
134
+ ${d}`:`分析此OHLCV数据(${e.length}个周期):
135
+ ${d}`;a&&(l+=t==="en"?`
136
+
137
+ Additional request: ${a}`:`
138
+
139
+ 附加要求:${a}`);const u=[{role:"system",content:i},{role:"user",content:l}];try{const m=await this.chatCompletion(u,t,{temperature:o?.temperature||.4,maxTokens:o?.maxTokens||1200,stream:!1,modelType:this.modelType}),c=this.extractContent(m);try{const p=JSON.parse(c);if(p.summary&&Array.isArray(p.details)&&Array.isArray(p.recommendations))return p}catch{}return c}catch(m){throw new Error(`Structured OHLCV analysis failed: ${m.message}`)}}return this.analyzeOHLCV(e,t,s,a,o)}async predictingOHLCV(e,t,s,a){const r=t||"Based on these OHLCV data, predict the next period",o=s||1;if(!Number.isInteger(o)||o<=0)throw new Error(`Invalid count parameter: ${o}. Must be a positive integer.`);const i=50;if(o>i)throw new Error(`Count parameter too large: ${o}. Maximum allowed is ${i}.`);const d=o===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${o} consecutive OHLCV objects for the next ${o} periods.`,l=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
140
+ Your task: ${r}
25
141
  CRITICAL RULES:
26
- 1. ${p}
142
+ 1. ${d}
27
143
  2. Return ONLY a JSON array of OHLCV objects, NO explanations, comments, or other text
28
144
  3. The OHLCV array format must match: [{open, high, low, close, volume}, ...]
29
145
  4. All numbers must be valid numbers
@@ -32,22 +148,78 @@ CRITICAL RULES:
32
148
  7. For technical analysis, provide reasonable values based on typical patterns
33
149
  8. Do not include markdown formatting, only pure JSON
34
150
 
35
- ${n===1?`Example of valid response for 1 period:
36
- [{"open": 115.5, "high": 118.0, "low": 114.0, "close": 117.0, "volume": 1350000}]`:`Example of valid response for ${n} periods:
151
+ ${o===1?`Example of valid response for 1 period:
152
+ [{"open": 115.5, "high": 118.0, "low": 114.0, "close": 117.0, "volume": 1350000}]`:`Example of valid response for ${o} periods:
37
153
  [
38
154
  {"open": 115.5, "high": 118.0, "low": 114.0, "close": 117.0, "volume": 1350000},
39
155
  {"open": 117.5, "high": 120.0, "low": 116.0, "close": 119.0, "volume": 1400000}
40
- ${n>2?`,
41
- ... ${n-2} more OHLCV objects following the same pattern`:""}
42
- ]`}`,m=JSON.stringify(e,null,2),d=`Here is the historical OHLCV data (${e.length} periods):
156
+ ${o>2?`,
157
+ ... ${o-2} more OHLCV objects following the same pattern`:""}
158
+ ]`}`,u=JSON.stringify(e,null,2),m=`Here is the historical OHLCV data (${e.length} periods):
159
+ ${u}
160
+ Please process this data according to the system instructions. Remember to return EXACTLY ${o} OHLCV object(s) in a JSON array with no additional text.`,c=[{role:"system",content:l},{role:"user",content:m}];try{const p=o*50+100,g=Math.max(a?.maxTokens||2e3,p),y=await this.chatCompletion(c,"en",{temperature:a?.temperature||.3,maxTokens:g,stream:!1,modelType:this.modelType,topP:a?.topP,frequencyPenalty:a?.frequencyPenalty,presencePenalty:a?.presencePenalty}),h=this.extractContent(y),b=this.parseOHLCVResponse(h);if(b.length!==o)throw new Error(`AI returned ${b.length} OHLCV objects, but expected ${o}.`);return b}catch(p){throw new Error(`OHLCV analysis failed: ${p.message}`)}}setModel(e){if(!$.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=$.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.',"en");return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}buildOpenAIRequest(e,t,s,a,r,o,i,d,l,u,m){const c={model:e,messages:t,temperature:s,max_tokens:a,stream:r};return o!==void 0&&(c.top_p=o),i!==void 0&&(c.frequency_penalty=i),d!==void 0&&(c.presence_penalty=d),l&&(c.stop=l),u&&(c.tools=u),m&&(c.tool_choice=m),c}async makeRequest(e,t,s){const a=new AbortController,r=setTimeout(()=>a.abort(),this.timeout);try{const o=await fetch(e,{method:"POST",headers:{Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json; charset=utf-8",Accept:"application/json"},body:JSON.stringify(t),signal:a.signal});if(clearTimeout(r),!o.ok){const i=await o.text();throw new Error(`HTTP ${o.status}: ${i}`)}return s?o.body:await o.json()}catch(o){throw clearTimeout(r),o.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):o}}async makeStreamRequest(e,t,s){const a=await this.makeRequest(e,t,!0);if(!a)throw new Error("Failed to get streaming response");const r=a.getReader(),o=new TextDecoder("utf-8");let i="";try{for(;;){const{done:d,value:l}=await r.read();if(d){s("",!0);break}i+=o.decode(l,{stream:!0});const u=i.split(`
161
+ `);i=u.pop()||"";for(const m of u)if(m.startsWith("data: ")){const c=m.slice(6);if(c==="[DONE]"){s("",!0);return}try{const p=JSON.parse(c);p.choices?.[0]?.delta?.content&&s(p.choices[0].delta.content,!1)}catch{}}}}finally{r.releaseLock()}}extractContent(e){if(e.choices?.[0]?.message?.content)return e.choices[0].message.content;if(e.output?.choices?.[0]?.message?.content)return e.output.choices[0].message.content;if(e.output?.text)return e.output.text;if(e.choices?.[0]?.delta?.content)return e.choices[0].delta.content;throw new Error("Unable to parse response content")}parseOHLCVResponse(e){try{const t=JSON.parse(e);if(!Array.isArray(t))throw new Error("Response is not in array format");return t.map((a,r)=>{if(typeof a!="object"||a===null)throw new Error(`Element ${r} is not a valid object`);const{open:o,high:i,low:d,close:l,volume:u}=a,m=["open","high","low","close","volume"];for(const c of m)if(typeof a[c]!="number"||isNaN(a[c]))throw new Error(`Element ${r} field ${c} is not a valid number`);if(i<d)throw new Error(`Element ${r}: high cannot be lower than low`);if(l<d||l>i)throw new Error(`Element ${r}: close must be between low and high`);return{open:Number(o),high:Number(i),low:Number(d),close:Number(l),volume:Number(u)}})}catch(t){const s=e.match(/\[[\s\S]*\]/);if(s)return this.parseOHLCVResponse(s[0]);throw new Error(`Unable to parse AI returned OHLCV data: ${t}
162
+ Original content: ${e.substring(0,200)}...`)}}}function z(n,e){return new q({apiKey:n,modelType:e})}var T=(n=>(n.GPT4="gpt-4",n.GPT4_0314="gpt-4-0314",n.GPT4_0613="gpt-4-0613",n.GPT4_32K="gpt-4-32k",n.GPT4_32K_0314="gpt-4-32k-0314",n.GPT4_32K_0613="gpt-4-32k-0613",n.GPT4_TURBO="gpt-4-turbo",n.GPT4_TURBO_PREVIEW="gpt-4-turbo-preview",n.GPT4_TURBO_2024_04_09="gpt-4-turbo-2024-04-09",n.GPT4_OMNI="gpt-4o",n.GPT4_OMNI_2024_05_13="gpt-4o-2024-05-13",n.GPT4_OMNI_MINI="gpt-4o-mini",n.GPT4_OMNI_MINI_2024_07_18="gpt-4o-mini-2024-07-18",n.GPT3_5_TURBO="gpt-3.5-turbo",n.GPT3_5_TURBO_0125="gpt-3.5-turbo-0125",n.GPT3_5_TURBO_1106="gpt-3.5-turbo-1106",n.GPT3_5_TURBO_INSTRUCT="gpt-3.5-turbo-instruct",n.GPT3_5_TURBO_16K="gpt-3.5-turbo-16k",n.GPT3_5_TURBO_16K_0613="gpt-3.5-turbo-16k-0613",n.DAVINCI_002="davinci-002",n.BABBAGE_002="babbage-002",n.TEXT_DAVINCI_003="text-davinci-003",n.TEXT_DAVINCI_002="text-davinci-002",n.TEXT_DAVINCI_001="text-davinci-001",n.TEXT_CURIE_001="text-curie-001",n.TEXT_BABBAGE_001="text-babbage-001",n.TEXT_ADA_001="text-ada-001",n.TEXT_EMBEDDING_ADA_002="text-embedding-ada-002",n.TEXT_EMBEDDING_3_SMALL="text-embedding-3-small",n.TEXT_EMBEDDING_3_LARGE="text-embedding-3-large",n.DALL_E_2="dall-e-2",n.DALL_E_3="dall-e-3",n.WHISPER_1="whisper-1",n.TTS_1="tts-1",n.TTS_1_HD="tts-1-hd",n.MODERATION_LATEST="text-moderation-latest",n.MODERATION_STABLE="text-moderation-stable",n.GPT3_5_TURBO_FINETUNED="ft:gpt-3.5-turbo-0125:personal:",n.GPT4_FINETUNED="ft:gpt-4-0125-preview:personal:",n.GPT4_VISION_PREVIEW="gpt-4-vision-preview",n))(T||{});const x=new Map([["gpt-4",{name:"gpt-4",displayName:"GPT-4",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Powerful multi-purpose model for complex tasks",maxTokens:8192,contextLength:8192,capabilities:["chat","text-generation","reasoning","analysis"],inputCostPer1KTokens:.03,outputCostPer1KTokens:.06,supportedFeatures:["chat","function-calling"]}],["gpt-4-turbo",{name:"gpt-4-turbo",displayName:"GPT-4 Turbo",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Enhanced GPT-4 with 128K context, knowledge cutoff April 2023",maxTokens:4096,contextLength:128e3,capabilities:["chat","text-generation","reasoning","analysis","vision"],inputCostPer1KTokens:.01,outputCostPer1KTokens:.03,supportedFeatures:["chat","function-calling","vision","json-mode"]}],["gpt-4o",{name:"gpt-4o",displayName:"GPT-4o",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Versatile model supporting text, images, audio with fast response",maxTokens:4096,contextLength:128e3,capabilities:["chat","text-generation","vision","audio-processing","multimodal"],inputCostPer1KTokens:.005,outputCostPer1KTokens:.015,supportedFeatures:["chat","function-calling","vision","audio","json-mode"]}],["gpt-4o-mini",{name:"gpt-4o-mini",displayName:"GPT-4o Mini",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Compact and efficient version of GPT-4o with lower cost",maxTokens:16384,contextLength:128e3,capabilities:["chat","text-generation","vision"],inputCostPer1KTokens:15e-5,outputCostPer1KTokens:6e-4,supportedFeatures:["chat","function-calling","vision","json-mode"]}],["gpt-3.5-turbo",{name:"gpt-3.5-turbo",displayName:"GPT-3.5 Turbo",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Fast and cost-effective, suitable for most conversational tasks",maxTokens:4096,contextLength:16385,capabilities:["chat","text-generation","code-generation"],inputCostPer1KTokens:5e-4,outputCostPer1KTokens:.0015,supportedFeatures:["chat","function-calling"]}],["gpt-3.5-turbo-instruct",{name:"gpt-3.5-turbo-instruct",displayName:"GPT-3.5 Turbo Instruct",endpoint:"https://api.openai.com/v1/completions",format:"openai",description:"Instruction-tuned version for text completion tasks",maxTokens:4096,contextLength:4097,capabilities:["text-completion","instruction-following"],inputCostPer1KTokens:.0015,outputCostPer1KTokens:.002,supportedFeatures:["completions"]}],["text-embedding-ada-002",{name:"text-embedding-ada-002",displayName:"Text Embedding Ada 002",endpoint:"https://api.openai.com/v1/embeddings",format:"openai",description:"Text embedding model, 1536 dimensions, suitable for retrieval and similarity",contextLength:8191,capabilities:["embeddings","semantic-search"],inputCostPer1KTokens:1e-4,supportedFeatures:["embeddings"]}],["text-embedding-3-small",{name:"text-embedding-3-small",displayName:"Text Embedding 3 Small",endpoint:"https://api.openai.com/v1/embeddings",format:"openai",description:"Small text embedding model, 1536 dimensions, balance of performance and cost",contextLength:8191,capabilities:["embeddings","semantic-search"],inputCostPer1KTokens:2e-5,supportedFeatures:["embeddings"]}],["dall-e-3",{name:"dall-e-3",displayName:"DALL-E 3",endpoint:"https://api.openai.com/v1/images/generations",format:"openai",description:"Advanced image generation model producing high-quality, high-resolution images",capabilities:["image-generation","creative-design"],inputCostPer1KTokens:.04,supportedFeatures:["image-generation","variations","edits"]}],["whisper-1",{name:"whisper-1",displayName:"Whisper",endpoint:"https://api.openai.com/v1/audio/transcriptions",format:"openai",description:"Speech recognition model supporting multilingual transcription and translation",capabilities:["speech-recognition","audio-transcription","translation"],inputCostPer1KTokens:.006,supportedFeatures:["transcriptions","translations"]}],["tts-1-hd",{name:"tts-1-hd",displayName:"TTS-1 HD",endpoint:"https://api.openai.com/v1/audio/speech",format:"openai",description:"High-quality text-to-speech with multiple voice options",capabilities:["speech-synthesis","text-to-speech"],inputCostPer1KTokens:.015,supportedFeatures:["speech","voice-selection"]}],["text-moderation-latest",{name:"text-moderation-latest",displayName:"Moderation Latest",endpoint:"https://api.openai.com/v1/moderations",format:"openai",description:"Content moderation model for detecting harmful content",capabilities:["content-moderation","safety"],inputCostPer1KTokens:1e-4,supportedFeatures:["moderation"]}]]);function j(n){return x.get(n)}function L(){return Array.from(x.values())}function W(n){for(const e of x.values())if(e.name===n)return e}function G(){return Array.from(x.keys())}function J(){return L().filter(n=>n.capabilities.includes("chat"))}function X(){return L().filter(n=>n.capabilities.includes("text-completion"))}function Y(){return L().filter(n=>n.capabilities.includes("embeddings"))}function Z(){return L().filter(n=>n.capabilities.includes("vision")||n.capabilities.includes("image-generation"))}function ee(){return L().filter(n=>n.capabilities.includes("audio-processing")||n.capabilities.includes("speech-recognition")||n.capabilities.includes("speech-synthesis"))}function te(){return L().filter(n=>n.capabilities.includes("multimodal"))}function ne(){const n=["gpt-4o","gpt-4o-mini","gpt-4-turbo","gpt-3.5-turbo","text-embedding-3-small","dall-e-3"];return L().filter(e=>n.includes(e.name))}function oe(){return L().filter(n=>n.inputCostPer1KTokens&&n.inputCostPer1KTokens<.001).sort((n,e)=>(n.inputCostPer1KTokens||0)-(e.inputCostPer1KTokens||0))}function ae(){return L().filter(n=>n.contextLength&&n.contextLength>=128e3).sort((n,e)=>(e.contextLength||0)-(n.contextLength||0))}function se(n,e,t=0){const s=(n.inputCostPer1KTokens||0)/1e3*e,a=(n.outputCostPer1KTokens||0)/1e3*t;return{inputTokens:e,outputTokens:t,inputCost:s,outputCost:a,totalCost:s+a}}function ie(n){let e=L();switch(n.taskType){case"chat":e=e.filter(t=>t.capabilities.includes("chat"));break;case"completion":e=e.filter(t=>t.capabilities.includes("text-completion"));break;case"embedding":e=e.filter(t=>t.capabilities.includes("embeddings"));break;case"image":e=e.filter(t=>t.capabilities.includes("image-generation")||t.capabilities.includes("vision"));break;case"audio":e=e.filter(t=>t.capabilities.includes("speech-recognition")||t.capabilities.includes("speech-synthesis"));break}return n.contextLength&&(e=e.filter(t=>t.contextLength&&t.contextLength>=n.contextLength)),n.features&&n.features.length>0&&(e=e.filter(t=>n.features.every(s=>t.supportedFeatures?.includes(s)||t.capabilities.includes(s)))),n.budget&&e.sort((t,s)=>(t.inputCostPer1KTokens||0)-(s.inputCostPer1KTokens||0)),e.slice(0,5)}function re(n){const e=Object.values(T);for(const t of e)if(t===n)return t;return null}class H{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||T.GPT3_5_TURBO,this.timeout=e.timeout||3e4,this.organization=e.organization,this.baseURL=e.baseURL||"https://api.openai.com/v1",!this.apiKey)throw new Error("API Key cannot be empty");if(!x.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,t,s){const a=[],r=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";if(s?.systemPrompt||r){const i=[s?.systemPrompt,r].filter(Boolean).join(`
163
+ `);a.push({role:"system",content:i})}a.push({role:"user",content:e});const o=await this.chatCompletion(a,t,{temperature:s?.temperature,maxTokens:s?.maxTokens,stream:!1,topP:s?.topP,frequencyPenalty:s?.frequencyPenalty,presencePenalty:s?.presencePenalty,stop:s?.stop});return this.extractContent(o)}async chatCompletion(e,t,s){const a=s?.modelType||this.modelType,r=x.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=s?.temperature??.7,i=s?.maxTokens??1e3,d=s?.stream??!1,l=r.endpoint,u=t==="en"?"Please respond in English only.":t==="cn"?"请使用中文回答。":"";u&&(e.some(p=>p.role==="system")?e=e.map(p=>p.role==="system"?{...p,content:`${p.content}
164
+ ${u}`}:p):e.unshift({role:"system",content:u}));const m=this.buildOpenAIRequest(r.name,e,o,i,d,s);try{return await this.makeRequest(l,m,d)}catch(c){throw new Error(`OpenAI request failed: ${c.message}`)}}async chatStream(e,t,s,a){const r=a?.modelType||this.modelType,o=x.get(r);if(!o)throw new Error(`Unsupported model type: ${r}`);const i=a?.temperature??.7,d=a?.maxTokens??1e3,l=s==="en"?"Please respond in English only.":s==="cn"?"请使用中文回答。":"";l&&(e.some(c=>c.role==="system")?e=e.map(c=>c.role==="system"?{...c,content:`${c.content}
165
+ ${l}`}:c):e.unshift({role:"system",content:l}));const u=this.buildOpenAIRequest(o.name,e,i,d,!0,a);try{await this.makeStreamRequest(o.endpoint,u,t)}catch(m){throw new Error(`Streaming request failed: ${m.message}`)}}async analyzeOHLCV(e,t,s,a,r){const o=s||"comprehensive",i=e.length,l={trend:{en:"Provide a detailed trend analysis of this OHLCV data, including price direction, support/resistance levels, and trend strength.",cn:"提供详细的OHLCV数据趋势分析,包括价格方向、支撑/阻力位和趋势强度。"},volume:{en:"Analyze the volume patterns in this OHLCV data, including volume trends, unusual volume spikes, and volume-price relationships.",cn:"分析OHLCV数据中的成交量模式,包括成交量趋势、异常成交量波动和量价关系。"},technical:{en:"Perform technical analysis on this OHLCV data, identifying potential technical indicators, patterns, and trading signals.",cn:"对OHLCV数据进行技术分析,识别潜在的技术指标、图表形态和交易信号。"},comprehensive:{en:"Provide a comprehensive analysis of this OHLCV data, covering trends, volume, technical aspects, and potential market implications.",cn:"提供全面的OHLCV数据分析,涵盖趋势、成交量、技术面和潜在市场影响。"}}[o][t],u=t==="en"?"Please provide your analysis in English.":"请用中文进行分析。";let m="";if(i>0){const h=e[0],f=e[i-1].close-h.close,w=f/h.close*100;let C=h.high,v=h.low,O=0;for(const E of e)E.high>C&&(C=E.high),E.low<v&&(v=E.low),O+=E.volume;const P=O/i;m=t==="en"?`This dataset contains ${i} periods of OHLCV data.
166
+ Price range: ${v.toFixed(2)} - ${C.toFixed(2)}
167
+ Overall price change: ${f>=0?"+":""}${f.toFixed(2)} (${f>=0?"+":""}${w.toFixed(2)}%)
168
+ Average volume: ${P.toFixed(0)}`:`该数据集包含 ${i} 个周期的OHLCV数据。
169
+ 价格范围:${v.toFixed(2)} - ${C.toFixed(2)}
170
+ 总体价格变化:${f>=0?"+":""}${f.toFixed(2)} (${f>=0?"+":""}${w.toFixed(2)}%)
171
+ 平均成交量:${P.toFixed(0)}`}let c=t==="en"?`You are a professional financial data analyst. Your task is to analyze OHLCV (Open, High, Low, Close, Volume) data and provide insights.
172
+ Analysis focus: ${l}
173
+ ${m?`Data characteristics:
174
+ ${m}
175
+
176
+ `:""}
177
+ Please provide:
178
+ 1. Clear and structured analysis
179
+ 2. Key observations from the data
180
+ 3. Potential implications or insights
181
+ 4. Recommendations or considerations (if applicable)
182
+ Format your response as a well-organized text analysis.`:`您是一位专业的金融数据分析师。您的任务是分析OHLCV(开盘价、最高价、最低价、收盘价、成交量)数据并提供见解。
183
+ 分析重点:${l}
184
+ ${m?`数据特征:
43
185
  ${m}
44
- Please process this data according to the system instructions. Remember to return EXACTLY ${n} OHLCV object(s) in a JSON array with no additional text.`,l=[{role:"system",content:c},{role:"user",content:d}];try{const u=n*50+100,x=Math.max(a?.maxTokens||2e3,u),T=await this.chatCompletion(l,{temperature:a?.temperature||.3,maxTokens:x,stream:!1,modelType:a?.modelType||v.DEEPSEEK_FINANCE,topP:a?.topP,frequencyPenalty:a?.frequencyPenalty,presencePenalty:a?.presencePenalty}),E=this.extractContent(T),f=this.parseOHLCVResponse(E);if(f.length!==n)throw new Error(`AI returned ${f.length} OHLCV objects, but expected ${n}.`);return f}catch(u){throw new Error(`OHLCV analysis failed: ${u.message}`)}}setModel(e){if(!y.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=y.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.');return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}buildOpenAIRequest(e,o,s,a,i,n,r,p,c,m,d){const l={model:e,messages:o,temperature:s,max_tokens:a,stream:i};return n!==void 0&&(l.top_p=n),r!==void 0&&(l.frequency_penalty=r),p!==void 0&&(l.presence_penalty=p),c&&(l.stop=c),m&&(l.tools=m),d&&(l.tool_choice=d),l}async makeRequest(e,o,s){const a=new AbortController,i=setTimeout(()=>a.abort(),this.timeout);try{const n=await fetch(e,{method:"POST",headers:{Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json; charset=utf-8",Accept:"application/json"},body:JSON.stringify(o),signal:a.signal});if(clearTimeout(i),!n.ok){const r=await n.text();throw new Error(`HTTP ${n.status}: ${r}`)}return s?n.body:await n.json()}catch(n){throw clearTimeout(i),n.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):n}}async makeStreamRequest(e,o,s){const a=await this.makeRequest(e,o,!0);if(!a)throw new Error("Failed to get streaming response");const i=a.getReader(),n=new TextDecoder("utf-8");let r="";try{for(;;){const{done:p,value:c}=await i.read();if(p){s("",!0);break}r+=n.decode(c,{stream:!0});const m=r.split(`
45
- `);r=m.pop()||"";for(const d of m)if(d.startsWith("data: ")){const l=d.slice(6);if(l==="[DONE]"){s("",!0);return}try{const u=JSON.parse(l);u.choices?.[0]?.delta?.content&&s(u.choices[0].delta.content,!1)}catch{}}}}finally{i.releaseLock()}}extractContent(e){if(e.choices?.[0]?.message?.content)return e.choices[0].message.content;if(e.output?.choices?.[0]?.message?.content)return e.output.choices[0].message.content;if(e.output?.text)return e.output.text;if(e.choices?.[0]?.delta?.content)return e.choices[0].delta.content;throw new Error("Unable to parse response content")}parseOHLCVResponse(e){try{const o=JSON.parse(e);if(!Array.isArray(o))throw new Error("Response is not in array format");return o.map((a,i)=>{if(typeof a!="object"||a===null)throw new Error(`Element ${i} is not a valid object`);const{open:n,high:r,low:p,close:c,volume:m}=a,d=["open","high","low","close","volume"];for(const l of d)if(typeof a[l]!="number"||isNaN(a[l]))throw new Error(`Element ${i} field ${l} is not a valid number`);if(r<p)throw new Error(`Element ${i}: high cannot be lower than low`);if(c<p||c>r)throw new Error(`Element ${i}: close must be between low and high`);return{open:Number(n),high:Number(r),low:Number(p),close:Number(c),volume:Number(m)}})}catch(o){const s=e.match(/\[[\s\S]*\]/);if(s)return this.parseOHLCVResponse(s[0]);throw new Error(`Unable to parse AI returned OHLCV data: ${o}
46
- Original content: ${e.substring(0,200)}...`)}}}function H(t,e){return new C({apiKey:t,modelType:e})}var g=(t=>(t.GPT4="gpt-4",t.GPT4_0314="gpt-4-0314",t.GPT4_0613="gpt-4-0613",t.GPT4_32K="gpt-4-32k",t.GPT4_32K_0314="gpt-4-32k-0314",t.GPT4_32K_0613="gpt-4-32k-0613",t.GPT4_TURBO="gpt-4-turbo",t.GPT4_TURBO_PREVIEW="gpt-4-turbo-preview",t.GPT4_TURBO_2024_04_09="gpt-4-turbo-2024-04-09",t.GPT4_OMNI="gpt-4o",t.GPT4_OMNI_2024_05_13="gpt-4o-2024-05-13",t.GPT4_OMNI_MINI="gpt-4o-mini",t.GPT4_OMNI_MINI_2024_07_18="gpt-4o-mini-2024-07-18",t.GPT3_5_TURBO="gpt-3.5-turbo",t.GPT3_5_TURBO_0125="gpt-3.5-turbo-0125",t.GPT3_5_TURBO_1106="gpt-3.5-turbo-1106",t.GPT3_5_TURBO_INSTRUCT="gpt-3.5-turbo-instruct",t.GPT3_5_TURBO_16K="gpt-3.5-turbo-16k",t.GPT3_5_TURBO_16K_0613="gpt-3.5-turbo-16k-0613",t.DAVINCI_002="davinci-002",t.BABBAGE_002="babbage-002",t.TEXT_DAVINCI_003="text-davinci-003",t.TEXT_DAVINCI_002="text-davinci-002",t.TEXT_DAVINCI_001="text-davinci-001",t.TEXT_CURIE_001="text-curie-001",t.TEXT_BABBAGE_001="text-babbage-001",t.TEXT_ADA_001="text-ada-001",t.TEXT_EMBEDDING_ADA_002="text-embedding-ada-002",t.TEXT_EMBEDDING_3_SMALL="text-embedding-3-small",t.TEXT_EMBEDDING_3_LARGE="text-embedding-3-large",t.DALL_E_2="dall-e-2",t.DALL_E_3="dall-e-3",t.WHISPER_1="whisper-1",t.TTS_1="tts-1",t.TTS_1_HD="tts-1-hd",t.MODERATION_LATEST="text-moderation-latest",t.MODERATION_STABLE="text-moderation-stable",t.GPT3_5_TURBO_FINETUNED="ft:gpt-3.5-turbo-0125:personal:",t.GPT4_FINETUNED="ft:gpt-4-0125-preview:personal:",t.GPT4_VISION_PREVIEW="gpt-4-vision-preview",t))(g||{});const h=new Map([["gpt-4",{name:"gpt-4",displayName:"GPT-4",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Powerful multi-purpose model for complex tasks",maxTokens:8192,contextLength:8192,capabilities:["chat","text-generation","reasoning","analysis"],inputCostPer1KTokens:.03,outputCostPer1KTokens:.06,supportedFeatures:["chat","function-calling"]}],["gpt-4-turbo",{name:"gpt-4-turbo",displayName:"GPT-4 Turbo",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Enhanced GPT-4 with 128K context, knowledge cutoff April 2023",maxTokens:4096,contextLength:128e3,capabilities:["chat","text-generation","reasoning","analysis","vision"],inputCostPer1KTokens:.01,outputCostPer1KTokens:.03,supportedFeatures:["chat","function-calling","vision","json-mode"]}],["gpt-4o",{name:"gpt-4o",displayName:"GPT-4o",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Versatile model supporting text, images, audio with fast response",maxTokens:4096,contextLength:128e3,capabilities:["chat","text-generation","vision","audio-processing","multimodal"],inputCostPer1KTokens:.005,outputCostPer1KTokens:.015,supportedFeatures:["chat","function-calling","vision","audio","json-mode"]}],["gpt-4o-mini",{name:"gpt-4o-mini",displayName:"GPT-4o Mini",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Compact and efficient version of GPT-4o with lower cost",maxTokens:16384,contextLength:128e3,capabilities:["chat","text-generation","vision"],inputCostPer1KTokens:15e-5,outputCostPer1KTokens:6e-4,supportedFeatures:["chat","function-calling","vision","json-mode"]}],["gpt-3.5-turbo",{name:"gpt-3.5-turbo",displayName:"GPT-3.5 Turbo",endpoint:"https://api.openai.com/v1/chat/completions",format:"openai",description:"Fast and cost-effective, suitable for most conversational tasks",maxTokens:4096,contextLength:16385,capabilities:["chat","text-generation","code-generation"],inputCostPer1KTokens:5e-4,outputCostPer1KTokens:.0015,supportedFeatures:["chat","function-calling"]}],["gpt-3.5-turbo-instruct",{name:"gpt-3.5-turbo-instruct",displayName:"GPT-3.5 Turbo Instruct",endpoint:"https://api.openai.com/v1/completions",format:"openai",description:"Instruction-tuned version for text completion tasks",maxTokens:4096,contextLength:4097,capabilities:["text-completion","instruction-following"],inputCostPer1KTokens:.0015,outputCostPer1KTokens:.002,supportedFeatures:["completions"]}],["text-embedding-ada-002",{name:"text-embedding-ada-002",displayName:"Text Embedding Ada 002",endpoint:"https://api.openai.com/v1/embeddings",format:"openai",description:"Text embedding model, 1536 dimensions, suitable for retrieval and similarity",contextLength:8191,capabilities:["embeddings","semantic-search"],inputCostPer1KTokens:1e-4,supportedFeatures:["embeddings"]}],["text-embedding-3-small",{name:"text-embedding-3-small",displayName:"Text Embedding 3 Small",endpoint:"https://api.openai.com/v1/embeddings",format:"openai",description:"Small text embedding model, 1536 dimensions, balance of performance and cost",contextLength:8191,capabilities:["embeddings","semantic-search"],inputCostPer1KTokens:2e-5,supportedFeatures:["embeddings"]}],["dall-e-3",{name:"dall-e-3",displayName:"DALL-E 3",endpoint:"https://api.openai.com/v1/images/generations",format:"openai",description:"Advanced image generation model producing high-quality, high-resolution images",capabilities:["image-generation","creative-design"],inputCostPer1KTokens:.04,supportedFeatures:["image-generation","variations","edits"]}],["whisper-1",{name:"whisper-1",displayName:"Whisper",endpoint:"https://api.openai.com/v1/audio/transcriptions",format:"openai",description:"Speech recognition model supporting multilingual transcription and translation",capabilities:["speech-recognition","audio-transcription","translation"],inputCostPer1KTokens:.006,supportedFeatures:["transcriptions","translations"]}],["tts-1-hd",{name:"tts-1-hd",displayName:"TTS-1 HD",endpoint:"https://api.openai.com/v1/audio/speech",format:"openai",description:"High-quality text-to-speech with multiple voice options",capabilities:["speech-synthesis","text-to-speech"],inputCostPer1KTokens:.015,supportedFeatures:["speech","voice-selection"]}],["text-moderation-latest",{name:"text-moderation-latest",displayName:"Moderation Latest",endpoint:"https://api.openai.com/v1/moderations",format:"openai",description:"Content moderation model for detecting harmful content",capabilities:["content-moderation","safety"],inputCostPer1KTokens:1e-4,supportedFeatures:["moderation"]}]]);function K(t){return h.get(t)}function w(){return Array.from(h.values())}function V(t){for(const e of h.values())if(e.name===t)return e}function U(){return Array.from(h.keys())}function M(){return w().filter(t=>t.capabilities.includes("chat"))}function Q(){return w().filter(t=>t.capabilities.includes("text-completion"))}function j(){return w().filter(t=>t.capabilities.includes("embeddings"))}function W(){return w().filter(t=>t.capabilities.includes("vision")||t.capabilities.includes("image-generation"))}function F(){return w().filter(t=>t.capabilities.includes("audio-processing")||t.capabilities.includes("speech-recognition")||t.capabilities.includes("speech-synthesis"))}function G(){return w().filter(t=>t.capabilities.includes("multimodal"))}function X(){const t=["gpt-4o","gpt-4o-mini","gpt-4-turbo","gpt-3.5-turbo","text-embedding-3-small","dall-e-3"];return w().filter(e=>t.includes(e.name))}function z(){return w().filter(t=>t.inputCostPer1KTokens&&t.inputCostPer1KTokens<.001).sort((t,e)=>(t.inputCostPer1KTokens||0)-(e.inputCostPer1KTokens||0))}function J(){return w().filter(t=>t.contextLength&&t.contextLength>=128e3).sort((t,e)=>(e.contextLength||0)-(t.contextLength||0))}function Z(t,e,o=0){const s=(t.inputCostPer1KTokens||0)/1e3*e,a=(t.outputCostPer1KTokens||0)/1e3*o;return{inputTokens:e,outputTokens:o,inputCost:s,outputCost:a,totalCost:s+a}}function Y(t){let e=w();switch(t.taskType){case"chat":e=e.filter(o=>o.capabilities.includes("chat"));break;case"completion":e=e.filter(o=>o.capabilities.includes("text-completion"));break;case"embedding":e=e.filter(o=>o.capabilities.includes("embeddings"));break;case"image":e=e.filter(o=>o.capabilities.includes("image-generation")||o.capabilities.includes("vision"));break;case"audio":e=e.filter(o=>o.capabilities.includes("speech-recognition")||o.capabilities.includes("speech-synthesis"));break}return t.contextLength&&(e=e.filter(o=>o.contextLength&&o.contextLength>=t.contextLength)),t.features&&t.features.length>0&&(e=e.filter(o=>t.features.every(s=>o.supportedFeatures?.includes(s)||o.capabilities.includes(s)))),t.budget&&e.sort((o,s)=>(o.inputCostPer1KTokens||0)-(s.inputCostPer1KTokens||0)),e.slice(0,5)}function ee(t){const e=Object.values(g);for(const o of e)if(o===t)return o;return null}class L{constructor(e){if(this.apiKey=e.apiKey,this.modelType=e.modelType||g.GPT3_5_TURBO,this.timeout=e.timeout||3e4,this.organization=e.organization,this.baseURL=e.baseURL||"https://api.openai.com/v1",!this.apiKey)throw new Error("API Key cannot be empty");if(!h.get(this.modelType))throw new Error(`Unsupported model type: ${this.modelType}`)}async chat(e,o){const s=[];o?.systemPrompt&&s.push({role:"system",content:o.systemPrompt}),s.push({role:"user",content:e});const a=await this.chatCompletion(s,{temperature:o?.temperature,maxTokens:o?.maxTokens,stream:!1,topP:o?.topP,frequencyPenalty:o?.frequencyPenalty,presencePenalty:o?.presencePenalty,stop:o?.stop});return this.extractContent(a)}async chatCompletion(e,o){const s=o?.modelType||this.modelType,a=h.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const i=o?.temperature??.7,n=o?.maxTokens??1e3,r=o?.stream??!1,p=a.endpoint,c=this.buildOpenAIRequest(a.name,e,i,n,r,o);try{return await this.makeRequest(p,c,r)}catch(m){throw new Error(`OpenAI request failed: ${m.message}`)}}async chatStream(e,o,s){const a=s?.modelType||this.modelType,i=h.get(a);if(!i)throw new Error(`Unsupported model type: ${a}`);const n=s?.temperature??.7,r=s?.maxTokens??1e3,p=this.buildOpenAIRequest(i.name,e,n,r,!0,s);try{await this.makeStreamRequest(i.endpoint,p,o)}catch(c){throw new Error(`Streaming request failed: ${c.message}`)}}async generateImage(e,o){const s=o?.modelType||g.DALL_E_3;if(s!==g.DALL_E_2&&s!==g.DALL_E_3)throw new Error("Image generation only supports DALL-E models");const a=h.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const i={model:a.name,prompt:e,n:o?.n||1,size:o?.size||"1024x1024",quality:o?.quality||"standard",style:o?.style||"vivid",response_format:o?.responseFormat||"url"};try{const n=await this.makeRequest(a.endpoint,i,!1);if(n.data&&Array.isArray(n.data))return n.data.map(r=>o?.responseFormat==="b64_json"?r.b64_json:r.url);throw new Error("Invalid response format from image generation")}catch(n){throw new Error(`Image generation failed: ${n.message}`)}}async createEmbeddings(e,o){const s=o?.modelType||g.TEXT_EMBEDDING_ADA_002,a=h.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const i={model:a.name,input:e};o?.dimensions&&a.name===g.TEXT_EMBEDDING_3_SMALL&&(i.dimensions=o.dimensions);try{const n=await this.makeRequest(a.endpoint,i,!1);if(n.data&&Array.isArray(n.data))return n.data.map(r=>r.embedding);throw new Error("Invalid response format from embeddings")}catch(n){throw new Error(`Embedding creation failed: ${n.message}`)}}async transcribeAudio(e,o){const s=o?.modelType||g.WHISPER_1,a=h.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const i=new FormData;if(typeof e=="string")throw new Error("File path/Base64 support requires additional implementation");i.append("file",e),i.append("model",a.name),o?.language&&i.append("language",o.language),o?.prompt&&i.append("prompt",o.prompt),o?.responseFormat&&i.append("response_format",o.responseFormat),o?.temperature!==void 0&&i.append("temperature",o.temperature.toString());try{const n=await this.makeFormDataRequest(a.endpoint,i,!1);return n.text||n.transcription||""}catch(n){throw new Error(`Audio transcription failed: ${n.message}`)}}async textToSpeech(e,o){const s=o?.modelType||g.TTS_1_HD,a=h.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const i={model:a.name,input:e,voice:o?.voice||"alloy",response_format:o?.responseFormat||"mp3",speed:o?.speed||1};try{return await this.makeRequest(a.endpoint,i,!1,!0)}catch(n){throw new Error(`Text-to-speech conversion failed: ${n.message}`)}}async moderateContent(e,o){const s=o?.modelType||g.MODERATION_LATEST,a=h.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const i={model:a.name,input:e};try{return(await this.makeRequest(a.endpoint,i,!1)).results||[]}catch(n){throw new Error(`Content moderation failed: ${n.message}`)}}setModel(e){if(!h.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=h.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.');return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}estimateCost(e,o=0,s){const a=s||this.modelType,i=h.get(a);if(!i)throw new Error(`Unsupported model type: ${a}`);const n=(i.inputCostPer1KTokens||0)/1e3*e,r=(i.outputCostPer1KTokens||0)/1e3*o;return{inputCost:n,outputCost:r,totalCost:n+r}}buildOpenAIRequest(e,o,s,a,i,n){const r={model:e,messages:o,temperature:s,max_tokens:a,stream:i};return n?.topP!==void 0&&(r.top_p=n.topP),n?.frequencyPenalty!==void 0&&(r.frequency_penalty=n.frequencyPenalty),n?.presencePenalty!==void 0&&(r.presence_penalty=n.presencePenalty),n?.stop!==void 0&&(r.stop=n.stop),r}async makeRequest(e,o,s,a=!1){const i=new AbortController,n=setTimeout(()=>i.abort(),this.timeout);try{const r=e.startsWith("http")?e:`${this.baseURL}${e}`,p={Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"};this.organization&&(p["OpenAI-Organization"]=this.organization);const c=await fetch(r,{method:"POST",headers:p,body:JSON.stringify(o),signal:i.signal});if(clearTimeout(n),!c.ok){const m=await c.text();try{const d=JSON.parse(m);throw new Error(`HTTP ${c.status}: ${d.error?.message||m}`)}catch{throw new Error(`HTTP ${c.status}: ${m}`)}}return a?await c.arrayBuffer():s?c.body:await c.json()}catch(r){throw clearTimeout(n),r.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):r}}async makeFormDataRequest(e,o,s){const a=new AbortController,i=setTimeout(()=>a.abort(),this.timeout);try{const n=e.startsWith("http")?e:`${this.baseURL}${e}`,r={Authorization:`Bearer ${this.apiKey}`};this.organization&&(r["OpenAI-Organization"]=this.organization);const p=await fetch(n,{method:"POST",headers:r,body:o,signal:a.signal});if(clearTimeout(i),!p.ok){const c=await p.text();try{const m=JSON.parse(c);throw new Error(`HTTP ${p.status}: ${m.error?.message||c}`)}catch{throw new Error(`HTTP ${p.status}: ${c}`)}}return s?p.body:await p.json()}catch(n){throw clearTimeout(i),n.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):n}}async makeStreamRequest(e,o,s){const a=await this.makeRequest(e,o,!0);if(!a)throw new Error("Failed to get streaming response");const i=a.getReader(),n=new TextDecoder("utf-8");let r="";try{for(;;){const{done:p,value:c}=await i.read();if(p){s("",!0);break}r+=n.decode(c,{stream:!0});const m=r.split(`
47
- `);r=m.pop()||"";for(const d of m)if(d.startsWith("data: ")){const l=d.slice(6);if(l==="[DONE]"){s("",!0);return}try{const u=JSON.parse(l);u.choices?.[0]?.delta?.content&&s(u.choices[0].delta.content,!1)}catch{}}}}finally{i.releaseLock()}}extractContent(e){if(e.choices?.[0]?.message?.content)return e.choices[0].message.content;if(e.data?.[0]?.b64_json)return e.data[0].b64_json;if(e.data?.[0]?.url)return e.data[0].url;if(e.text)return e.text;throw new Error("Unable to parse response content")}async analyzeOHLCV(e,o,s,a){const i=o||"Based on these OHLCV data, predict the next period",n=s||1;if(!Number.isInteger(n)||n<=0)throw new Error(`Invalid count parameter: ${n}. Must be a positive integer.`);const r=50;if(n>r)throw new Error(`Count parameter too large: ${n}. Maximum allowed is ${r}. Please reduce the count or split your request.`);const p=n===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${n} consecutive OHLCV objects for the next ${n} periods.`,c=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
48
- Your task: ${i}
186
+
187
+ `:""}
188
+ 请提供:
189
+ 1. 清晰且有结构的分析
190
+ 2. 数据的关键观察结果
191
+ 3. 潜在的启示或见解
192
+ 4. 建议或注意事项(如适用)
193
+ 请以组织良好的文本分析形式回复。`;c+=`
194
+
195
+ ${u}`;const p=JSON.stringify(e,null,2);let g="";a?g=t==="en"?`Here is the OHLCV data (${i} periods):
196
+ ${p}
197
+ My specific question or request: ${a}
198
+ Please analyze this data considering my request above.`:`这是OHLCV数据(${i}个周期):
199
+ ${p}
200
+ 我的具体问题或需求:${a}
201
+ 请根据我的上述需求分析这些数据。`:g=t==="en"?`Here is the OHLCV data (${i} periods):
202
+ ${p}
203
+ Please analyze this data as requested.`:`这是OHLCV数据(${i}个周期):
204
+ ${p}
205
+ 请按要求分析这些数据。`;const y=[{role:"system",content:c},{role:"user",content:g}];try{const h=await this.chatCompletion(y,t,{temperature:r?.temperature||.5,maxTokens:r?.maxTokens||1500,stream:!1,modelType:this.modelType,topP:r?.topP,frequencyPenalty:r?.frequencyPenalty,presencePenalty:r?.presencePenalty,stop:r?.stop});return this.extractContent(h)}catch(h){throw new Error(`OHLCV analysis failed: ${h.message}`)}}async analyzeOHLCVEnhanced(e,t,s,a,r=!1,o){if(r){const i=t==="en"?`You are a professional financial data analyst. Analyze the OHLCV data and provide a structured response with:
206
+ 1. Summary (brief overview)
207
+ 2. Details (key observations, 3-5 points)
208
+ 3. Recommendations (actionable insights, 2-3 points)
209
+ Format as JSON: {"summary": "...", "details": ["...", "..."], "recommendations": ["...", "..."]}`:`您是一位专业的金融数据分析师。分析OHLCV数据并提供结构化响应:
210
+ 1. 总结(简要概述)
211
+ 2. 详情(关键观察结果,3-5点)
212
+ 3. 建议(可操作的见解,2-3点)
213
+ 格式化为JSON:{"summary": "...", "details": ["...", "..."], "recommendations": ["...", "..."]}`,d=JSON.stringify(e,null,2);let l=t==="en"?`Analyze this OHLCV data (${e.length} periods):
214
+ ${d}`:`分析此OHLCV数据(${e.length}个周期):
215
+ ${d}`;a&&(l+=t==="en"?`
216
+
217
+ Additional request: ${a}`:`
218
+
219
+ 附加要求:${a}`);const u=[{role:"system",content:i},{role:"user",content:l}];try{const m=await this.chatCompletion(u,t,{temperature:o?.temperature||.4,maxTokens:o?.maxTokens||1200,stream:!1,modelType:this.modelType}),c=this.extractContent(m);try{const p=JSON.parse(c);if(p.summary&&Array.isArray(p.details)&&Array.isArray(p.recommendations))return p}catch{}return c}catch(m){throw new Error(`Structured OHLCV analysis failed: ${m.message}`)}}return this.analyzeOHLCV(e,t,s,a,o)}async predictingOHLCV(e,t,s,a){const r=t||"Based on these OHLCV data, predict the next period",o=s||1;if(!Number.isInteger(o)||o<=0)throw new Error(`Invalid count parameter: ${o}. Must be a positive integer.`);const i=50;if(o>i)throw new Error(`Count parameter too large: ${o}. Maximum allowed is ${i}. Please reduce the count or split your request.`);const d=o===1?"Return EXACTLY 1 OHLCV object for the next period.":`Return EXACTLY ${o} consecutive OHLCV objects for the next ${o} periods.`,l=`You are a professional financial data analysis AI. The user will give you an array of OHLCV (Open, High, Low, Close, Volume) data.
220
+ Your task: ${r}
49
221
  CRITICAL RULES:
50
- 1. ${p}
222
+ 1. ${d}
51
223
  2. Return ONLY a JSON array of OHLCV objects, NO explanations, comments, or other text
52
224
  3. The OHLCV array format must match: [{open, high, low, close, volume}, ...]
53
225
  4. All numbers must be valid numbers
@@ -55,14 +227,15 @@ CRITICAL RULES:
55
227
  6. Maintain consistency with historical trends and patterns
56
228
  7. For technical analysis, provide reasonable values based on typical patterns
57
229
  8. Do not include markdown formatting, only pure JSON
58
- ${n===1?`Example of valid response for 1 period:
59
- [{"open": 115.5, "high": 118.0, "low": 114.0, "close": 117.0, "volume": 1350000}]`:`Example of valid response for ${n} periods:
230
+ ${o===1?`Example of valid response for 1 period:
231
+ [{"open": 115.5, "high": 118.0, "low": 114.0, "close": 117.0, "volume": 1350000}]`:`Example of valid response for ${o} periods:
60
232
  [
61
233
  {"open": 115.5, "high": 118.0, "low": 114.0, "close": 117.0, "volume": 1350000},
62
234
  {"open": 117.5, "high": 120.0, "low": 116.0, "close": 119.0, "volume": 1400000}
63
- ${n>2?`,
64
- ... ${n-2} more OHLCV objects following the same pattern`:""}
65
- ]`}`,m=JSON.stringify(e,null,2),d=`Here is the historical OHLCV data (${e.length} periods):
66
- ${m}
67
- Please process this data according to the system instructions. Remember to return EXACTLY ${n} OHLCV object(s) in a JSON array with no additional text.`,l=[{role:"system",content:c},{role:"user",content:d}];try{const u=n*50+100,x=Math.max(a?.maxTokens||1e3,u),T=await this.chatCompletion(l,{temperature:a?.temperature||.3,maxTokens:x,stream:!1,modelType:a?.modelType,topP:a?.topP,frequencyPenalty:a?.frequencyPenalty,presencePenalty:a?.presencePenalty,stop:a?.stop}),E=this.extractContent(T),f=this.parseOHLCVResponse(E);if(f.length!==n)throw new Error(`AI returned ${f.length} OHLCV objects, but expected ${n}.`);return f}catch(u){throw new Error(`OHLCV analysis failed: ${u.message}`)}}parseOHLCVResponse(e){try{const o=JSON.parse(e);if(!Array.isArray(o))throw new Error("Response is not in array format");return o.map((a,i)=>{if(typeof a!="object"||a===null)throw new Error(`Element ${i} is not a valid object`);const{open:n,high:r,low:p,close:c,volume:m}=a,d=["open","high","low","close","volume"];for(const l of d)if(typeof a[l]!="number"||isNaN(a[l]))throw new Error(`Element ${i} field ${l} is not a valid number`);if(r<p)throw new Error(`Element ${i}: high cannot be lower than low`);if(c<p||c>r)throw new Error(`Element ${i}: close must be between low and high`);return{open:Number(n),high:Number(r),low:Number(p),close:Number(c),volume:Number(m)}})}catch(o){const s=e.match(/\[[\s\S]*\]/);if(s)return this.parseOHLCVResponse(s[0]);throw new Error(`Unable to parse AI returned OHLCV data: ${o}
68
- Original content: ${e.substring(0,200)}...`)}}}function te(t,e){return new L({apiKey:t,modelType:e})}exports.ALIYUN_MODELS=b;exports.AliYunModelType=k;exports.AliyunAI=_;exports.DEEPSEEK_MODELS=y;exports.DeepSeekAI=C;exports.DeepSeekModelType=v;exports.OPENAI_MODELS=h;exports.OpenAI=L;exports.OpenAIModelType=g;exports.createAliyunAI=A;exports.createDeepSeekAI=H;exports.createOpenAI=te;exports.estimateCost=Z;exports.getAllDeepSeekModels=R;exports.getAllModels=P;exports.getAllOpenAIModels=w;exports.getAudioModelsOpenAI=F;exports.getAvailableAliYunModelTypes=q;exports.getAvailableDeepSeekModelTypes=I;exports.getAvailableOpenAIModelTypes=U;exports.getChatModels=M;exports.getCompletionModels=Q;exports.getCostEfficientModels=z;exports.getDeepSeekModel=$;exports.getDeepSeekModelByName=D;exports.getEmbeddingModels=j;exports.getHighContextModels=J;exports.getLatestModels=X;exports.getModel=N;exports.getModelByName=O;exports.getMultimodalModelsOpenAI=G;exports.getOpenAIModel=K;exports.getOpenAIModelByName=V;exports.getVisionModelsOpenAI=W;exports.stringToAliYunModelType=S;exports.stringToDeepSeekModelType=B;exports.stringToOpenAIModelType=ee;exports.suggestModel=Y;
235
+ ${o>2?`,
236
+ ... ${o-2} more OHLCV objects following the same pattern`:""}
237
+ ]`}`,u=JSON.stringify(e,null,2),m=`Here is the historical OHLCV data (${e.length} periods):
238
+ ${u}
239
+ Please process this data according to the system instructions. Remember to return EXACTLY ${o} OHLCV object(s) in a JSON array with no additional text.`,c=[{role:"system",content:l},{role:"user",content:m}];try{const p=o*50+100,g=Math.max(a?.maxTokens||1e3,p),y=await this.chatCompletion(c,"en",{temperature:a?.temperature||.3,maxTokens:g,stream:!1,modelType:this.modelType,topP:a?.topP,frequencyPenalty:a?.frequencyPenalty,presencePenalty:a?.presencePenalty,stop:a?.stop}),h=this.extractContent(y),b=this.parseOHLCVResponse(h);if(b.length!==o)throw new Error(`AI returned ${b.length} OHLCV objects, but expected ${o}.`);return b}catch(p){throw new Error(`OHLCV analysis failed: ${p.message}`)}}async generateImage(e,t){const s=t?.modelType||T.DALL_E_3;if(s!==T.DALL_E_2&&s!==T.DALL_E_3)throw new Error("Image generation only supports DALL-E models");const a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,prompt:e,n:t?.n||1,size:t?.size||"1024x1024",quality:t?.quality||"standard",style:t?.style||"vivid",response_format:t?.responseFormat||"url"};try{const o=await this.makeRequest(a.endpoint,r,!1);if(o.data&&Array.isArray(o.data))return o.data.map(i=>t?.responseFormat==="b64_json"?i.b64_json:i.url);throw new Error("Invalid response format from image generation")}catch(o){throw new Error(`Image generation failed: ${o.message}`)}}async createEmbeddings(e,t){const s=t?.modelType||T.TEXT_EMBEDDING_ADA_002,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,input:e};t?.dimensions&&a.name===T.TEXT_EMBEDDING_3_SMALL&&(r.dimensions=t.dimensions);try{const o=await this.makeRequest(a.endpoint,r,!1);if(o.data&&Array.isArray(o.data))return o.data.map(i=>i.embedding);throw new Error("Invalid response format from embeddings")}catch(o){throw new Error(`Embedding creation failed: ${o.message}`)}}async transcribeAudio(e,t){const s=t?.modelType||T.WHISPER_1,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r=new FormData;if(typeof e=="string")throw new Error("File path/Base64 support requires additional implementation");r.append("file",e),r.append("model",a.name),t?.language&&r.append("language",t.language),t?.prompt&&r.append("prompt",t.prompt),t?.responseFormat&&r.append("response_format",t.responseFormat),t?.temperature!==void 0&&r.append("temperature",t.temperature.toString());try{const o=await this.makeFormDataRequest(a.endpoint,r,!1);return o.text||o.transcription||""}catch(o){throw new Error(`Audio transcription failed: ${o.message}`)}}async textToSpeech(e,t){const s=t?.modelType||T.TTS_1_HD,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,input:e,voice:t?.voice||"alloy",response_format:t?.responseFormat||"mp3",speed:t?.speed||1};try{return await this.makeRequest(a.endpoint,r,!1,!0)}catch(o){throw new Error(`Text-to-speech conversion failed: ${o.message}`)}}async moderateContent(e,t){const s=t?.modelType||T.MODERATION_LATEST,a=x.get(s);if(!a)throw new Error(`Unsupported model type: ${s}`);const r={model:a.name,input:e};try{return(await this.makeRequest(a.endpoint,r,!1)).results||[]}catch(o){throw new Error(`Content moderation failed: ${o.message}`)}}setModel(e){if(!x.get(e))throw new Error(`Unsupported model type: ${e}`);this.modelType=e}getCurrentModel(){const e=x.get(this.modelType);if(!e)throw new Error(`Model configuration does not exist: ${this.modelType}`);return{name:e.name,displayName:e.displayName,description:e.description}}async testConnection(){try{const e=await this.chat('Hello, respond with "OK" if you can hear me.',"en");return{success:!0,model:this.modelType,response:e}}catch(e){return{success:!1,model:this.modelType,error:e.message}}}estimateCost(e,t=0,s){const a=s||this.modelType,r=x.get(a);if(!r)throw new Error(`Unsupported model type: ${a}`);const o=(r.inputCostPer1KTokens||0)/1e3*e,i=(r.outputCostPer1KTokens||0)/1e3*t;return{inputCost:o,outputCost:i,totalCost:o+i}}buildOpenAIRequest(e,t,s,a,r,o){const i={model:e,messages:t,temperature:s,max_tokens:a,stream:r};return o?.topP!==void 0&&(i.top_p=o.topP),o?.frequencyPenalty!==void 0&&(i.frequency_penalty=o.frequencyPenalty),o?.presencePenalty!==void 0&&(i.presence_penalty=o.presencePenalty),o?.stop!==void 0&&(i.stop=o.stop),i}async makeRequest(e,t,s,a=!1){const r=new AbortController,o=setTimeout(()=>r.abort(),this.timeout);try{const i=e.startsWith("http")?e:`${this.baseURL}${e}`,d={Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"};this.organization&&(d["OpenAI-Organization"]=this.organization);const l=await fetch(i,{method:"POST",headers:d,body:JSON.stringify(t),signal:r.signal});if(clearTimeout(o),!l.ok){const u=await l.text();try{const m=JSON.parse(u);throw new Error(`HTTP ${l.status}: ${m.error?.message||u}`)}catch{throw new Error(`HTTP ${l.status}: ${u}`)}}return a?await l.arrayBuffer():s?l.body:await l.json()}catch(i){throw clearTimeout(o),i.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):i}}async makeFormDataRequest(e,t,s){const a=new AbortController,r=setTimeout(()=>a.abort(),this.timeout);try{const o=e.startsWith("http")?e:`${this.baseURL}${e}`,i={Authorization:`Bearer ${this.apiKey}`};this.organization&&(i["OpenAI-Organization"]=this.organization);const d=await fetch(o,{method:"POST",headers:i,body:t,signal:a.signal});if(clearTimeout(r),!d.ok){const l=await d.text();try{const u=JSON.parse(l);throw new Error(`HTTP ${d.status}: ${u.error?.message||l}`)}catch{throw new Error(`HTTP ${d.status}: ${l}`)}}return s?d.body:await d.json()}catch(o){throw clearTimeout(r),o.name==="AbortError"?new Error(`Request timeout (${this.timeout}ms)`):o}}async makeStreamRequest(e,t,s){const a=await this.makeRequest(e,t,!0);if(!a)throw new Error("Failed to get streaming response");const r=a.getReader(),o=new TextDecoder("utf-8");let i="";try{for(;;){const{done:d,value:l}=await r.read();if(d){s("",!0);break}i+=o.decode(l,{stream:!0});const u=i.split(`
240
+ `);i=u.pop()||"";for(const m of u)if(m.startsWith("data: ")){const c=m.slice(6);if(c==="[DONE]"){s("",!0);return}try{const p=JSON.parse(c);p.choices?.[0]?.delta?.content&&s(p.choices[0].delta.content,!1)}catch{}}}}finally{r.releaseLock()}}extractContent(e){if(e.choices?.[0]?.message?.content)return e.choices[0].message.content;if(e.data?.[0]?.b64_json)return e.data[0].b64_json;if(e.data?.[0]?.url)return e.data[0].url;if(e.text)return e.text;throw new Error("Unable to parse response content")}parseOHLCVResponse(e){try{const t=JSON.parse(e);if(!Array.isArray(t))throw new Error("Response is not in array format");return t.map((a,r)=>{if(typeof a!="object"||a===null)throw new Error(`Element ${r} is not a valid object`);const{open:o,high:i,low:d,close:l,volume:u}=a,m=["open","high","low","close","volume"];for(const c of m)if(typeof a[c]!="number"||isNaN(a[c]))throw new Error(`Element ${r} field ${c} is not a valid number`);if(i<d)throw new Error(`Element ${r}: high cannot be lower than low`);if(l<d||l>i)throw new Error(`Element ${r}: close must be between low and high`);return{open:Number(o),high:Number(i),low:Number(d),close:Number(l),volume:Number(u)}})}catch(t){const s=e.match(/\[[\s\S]*\]/);if(s)return this.parseOHLCVResponse(s[0]);throw new Error(`Unable to parse AI returned OHLCV data: ${t}
241
+ Original content: ${e.substring(0,200)}...`)}}}function ce(n,e){return new H({apiKey:n,modelType:e})}exports.ALIYUN_MODELS=k;exports.AliYunModelType=_;exports.AliyunAI=S;exports.DEEPSEEK_MODELS=$;exports.DeepSeekAI=q;exports.DeepSeekModelType=N;exports.OPENAI_MODELS=x;exports.OpenAI=H;exports.OpenAIModelType=T;exports.createAliyunAI=B;exports.createDeepSeekAI=z;exports.createOpenAI=ce;exports.estimateCost=se;exports.getAllDeepSeekModels=K;exports.getAllModels=A;exports.getAllOpenAIModels=L;exports.getAudioModelsOpenAI=ee;exports.getAvailableAliYunModelTypes=R;exports.getAvailableDeepSeekModelTypes=U;exports.getAvailableOpenAIModelTypes=G;exports.getChatModels=J;exports.getCompletionModels=X;exports.getCostEfficientModels=oe;exports.getDeepSeekModel=M;exports.getDeepSeekModelByName=F;exports.getEmbeddingModels=Y;exports.getHighContextModels=ae;exports.getLatestModels=ne;exports.getModel=V;exports.getModelByName=I;exports.getMultimodalModelsOpenAI=te;exports.getOpenAIModel=j;exports.getOpenAIModelByName=W;exports.getVisionModelsOpenAI=Z;exports.stringToAliYunModelType=D;exports.stringToDeepSeekModelType=Q;exports.stringToOpenAIModelType=re;exports.suggestModel=ie;