aicommit2 2.5.3 → 2.5.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +33 -17
- package/dist/ai.service-6f818099.mjs +11 -0
- package/dist/anthropic.service-d0af34bb.mjs +1 -0
- package/dist/bedrock.service-7f01f1d1.mjs +1 -0
- package/dist/cli-9533dfd6.mjs +297 -0
- package/dist/cli.mjs +1 -316
- package/dist/codestral.service-ccd13cd7.mjs +1 -0
- package/dist/cohere.service-e72f068e.mjs +1 -0
- package/dist/deep-seek.service-f1fce159.mjs +1 -0
- package/dist/gemini.service-ea4399b1.mjs +1 -0
- package/dist/github-models.service-16ce699f.mjs +5 -0
- package/dist/groq.service-b7d23bbc.mjs +1 -0
- package/dist/hugging-face.service-760afbf2.mjs +2 -0
- package/dist/mistral.service-fe74f317.mjs +1 -0
- package/dist/ollama.service-3312d7f0.mjs +1 -0
- package/dist/openai-8b372df6.mjs +16 -0
- package/dist/openai-compatible.service-bf183fc9.mjs +1 -0
- package/dist/openai.service-1d3ec4cc.mjs +1 -0
- package/dist/perplexity.service-85ac5631.mjs +1 -0
- package/package.json +1 -1
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import f from"chalk";import{concatMap as u,from as g,map as C,catchError as v}from"rxjs";import{fromPromise as y}from"rxjs/internal/observable/innerFrom";import{A as M,l as k,a as R,b as _,c as x,e as P,d as E}from"./ai.service-6f818099.mjs";import{D as N,b as $,g as D,k as A,n as S,H as B}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class U extends M{constructor(t){super(t),this.params=t,this.apiKey="",this.colors={primary:"#e28c58",secondary:"#fff"},this.serviceName=f.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Codestral]"),this.errorPrefix=f.red.bold("[Codestral]"),this.apiKey=this.params.config.key}getServiceSpecificErrorMessage(t){const e=t.message||"";return e.includes("API key")||e.includes("api_key")?"Invalid API key. Check your Codestral API key in configuration":e.includes("rate_limit")||e.includes("Rate limit")?"Rate limit exceeded. Wait a moment and try again, or upgrade your Codestral plan":e.includes("model")||e.includes("Model")?"Model not found or not accessible. Check if the Codestral model name is correct":e.includes("Invalid model type")?"Invalid model type. Use supported models: codestral-latest, codestral-2501":e.includes("overloaded")||e.includes("capacity")?"Codestral service is overloaded. Try again in a few minutes":e.includes("403")||e.includes("Forbidden")?"Access denied. Your API key may not have permission for this Codestral model":e.includes("404")||e.includes("Not Found")?"Model or endpoint not found. Check your Codestral model configuration":e.includes("500")||e.includes("Internal Server Error")?"Codestral server error. Try again later":null}generateCommitMessage$(){return y(this.generateMessage("commit")).pipe(u(t=>g(t)),C(this.formatAsChoice),v(this.handleError$))}generateCodeReview$(){return y(this.generateMessage("review")).pipe(u(t=>g(t)),C(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:t.value,description:t.value,isError:!1})),v(this.handleError$))}async generateMessage(t){const e=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:s,codeReviewPromptPath:m,logging:i,locale:l,generate:a,type:n,maxLength:p}=this.params.config,o={...N,locale:l,maxLength:p,type:n,generate:a,systemPrompt:r,systemPromptPath:s,codeReviewPromptPath:m,vcs_branch:this.params.branchName||""},d=t==="review"?$(o):D(o);this.checkAvailableModels();const c=A(e,t),b=`${this.params.config.url||"https://codestral.mistral.ai"}/v1/chat/completions`,w={Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"};k(e,t,"Codestral",this.params.config.model,b,w,i),R(e,t,"Codestral",d,c,i);const h=await this.createChatCompletions(d,t);return t==="review"?this.sanitizeResponse(h):this.parseMessage(h,n,a)}checkAvailableModels(){if(["codestral-latest","codestral-2501"].includes(this.params.config.model))return!0;throw new Error("Invalid model type of Codestral AI")}async createChatCompletions(t,e){const r=this.params.stagedDiff.diff,{logging:s}=this.params.config,m=this.params.config.url||"https://codestral.mistral.ai",i={model:this.params.config.model,messages:[{role:"system",content:t},{role:"user",content:A(this.params.stagedDiff.diff,e)}],temperature:this.params.config.temperature,top_p:this.params.config.topP,max_tokens:this.params.config.maxTokens,stream:!1,safe_prompt:!1,random_seed:S(10,1e3)};e==="commit"&&(i.response_format={type:"json_object"}),_(r,e,"Codestral",i,s);const l=Date.now();try{const n=await new B({method:"POST",baseURL:`${m}/v1/chat/completions`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).setBody(i).execute(),p=Date.now()-l,o=n.data;if(x(r,e,"Codestral",o,s),!o.choices||o.choices.length===0||!o.choices[0].message?.content)throw P(r,e,"Codestral",{message:"No Content on response",result:o},s),new Error("No Content on response. Please open a Bug report");const c=o.choices[0].message.content;return E(r,e,"Codestral",p,c,s),c}catch(a){throw P(r,e,"Codestral",a,s),a}}}export{U as CodestralService};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import l from"chalk";import{CohereClientV2 as x}from"cohere-ai";import{concatMap as h,from as d,map as g,catchError as u}from"rxjs";import{fromPromise as f}from"rxjs/internal/observable/innerFrom";import{A as E,l as S,a as $,b as D,c as N,d as _,e as V}from"./ai.service-6f818099.mjs";import{D as O,b as F,g as U,n as H}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class L extends E{constructor(r){super(r),this.params=r,this.colors={primary:"#D18EE2",secondary:"#fff"},this.serviceName=l.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Cohere]"),this.errorPrefix=l.red.bold("[Cohere]"),this.cohere=new x({token:this.params.config.key})}isValidCohereV2Response(r){const e=r;return e?.message?.content!==void 0&&Array.isArray(e.message.content)&&e.message.content.length>0&&typeof e.message.content[0]?.text=="string"}getServiceSpecificErrorMessage(r){const e=r.message||"";return e.includes("API key")||e.includes("api_key")?"Invalid API key. Check your Cohere API key in configuration":e.includes("rate_limit")||e.includes("Rate limit")?"Rate limit exceeded. Wait a moment and try again, or upgrade your Cohere plan":e.includes("model")||e.includes("Model")?"Model not found or not accessible. Check if the Cohere model name is correct":e.includes("overloaded")||e.includes("capacity")?"Cohere service is overloaded. Try again in a few minutes":e.includes("403")||e.includes("Forbidden")?"Access denied. Your API key may not have permission for this Cohere model":e.includes("404")||e.includes("Not Found")?"Model or endpoint not found. Check your Cohere model configuration":e.includes("500")||e.includes("Internal Server Error")?"Cohere server error. Try again later":null}generateCommitMessage$(){return f(this.generateMessage("commit")).pipe(h(r=>d(r)),g(this.formatAsChoice),u(this.handleError$))}generateCodeReview$(){return f(this.generateMessage("review")).pipe(h(r=>d(r)),g(r=>({name:`${this.serviceName} ${r.title}`,short:r.title,value:r.value,description:r.value,isError:!1})),u(this.handleError$))}async generateMessage(r){const e=this.params.stagedDiff.diff,{systemPrompt:C,systemPromptPath:v,codeReviewPromptPath:y,logging:o,temperature:P,locale:A,generate:n,type:a,maxLength:I,maxTokens:w}=this.params.config,m={...O,locale:A,maxLength:I,type:a,generate:n,systemPrompt:C,systemPromptPath:v,codeReviewPromptPath:y,vcs_branch:this.params.branchName||""},i=r==="review"?F(m):U(m),c=`Here is the diff: ${e}`,k=[...i?[{role:"system",content:i}]:[],{role:"user",content:c}],M=`${this.params.config.url}/v2/chat`;S(e,r,"Cohere",this.params.config.model,M,{},o),$(e,r,"Cohere",i,c,o);const p={model:this.params.config.model,messages:k,max_tokens:w,temperature:P,seed:H(10,1e3),p:this.params.config.topP};D(e,r,"Cohere",p,o);const b=Date.now();try{const t=await this.cohere.chat(p,{timeoutInSeconds:Math.floor(this.params.config.timeout/1e3)}),R=Date.now()-b;if(!this.isValidCohereV2Response(t))throw new Error("Invalid response structure from Cohere v2 API");const s=t.message.content[0].text;return N(e,r,"Cohere",t,o),_(e,r,"Cohere",R,s,o),r==="review"?this.sanitizeResponse(s):this.parseMessage(s,a,n)}catch(t){throw V(e,r,"Cohere",t,o),t}}}export{L as CohereService};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import v from"chalk";import K from"openai";import{concatMap as w,from as A,map as C,catchError as I}from"rxjs";import{fromPromise as M}from"rxjs/internal/observable/innerFrom";import{A as W,l as b,a as x,b as $,c as R,d as _,e as E}from"./ai.service-6f818099.mjs";import{D as U,g as N,b as Y,k as O}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class G extends W{constructor(t){super(t),this.params=t,this.generateStreamingCommitMessage$=()=>{const{generate:s,type:o}=this.params.config;return this.createStreamingCommitMessages$(i=>{this.streamChunks(i).catch(a=>i.error(a))},o,s)},this.streamChunks=async s=>{const o=this.params.stagedDiff.diff,{systemPrompt:i,systemPromptPath:a,codeReviewPromptPath:p,logging:r,locale:c,generate:m,type:n,maxLength:l}=this.params.config,h={...U,locale:c,maxLength:l,type:n,generate:m,systemPrompt:i,systemPromptPath:a,codeReviewPromptPath:p,vcs_branch:this.params.branchName||""},f=N(h);this.checkAvailableModels();const d=O(o,"commit"),g=`${this.params.config.url||"https://api.deepseek.com"}/chat/completions`,L={Authorization:`Bearer ${this.params.config.key}`,"Content-Type":"application/json"};b(o,"commit","DeepSeek",this.params.config.model,g,L,r),x(o,"commit","DeepSeek",f,d,r);const D={messages:[{role:"system",content:f},{role:"user",content:d}],model:this.params.config.model,max_tokens:this.params.config.maxTokens,top_p:this.params.config.topP,temperature:this.params.config.temperature,stream:!0};$(o,"commit","DeepSeek",D,r);const T=Date.now();let u="";try{const z=await this.deepSeek.chat.completions.create(D,{timeout:this.params.config.timeout});for await(const P of z){const B=P.choices?.[0]?.delta?.content||"",H=P.choices?.[0]?.delta?.reasoning_content||"",S=`${B}${H}`;S&&(u+=S,s.next(S))}const F=Date.now()-T;R(o,"commit","DeepSeek",{streamed:!0,totalLength:u.length},r),_(o,"commit","DeepSeek",F,u,r),s.complete()}catch(k){E(o,"commit","DeepSeek",k,r),s.error(k)}},this.colors={primary:"#53a3f9",secondary:"#fff"},this.serviceName=v.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[DeepSeek]"),this.errorPrefix=v.red.bold("[DeepSeek]");const e=this.params.config.url||"https://api.deepseek.com";this.deepSeek=new K({baseURL:e,apiKey:this.params.config.key})}getServiceSpecificErrorMessage(t){const e=t.message||"";return e.includes("API key")||e.includes("api_key")?"Invalid API key. Check your DeepSeek API key in configuration":e.includes("rate_limit")||e.includes("Rate limit")?"Rate limit exceeded. Wait a moment and try again, or upgrade your DeepSeek plan":e.includes("model")||e.includes("Model")?"Model not found or not accessible. Check if the DeepSeek model name is correct":e.includes("Invalid model type")?"Invalid model type. Use supported models: deepseek-reasoner, deepseek-chat":e.includes("overloaded")||e.includes("capacity")?"DeepSeek service is overloaded. Try again in a few minutes":e.includes("403")||e.includes("Forbidden")?"Access denied. Your API key may not have permission for this DeepSeek model":e.includes("404")||e.includes("Not Found")?"Model or endpoint not found. Check your DeepSeek model configuration":e.includes("500")||e.includes("Internal Server Error")?"DeepSeek server error. Try again later":null}generateCommitMessage$(){return this.params.config.stream||!1?this.generateStreamingCommitMessage$():M(this.generateMessage("commit")).pipe(w(e=>A(e)),C(this.formatAsChoice),I(this.handleError$))}generateCodeReview$(){return M(this.generateMessage("review")).pipe(w(t=>A(t)),C(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:t.value,description:t.value,isError:!1})),I(this.handleError$))}async generateMessage(t){const e=this.params.stagedDiff.diff,{systemPrompt:s,systemPromptPath:o,codeReviewPromptPath:i,logging:a,locale:p,generate:r,type:c,maxLength:m}=this.params.config,n={...U,locale:p,maxLength:m,type:c,generate:r,systemPrompt:s,systemPromptPath:o,codeReviewPromptPath:i,vcs_branch:this.params.branchName||""},l=t==="review"?Y(n):N(n);this.checkAvailableModels();const h=O(e,t),d=`${this.params.config.url||"https://api.deepseek.com"}/chat/completions`,y={Authorization:`Bearer ${this.params.config.key}`,"Content-Type":"application/json"};b(e,t,"DeepSeek",this.params.config.model,d,y,a),x(e,t,"DeepSeek",l,h,a);const g=await this.createChatCompletions(l,h,t);return t==="review"?this.sanitizeResponse(g):this.parseMessage(g,c,r)}checkAvailableModels(){if(["deepseek-reasoner","deepseek-chat"].includes(this.params.config.model))return!0;throw new Error("Invalid model type of DeepSeek")}async createChatCompletions(t,e,s){const o=this.params.stagedDiff.diff,{logging:i}=this.params.config,a={messages:[{role:"system",content:t},{role:"user",content:e}],model:this.params.config.model,max_tokens:this.params.config.maxTokens,top_p:this.params.config.topP,temperature:this.params.config.temperature};$(o,s,"DeepSeek",a,i);const p=Date.now();try{const r=await this.deepSeek.chat.completions.create(a,{timeout:this.params.config.timeout}),c=Date.now()-p,m=r.choices?.[0];if(!m?.message)throw new Error("DeepSeek API returned invalid response structure");const n=m.message.content||m.message.reasoning_content||"";if(!n)throw new Error("DeepSeek API returned empty response");return R(o,s,"DeepSeek",r,i),_(o,s,"DeepSeek",c,n,i),n}catch(r){throw E(o,s,"DeepSeek",r,i),r}}}export{G as DeepSeekService};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{HarmCategory as n,HarmBlockThreshold as a,GoogleGenerativeAI as W}from"@google/generative-ai";import S from"chalk";import{concatMap as v,from as x,map as N,catchError as L}from"rxjs";import{fromPromise as D}from"rxjs/internal/observable/innerFrom";import{A as X,l as w,a as b,b as B,c as H,d as $,e as Y}from"./ai.service-6f818099.mjs";import{D as U,g as K,b as F,k as V}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class q extends X{constructor(t){super(t),this.params=t,this.generateStreamingCommitMessage$=()=>{const{generate:e,type:o}=this.params.config;return this.createStreamingCommitMessages$(c=>{this.streamChunks(c).catch(r=>c.error(r))},o,e)},this.streamChunks=async e=>{const o=this.params.stagedDiff.diff,{systemPrompt:c,systemPromptPath:r,codeReviewPromptPath:C,logging:s,locale:f,generate:A,type:E,maxLength:T}=this.params.config,_=this.params.config.maxTokens,h={...U,locale:f,maxLength:T,type:E,generate:A,systemPrompt:c,systemPromptPath:r,codeReviewPromptPath:C,vcs_branch:this.params.branchName||""},p=K(h),y={maxOutputTokens:_,temperature:this.params.config.temperature,topP:this.params.config.topP},u=this.genAI.getGenerativeModel({model:this.params.config.model,systemInstruction:p,generationConfig:y,safetySettings:[{category:n.HARM_CATEGORY_HATE_SPEECH,threshold:a.BLOCK_LOW_AND_ABOVE},{category:n.HARM_CATEGORY_SEXUALLY_EXPLICIT,threshold:a.BLOCK_LOW_AND_ABOVE},{category:n.HARM_CATEGORY_HARASSMENT,threshold:a.BLOCK_LOW_AND_ABOVE},{category:n.HARM_CATEGORY_DANGEROUS_CONTENT,threshold:a.BLOCK_LOW_AND_ABOVE}]}),O=V(o,"commit"),k=`${this.params.config.url||"https://generativelanguage.googleapis.com"}/v1beta/models/${this.params.config.model}:streamGenerateContent`,I={"Content-Type":"application/json","x-goog-api-key":this.params.config.key};w(o,"commit","Gemini",this.params.config.model,k,I,s),b(o,"commit","Gemini",p,O,s),B(o,"commit","Gemini",{systemInstruction:{parts:[{text:p}]},contents:[{parts:[{text:O}]}],generationConfig:y},s);const g=Date.now();let m="";try{const i=this.params.config.timeout>1e4?{request:{timeout:this.params.config.timeout}}:void 0,R=await u.generateContentStream(O,i);for await(const P of R.stream){const d=P.text();d&&(m+=d,e.next(d))}const l=Date.now()-g;H(o,"commit","Gemini",{streamed:!0,totalLength:m.length},s),$(o,"commit","Gemini",l,m,s),e.complete()}catch(i){Y(o,"commit","Gemini",i,s),e.error(i)}},this.colors={primary:"#0077FF",secondary:"#fff"},this.serviceName=S.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Gemini]"),this.errorPrefix=S.red.bold("[Gemini]"),this.genAI=new W(this.params.config.key)}getServiceSpecificErrorMessage(t){const e=t.message||"";return e.includes("API key")||e.includes("api_key")?"Invalid API key. Check your Google AI Studio API key in configuration":e.includes("quota")||e.includes("QUOTA_EXCEEDED")?"API quota exceeded. Check your Google AI Studio usage limits":e.includes("model")||e.includes("Model")?"Model not found or not accessible. Check if the Gemini model name is correct":e.includes("SAFETY")||e.includes("safety")?"Content blocked by safety filters. Try rephrasing your request":e.includes("RECITATION")||e.includes("recitation")?"Content blocked due to recitation concerns. Try a different approach":e.includes("403")||e.includes("Forbidden")?"Access denied. Your API key may not have permission for this Gemini model":e.includes("404")||e.includes("Not Found")?"Model or endpoint not found. Check your Gemini model configuration":e.includes("500")||e.includes("Internal Server Error")?"Google AI service error. Try again later":e.includes("MAX_TOKENS")||e.includes("truncated")||e.includes("maxOutputTokens")?"Response truncated due to token limit. Gemini 2.5+ models use thinking tokens. Try increasing maxTokens (recommended: 8192+)":null}generateCommitMessage$(){return this.params.config.stream||!1?this.generateStreamingCommitMessage$():D(this.generateMessage("commit")).pipe(v(e=>x(e)),N(this.formatAsChoice),L(this.handleError$))}generateCodeReview$(){return D(this.generateMessage("review")).pipe(v(t=>x(t)),N(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:t.value,description:t.value,isError:!1})),L(this.handleError$))}async generateMessage(t){const e=this.params.stagedDiff.diff,{systemPrompt:o,systemPromptPath:c,logging:r,locale:C,codeReviewPromptPath:s,generate:f,type:A,maxLength:E}=this.params.config,T=this.params.config.maxTokens,_={...U,locale:C,maxLength:E,type:A,generate:f,systemPrompt:o,systemPromptPath:c,codeReviewPromptPath:s,vcs_branch:this.params.branchName||""},h=t==="review"?F(_):K(_),p={maxOutputTokens:T,temperature:this.params.config.temperature,topP:this.params.config.topP},y=this.genAI.getGenerativeModel({model:this.params.config.model,systemInstruction:h,generationConfig:p,safetySettings:[{category:n.HARM_CATEGORY_HATE_SPEECH,threshold:a.BLOCK_LOW_AND_ABOVE},{category:n.HARM_CATEGORY_SEXUALLY_EXPLICIT,threshold:a.BLOCK_LOW_AND_ABOVE},{category:n.HARM_CATEGORY_HARASSMENT,threshold:a.BLOCK_LOW_AND_ABOVE},{category:n.HARM_CATEGORY_DANGEROUS_CONTENT,threshold:a.BLOCK_LOW_AND_ABOVE}]}),u=V(e,t),G=`${this.params.config.url||"https://generativelanguage.googleapis.com"}/v1beta/models/${this.params.config.model}:generateContent`,k={"Content-Type":"application/json","x-goog-api-key":this.params.config.key};w(e,t,"Gemini",this.params.config.model,G,k,r),b(e,t,"Gemini",h,u,r),B(e,t,"Gemini",{systemInstruction:{parts:[{text:h}]},contents:[{parts:[{text:u}]}],generationConfig:p},r);const M=Date.now();try{const g=this.params.config.timeout>1e4?{request:{timeout:this.params.config.timeout}}:void 0,m=await y.generateContent(u,g),i=m.response;if(i.candidates?.[0]?.finishReason==="MAX_TOKENS"){const d=i.usageMetadata;throw new Error(`Response truncated: maxOutputTokens exceeded. Thinking tokens: ${d?.thoughtsTokenCount??"N/A"}, Output tokens: ${d?.candidatesTokenCount??"N/A"}. Increase maxTokens config for Gemini 2.5+ thinking models.`)}const l=i.text(),P=Date.now()-M;return H(e,t,"Gemini",{response:l,candidates:m.response.candidates,usageMetadata:m.response.usageMetadata},r),$(e,t,"Gemini",P,l,r),t==="review"?this.sanitizeResponse(l):this.parseMessage(l,A,f)}catch(g){throw Y(e,t,"Gemini",g,r),g}}}export{q as GeminiService};
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import G from"chalk";import{concatMap as N,from as I,map as T,catchError as v}from"rxjs";import{fromPromise as D}from"rxjs/internal/observable/innerFrom";import{A as O,l as _,a as P,b as R,e as M,c as S,d as k}from"./ai.service-6f818099.mjs";import{i as $}from"./openai-8b372df6.mjs";import{D as x,b as y,g as C}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"http";import"https";import"net";import"tls";import"url";import"assert";import"tty";import"util";import"os";import"events";import"cleye";import"module";import"crypto";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"node:fs";import"buffer";import"stream";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"node:fs/promises";import"chokidar";import"rxjs/operators";class U extends O{constructor(e){super(e),this.params=e,this.baseURL="https://models.github.ai",this.colors={primary:"#24292e",secondary:"#FFF"},this.serviceName=G.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[GitHub Models]"),this.errorPrefix=G.red.bold("[GitHub Models]")}getServiceSpecificErrorMessage(e){switch(e.code){case"MISSING_TOKEN":return"GitHub token is required. Run: aicommit2 github-login";case"AUTHENTICATION_FAILED":return"Authentication failed. Your GitHub token may be expired or invalid. Run: aicommit2 github-login";case"ACCESS_DENIED":return'Access denied. Make sure your GitHub token has "Models" permission in GitHub settings';case"NO_CONTENT":return"No content received from GitHub Models. The model may have failed to generate a response";default:return null}}generateCommitMessage$(){return D(this.generateMessage("commit")).pipe(N(e=>I(e)),T(this.formatAsChoice),v(this.handleError$))}generateCodeReview$(){return D(this.generateMessage("review")).pipe(N(e=>I(e)),T(e=>({name:`${this.serviceName} ${e.title}`,short:e.title,value:e.value,description:e.value,isError:!1})),v(this.handleError$))}async generateMessage(e){if(!this.params.config.key){const r=new Error("GitHub token is required for GitHub Models. Use: aicommit2 github-login");throw r.code="MISSING_TOKEN",r}const s=this.params.stagedDiff.diff,{systemPrompt:i,systemPromptPath:l,codeReviewPromptPath:w,locale:H,generate:p,type:u,maxLength:g}=this.params.config,a={...x,locale:H,maxLength:g,type:u,generate:p,systemPrompt:i,systemPromptPath:l,codeReviewPromptPath:w,vcs_branch:this.params.branchName||""},E=e==="review"?y(a):C(a),h=await this.makeRequest(E,s,e);return e==="review"?this.sanitizeResponse(h):this.parseMessage(h,u,p)}async makeRequest(e,s,i){const l=Array.isArray(this.params.config.model)?this.params.config.model[0]:this.params.config.model||"gpt-4o-mini",w=[{role:"system",content:e},{role:"user",content:i==="review"?s:`Here's the diff:
|
|
2
|
+
|
|
3
|
+
${s}`}],H=$(l),p={messages:w,model:l,stream:!1,...H?{max_completion_tokens:this.params.config.maxTokens||1024,temperature:1}:{max_tokens:this.params.config.maxTokens||1024,top_p:this.params.config.topP||.95,temperature:this.params.config.temperature||.7}},u=`${this.baseURL}/inference/chat/completions`,g={"Content-Type":"application/json",Accept:"application/vnd.github+json",Authorization:`Bearer ${this.params.config.key}`},{logging:a}=this.params.config;_(s,i,"GitHub Models",l,u,g,a),P(s,i,"GitHub Models",e,i==="review"?s:`Here's the diff:
|
|
4
|
+
|
|
5
|
+
${s}`,a),R(s,i,"GitHub Models",p,a);const E=new AbortController,h=setTimeout(()=>E.abort(),this.params.config.timeout);try{const r=Date.now(),o=await fetch(u,{method:"POST",headers:g,body:JSON.stringify(p),signal:E.signal});if(clearTimeout(h),!o.ok){const n=await o.text(),d={status:o.status,statusText:o.statusText,url:u,headers:Object.fromEntries(o.headers),body:n};M(s,i,"GitHub Models",d,a);let b=`GitHub API request failed: ${o.status} ${o.statusText}`;try{const t=JSON.parse(n);t.error?.message?b+=` - ${t.error.message}`:t.message&&(b+=` - ${t.message}`)}catch{n&&(b+=` - ${n}`)}if(o.status===401){const t=new Error("GitHub authentication failed. Please run: aicommit2 github-login");throw t.status=o.status,t.code="AUTHENTICATION_FAILED",t.content=n,t}else if(o.status===403){const t=new Error('GitHub Models access denied. Make sure your token has "Models" permission.');throw t.status=o.status,t.code="ACCESS_DENIED",t.content=n,t}else if(o.status===404){const t=new Error(`Model "${l}" not found. Please check the model name.`);throw t.status=o.status,t.code="MODEL_NOT_FOUND",t.content=n,t}else if(o.status===429){const t=new Error("Rate limit exceeded. Please try again later.");throw t.status=o.status,t.code="RATE_LIMIT_EXCEEDED",t.content=n,t}const f=new Error(b);throw f.status=o.status,f.code="API_ERROR",f.content=n,f}const m=await o.json(),A=Date.now()-r;S(s,i,"GitHub Models",m,a);const c=m.choices?.[0]?.message?.content?.trim();if(!c){M(s,i,"GitHub Models",{message:"No content found in GitHub Models response",result:m},a);const d=new Error("No response content received from GitHub Models");throw d.code="NO_CONTENT",d.content=JSON.stringify(m,null,2),d}return k(s,i,"GitHub Models",A,c,a),c}catch(r){if(clearTimeout(h),r instanceof Error&&r.name==="AbortError"){const A={message:`GitHub Models request timeout after ${this.params.config.timeout}ms`,error:r};M(s,i,"GitHub Models",A,a);const c=new Error(`GitHub Models request timed out after ${this.params.config.timeout}ms`);throw c.code="REQUEST_TIMEOUT",c.originalError=r,c}if(r.code)throw r;M(s,i,"GitHub Models",{message:"GitHub Models request failed",error:r},a);const m=new Error(`GitHub Models request failed: ${r instanceof Error?r.message:String(r)}`);throw m.code="REQUEST_FAILED",m.originalError=r,m}}}export{U as GitHubModelsService};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import A from"chalk";import L from"groq-sdk";import{concatMap as x,from as C,map as M,catchError as $}from"rxjs";import{fromPromise as I}from"rxjs/internal/observable/innerFrom";import{A as z,l as S,a as b,b as R,c as _,d as D,e as E}from"./ai.service-6f818099.mjs";import{D as T,g as N,b as F}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class H extends z{constructor(e){super(e),this.params=e,this.generateStreamingCommitMessage$=()=>{const{generate:t,type:o}=this.params.config;return this.createStreamingCommitMessages$(n=>{this.streamChunks(n).catch(m=>n.error(m))},o,t)},this.streamChunks=async t=>{const o=this.params.stagedDiff.diff,{systemPrompt:n,systemPromptPath:m,codeReviewPromptPath:r,logging:i,locale:y,temperature:l,generate:h,type:P,maxLength:v}=this.params.config,g=this.params.config.maxTokens,d={...T,locale:y,maxLength:v,type:P,generate:h,systemPrompt:n,systemPromptPath:m,codeReviewPromptPath:r,vcs_branch:this.params.branchName||""},c=N(d),G=`Here is the diff: ${o}`,q=`${this.params.config.url||"https://api.groq.com"}/openai/v1/chat/completions`,f={Authorization:`Bearer ${this.params.config.key}`,"Content-Type":"application/json"};S(o,"commit","Groq",this.params.config.model,q,f,i),b(o,"commit","Groq",c,G,i);const u={messages:[{role:"system",content:c},{role:"user",content:G}],model:this.params.config.model,max_tokens:g,top_p:this.params.config.topP,temperature:l,stream:!0};R(o,"commit","Groq",u,i);const a=Date.now();let p="";try{const s=await this.groq.chat.completions.create(u,{timeout:this.params.config.timeout});for await(const U of s){const k=U.choices?.[0]?.delta?.content||"";k&&(p+=k,t.next(k))}const O=Date.now()-a;_(o,"commit","Groq",{streamed:!0,totalLength:p.length},i),D(o,"commit","Groq",O,p,i),t.complete()}catch(s){E(o,"commit","Groq",s,i),t.error(s)}},this.colors={primary:"#f55036",secondary:"#fff"},this.serviceName=A.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Groq]"),this.errorPrefix=A.red.bold("[Groq]"),this.groq=new L({apiKey:this.params.config.key})}getServiceSpecificErrorMessage(e){const t=e.message||"";return t.includes("API key")||t.includes("api_key")?"Invalid API key. Check your Groq API key in configuration":t.includes("rate_limit")||t.includes("Rate limit")?"Rate limit exceeded. Wait a moment and try again, or upgrade your Groq plan":t.includes("model")||t.includes("Model")?"Model not found or not accessible. Check if the Groq model name is correct":t.includes("overloaded")||t.includes("capacity")?"Groq service is overloaded. Try again in a few minutes":t.includes("403")||t.includes("Forbidden")?"Access denied. Your API key may not have permission for this Groq model":t.includes("404")||t.includes("Not Found")?"Model or endpoint not found. Check your Groq model configuration":t.includes("500")||t.includes("Internal Server Error")?"Groq server error. Try again later":null}generateCommitMessage$(){return this.params.config.stream||!1?this.generateStreamingCommitMessage$():I(this.generateMessage("commit")).pipe(x(t=>C(t)),M(this.formatAsChoice),$(this.handleError$))}generateCodeReview$(){return I(this.generateMessage("review")).pipe(x(e=>C(e)),M(e=>({name:`${this.serviceName} ${e.title}`,short:e.title,value:e.value,description:e.value,isError:!1})),$(this.handleError$))}async generateMessage(e){const t=this.params.stagedDiff.diff,{systemPrompt:o,systemPromptPath:n,codeReviewPromptPath:m,logging:r,locale:i,temperature:y,generate:l,type:h,maxLength:P}=this.params.config,v=this.params.config.maxTokens,g={...T,locale:i,maxLength:P,type:h,generate:l,systemPrompt:o,systemPromptPath:n,codeReviewPromptPath:m,vcs_branch:this.params.branchName||""},d=e==="review"?F(g):N(g),c=`Here is the diff: ${t}`,w=`${this.params.config.url||"https://api.groq.com"}/openai/v1/chat/completions`,q={Authorization:`Bearer ${this.params.config.key}`,"Content-Type":"application/json"};S(t,e,"Groq",this.params.config.model,w,q,r),b(t,e,"Groq",d,c,r);const f={messages:[{role:"system",content:d},{role:"user",content:c}],model:this.params.config.model,max_tokens:v,top_p:this.params.config.topP,temperature:y};R(t,e,"Groq",f,r);const u=Date.now();try{const a=await this.groq.chat.completions.create(f,{timeout:this.params.config.timeout}),p=Date.now()-u,s=a.choices[0].message.content||"";return _(t,e,"Groq",a,r),D(t,e,"Groq",p,s,r),e==="review"?this.sanitizeResponse(s):this.parseMessage(s,h,l)}catch(a){throw E(t,e,"Groq",a,r),a}}}export{H as GroqService};
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import b from"chalk";import{concatMap as k,from as D,map as P,catchError as E}from"rxjs";import{fromPromise as F}from"rxjs/internal/observable/innerFrom";import{A as $,l as H,a as R,c as U,d as S,e as N}from"./ai.service-6f818099.mjs";import{D as x,b as T,g as _}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class j extends ${constructor(t){super(t),this.params=t,this.headers={},this.models=[],this.currentModelId=null,this.currentConversation=void 0,this.currentConversionID=void 0,this.cookie="",this.colors={primary:"#FED21F",secondary:"#000"},this.serviceName=b.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[HuggingFace]"),this.errorPrefix=b.red.bold("[HuggingFace]"),this.cookie=this.params.config.cookie;const e=this.params.config.url||"https://huggingface.co";this.headers={accept:"*/*","accept-language":"en-US,en;q=0.9","sec-ch-ua":'"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"',"sec-ch-ua-mobile":"?0","sec-ch-ua-platform":'"Windows"',"sec-fetch-dest":"empty","sec-fetch-mode":"cors","sec-fetch-site":"same-origin",origin:e,"Referrer-Policy":"strict-origin-when-cross-origin"}}getServiceSpecificErrorMessage(t){const e=t.message||"";return e.includes("cookie")||e.includes("Cookie")?"Invalid cookie. Check your Hugging Face session cookie in configuration":e.includes("model")||e.includes("Model")?"Model not found or not accessible. Check if the Hugging Face model name is correct":e.includes("conversation")||e.includes("conversion")?"Failed to create conversation. Try again or check your session":e.includes("401")||e.includes("Unauthorized")?"Authentication failed. Your Hugging Face session may have expired":e.includes("403")||e.includes("Forbidden")?"Access denied. You may not have permission to access this model":e.includes("404")||e.includes("Not Found")?"Model not found. Check your Hugging Face model configuration":e.includes("500")||e.includes("Internal Server Error")?"Hugging Face server error. Try again later":e.includes("overloaded")||e.includes("capacity")?"Hugging Face service is overloaded. Try again in a few minutes":null}generateCommitMessage$(){return F(this.generateMessage("commit")).pipe(k(t=>D(t)),P(this.formatAsChoice),E(this.handleError$))}generateCodeReview$(){return F(this.generateMessage("review")).pipe(k(t=>D(t)),P(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:t.value,description:t.value,isError:!1})),E(this.handleError$))}async generateMessage(t){await this.initialize();const e=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:o,codeReviewPromptPath:p,logging:a,locale:i,generate:d,type:s,maxLength:u,temperature:m,maxTokens:v,topP:h,timeout:g}=this.params.config,n={...x,locale:i,maxLength:u,type:s,generate:d,systemPrompt:r,systemPromptPath:o,codeReviewPromptPath:p,vcs_branch:this.params.branchName||""},c=t==="review"?T(n):_(n),l=`Here is the diff: ${e}`,w=`${this.params.config.url||"https://huggingface.co"}/chat/conversation`,I={...this.headers,cookie:this.cookie};H(e,t,"HuggingFace",this.params.config.model,w,I,a),R(e,t,"HuggingFace",c,l,a);const A=Date.now();try{const y=await this.getNewChat(c),C=await(await this.sendMessage(l,y.id)).completeResponsePromise();await this.deleteConversation(y.id);const M=Date.now()-A;return U(e,t,"HuggingFace",{response:C},a),S(e,t,"HuggingFace",M,C,a),t==="review"?this.sanitizeResponse(C):this.parseMessage(C,s,d)}catch(y){throw N(e,t,"HuggingFace",y,a),y}}async initialize(){const t=await this.getRemoteLlms(),e=t.find(r=>r.name?.toLowerCase()===this.params.config.model.toLowerCase());if(e){this.currentModel=e,this.currentModelId=e.id;return}this.currentModel=t[0],this.currentModelId=t[0].id}async getRemoteLlms(){const t=this.params.config.url||"https://huggingface.co",e=await fetch(`${t}/chat/__data.json`,{headers:{...this.headers,cookie:this.cookie},body:null,method:"GET"});if(e.status!==200)throw new Error(`Failed to get remote LLMs with status code: ${e.status}`);const o=(await e.json()).nodes[0].data,p=o[o[0].models],a=[],i=d=>d===-1?null:o[d];for(const d of p){const s=o[d];if(o[s.unlisted])continue;const u={id:i(s.id),name:i(s.name),displayName:i(s.displayName),preprompt:i(s.preprompt),promptExamples:[],websiteUrl:i(s.websiteUrl),description:i(s.description),datasetName:i(s.datasetName),datasetUrl:i(s.datasetUrl),modelUrl:i(s.modelUrl),parameters:{}},m=i(s.promptExamples);if(m!==null){const g=m.map(n=>i(n));u.promptExamples=g.map(n=>({title:o[n.title],prompt:o[n.prompt]}))}const v=i(s.parameters),h={};for(const[g,n]of Object.entries(v)){if(n===-1){h[g]=null;continue}if(Array.isArray(o[n])){h[g]=o[n].map(c=>o[c]);continue}h[g]=o[n]}u.parameters=h,a.push(u)}return this.models=a,a}async getNewChat(t){const e={model:this.currentModelId,preprompt:t};let r=0;const o=this.params.config.url||"https://huggingface.co";for(;r<5;){const p=await fetch(`${o}/chat/conversation`,{headers:{...this.headers,"content-type":"application/json",cookie:this.cookie,Referer:`${o}/chat/`},body:JSON.stringify(e),method:"POST"}),{conversationId:a}=await p.json();if(a){this.currentConversionID=a;break}else r++}if(!this.currentConversionID)throw new Error("Failed to create new conversion");return await this.getConversationHistory(this.currentConversionID)}async getConversationHistory(t){if(!t)throw new Error("conversationId is required for getConversationHistory");const e=this.params.config.url||"https://huggingface.co",r=await fetch(`${e}/chat/conversation/${t}/__data.json`,{headers:{...this.headers,cookie:this.cookie,Referer:`${e}/chat/`},body:null,method:"GET"});if(r.status!=200)throw new Error("Unable get conversation details "+r);{const o=await r.json();return this.metadataParser(o,t)}}metadataParser(t,e){const r={id:"",model:"",systemPrompt:"",title:"",history:[]},o=t.nodes[1].data,p=o[o[0].model],a=o[o[0].preprompt],i=o[o[0].title],d=o[o[0].messages],s=[];for(const u of d){const m=o[u],v=new Date(o[m.createdAt][1]).getTime()/1e3,h=new Date(o[m.updatedAt][1]).getTime()/1e3;s.push({id:o[m.id],role:o[m.from],content:o[m.content],createdAt:v,updatedAt:h})}return r.id=e,r.model=p,r.systemPrompt=a,r.title=i,r.history=s,this.currentConversation=r,r}async sendMessage(t,e){if(t==="")throw new Error("the prompt can not be empty.");if(!e&&!this.currentConversionID?await this.getNewChat():e?(this.currentConversionID=e,await this.getConversationHistory(e)):this.currentConversionID&&await this.getConversationHistory(this.currentConversionID),!this.currentConversation)throw new Error("Failed to create new conversion");const r={inputs:t,id:this.currentConversation.history[this.currentConversation.history.length-1].id,is_retry:!1,is_continue:!1,web_search:!1,tools:[]},o=new FormData;o.append("data",JSON.stringify(r));const p=this.params.config.url||"https://huggingface.co",a=new AbortController,i=setTimeout(()=>a.abort(),this.params.config.timeout),d=await fetch(`${p}/chat/conversation/${this.currentConversionID}`,{headers:{...this.headers,cookie:this.cookie,Referer:`${p}/chat/conversation/${this.currentConversionID}`},body:o,method:"POST",signal:a.signal});clearTimeout(i);function s(n){try{const c=n.split(`
|
|
2
|
+
`),l=[];for(const f of c)f.trim()&&l.push(JSON.parse(f));return l}catch{return[{}]}}const u=new TextDecoder;let m="";const v=new TransformStream({async transform(n,c){const l=u.decode(n);try{const f=s(l);for(const w of f)w.type==="finalAnswer"?(m=w?.text||"",c.terminate()):w.type==="stream"&&c.enqueue(w?.token||"")}catch{throw new Error("Error during parsing response")}}}),h=d.body?.pipeThrough(v);async function g(){return new Promise(async(n,c)=>{try{if(!h)c("ModifiedStream undefined");else{const l=h.getReader();for(;;){const{done:f,value:w}=await l.read();if(f){n(m);break}}}}catch(l){c(l)}})}return{id:this.currentConversionID,stream:h,completeResponsePromise:g}}async deleteConversation(t){const e=this.params.config.url||"https://huggingface.co";return(await fetch(`${e}/chat/conversation/${t}`,{headers:{...this.headers,cookie:this.cookie,Referer:`${e}/chat/`},body:null,method:"DELETE"})).json()}}export{j as HuggingFaceService};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import f from"chalk";import{concatMap as g,from as u,map as A,catchError as M}from"rxjs";import{fromPromise as v}from"rxjs/internal/observable/innerFrom";import{A as C,l as k,a as $,b as R,c as x,e as y,d as E}from"./ai.service-6f818099.mjs";import{D as N,b as D,g as S,H as I,n as _}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class U extends C{constructor(e){super(e),this.params=e,this.apiKey="",this.colors={primary:"#ff7000",secondary:"#fff"},this.serviceName=f.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[MistralAI]"),this.errorPrefix=f.red.bold("[MistralAI]"),this.apiKey=this.params.config.key}getServiceSpecificErrorMessage(e){const t=e.message||"";return t.includes("API key")||t.includes("api_key")?"Invalid API key. Check your Mistral AI API key in configuration":t.includes("quota")||t.includes("usage")?"API quota exceeded. Check your Mistral AI usage limits":t.includes("model")||t.includes("Model")?"Model not found or not accessible. Check if the Mistral model name is correct":t.includes("403")||t.includes("Forbidden")?"Access denied. Your API key may not have permission for this Mistral model":t.includes("404")||t.includes("Not Found")?"Model or endpoint not found. Check your Mistral model configuration":t.includes("500")||t.includes("Internal Server Error")?"Mistral AI server error. Try again later":null}generateCommitMessage$(){return v(this.generateMessage("commit")).pipe(g(e=>u(e)),A(this.formatAsChoice),M(this.handleError$))}generateCodeReview$(){return v(this.generateMessage("review")).pipe(g(e=>u(e)),A(e=>({name:`${this.serviceName} ${e.title}`,short:e.title,value:e.value,description:e.value,isError:!1})),M(this.handleError$))}async generateMessage(e){const t=this.params.stagedDiff.diff,{systemPrompt:o,systemPromptPath:s,codeReviewPromptPath:i,logging:n,locale:l,generate:a,type:c,maxLength:p}=this.params.config,r={...N,locale:l,maxLength:p,type:c,generate:a,systemPrompt:o,systemPromptPath:s,codeReviewPromptPath:i,vcs_branch:this.params.branchName||""},h=e==="review"?D(r):S(r);await this.checkAvailableModels();const m=`Here is the diff: ${t}`,b=`${this.params.config.url||"https://api.mistral.ai"}/v1/chat/completions`,w={Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"};k(t,e,"MistralAI",this.params.config.model,b,w,n),$(t,e,"MistralAI",h,m,n);const d=await this.createChatCompletions(h,m,e);return e==="review"?this.sanitizeResponse(d):this.parseMessage(d,c,a)}async checkAvailableModels(){if((await this.getAvailableModels()).includes(this.params.config.model))return!0;throw new Error(`Invalid model type of Mistral AI: ${this.params.config.model}`)}async getAvailableModels(){const e=this.params.config.url||"https://api.mistral.ai";return(await new I({method:"GET",baseURL:`${e}/v1/models`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).execute()).data.data.filter(o=>o.object==="model").map(o=>o.id)}async createChatCompletions(e,t,o){const s=this.params.stagedDiff.diff,{logging:i}=this.params.config,n={model:this.params.config.model,messages:[{role:"system",content:e},{role:"user",content:t}],temperature:this.params.config.temperature,top_p:this.params.config.topP,max_tokens:this.params.config.maxTokens,stream:!1,safe_prompt:!1,random_seed:_(10,1e3)};R(s,o,"MistralAI",n,i);const l=Date.now();try{const a=this.params.config.url||"https://api.mistral.ai",c=await new I({method:"POST",baseURL:`${a}/v1/chat/completions`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).setBody(n).execute(),p=Date.now()-l,r=c.data;if(x(s,o,"MistralAI",r,i),!r.choices||r.choices.length===0||!r.choices[0].message?.content)throw y(s,o,"MistralAI",{message:"No Content on response",result:r},i),new Error("No Content on response. Please open a Bug report");const m=r.choices[0].message.content;return E(s,o,"MistralAI",p,m,i),m}catch(a){throw y(s,o,"MistralAI",a,i),a}}}export{U as MistralService};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import P from"chalk";import{Ollama as z}from"ollama";import{concatMap as v,from as k,map as w,catchError as A}from"rxjs";import{fromPromise as x}from"rxjs/internal/observable/innerFrom";import{fetch as L,Agent as T}from"undici";import{A as H,l as C,a as D,b as S,c as _,d as M,e as O}from"./ai.service-6f818099.mjs";import{o as R,t as U,n as E,c as N,P as b,D as B,b as G,g as J,H as K}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class Q extends H{constructor(e){super(e),this.params=e,this.host=R,this.model="",this.key="",this.auth="",this.getTruncatedDiff=()=>{const t=this.params.stagedDiff.diff,s=this.params.config.maxDiffSize||0;if(s<=0)return t;const{diff:o}=U(t,s);return o},this.generateStreamingCommitMessage$=()=>{const{generate:t,type:s}=this.params.config;return this.createStreamingCommitMessages$(o=>{this.streamChunks(o).catch(n=>o.error(n))},s,t)},this.streamChunks=async t=>{const s=this.getTruncatedDiff(),{logging:o}=this.params.config,n=this.buildCommitPrompt();await this.checkIsAvailableOllama();const h=`Here is the diff: ${s}`,i=`Ollama_${this.model}`,f=`${this.host}/api/chat`,a=this.key?{Authorization:`${this.auth} ${this.key}`}:{};C(s,"commit",i,this.model,f,a,o),D(s,"commit",i,n,h,o);const{numCtx:c,temperature:m,topP:p,timeout:d,maxTokens:r}=this.params.config,u={model:this.model,messages:[{role:"system",content:n},{role:"user",content:h}],stream:!0,keep_alive:d,options:{num_ctx:c,temperature:m,top_p:p,seed:E(10,1e3),num_predict:r??-1}};S(s,"commit",i,u,o);const l=Date.now();let g="";try{const $=await this.ollama.chat(u);for await(const F of $){const y=F.message.content;y&&(g+=y,t.next(y))}const I=Date.now()-l;_(s,"commit",i,{streamed:!0,totalLength:g.length},o),M(s,"commit",i,I,g,o),t.complete()}catch($){O(s,"commit",i,$,o),t.error($)}},this.setupFetch=(t,s={})=>L(t,{...s,dispatcher:new T({headersTimeout:this.params.config.timeout})}),this.colors={primary:"#FFF",secondary:"#000"},this.model=this.params.keyName,this.serviceName=P.bgHex(this.colors.primary).hex(this.colors.secondary).bold(`[${N(this.model)}]`),this.errorPrefix=P.red.bold(`[${N(this.model)}]`),this.host=this.params.config.host||R,this.auth=this.params.config.auth||"Bearer",this.key=this.params.config.key||"",this.ollama=new z({host:this.host,fetch:this.setupFetch,...this.key&&{headers:{Authorization:`${this.auth} ${this.key}`}}})}getServiceSpecificErrorMessage(e){const t=e.message||"";return t.includes("ECONNREFUSED")||t.includes("connection refused")?b.ollamaNotRunning():t.includes("model")&&(t.includes("not found")||t.includes("404"))?b.ollamaModelNotPulled(this.model):null}generateCommitMessage$(){return this.params.config.stream||!1?this.generateStreamingCommitMessage$():x(this.generateMessage("commit")).pipe(v(t=>k(t)),w(this.formatAsChoice),A(this.handleError$))}generateCodeReview$(){return x(this.generateMessage("review")).pipe(v(e=>k(e)),w(e=>({name:`${this.serviceName} ${e.title}`,short:e.title,value:e.value,description:e.value,isError:!1})),A(this.handleError$))}async generateMessage(e){const t=this.getTruncatedDiff(),{systemPrompt:s,systemPromptPath:o,codeReviewPromptPath:n,logging:h,locale:i,generate:f,type:a,maxLength:c}=this.params.config,m={...B,locale:i,maxLength:c,type:a,generate:f,systemPrompt:s,systemPromptPath:o,codeReviewPromptPath:n,vcs_branch:this.params.branchName||""},p=e==="review"?G(m):J(m);await this.checkIsAvailableOllama();const d=`Here is the diff: ${t}`,r=`Ollama_${this.model}`,u=`${this.host}/api/chat`,l=this.key?{Authorization:`${this.auth} ${this.key}`}:{};C(t,e,r,this.model,u,l,h),D(t,e,r,p,d,h);const g=await this.createChatCompletions(p,d,e);return e==="review"?this.sanitizeResponse(g):this.parseMessage(g,a,f)}async checkIsAvailableOllama(){const e=new K({method:"GET",baseURL:`${this.host}`,timeout:this.params.config.timeout});return this.key&&e.setHeaders({Authorization:`${this.auth} ${this.key}`}),(await e.execute()).data}async createChatCompletions(e,t,s){const{numCtx:o,temperature:n,topP:h,timeout:i,maxTokens:f,logging:a}=this.params.config,c=this.params.stagedDiff.diff,m=`Ollama_${this.model}`,p={model:this.model,messages:[{role:"system",content:e},{role:"user",content:t}],stream:!1,keep_alive:i,options:{num_ctx:o,temperature:n,top_p:h,seed:E(10,1e3),num_predict:f??-1}};S(c,s,m,p,a);const d=Date.now();try{const r=await this.ollama.chat(p),u=Date.now()-d,l=r.message.content;return _(c,s,m,{response:l,fullResponse:r},a),M(c,s,m,u,l,a),l}catch(r){throw O(c,s,m,r,a),r}}}export{Q as OllamaService};
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import ae from"http";import ce from"https";import{i as y,j as ue,k as le,K as M}from"./cli-9533dfd6.mjs";import fe from"net";import pe from"tls";import de from"url";import he from"assert";import V from"tty";import me from"util";import Ce from"os";import ge from"events";import{l as ye,a as be,b as we,c as ve,d as Fe,e as _e}from"./ai.service-6f818099.mjs";var T={},k={exports:{}},E={exports:{}},S,W;function Oe(){if(W)return S;W=1;var t=1e3,n=t*60,i=n*60,e=i*24,l=e*7,s=e*365.25;S=function(u,r){r=r||{};var o=typeof u;if(o==="string"&&u.length>0)return d(u);if(o==="number"&&isFinite(u))return r.long?a(u):h(u);throw new Error("val is not a non-empty string or a valid number. val="+JSON.stringify(u))};function d(u){if(u=String(u),!(u.length>100)){var r=/^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(u);if(r){var o=parseFloat(r[1]),c=(r[2]||"ms").toLowerCase();switch(c){case"years":case"year":case"yrs":case"yr":case"y":return o*s;case"weeks":case"week":case"w":return o*l;case"days":case"day":case"d":return o*e;case"hours":case"hour":case"hrs":case"hr":case"h":return o*i;case"minutes":case"minute":case"mins":case"min":case"m":return o*n;case"seconds":case"second":case"secs":case"sec":case"s":return o*t;case"milliseconds":case"millisecond":case"msecs":case"msec":case"ms":return o;default:return}}}}function h(u){var r=Math.abs(u);return r>=e?Math.round(u/e)+"d":r>=i?Math.round(u/i)+"h":r>=n?Math.round(u/n)+"m":r>=t?Math.round(u/t)+"s":u+"ms"}function a(u){var r=Math.abs(u);return r>=e?f(u,r,e,"day"):r>=i?f(u,r,i,"hour"):r>=n?f(u,r,n,"minute"):r>=t?f(u,r,t,"second"):u+" ms"}function f(u,r,o,c){var p=r>=o*1.5;return Math.round(u/o)+" "+c+(p?"s":"")}return S}var L,Y;function K(){if(Y)return L;Y=1;function t(n){e.debug=e,e.default=e,e.coerce=f,e.disable=h,e.enable=s,e.enabled=a,e.humanize=Oe(),e.destroy=u,Object.keys(n).forEach(r=>{e[r]=n[r]}),e.names=[],e.skips=[],e.formatters={};function i(r){let o=0;for(let c=0;c<r.length;c++)o=(o<<5)-o+r.charCodeAt(c),o|=0;return e.colors[Math.abs(o)%e.colors.length]}e.selectColor=i;function e(r){let o,c=null,p,m;function C(...g){if(!C.enabled)return;const b=C,F=Number(new Date),I=F-(o||F);b.diff=I,b.prev=o,b.curr=F,o=F,g[0]=e.coerce(g[0]),typeof g[0]!="string"&&g.unshift("%O");let v=0;g[0]=g[0].replace(/%([a-zA-Z%])/g,(O,w)=>{if(O==="%%")return"%";v++;const J=e.formatters[w];if(typeof J=="function"){const ie=g[v];O=J.call(b,ie),g.splice(v,1),v--}return O}),e.formatArgs.call(b,g),(b.log||e.log).apply(b,g)}return C.namespace=r,C.useColors=e.useColors(),C.color=e.selectColor(r),C.extend=l,C.destroy=e.destroy,Object.defineProperty(C,"enabled",{enumerable:!0,configurable:!1,get:()=>c!==null?c:(p!==e.namespaces&&(p=e.namespaces,m=e.enabled(r)),m),set:g=>{c=g}}),typeof e.init=="function"&&e.init(C),C}function l(r,o){const c=e(this.namespace+(typeof o>"u"?":":o)+r);return c.log=this.log,c}function s(r){e.save(r),e.namespaces=r,e.names=[],e.skips=[];const o=(typeof r=="string"?r:"").trim().replace(/\s+/g,",").split(",").filter(Boolean);for(const c of o)c[0]==="-"?e.skips.push(c.slice(1)):e.names.push(c)}function d(r,o){let c=0,p=0,m=-1,C=0;for(;c<r.length;)if(p<o.length&&(o[p]===r[c]||o[p]==="*"))o[p]==="*"?(m=p,C=c,p++):(c++,p++);else if(m!==-1)p=m+1,C++,c=C;else return!1;for(;p<o.length&&o[p]==="*";)p++;return p===o.length}function h(){const r=[...e.names,...e.skips.map(o=>"-"+o)].join(",");return e.enable(""),r}function a(r){for(const o of e.skips)if(d(r,o))return!1;for(const o of e.names)if(d(r,o))return!0;return!1}function f(r){return r instanceof Error?r.stack||r.message:r}function u(){console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.")}return e.enable(e.load()),e}return L=t,L}E.exports;var Z;function Ee(){return Z||(Z=1,function(t,n){n.formatArgs=e,n.save=l,n.load=s,n.useColors=i,n.storage=d(),n.destroy=(()=>{let a=!1;return()=>{a||(a=!0,console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."))}})(),n.colors=["#0000CC","#0000FF","#0033CC","#0033FF","#0066CC","#0066FF","#0099CC","#0099FF","#00CC00","#00CC33","#00CC66","#00CC99","#00CCCC","#00CCFF","#3300CC","#3300FF","#3333CC","#3333FF","#3366CC","#3366FF","#3399CC","#3399FF","#33CC00","#33CC33","#33CC66","#33CC99","#33CCCC","#33CCFF","#6600CC","#6600FF","#6633CC","#6633FF","#66CC00","#66CC33","#9900CC","#9900FF","#9933CC","#9933FF","#99CC00","#99CC33","#CC0000","#CC0033","#CC0066","#CC0099","#CC00CC","#CC00FF","#CC3300","#CC3333","#CC3366","#CC3399","#CC33CC","#CC33FF","#CC6600","#CC6633","#CC9900","#CC9933","#CCCC00","#CCCC33","#FF0000","#FF0033","#FF0066","#FF0099","#FF00CC","#FF00FF","#FF3300","#FF3333","#FF3366","#FF3399","#FF33CC","#FF33FF","#FF6600","#FF6633","#FF9900","#FF9933","#FFCC00","#FFCC33"];function i(){if(typeof window<"u"&&window.process&&(window.process.type==="renderer"||window.process.__nwjs))return!0;if(typeof navigator<"u"&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/))return!1;let a;return typeof document<"u"&&document.documentElement&&document.documentElement.style&&document.documentElement.style.WebkitAppearance||typeof window<"u"&&window.console&&(window.console.firebug||window.console.exception&&window.console.table)||typeof navigator<"u"&&navigator.userAgent&&(a=navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/))&&parseInt(a[1],10)>=31||typeof navigator<"u"&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)}function e(a){if(a[0]=(this.useColors?"%c":"")+this.namespace+(this.useColors?" %c":" ")+a[0]+(this.useColors?"%c ":" ")+"+"+t.exports.humanize(this.diff),!this.useColors)return;const f="color: "+this.color;a.splice(1,0,f,"color: inherit");let u=0,r=0;a[0].replace(/%[a-zA-Z%]/g,o=>{o!=="%%"&&(u++,o==="%c"&&(r=u))}),a.splice(r,0,f)}n.log=console.debug||console.log||(()=>{});function l(a){try{a?n.storage.setItem("debug",a):n.storage.removeItem("debug")}catch{}}function s(){let a;try{a=n.storage.getItem("debug")||n.storage.getItem("DEBUG")}catch{}return!a&&typeof process<"u"&&"env"in process&&(a=process.env.DEBUG),a}function d(){try{return localStorage}catch{}}t.exports=K()(n);const{formatters:h}=t.exports;h.j=function(a){try{return JSON.stringify(a)}catch(f){return"[UnexpectedJSONParseError]: "+f.message}}}(E,E.exports)),E.exports}var $={exports:{}},D,X;function $e(){return X||(X=1,D=(t,n=process.argv)=>{const i=t.startsWith("-")?"":t.length===1?"-":"--",e=n.indexOf(i+t),l=n.indexOf("--");return e!==-1&&(l===-1||e<l)}),D}var j,Q;function xe(){if(Q)return j;Q=1;const t=Ce,n=V,i=$e(),{env:e}=process;let l;i("no-color")||i("no-colors")||i("color=false")||i("color=never")?l=0:(i("color")||i("colors")||i("color=true")||i("color=always"))&&(l=1),"FORCE_COLOR"in e&&(e.FORCE_COLOR==="true"?l=1:e.FORCE_COLOR==="false"?l=0:l=e.FORCE_COLOR.length===0?1:Math.min(parseInt(e.FORCE_COLOR,10),3));function s(a){return a===0?!1:{level:a,hasBasic:!0,has256:a>=2,has16m:a>=3}}function d(a,f){if(l===0)return 0;if(i("color=16m")||i("color=full")||i("color=truecolor"))return 3;if(i("color=256"))return 2;if(a&&!f&&l===void 0)return 0;const u=l||0;if(e.TERM==="dumb")return u;if(process.platform==="win32"){const r=t.release().split(".");return Number(r[0])>=10&&Number(r[2])>=10586?Number(r[2])>=14931?3:2:1}if("CI"in e)return["TRAVIS","CIRCLECI","APPVEYOR","GITLAB_CI","GITHUB_ACTIONS","BUILDKITE"].some(r=>r in e)||e.CI_NAME==="codeship"?1:u;if("TEAMCITY_VERSION"in e)return/^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(e.TEAMCITY_VERSION)?1:0;if(e.COLORTERM==="truecolor")return 3;if("TERM_PROGRAM"in e){const r=parseInt((e.TERM_PROGRAM_VERSION||"").split(".")[0],10);switch(e.TERM_PROGRAM){case"iTerm.app":return r>=3?3:2;case"Apple_Terminal":return 2}}return/-256(color)?$/i.test(e.TERM)?2:/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(e.TERM)||"COLORTERM"in e?1:u}function h(a){const f=d(a,a&&a.isTTY);return s(f)}return j={supportsColor:h,stdout:s(d(!0,n.isatty(1))),stderr:s(d(!0,n.isatty(2)))},j}$.exports;var ee;function Ae(){return ee||(ee=1,function(t,n){const i=V,e=me;n.init=u,n.log=h,n.formatArgs=s,n.save=a,n.load=f,n.useColors=l,n.destroy=e.deprecate(()=>{},"Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."),n.colors=[6,2,3,4,5,1];try{const o=xe();o&&(o.stderr||o).level>=2&&(n.colors=[20,21,26,27,32,33,38,39,40,41,42,43,44,45,56,57,62,63,68,69,74,75,76,77,78,79,80,81,92,93,98,99,112,113,128,129,134,135,148,149,160,161,162,163,164,165,166,167,168,169,170,171,172,173,178,179,184,185,196,197,198,199,200,201,202,203,204,205,206,207,208,209,214,215,220,221])}catch{}n.inspectOpts=Object.keys(process.env).filter(o=>/^debug_/i.test(o)).reduce((o,c)=>{const p=c.substring(6).toLowerCase().replace(/_([a-z])/g,(C,g)=>g.toUpperCase());let m=process.env[c];return/^(yes|on|true|enabled)$/i.test(m)?m=!0:/^(no|off|false|disabled)$/i.test(m)?m=!1:m==="null"?m=null:m=Number(m),o[p]=m,o},{});function l(){return"colors"in n.inspectOpts?!!n.inspectOpts.colors:i.isatty(process.stderr.fd)}function s(o){const{namespace:c,useColors:p}=this;if(p){const m=this.color,C="\x1B[3"+(m<8?m:"8;5;"+m),g=` ${C};1m${c} \x1B[0m`;o[0]=g+o[0].split(`
|
|
2
|
+
`).join(`
|
|
3
|
+
`+g),o.push(C+"m+"+t.exports.humanize(this.diff)+"\x1B[0m")}else o[0]=d()+c+" "+o[0]}function d(){return n.inspectOpts.hideDate?"":new Date().toISOString()+" "}function h(...o){return process.stderr.write(e.formatWithOptions(n.inspectOpts,...o)+`
|
|
4
|
+
`)}function a(o){o?process.env.DEBUG=o:delete process.env.DEBUG}function f(){return process.env.DEBUG}function u(o){o.inspectOpts={};const c=Object.keys(n.inspectOpts);for(let p=0;p<c.length;p++)o.inspectOpts[c[p]]=n.inspectOpts[c[p]]}t.exports=K()(n);const{formatters:r}=t.exports;r.o=function(o){return this.inspectOpts.colors=this.useColors,e.inspect(o,this.inspectOpts).split(`
|
|
5
|
+
`).map(c=>c.trim()).join(" ")},r.O=function(o){return this.inspectOpts.colors=this.useColors,e.inspect(o,this.inspectOpts)}}($,$.exports)),$.exports}typeof process>"u"||process.type==="renderer"||process.browser===!0||process.__nwjs?k.exports=Ee():k.exports=Ae();var B=k.exports,N={};Object.defineProperty(N,"__esModule",{value:!0});function Pe(t){return function(n,i){return new Promise((e,l)=>{t.call(this,n,i,(s,d)=>{s?l(s):e(d)})})}}N.default=Pe;var te=y&&y.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};const Re=ge,Ie=te(B),Me=te(N),x=Ie.default("agent-base");function Te(t){return!!t&&typeof t.addRequest=="function"}function U(){const{stack:t}=new Error;return typeof t!="string"?!1:t.split(`
|
|
6
|
+
`).some(n=>n.indexOf("(https.js:")!==-1||n.indexOf("node:https:")!==-1)}function R(t,n){return new R.Agent(t,n)}(function(t){class n extends Re.EventEmitter{constructor(e,l){super();let s=l;typeof e=="function"?this.callback=e:e&&(s=e),this.timeout=null,s&&typeof s.timeout=="number"&&(this.timeout=s.timeout),this.maxFreeSockets=1,this.maxSockets=1,this.maxTotalSockets=1/0,this.sockets={},this.freeSockets={},this.requests={},this.options={}}get defaultPort(){return typeof this.explicitDefaultPort=="number"?this.explicitDefaultPort:U()?443:80}set defaultPort(e){this.explicitDefaultPort=e}get protocol(){return typeof this.explicitProtocol=="string"?this.explicitProtocol:U()?"https:":"http:"}set protocol(e){this.explicitProtocol=e}callback(e,l,s){throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`')}addRequest(e,l){const s=Object.assign({},l);typeof s.secureEndpoint!="boolean"&&(s.secureEndpoint=U()),s.host==null&&(s.host="localhost"),s.port==null&&(s.port=s.secureEndpoint?443:80),s.protocol==null&&(s.protocol=s.secureEndpoint?"https:":"http:"),s.host&&s.path&&delete s.path,delete s.agent,delete s.hostname,delete s._defaultAgent,delete s.defaultPort,delete s.createConnection,e._last=!0,e.shouldKeepAlive=!1;let d=!1,h=null;const a=s.timeout||this.timeout,f=c=>{e._hadError||(e.emit("error",c),e._hadError=!0)},u=()=>{h=null,d=!0;const c=new Error(`A "socket" was not created for HTTP request before ${a}ms`);c.code="ETIMEOUT",f(c)},r=c=>{d||(h!==null&&(clearTimeout(h),h=null),f(c))},o=c=>{if(d)return;if(h!=null&&(clearTimeout(h),h=null),Te(c)){x("Callback returned another Agent instance %o",c.constructor.name),c.addRequest(e,s);return}if(c){c.once("free",()=>{this.freeSocket(c,s)}),e.onSocket(c);return}const p=new Error(`no Duplex stream was returned to agent-base for \`${e.method} ${e.path}\``);f(p)};if(typeof this.callback!="function"){f(new Error("`callback` is not defined"));return}this.promisifiedCallback||(this.callback.length>=3?(x("Converting legacy callback function to promise"),this.promisifiedCallback=Me.default(this.callback)):this.promisifiedCallback=this.callback),typeof a=="number"&&a>0&&(h=setTimeout(u,a)),"port"in s&&typeof s.port!="number"&&(s.port=Number(s.port));try{x("Resolving socket for %o request: %o",s.protocol,`${e.method} ${e.path}`),Promise.resolve(this.promisifiedCallback(e,s)).then(o,r)}catch(c){Promise.reject(c).catch(r)}}freeSocket(e,l){x("Freeing socket %o %o",e.constructor.name,l),e.destroy()}destroy(){x("Destroying agent %o",this.constructor.name)}}t.Agent=n,t.prototype=t.Agent.prototype})(R||(R={}));var ke=R,q={},Se=y&&y.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(q,"__esModule",{value:!0});const Le=Se(B),A=Le.default("https-proxy-agent:parse-proxy-response");function De(t){return new Promise((n,i)=>{let e=0;const l=[];function s(){const r=t.read();r?u(r):t.once("readable",s)}function d(){t.removeListener("end",a),t.removeListener("error",f),t.removeListener("close",h),t.removeListener("readable",s)}function h(r){A("onclose had error %o",r)}function a(){A("onend")}function f(r){d(),A("onerror %o",r),i(r)}function u(r){l.push(r),e+=r.length;const o=Buffer.concat(l,e);if(o.indexOf(`\r
|
|
7
|
+
\r
|
|
8
|
+
`)===-1){A("have not received end of HTTP headers yet..."),s();return}const p=o.toString("ascii",0,o.indexOf(`\r
|
|
9
|
+
`)),m=+p.split(" ")[1];A("got proxy server response: %o",p),n({statusCode:m,buffered:o})}t.on("error",f),t.on("close",h),t.on("end",a),s()})}q.default=De;var je=y&&y.__awaiter||function(t,n,i,e){function l(s){return s instanceof i?s:new i(function(d){d(s)})}return new(i||(i=Promise))(function(s,d){function h(u){try{f(e.next(u))}catch(r){d(r)}}function a(u){try{f(e.throw(u))}catch(r){d(r)}}function f(u){u.done?s(u.value):l(u.value).then(h,a)}f((e=e.apply(t,n||[])).next())})},_=y&&y.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(T,"__esModule",{value:!0});const re=_(fe),oe=_(pe),Be=_(de),Ne=_(he),Ue=_(B),qe=ke,He=_(q),P=Ue.default("https-proxy-agent:agent");class Ge extends qe.Agent{constructor(n){let i;if(typeof n=="string"?i=Be.default.parse(n):i=n,!i)throw new Error("an HTTP(S) proxy server `host` and `port` must be specified!");P("creating new HttpsProxyAgent instance: %o",i),super(i);const e=Object.assign({},i);this.secureProxy=i.secureProxy||Ve(e.protocol),e.host=e.hostname||e.host,typeof e.port=="string"&&(e.port=parseInt(e.port,10)),!e.port&&e.host&&(e.port=this.secureProxy?443:80),this.secureProxy&&!("ALPNProtocols"in e)&&(e.ALPNProtocols=["http 1.1"]),e.host&&e.path&&(delete e.path,delete e.pathname),this.proxy=e}callback(n,i){return je(this,void 0,void 0,function*(){const{proxy:e,secureProxy:l}=this;let s;l?(P("Creating `tls.Socket`: %o",e),s=oe.default.connect(e)):(P("Creating `net.Socket`: %o",e),s=re.default.connect(e));const d=Object.assign({},e.headers);let a=`CONNECT ${`${i.host}:${i.port}`} HTTP/1.1\r
|
|
10
|
+
`;e.auth&&(d["Proxy-Authorization"]=`Basic ${Buffer.from(e.auth).toString("base64")}`);let{host:f,port:u,secureEndpoint:r}=i;Je(u,r)||(f+=`:${u}`),d.Host=f,d.Connection="close";for(const C of Object.keys(d))a+=`${C}: ${d[C]}\r
|
|
11
|
+
`;const o=He.default(s);s.write(`${a}\r
|
|
12
|
+
`);const{statusCode:c,buffered:p}=yield o;if(c===200){if(n.once("socket",ze),i.secureEndpoint){P("Upgrading socket connection to TLS");const C=i.servername||i.host;return oe.default.connect(Object.assign(Object.assign({},We(i,"host","hostname","path","port")),{socket:s,servername:C}))}return s}s.destroy();const m=new re.default.Socket({writable:!1});return m.readable=!0,n.once("socket",C=>{P("replaying proxy buffer for failed request"),Ne.default(C.listenerCount("data")>0),C.push(p),C.push(null)}),m})}}T.default=Ge;function ze(t){t.resume()}function Je(t,n){return!!(!n&&t===80||n&&t===443)}function Ve(t){return typeof t=="string"?/^https:?$/i.test(t):!1}function We(t,...n){const i={};let e;for(e in t)n.includes(e)||(i[e]=t[e]);return i}var Ye=y&&y.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};const H=Ye(T);function G(t){return new H.default(t)}(function(t){t.HttpsProxyAgent=H.default,t.prototype=H.default.prototype})(G||(G={}));var Ke=G,Ze=ue(Ke);const Xe=async(t,n,i,e,l,s,d)=>new Promise((h,a)=>{const f=JSON.stringify(e),r=(t.protocol.includes("https")?ce:ae).request({port:d||void 0,hostname:t.hostname,path:n,method:"POST",headers:{"Content-Type":"application/json","Content-Length":Buffer.byteLength(f),...i},timeout:l,agent:s?Ze(s):void 0},o=>{const c=[];o.on("data",p=>c.push(p)),o.on("end",()=>{h({request:r,response:o,data:Buffer.concat(c).toString()})})});r.on("error",a),r.on("timeout",()=>{r.destroy(),a(new M(`Time out error: request took over ${l}ms. Try increasing the \`timeout\` config`))}),r.write(f),r.end()}),Qe=async(t,n,i,e,l,s)=>{const d=new URL(t),{response:h,data:a}=await Xe(d,n,{Authorization:`Bearer ${i}`},e,l,s);if(!h.statusCode||h.statusCode<200||h.statusCode>299){let f=`API Error: ${h.statusCode} - ${h.statusMessage}`;throw a&&(f+=`
|
|
13
|
+
|
|
14
|
+
${a}`),h.statusCode===500&&(f+=`
|
|
15
|
+
|
|
16
|
+
Check the API status: ${t}`),new M(f)}return JSON.parse(a)},ne=t=>t.trim(),et=["gpt-5","gpt-5-mini","gpt-5-nano","gpt-5-codex","o1","o1-mini","o1-pro","o3","o3-mini","o3-pro","o4-mini"],se=t=>{const n=t.toLowerCase();return et.some(i=>n===i||n.startsWith(`${i}-`)||n.startsWith(`${i}.`))},tt=async(t,n,i,e,l,s,d,h,a,f,u,r,o,c)=>{try{const p=le(s,o),m=se(l),C={model:l,messages:[{role:"system",content:u},{role:"user",content:p}],stream:!1,n:1,frequency_penalty:0,presence_penalty:0,...m?{max_completion_tokens:h,temperature:1}:{max_tokens:h,top_p:f,temperature:a}},g=new URL(n),b=`${g.protocol}//${g.host}${i}`,F={Authorization:`Bearer ${e}`,"Content-Type":"application/json"};ye(s,o,t,l,b,F,r),be(s,o,t,u,p,r),we(s,o,t,C,r);const I=Date.now(),v=await Qe(n,i,e,C,d,c),z=Date.now()-I;ve(s,o,t,v,r);const O=v.choices.filter(w=>w.message?.content).map(w=>ne(w.message.content)).join();return Fe(s,o,t,z,O,r),v.choices.filter(w=>w.message?.content).map(w=>ne(w.message.content))}catch(p){_e(s,o,t,p,r);const m=p;throw m.code==="ENOTFOUND"?new M(`Error connecting to ${m.hostname} (${m.syscall})`):m}};export{tt as g,se as i};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import S from"chalk";import Q from"openai";import{concatMap as _,from as N,map as b,catchError as O}from"rxjs";import{fromPromise as R}from"rxjs/internal/observable/innerFrom";import{A as V,l as E,a as T,b as D,c as L,d as z,e as F}from"./ai.service-6f818099.mjs";import{i as B}from"./openai-8b372df6.mjs";import{D as H,g as U,a as X,c as K,b as Z}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"http";import"https";import"net";import"tls";import"url";import"assert";import"tty";import"util";import"os";import"events";import"cleye";import"module";import"crypto";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"node:fs";import"buffer";import"stream";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"node:fs/promises";import"chokidar";import"rxjs/operators";class j extends V{constructor(t){super(t),this.params=t,this.generateStreamingCommitMessage$=()=>{const{generate:m,type:o}=this.params.config;return this.createStreamingCommitMessages$(c=>{this.streamChunks(c).catch(r=>c.error(r))},o,m)},this.streamChunks=async m=>{const o=this.params.stagedDiff.diff,{systemPrompt:c,systemPromptPath:r,codeReviewPromptPath:A,logging:s,locale:g,temperature:d,generate:v,type:I,maxLength:f,timeout:u}=this.params.config,p=this.params.config.maxTokens,y={...H,locale:g,maxLength:f,type:I,generate:v,systemPrompt:c,systemPromptPath:r,codeReviewPromptPath:A,vcs_branch:this.params.branchName||""},i=U(y),P=`Here is the diff: ${o}`,a=this.params.keyName||"OpenAI-Compatible",C=`${this.params.config.url}${this.params.config.path}`,k={Authorization:`Bearer ${this.params.config.key}`,"Content-Type":"application/json"};E(o,"commit",a,this.params.config.model,C,k,s),T(o,"commit",a,i,P,s);const $=B(this.params.config.model),n={messages:[{role:"system",content:i},{role:"user",content:P}],model:this.params.config.model,stream:!0,...$?{max_completion_tokens:p,temperature:1}:{max_tokens:p,top_p:this.params.config.topP,temperature:d}};D(o,"commit",a,n,s);const l=Date.now();let h="";try{const W=await this.openAI.chat.completions.create(n,{timeout:u});for await(const M of W){const G=M.choices?.[0]?.delta?.content||"",J=M.choices?.[0]?.delta?.reasoning_content||"",w=`${G}${J}`;w&&(h+=w,m.next(w))}const Y=Date.now()-l;L(o,"commit",a,{streamed:!0,totalLength:h.length},s),z(o,"commit",a,Y,h,s),m.complete()}catch(x){F(o,"commit",a,x,s),m.error(x)}};const e=this.params.keyName||"OPENAI_COMPATIBLE";this.colors=X(e),this.serviceName=S.bgHex(this.colors.primary).hex(this.colors.secondary).bold(`[${K(e)}]`),this.errorPrefix=S.red.bold(`[${K(e)}]`),this.openAI=new Q({apiKey:this.params.config.key,baseURL:`${this.params.config.url}${this.params.config.path}`})}getServiceSpecificErrorMessage(t){const e=t.message||"";return e.includes("API key")||e.includes("api_key")?"Invalid API key. Check your OpenAI-compatible API key in configuration":e.includes("rate_limit")||e.includes("Rate limit")?"Rate limit exceeded. Wait a moment and try again, or check your service limits":e.includes("model")||e.includes("Model")?"Model not found or not accessible. Check if the model name is correct":e.includes("network")||e.includes("connection")?"Network error. Check your internet connection and API endpoint":e.includes("quota")||e.includes("usage")?"API quota exceeded. Check your usage limits":e.includes("403")||e.includes("Forbidden")?"Access denied. Your API key may not have permission for this model":e.includes("404")||e.includes("Not Found")?"Model or endpoint not found. Check your API configuration":e.includes("500")||e.includes("Internal Server Error")?"Server error. Try again later":e.includes("overloaded")||e.includes("capacity")?"Service is overloaded. Try again in a few minutes":null}generateCommitMessage$(){return this.params.config.stream||!1?this.generateStreamingCommitMessage$():R(this.generateMessage("commit")).pipe(_(e=>N(e)),b(this.formatAsChoice),O(this.handleError$))}generateCodeReview$(){return R(this.generateMessage("review")).pipe(_(t=>N(t)),b(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:t.value,description:t.value,isError:!1})),O(this.handleError$))}async generateMessage(t){const e=this.params.stagedDiff.diff,{systemPrompt:m,systemPromptPath:o,codeReviewPromptPath:c,logging:r,locale:A,temperature:s,generate:g,type:d,maxLength:v,timeout:I}=this.params.config,f=this.params.config.maxTokens,u={...H,locale:A,maxLength:v,type:d,generate:g,systemPrompt:m,systemPromptPath:o,codeReviewPromptPath:c,vcs_branch:this.params.branchName||""},p=t==="review"?Z(u):U(u),y=`Here is the diff: ${e}`,i=this.params.keyName||"OpenAI-Compatible",P=`${this.params.config.url}${this.params.config.path}`,a={Authorization:`Bearer ${this.params.config.key}`,"Content-Type":"application/json"};E(e,t,i,this.params.config.model,P,a,r),T(e,t,i,p,y,r);const C=B(this.params.config.model),k={messages:[{role:"system",content:p},{role:"user",content:y}],model:this.params.config.model,stream:!1,...C?{max_completion_tokens:f,temperature:1}:{max_tokens:f,top_p:this.params.config.topP,temperature:s}};D(e,t,i,k,r);const $=Date.now();try{const n=await this.openAI.chat.completions.create(k,{timeout:I}),l=n.choices?.[0]?.message.content||"",h=Date.now()-$;return L(e,t,i,n,r),z(e,t,i,h,l,r),t==="review"?this.sanitizeResponse(l):this.parseMessage(l,d,g)}catch(n){throw F(e,t,i,n,r),n}}}export{j as OpenAICompatibleService};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import k from"chalk";import F from"openai";import{concatMap as T,from as $,map as x,catchError as M}from"rxjs";import{fromPromise as w}from"rxjs/internal/observable/innerFrom";import{A as U,l as z,a as H,b as q,c as B,d as K,e as J}from"./ai.service-6f818099.mjs";import{i as Q,g as V}from"./openai-8b372df6.mjs";import{D as S,g as O,b as W,m as R}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"http";import"https";import"net";import"tls";import"url";import"assert";import"tty";import"util";import"os";import"events";import"cleye";import"module";import"crypto";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"node:fs";import"buffer";import"stream";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"node:fs/promises";import"chokidar";import"rxjs/operators";class X extends U{constructor(t){super(t),this.params=t,this.generateStreamingCommitMessage$=()=>{const{generate:s,type:e}=this.params.config;return this.createStreamingCommitMessages$(i=>{this.streamChunks(i).catch(m=>i.error(m))},e,s)},this.streamChunks=async s=>{const e=this.params.stagedDiff.diff,{systemPrompt:i,systemPromptPath:m,codeReviewPromptPath:g,temperature:p,logging:o,locale:l,generate:f,type:u,maxLength:d,maxTokens:n,timeout:P}=this.params.config,c={...S,locale:l,maxLength:d,type:u,generate:f,systemPrompt:i,systemPromptPath:m,codeReviewPromptPath:g,vcs_branch:this.params.branchName||""},a=O(c),v=`Here is the diff: ${e}`,b=`${this.params.config.url}${this.params.config.path}`,G={Authorization:`Bearer ${this.params.config.key}`,"Content-Type":"application/json"};z(e,"commit","ChatGPT",this.params.config.model,b,G,o),H(e,"commit","ChatGPT",a,v,o);const _=Q(this.params.config.model),I={messages:[{role:"system",content:a},{role:"user",content:v}],model:this.params.config.model,stream:!0,..._?{max_completion_tokens:n,temperature:1}:{max_tokens:n,top_p:this.params.config.topP,temperature:p}};q(e,"commit","ChatGPT",I,o);const D=Date.now();let y="";try{const E=await this.openAI.chat.completions.create(I,{timeout:P});for await(const N of E){const C=N.choices?.[0]?.delta?.content||"";C&&(y+=C,s.next(C))}const L=Date.now()-D;B(e,"commit","ChatGPT",{streamed:!0,totalLength:y.length},o),K(e,"commit","ChatGPT",L,y,o),s.complete()}catch(A){J(e,"commit","ChatGPT",A,o),s.error(A)}},this.colors={primary:"#74AA9C",secondary:"#FFF"},this.serviceName=k.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[ChatGPT]"),this.errorPrefix=k.red.bold("[ChatGPT]");const r=this.params.config.url||"https://api.openai.com",h=(this.params.config.path||"/v1/chat/completions").replace(/\/chat\/completions\/?$/,"");this.openAI=new F({apiKey:this.params.config.key,baseURL:`${r}${h}`})}getServiceSpecificErrorMessage(t){const r=t.message||"";return r.includes("API key")?"Invalid API key. Check your OpenAI API key in configuration":r.includes("quota")?"API quota exceeded. Check your OpenAI usage limits":r.includes("500")?"OpenAI server error. Try again later":null}generateCommitMessage$(){return this.params.config.stream||!1?this.generateStreamingCommitMessage$():w(this.generateMessage("commit")).pipe(T(r=>$(r)),x(this.formatAsChoice),M(this.handleError$))}generateCodeReview$(){return w(this.generateMessage("review")).pipe(T(t=>$(t)),x(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:t.value,description:t.value,isError:!1})),M(this.handleError$))}async generateMessage(t){const r=this.params.stagedDiff.diff,{systemPrompt:h,systemPromptPath:s,codeReviewPromptPath:e,temperature:i,logging:m,locale:g,generate:p,type:o,maxLength:l,proxy:f,maxTokens:u,timeout:d}=this.params.config,n={...S,locale:g,maxLength:l,type:o,generate:p,systemPrompt:h,systemPromptPath:s,codeReviewPromptPath:e,vcs_branch:this.params.branchName||""},P=t==="review"?W(n):O(n),c=await V("ChatGPT",this.params.config.url,this.params.config.path,this.params.config.key,this.params.config.model,r,d,u,i,this.params.config.topP,P,m,t,f);return t==="review"?R(c.map(a=>this.sanitizeResponse(a))):R(c.map(a=>this.parseMessage(a,o,p)))}}export{X as OpenAIService};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import d from"chalk";import{concatMap as g,from as u,map as f,catchError as y}from"rxjs";import{fromPromise as P}from"rxjs/internal/observable/innerFrom";import{A as b,l as I,a as k,b as M,c as E,e as x,d as N}from"./ai.service-6f818099.mjs";import{D as R,b as $,g as S,H as D}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class O extends b{constructor(e){super(e),this.params=e,this.apiKey="",this.colors={primary:"#20808D",secondary:"#FFF"},this.serviceName=d.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Perplexity]"),this.errorPrefix=d.red.bold("[Perplexity]"),this.apiKey=this.params.config.key}getServiceSpecificErrorMessage(e){const t=e.message||"";return t.includes("API key")||t.includes("api_key")?"Invalid API key. Check your Perplexity API key in configuration":t.includes("rate_limit")||t.includes("Rate limit")?"Rate limit exceeded. Wait a moment and try again, or upgrade your Perplexity plan":t.includes("model")||t.includes("Model")?"Model not found or not accessible. Check if the Perplexity model name is correct":t.includes("overloaded")||t.includes("capacity")?"Perplexity service is overloaded. Try again in a few minutes":t.includes("403")||t.includes("Forbidden")?"Access denied. Your API key may not have permission for this Perplexity model":t.includes("404")||t.includes("Not Found")?"Model or endpoint not found. Check your Perplexity model configuration":t.includes("500")||t.includes("Internal Server Error")?"Perplexity server error. Try again later":null}generateCommitMessage$(){return P(this.generateMessage("commit")).pipe(g(e=>u(e)),f(this.formatAsChoice),y(this.handleError$))}generateCodeReview$(){return P(this.generateMessage("review")).pipe(g(e=>u(e)),f(e=>({name:`${this.serviceName} ${e.title}`,short:e.title,value:e.value,description:e.value,isError:!1})),y(this.handleError$))}extractJSONFromError(e){const t=/[{[]{1}([,:{}[\]0-9.\-+Eaeflnr-u \n\r\t]|".*?")+[}\]]{1}/gis,r=e.match(t);return r?Object.assign({},...r.map(o=>JSON.parse(o))):{error:{message:"Unknown error"}}}async generateMessage(e){const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:o,codeReviewPromptPath:s,logging:n,locale:m,generate:a,type:c,maxLength:l}=this.params.config,i={...R,locale:m,maxLength:l,type:c,generate:a,systemPrompt:r,systemPromptPath:o,codeReviewPromptPath:s,vcs_branch:this.params.branchName||""},h=e==="review"?$(i):S(i),p=`Here is the diff: ${t}`,A=`${this.params.config.url||"https://api.perplexity.ai"}/chat/completions`,w={Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"};I(t,e,"Perplexity",this.params.config.model,A,w,n),k(t,e,"Perplexity",h,p,n);const C=await this.createChatCompletions(h,p,e);return this.parseMessage(C,c,a)}async createChatCompletions(e,t,r){const o=this.params.stagedDiff.diff,{logging:s}=this.params.config,n={model:this.params.config.model,messages:[{role:"system",content:e},{role:"user",content:t}],temperature:this.params.config.temperature,top_p:this.params.config.topP,max_tokens:this.params.config.maxTokens,stream:!1};M(o,r,"Perplexity",n,s);const m=Date.now();try{const a=this.params.config.url||"https://api.perplexity.ai",c=await new D({method:"POST",baseURL:`${a}/chat/completions`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).setBody(n).execute(),l=Date.now()-m,i=c.data;if(E(o,r,"Perplexity",i,s),!i.choices||i.choices.length===0||!i.choices[0].message?.content)throw x(o,r,"Perplexity",{message:"No Content on response",result:i},s),new Error("No Content on response. Please open a Bug report");const p=i.choices[0].message.content;return N(o,r,"Perplexity",l,p,s),p}catch(a){throw x(o,r,"Perplexity",a,s),a}}}export{O as PerplexityService};
|