aicommit2 2.5.5 → 2.5.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +79 -1
- package/dist/ai.service-da8345b1.mjs +17 -0
- package/dist/{anthropic.service-d0af34bb.mjs → anthropic.service-bff34bb1.mjs} +1 -1
- package/dist/bedrock.service-419a856c.mjs +1 -0
- package/dist/cli-83e0874c.mjs +273 -0
- package/dist/cli.mjs +1 -1
- package/dist/codestral.service-59a12e78.mjs +1 -0
- package/dist/cohere.service-80e89971.mjs +1 -0
- package/dist/{deep-seek.service-f1fce159.mjs → deep-seek.service-86125741.mjs} +1 -1
- package/dist/{gemini.service-ea4399b1.mjs → gemini.service-2c3b0cd4.mjs} +1 -1
- package/dist/github-models.service-37847903.mjs +5 -0
- package/dist/{groq.service-b7d23bbc.mjs → groq.service-5cf5b8c9.mjs} +1 -1
- package/dist/hugging-face.service-966de0cf.mjs +2 -0
- package/dist/mistral.service-3389f888.mjs +1 -0
- package/dist/ollama.service-c5b469be.mjs +1 -0
- package/dist/{openai-8b372df6.mjs → openai-3092588f.mjs} +1 -1
- package/dist/{openai-compatible.service-bf183fc9.mjs → openai-compatible.service-599ec5d0.mjs} +1 -1
- package/dist/openai.service-df984ad9.mjs +1 -0
- package/dist/openrouter.service-107eec99.mjs +1 -0
- package/dist/perplexity.service-eda88944.mjs +1 -0
- package/package.json +1 -1
- package/dist/ai.service-6f818099.mjs +0 -11
- package/dist/bedrock.service-7f01f1d1.mjs +0 -1
- package/dist/cli-9533dfd6.mjs +0 -297
- package/dist/codestral.service-ccd13cd7.mjs +0 -1
- package/dist/cohere.service-e72f068e.mjs +0 -1
- package/dist/github-models.service-16ce699f.mjs +0 -5
- package/dist/hugging-face.service-760afbf2.mjs +0 -2
- package/dist/mistral.service-fe74f317.mjs +0 -1
- package/dist/ollama.service-3312d7f0.mjs +0 -1
- package/dist/openai.service-1d3ec4cc.mjs +0 -1
- package/dist/perplexity.service-85ac5631.mjs +0 -1
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import f from"chalk";import{concatMap as u,from as g,map as C,catchError as v}from"rxjs";import{fromPromise as y}from"rxjs/internal/observable/innerFrom";import{A as M,l as k,a as R,b as _,c as x,e as P,d as E}from"./ai.service-6f818099.mjs";import{D as N,b as $,g as D,k as A,n as S,H as B}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class U extends M{constructor(t){super(t),this.params=t,this.apiKey="",this.colors={primary:"#e28c58",secondary:"#fff"},this.serviceName=f.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Codestral]"),this.errorPrefix=f.red.bold("[Codestral]"),this.apiKey=this.params.config.key}getServiceSpecificErrorMessage(t){const e=t.message||"";return e.includes("API key")||e.includes("api_key")?"Invalid API key. Check your Codestral API key in configuration":e.includes("rate_limit")||e.includes("Rate limit")?"Rate limit exceeded. Wait a moment and try again, or upgrade your Codestral plan":e.includes("model")||e.includes("Model")?"Model not found or not accessible. Check if the Codestral model name is correct":e.includes("Invalid model type")?"Invalid model type. Use supported models: codestral-latest, codestral-2501":e.includes("overloaded")||e.includes("capacity")?"Codestral service is overloaded. Try again in a few minutes":e.includes("403")||e.includes("Forbidden")?"Access denied. Your API key may not have permission for this Codestral model":e.includes("404")||e.includes("Not Found")?"Model or endpoint not found. Check your Codestral model configuration":e.includes("500")||e.includes("Internal Server Error")?"Codestral server error. Try again later":null}generateCommitMessage$(){return y(this.generateMessage("commit")).pipe(u(t=>g(t)),C(this.formatAsChoice),v(this.handleError$))}generateCodeReview$(){return y(this.generateMessage("review")).pipe(u(t=>g(t)),C(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:t.value,description:t.value,isError:!1})),v(this.handleError$))}async generateMessage(t){const e=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:s,codeReviewPromptPath:m,logging:i,locale:l,generate:a,type:n,maxLength:p}=this.params.config,o={...N,locale:l,maxLength:p,type:n,generate:a,systemPrompt:r,systemPromptPath:s,codeReviewPromptPath:m,vcs_branch:this.params.branchName||""},d=t==="review"?$(o):D(o);this.checkAvailableModels();const c=A(e,t),b=`${this.params.config.url||"https://codestral.mistral.ai"}/v1/chat/completions`,w={Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"};k(e,t,"Codestral",this.params.config.model,b,w,i),R(e,t,"Codestral",d,c,i);const h=await this.createChatCompletions(d,t);return t==="review"?this.sanitizeResponse(h):this.parseMessage(h,n,a)}checkAvailableModels(){if(["codestral-latest","codestral-2501"].includes(this.params.config.model))return!0;throw new Error("Invalid model type of Codestral AI")}async createChatCompletions(t,e){const r=this.params.stagedDiff.diff,{logging:s}=this.params.config,m=this.params.config.url||"https://codestral.mistral.ai",i={model:this.params.config.model,messages:[{role:"system",content:t},{role:"user",content:A(this.params.stagedDiff.diff,e)}],temperature:this.params.config.temperature,top_p:this.params.config.topP,max_tokens:this.params.config.maxTokens,stream:!1,safe_prompt:!1,random_seed:S(10,1e3)};e==="commit"&&(i.response_format={type:"json_object"}),_(r,e,"Codestral",i,s);const l=Date.now();try{const n=await new B({method:"POST",baseURL:`${m}/v1/chat/completions`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).setBody(i).execute(),p=Date.now()-l,o=n.data;if(x(r,e,"Codestral",o,s),!o.choices||o.choices.length===0||!o.choices[0].message?.content)throw P(r,e,"Codestral",{message:"No Content on response",result:o},s),new Error("No Content on response. Please open a Bug report");const c=o.choices[0].message.content;return E(r,e,"Codestral",p,c,s),c}catch(a){throw P(r,e,"Codestral",a,s),a}}}export{U as CodestralService};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import l from"chalk";import{CohereClientV2 as x}from"cohere-ai";import{concatMap as h,from as d,map as g,catchError as u}from"rxjs";import{fromPromise as f}from"rxjs/internal/observable/innerFrom";import{A as E,l as S,a as $,b as D,c as N,d as _,e as V}from"./ai.service-6f818099.mjs";import{D as O,b as F,g as U,n as H}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class L extends E{constructor(r){super(r),this.params=r,this.colors={primary:"#D18EE2",secondary:"#fff"},this.serviceName=l.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Cohere]"),this.errorPrefix=l.red.bold("[Cohere]"),this.cohere=new x({token:this.params.config.key})}isValidCohereV2Response(r){const e=r;return e?.message?.content!==void 0&&Array.isArray(e.message.content)&&e.message.content.length>0&&typeof e.message.content[0]?.text=="string"}getServiceSpecificErrorMessage(r){const e=r.message||"";return e.includes("API key")||e.includes("api_key")?"Invalid API key. Check your Cohere API key in configuration":e.includes("rate_limit")||e.includes("Rate limit")?"Rate limit exceeded. Wait a moment and try again, or upgrade your Cohere plan":e.includes("model")||e.includes("Model")?"Model not found or not accessible. Check if the Cohere model name is correct":e.includes("overloaded")||e.includes("capacity")?"Cohere service is overloaded. Try again in a few minutes":e.includes("403")||e.includes("Forbidden")?"Access denied. Your API key may not have permission for this Cohere model":e.includes("404")||e.includes("Not Found")?"Model or endpoint not found. Check your Cohere model configuration":e.includes("500")||e.includes("Internal Server Error")?"Cohere server error. Try again later":null}generateCommitMessage$(){return f(this.generateMessage("commit")).pipe(h(r=>d(r)),g(this.formatAsChoice),u(this.handleError$))}generateCodeReview$(){return f(this.generateMessage("review")).pipe(h(r=>d(r)),g(r=>({name:`${this.serviceName} ${r.title}`,short:r.title,value:r.value,description:r.value,isError:!1})),u(this.handleError$))}async generateMessage(r){const e=this.params.stagedDiff.diff,{systemPrompt:C,systemPromptPath:v,codeReviewPromptPath:y,logging:o,temperature:P,locale:A,generate:n,type:a,maxLength:I,maxTokens:w}=this.params.config,m={...O,locale:A,maxLength:I,type:a,generate:n,systemPrompt:C,systemPromptPath:v,codeReviewPromptPath:y,vcs_branch:this.params.branchName||""},i=r==="review"?F(m):U(m),c=`Here is the diff: ${e}`,k=[...i?[{role:"system",content:i}]:[],{role:"user",content:c}],M=`${this.params.config.url}/v2/chat`;S(e,r,"Cohere",this.params.config.model,M,{},o),$(e,r,"Cohere",i,c,o);const p={model:this.params.config.model,messages:k,max_tokens:w,temperature:P,seed:H(10,1e3),p:this.params.config.topP};D(e,r,"Cohere",p,o);const b=Date.now();try{const t=await this.cohere.chat(p,{timeoutInSeconds:Math.floor(this.params.config.timeout/1e3)}),R=Date.now()-b;if(!this.isValidCohereV2Response(t))throw new Error("Invalid response structure from Cohere v2 API");const s=t.message.content[0].text;return N(e,r,"Cohere",t,o),_(e,r,"Cohere",R,s,o),r==="review"?this.sanitizeResponse(s):this.parseMessage(s,a,n)}catch(t){throw V(e,r,"Cohere",t,o),t}}}export{L as CohereService};
|
|
@@ -1,5 +0,0 @@
|
|
|
1
|
-
import G from"chalk";import{concatMap as N,from as I,map as T,catchError as v}from"rxjs";import{fromPromise as D}from"rxjs/internal/observable/innerFrom";import{A as O,l as _,a as P,b as R,e as M,c as S,d as k}from"./ai.service-6f818099.mjs";import{i as $}from"./openai-8b372df6.mjs";import{D as x,b as y,g as C}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"http";import"https";import"net";import"tls";import"url";import"assert";import"tty";import"util";import"os";import"events";import"cleye";import"module";import"crypto";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"node:fs";import"buffer";import"stream";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"node:fs/promises";import"chokidar";import"rxjs/operators";class U extends O{constructor(e){super(e),this.params=e,this.baseURL="https://models.github.ai",this.colors={primary:"#24292e",secondary:"#FFF"},this.serviceName=G.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[GitHub Models]"),this.errorPrefix=G.red.bold("[GitHub Models]")}getServiceSpecificErrorMessage(e){switch(e.code){case"MISSING_TOKEN":return"GitHub token is required. Run: aicommit2 github-login";case"AUTHENTICATION_FAILED":return"Authentication failed. Your GitHub token may be expired or invalid. Run: aicommit2 github-login";case"ACCESS_DENIED":return'Access denied. Make sure your GitHub token has "Models" permission in GitHub settings';case"NO_CONTENT":return"No content received from GitHub Models. The model may have failed to generate a response";default:return null}}generateCommitMessage$(){return D(this.generateMessage("commit")).pipe(N(e=>I(e)),T(this.formatAsChoice),v(this.handleError$))}generateCodeReview$(){return D(this.generateMessage("review")).pipe(N(e=>I(e)),T(e=>({name:`${this.serviceName} ${e.title}`,short:e.title,value:e.value,description:e.value,isError:!1})),v(this.handleError$))}async generateMessage(e){if(!this.params.config.key){const r=new Error("GitHub token is required for GitHub Models. Use: aicommit2 github-login");throw r.code="MISSING_TOKEN",r}const s=this.params.stagedDiff.diff,{systemPrompt:i,systemPromptPath:l,codeReviewPromptPath:w,locale:H,generate:p,type:u,maxLength:g}=this.params.config,a={...x,locale:H,maxLength:g,type:u,generate:p,systemPrompt:i,systemPromptPath:l,codeReviewPromptPath:w,vcs_branch:this.params.branchName||""},E=e==="review"?y(a):C(a),h=await this.makeRequest(E,s,e);return e==="review"?this.sanitizeResponse(h):this.parseMessage(h,u,p)}async makeRequest(e,s,i){const l=Array.isArray(this.params.config.model)?this.params.config.model[0]:this.params.config.model||"gpt-4o-mini",w=[{role:"system",content:e},{role:"user",content:i==="review"?s:`Here's the diff:
|
|
2
|
-
|
|
3
|
-
${s}`}],H=$(l),p={messages:w,model:l,stream:!1,...H?{max_completion_tokens:this.params.config.maxTokens||1024,temperature:1}:{max_tokens:this.params.config.maxTokens||1024,top_p:this.params.config.topP||.95,temperature:this.params.config.temperature||.7}},u=`${this.baseURL}/inference/chat/completions`,g={"Content-Type":"application/json",Accept:"application/vnd.github+json",Authorization:`Bearer ${this.params.config.key}`},{logging:a}=this.params.config;_(s,i,"GitHub Models",l,u,g,a),P(s,i,"GitHub Models",e,i==="review"?s:`Here's the diff:
|
|
4
|
-
|
|
5
|
-
${s}`,a),R(s,i,"GitHub Models",p,a);const E=new AbortController,h=setTimeout(()=>E.abort(),this.params.config.timeout);try{const r=Date.now(),o=await fetch(u,{method:"POST",headers:g,body:JSON.stringify(p),signal:E.signal});if(clearTimeout(h),!o.ok){const n=await o.text(),d={status:o.status,statusText:o.statusText,url:u,headers:Object.fromEntries(o.headers),body:n};M(s,i,"GitHub Models",d,a);let b=`GitHub API request failed: ${o.status} ${o.statusText}`;try{const t=JSON.parse(n);t.error?.message?b+=` - ${t.error.message}`:t.message&&(b+=` - ${t.message}`)}catch{n&&(b+=` - ${n}`)}if(o.status===401){const t=new Error("GitHub authentication failed. Please run: aicommit2 github-login");throw t.status=o.status,t.code="AUTHENTICATION_FAILED",t.content=n,t}else if(o.status===403){const t=new Error('GitHub Models access denied. Make sure your token has "Models" permission.');throw t.status=o.status,t.code="ACCESS_DENIED",t.content=n,t}else if(o.status===404){const t=new Error(`Model "${l}" not found. Please check the model name.`);throw t.status=o.status,t.code="MODEL_NOT_FOUND",t.content=n,t}else if(o.status===429){const t=new Error("Rate limit exceeded. Please try again later.");throw t.status=o.status,t.code="RATE_LIMIT_EXCEEDED",t.content=n,t}const f=new Error(b);throw f.status=o.status,f.code="API_ERROR",f.content=n,f}const m=await o.json(),A=Date.now()-r;S(s,i,"GitHub Models",m,a);const c=m.choices?.[0]?.message?.content?.trim();if(!c){M(s,i,"GitHub Models",{message:"No content found in GitHub Models response",result:m},a);const d=new Error("No response content received from GitHub Models");throw d.code="NO_CONTENT",d.content=JSON.stringify(m,null,2),d}return k(s,i,"GitHub Models",A,c,a),c}catch(r){if(clearTimeout(h),r instanceof Error&&r.name==="AbortError"){const A={message:`GitHub Models request timeout after ${this.params.config.timeout}ms`,error:r};M(s,i,"GitHub Models",A,a);const c=new Error(`GitHub Models request timed out after ${this.params.config.timeout}ms`);throw c.code="REQUEST_TIMEOUT",c.originalError=r,c}if(r.code)throw r;M(s,i,"GitHub Models",{message:"GitHub Models request failed",error:r},a);const m=new Error(`GitHub Models request failed: ${r instanceof Error?r.message:String(r)}`);throw m.code="REQUEST_FAILED",m.originalError=r,m}}}export{U as GitHubModelsService};
|
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import b from"chalk";import{concatMap as k,from as D,map as P,catchError as E}from"rxjs";import{fromPromise as F}from"rxjs/internal/observable/innerFrom";import{A as $,l as H,a as R,c as U,d as S,e as N}from"./ai.service-6f818099.mjs";import{D as x,b as T,g as _}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class j extends ${constructor(t){super(t),this.params=t,this.headers={},this.models=[],this.currentModelId=null,this.currentConversation=void 0,this.currentConversionID=void 0,this.cookie="",this.colors={primary:"#FED21F",secondary:"#000"},this.serviceName=b.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[HuggingFace]"),this.errorPrefix=b.red.bold("[HuggingFace]"),this.cookie=this.params.config.cookie;const e=this.params.config.url||"https://huggingface.co";this.headers={accept:"*/*","accept-language":"en-US,en;q=0.9","sec-ch-ua":'"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"',"sec-ch-ua-mobile":"?0","sec-ch-ua-platform":'"Windows"',"sec-fetch-dest":"empty","sec-fetch-mode":"cors","sec-fetch-site":"same-origin",origin:e,"Referrer-Policy":"strict-origin-when-cross-origin"}}getServiceSpecificErrorMessage(t){const e=t.message||"";return e.includes("cookie")||e.includes("Cookie")?"Invalid cookie. Check your Hugging Face session cookie in configuration":e.includes("model")||e.includes("Model")?"Model not found or not accessible. Check if the Hugging Face model name is correct":e.includes("conversation")||e.includes("conversion")?"Failed to create conversation. Try again or check your session":e.includes("401")||e.includes("Unauthorized")?"Authentication failed. Your Hugging Face session may have expired":e.includes("403")||e.includes("Forbidden")?"Access denied. You may not have permission to access this model":e.includes("404")||e.includes("Not Found")?"Model not found. Check your Hugging Face model configuration":e.includes("500")||e.includes("Internal Server Error")?"Hugging Face server error. Try again later":e.includes("overloaded")||e.includes("capacity")?"Hugging Face service is overloaded. Try again in a few minutes":null}generateCommitMessage$(){return F(this.generateMessage("commit")).pipe(k(t=>D(t)),P(this.formatAsChoice),E(this.handleError$))}generateCodeReview$(){return F(this.generateMessage("review")).pipe(k(t=>D(t)),P(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:t.value,description:t.value,isError:!1})),E(this.handleError$))}async generateMessage(t){await this.initialize();const e=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:o,codeReviewPromptPath:p,logging:a,locale:i,generate:d,type:s,maxLength:u,temperature:m,maxTokens:v,topP:h,timeout:g}=this.params.config,n={...x,locale:i,maxLength:u,type:s,generate:d,systemPrompt:r,systemPromptPath:o,codeReviewPromptPath:p,vcs_branch:this.params.branchName||""},c=t==="review"?T(n):_(n),l=`Here is the diff: ${e}`,w=`${this.params.config.url||"https://huggingface.co"}/chat/conversation`,I={...this.headers,cookie:this.cookie};H(e,t,"HuggingFace",this.params.config.model,w,I,a),R(e,t,"HuggingFace",c,l,a);const A=Date.now();try{const y=await this.getNewChat(c),C=await(await this.sendMessage(l,y.id)).completeResponsePromise();await this.deleteConversation(y.id);const M=Date.now()-A;return U(e,t,"HuggingFace",{response:C},a),S(e,t,"HuggingFace",M,C,a),t==="review"?this.sanitizeResponse(C):this.parseMessage(C,s,d)}catch(y){throw N(e,t,"HuggingFace",y,a),y}}async initialize(){const t=await this.getRemoteLlms(),e=t.find(r=>r.name?.toLowerCase()===this.params.config.model.toLowerCase());if(e){this.currentModel=e,this.currentModelId=e.id;return}this.currentModel=t[0],this.currentModelId=t[0].id}async getRemoteLlms(){const t=this.params.config.url||"https://huggingface.co",e=await fetch(`${t}/chat/__data.json`,{headers:{...this.headers,cookie:this.cookie},body:null,method:"GET"});if(e.status!==200)throw new Error(`Failed to get remote LLMs with status code: ${e.status}`);const o=(await e.json()).nodes[0].data,p=o[o[0].models],a=[],i=d=>d===-1?null:o[d];for(const d of p){const s=o[d];if(o[s.unlisted])continue;const u={id:i(s.id),name:i(s.name),displayName:i(s.displayName),preprompt:i(s.preprompt),promptExamples:[],websiteUrl:i(s.websiteUrl),description:i(s.description),datasetName:i(s.datasetName),datasetUrl:i(s.datasetUrl),modelUrl:i(s.modelUrl),parameters:{}},m=i(s.promptExamples);if(m!==null){const g=m.map(n=>i(n));u.promptExamples=g.map(n=>({title:o[n.title],prompt:o[n.prompt]}))}const v=i(s.parameters),h={};for(const[g,n]of Object.entries(v)){if(n===-1){h[g]=null;continue}if(Array.isArray(o[n])){h[g]=o[n].map(c=>o[c]);continue}h[g]=o[n]}u.parameters=h,a.push(u)}return this.models=a,a}async getNewChat(t){const e={model:this.currentModelId,preprompt:t};let r=0;const o=this.params.config.url||"https://huggingface.co";for(;r<5;){const p=await fetch(`${o}/chat/conversation`,{headers:{...this.headers,"content-type":"application/json",cookie:this.cookie,Referer:`${o}/chat/`},body:JSON.stringify(e),method:"POST"}),{conversationId:a}=await p.json();if(a){this.currentConversionID=a;break}else r++}if(!this.currentConversionID)throw new Error("Failed to create new conversion");return await this.getConversationHistory(this.currentConversionID)}async getConversationHistory(t){if(!t)throw new Error("conversationId is required for getConversationHistory");const e=this.params.config.url||"https://huggingface.co",r=await fetch(`${e}/chat/conversation/${t}/__data.json`,{headers:{...this.headers,cookie:this.cookie,Referer:`${e}/chat/`},body:null,method:"GET"});if(r.status!=200)throw new Error("Unable get conversation details "+r);{const o=await r.json();return this.metadataParser(o,t)}}metadataParser(t,e){const r={id:"",model:"",systemPrompt:"",title:"",history:[]},o=t.nodes[1].data,p=o[o[0].model],a=o[o[0].preprompt],i=o[o[0].title],d=o[o[0].messages],s=[];for(const u of d){const m=o[u],v=new Date(o[m.createdAt][1]).getTime()/1e3,h=new Date(o[m.updatedAt][1]).getTime()/1e3;s.push({id:o[m.id],role:o[m.from],content:o[m.content],createdAt:v,updatedAt:h})}return r.id=e,r.model=p,r.systemPrompt=a,r.title=i,r.history=s,this.currentConversation=r,r}async sendMessage(t,e){if(t==="")throw new Error("the prompt can not be empty.");if(!e&&!this.currentConversionID?await this.getNewChat():e?(this.currentConversionID=e,await this.getConversationHistory(e)):this.currentConversionID&&await this.getConversationHistory(this.currentConversionID),!this.currentConversation)throw new Error("Failed to create new conversion");const r={inputs:t,id:this.currentConversation.history[this.currentConversation.history.length-1].id,is_retry:!1,is_continue:!1,web_search:!1,tools:[]},o=new FormData;o.append("data",JSON.stringify(r));const p=this.params.config.url||"https://huggingface.co",a=new AbortController,i=setTimeout(()=>a.abort(),this.params.config.timeout),d=await fetch(`${p}/chat/conversation/${this.currentConversionID}`,{headers:{...this.headers,cookie:this.cookie,Referer:`${p}/chat/conversation/${this.currentConversionID}`},body:o,method:"POST",signal:a.signal});clearTimeout(i);function s(n){try{const c=n.split(`
|
|
2
|
-
`),l=[];for(const f of c)f.trim()&&l.push(JSON.parse(f));return l}catch{return[{}]}}const u=new TextDecoder;let m="";const v=new TransformStream({async transform(n,c){const l=u.decode(n);try{const f=s(l);for(const w of f)w.type==="finalAnswer"?(m=w?.text||"",c.terminate()):w.type==="stream"&&c.enqueue(w?.token||"")}catch{throw new Error("Error during parsing response")}}}),h=d.body?.pipeThrough(v);async function g(){return new Promise(async(n,c)=>{try{if(!h)c("ModifiedStream undefined");else{const l=h.getReader();for(;;){const{done:f,value:w}=await l.read();if(f){n(m);break}}}}catch(l){c(l)}})}return{id:this.currentConversionID,stream:h,completeResponsePromise:g}}async deleteConversation(t){const e=this.params.config.url||"https://huggingface.co";return(await fetch(`${e}/chat/conversation/${t}`,{headers:{...this.headers,cookie:this.cookie,Referer:`${e}/chat/`},body:null,method:"DELETE"})).json()}}export{j as HuggingFaceService};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import f from"chalk";import{concatMap as g,from as u,map as A,catchError as M}from"rxjs";import{fromPromise as v}from"rxjs/internal/observable/innerFrom";import{A as C,l as k,a as $,b as R,c as x,e as y,d as E}from"./ai.service-6f818099.mjs";import{D as N,b as D,g as S,H as I,n as _}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class U extends C{constructor(e){super(e),this.params=e,this.apiKey="",this.colors={primary:"#ff7000",secondary:"#fff"},this.serviceName=f.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[MistralAI]"),this.errorPrefix=f.red.bold("[MistralAI]"),this.apiKey=this.params.config.key}getServiceSpecificErrorMessage(e){const t=e.message||"";return t.includes("API key")||t.includes("api_key")?"Invalid API key. Check your Mistral AI API key in configuration":t.includes("quota")||t.includes("usage")?"API quota exceeded. Check your Mistral AI usage limits":t.includes("model")||t.includes("Model")?"Model not found or not accessible. Check if the Mistral model name is correct":t.includes("403")||t.includes("Forbidden")?"Access denied. Your API key may not have permission for this Mistral model":t.includes("404")||t.includes("Not Found")?"Model or endpoint not found. Check your Mistral model configuration":t.includes("500")||t.includes("Internal Server Error")?"Mistral AI server error. Try again later":null}generateCommitMessage$(){return v(this.generateMessage("commit")).pipe(g(e=>u(e)),A(this.formatAsChoice),M(this.handleError$))}generateCodeReview$(){return v(this.generateMessage("review")).pipe(g(e=>u(e)),A(e=>({name:`${this.serviceName} ${e.title}`,short:e.title,value:e.value,description:e.value,isError:!1})),M(this.handleError$))}async generateMessage(e){const t=this.params.stagedDiff.diff,{systemPrompt:o,systemPromptPath:s,codeReviewPromptPath:i,logging:n,locale:l,generate:a,type:c,maxLength:p}=this.params.config,r={...N,locale:l,maxLength:p,type:c,generate:a,systemPrompt:o,systemPromptPath:s,codeReviewPromptPath:i,vcs_branch:this.params.branchName||""},h=e==="review"?D(r):S(r);await this.checkAvailableModels();const m=`Here is the diff: ${t}`,b=`${this.params.config.url||"https://api.mistral.ai"}/v1/chat/completions`,w={Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"};k(t,e,"MistralAI",this.params.config.model,b,w,n),$(t,e,"MistralAI",h,m,n);const d=await this.createChatCompletions(h,m,e);return e==="review"?this.sanitizeResponse(d):this.parseMessage(d,c,a)}async checkAvailableModels(){if((await this.getAvailableModels()).includes(this.params.config.model))return!0;throw new Error(`Invalid model type of Mistral AI: ${this.params.config.model}`)}async getAvailableModels(){const e=this.params.config.url||"https://api.mistral.ai";return(await new I({method:"GET",baseURL:`${e}/v1/models`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).execute()).data.data.filter(o=>o.object==="model").map(o=>o.id)}async createChatCompletions(e,t,o){const s=this.params.stagedDiff.diff,{logging:i}=this.params.config,n={model:this.params.config.model,messages:[{role:"system",content:e},{role:"user",content:t}],temperature:this.params.config.temperature,top_p:this.params.config.topP,max_tokens:this.params.config.maxTokens,stream:!1,safe_prompt:!1,random_seed:_(10,1e3)};R(s,o,"MistralAI",n,i);const l=Date.now();try{const a=this.params.config.url||"https://api.mistral.ai",c=await new I({method:"POST",baseURL:`${a}/v1/chat/completions`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).setBody(n).execute(),p=Date.now()-l,r=c.data;if(x(s,o,"MistralAI",r,i),!r.choices||r.choices.length===0||!r.choices[0].message?.content)throw y(s,o,"MistralAI",{message:"No Content on response",result:r},i),new Error("No Content on response. Please open a Bug report");const m=r.choices[0].message.content;return E(s,o,"MistralAI",p,m,i),m}catch(a){throw y(s,o,"MistralAI",a,i),a}}}export{U as MistralService};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import P from"chalk";import{Ollama as z}from"ollama";import{concatMap as v,from as k,map as w,catchError as A}from"rxjs";import{fromPromise as x}from"rxjs/internal/observable/innerFrom";import{fetch as L,Agent as T}from"undici";import{A as H,l as C,a as D,b as S,c as _,d as M,e as O}from"./ai.service-6f818099.mjs";import{o as R,t as U,n as E,c as N,P as b,D as B,b as G,g as J,H as K}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class Q extends H{constructor(e){super(e),this.params=e,this.host=R,this.model="",this.key="",this.auth="",this.getTruncatedDiff=()=>{const t=this.params.stagedDiff.diff,s=this.params.config.maxDiffSize||0;if(s<=0)return t;const{diff:o}=U(t,s);return o},this.generateStreamingCommitMessage$=()=>{const{generate:t,type:s}=this.params.config;return this.createStreamingCommitMessages$(o=>{this.streamChunks(o).catch(n=>o.error(n))},s,t)},this.streamChunks=async t=>{const s=this.getTruncatedDiff(),{logging:o}=this.params.config,n=this.buildCommitPrompt();await this.checkIsAvailableOllama();const h=`Here is the diff: ${s}`,i=`Ollama_${this.model}`,f=`${this.host}/api/chat`,a=this.key?{Authorization:`${this.auth} ${this.key}`}:{};C(s,"commit",i,this.model,f,a,o),D(s,"commit",i,n,h,o);const{numCtx:c,temperature:m,topP:p,timeout:d,maxTokens:r}=this.params.config,u={model:this.model,messages:[{role:"system",content:n},{role:"user",content:h}],stream:!0,keep_alive:d,options:{num_ctx:c,temperature:m,top_p:p,seed:E(10,1e3),num_predict:r??-1}};S(s,"commit",i,u,o);const l=Date.now();let g="";try{const $=await this.ollama.chat(u);for await(const F of $){const y=F.message.content;y&&(g+=y,t.next(y))}const I=Date.now()-l;_(s,"commit",i,{streamed:!0,totalLength:g.length},o),M(s,"commit",i,I,g,o),t.complete()}catch($){O(s,"commit",i,$,o),t.error($)}},this.setupFetch=(t,s={})=>L(t,{...s,dispatcher:new T({headersTimeout:this.params.config.timeout})}),this.colors={primary:"#FFF",secondary:"#000"},this.model=this.params.keyName,this.serviceName=P.bgHex(this.colors.primary).hex(this.colors.secondary).bold(`[${N(this.model)}]`),this.errorPrefix=P.red.bold(`[${N(this.model)}]`),this.host=this.params.config.host||R,this.auth=this.params.config.auth||"Bearer",this.key=this.params.config.key||"",this.ollama=new z({host:this.host,fetch:this.setupFetch,...this.key&&{headers:{Authorization:`${this.auth} ${this.key}`}}})}getServiceSpecificErrorMessage(e){const t=e.message||"";return t.includes("ECONNREFUSED")||t.includes("connection refused")?b.ollamaNotRunning():t.includes("model")&&(t.includes("not found")||t.includes("404"))?b.ollamaModelNotPulled(this.model):null}generateCommitMessage$(){return this.params.config.stream||!1?this.generateStreamingCommitMessage$():x(this.generateMessage("commit")).pipe(v(t=>k(t)),w(this.formatAsChoice),A(this.handleError$))}generateCodeReview$(){return x(this.generateMessage("review")).pipe(v(e=>k(e)),w(e=>({name:`${this.serviceName} ${e.title}`,short:e.title,value:e.value,description:e.value,isError:!1})),A(this.handleError$))}async generateMessage(e){const t=this.getTruncatedDiff(),{systemPrompt:s,systemPromptPath:o,codeReviewPromptPath:n,logging:h,locale:i,generate:f,type:a,maxLength:c}=this.params.config,m={...B,locale:i,maxLength:c,type:a,generate:f,systemPrompt:s,systemPromptPath:o,codeReviewPromptPath:n,vcs_branch:this.params.branchName||""},p=e==="review"?G(m):J(m);await this.checkIsAvailableOllama();const d=`Here is the diff: ${t}`,r=`Ollama_${this.model}`,u=`${this.host}/api/chat`,l=this.key?{Authorization:`${this.auth} ${this.key}`}:{};C(t,e,r,this.model,u,l,h),D(t,e,r,p,d,h);const g=await this.createChatCompletions(p,d,e);return e==="review"?this.sanitizeResponse(g):this.parseMessage(g,a,f)}async checkIsAvailableOllama(){const e=new K({method:"GET",baseURL:`${this.host}`,timeout:this.params.config.timeout});return this.key&&e.setHeaders({Authorization:`${this.auth} ${this.key}`}),(await e.execute()).data}async createChatCompletions(e,t,s){const{numCtx:o,temperature:n,topP:h,timeout:i,maxTokens:f,logging:a}=this.params.config,c=this.params.stagedDiff.diff,m=`Ollama_${this.model}`,p={model:this.model,messages:[{role:"system",content:e},{role:"user",content:t}],stream:!1,keep_alive:i,options:{num_ctx:o,temperature:n,top_p:h,seed:E(10,1e3),num_predict:f??-1}};S(c,s,m,p,a);const d=Date.now();try{const r=await this.ollama.chat(p),u=Date.now()-d,l=r.message.content;return _(c,s,m,{response:l,fullResponse:r},a),M(c,s,m,u,l,a),l}catch(r){throw O(c,s,m,r,a),r}}}export{Q as OllamaService};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import k from"chalk";import F from"openai";import{concatMap as T,from as $,map as x,catchError as M}from"rxjs";import{fromPromise as w}from"rxjs/internal/observable/innerFrom";import{A as U,l as z,a as H,b as q,c as B,d as K,e as J}from"./ai.service-6f818099.mjs";import{i as Q,g as V}from"./openai-8b372df6.mjs";import{D as S,g as O,b as W,m as R}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"http";import"https";import"net";import"tls";import"url";import"assert";import"tty";import"util";import"os";import"events";import"cleye";import"module";import"crypto";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"node:fs";import"buffer";import"stream";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"node:fs/promises";import"chokidar";import"rxjs/operators";class X extends U{constructor(t){super(t),this.params=t,this.generateStreamingCommitMessage$=()=>{const{generate:s,type:e}=this.params.config;return this.createStreamingCommitMessages$(i=>{this.streamChunks(i).catch(m=>i.error(m))},e,s)},this.streamChunks=async s=>{const e=this.params.stagedDiff.diff,{systemPrompt:i,systemPromptPath:m,codeReviewPromptPath:g,temperature:p,logging:o,locale:l,generate:f,type:u,maxLength:d,maxTokens:n,timeout:P}=this.params.config,c={...S,locale:l,maxLength:d,type:u,generate:f,systemPrompt:i,systemPromptPath:m,codeReviewPromptPath:g,vcs_branch:this.params.branchName||""},a=O(c),v=`Here is the diff: ${e}`,b=`${this.params.config.url}${this.params.config.path}`,G={Authorization:`Bearer ${this.params.config.key}`,"Content-Type":"application/json"};z(e,"commit","ChatGPT",this.params.config.model,b,G,o),H(e,"commit","ChatGPT",a,v,o);const _=Q(this.params.config.model),I={messages:[{role:"system",content:a},{role:"user",content:v}],model:this.params.config.model,stream:!0,..._?{max_completion_tokens:n,temperature:1}:{max_tokens:n,top_p:this.params.config.topP,temperature:p}};q(e,"commit","ChatGPT",I,o);const D=Date.now();let y="";try{const E=await this.openAI.chat.completions.create(I,{timeout:P});for await(const N of E){const C=N.choices?.[0]?.delta?.content||"";C&&(y+=C,s.next(C))}const L=Date.now()-D;B(e,"commit","ChatGPT",{streamed:!0,totalLength:y.length},o),K(e,"commit","ChatGPT",L,y,o),s.complete()}catch(A){J(e,"commit","ChatGPT",A,o),s.error(A)}},this.colors={primary:"#74AA9C",secondary:"#FFF"},this.serviceName=k.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[ChatGPT]"),this.errorPrefix=k.red.bold("[ChatGPT]");const r=this.params.config.url||"https://api.openai.com",h=(this.params.config.path||"/v1/chat/completions").replace(/\/chat\/completions\/?$/,"");this.openAI=new F({apiKey:this.params.config.key,baseURL:`${r}${h}`})}getServiceSpecificErrorMessage(t){const r=t.message||"";return r.includes("API key")?"Invalid API key. Check your OpenAI API key in configuration":r.includes("quota")?"API quota exceeded. Check your OpenAI usage limits":r.includes("500")?"OpenAI server error. Try again later":null}generateCommitMessage$(){return this.params.config.stream||!1?this.generateStreamingCommitMessage$():w(this.generateMessage("commit")).pipe(T(r=>$(r)),x(this.formatAsChoice),M(this.handleError$))}generateCodeReview$(){return w(this.generateMessage("review")).pipe(T(t=>$(t)),x(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:t.value,description:t.value,isError:!1})),M(this.handleError$))}async generateMessage(t){const r=this.params.stagedDiff.diff,{systemPrompt:h,systemPromptPath:s,codeReviewPromptPath:e,temperature:i,logging:m,locale:g,generate:p,type:o,maxLength:l,proxy:f,maxTokens:u,timeout:d}=this.params.config,n={...S,locale:g,maxLength:l,type:o,generate:p,systemPrompt:h,systemPromptPath:s,codeReviewPromptPath:e,vcs_branch:this.params.branchName||""},P=t==="review"?W(n):O(n),c=await V("ChatGPT",this.params.config.url,this.params.config.path,this.params.config.key,this.params.config.model,r,d,u,i,this.params.config.topP,P,m,t,f);return t==="review"?R(c.map(a=>this.sanitizeResponse(a))):R(c.map(a=>this.parseMessage(a,o,p)))}}export{X as OpenAIService};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import d from"chalk";import{concatMap as g,from as u,map as f,catchError as y}from"rxjs";import{fromPromise as P}from"rxjs/internal/observable/innerFrom";import{A as b,l as I,a as k,b as M,c as E,e as x,d as N}from"./ai.service-6f818099.mjs";import{D as R,b as $,g as S,H as D}from"./cli-9533dfd6.mjs";import"fs";import"path";import"@pacote/xxhash";import"winston";import"cleye";import"module";import"crypto";import"os";import"node:buffer";import"node:path";import"node:child_process";import"node:process";import"child_process";import"node:url";import"node:os";import"assert";import"events";import"node:fs";import"buffer";import"stream";import"util";import"node:util";import"inquirer";import"fs/promises";import"readline";import"figlet";import"gradient-string";import"ora";import"inquirer-reactive-list-prompt";import"winston-daily-rotate-file";import"axios";import"url";import"node:fs/promises";import"chokidar";import"rxjs/operators";class O extends b{constructor(e){super(e),this.params=e,this.apiKey="",this.colors={primary:"#20808D",secondary:"#FFF"},this.serviceName=d.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Perplexity]"),this.errorPrefix=d.red.bold("[Perplexity]"),this.apiKey=this.params.config.key}getServiceSpecificErrorMessage(e){const t=e.message||"";return t.includes("API key")||t.includes("api_key")?"Invalid API key. Check your Perplexity API key in configuration":t.includes("rate_limit")||t.includes("Rate limit")?"Rate limit exceeded. Wait a moment and try again, or upgrade your Perplexity plan":t.includes("model")||t.includes("Model")?"Model not found or not accessible. Check if the Perplexity model name is correct":t.includes("overloaded")||t.includes("capacity")?"Perplexity service is overloaded. Try again in a few minutes":t.includes("403")||t.includes("Forbidden")?"Access denied. Your API key may not have permission for this Perplexity model":t.includes("404")||t.includes("Not Found")?"Model or endpoint not found. Check your Perplexity model configuration":t.includes("500")||t.includes("Internal Server Error")?"Perplexity server error. Try again later":null}generateCommitMessage$(){return P(this.generateMessage("commit")).pipe(g(e=>u(e)),f(this.formatAsChoice),y(this.handleError$))}generateCodeReview$(){return P(this.generateMessage("review")).pipe(g(e=>u(e)),f(e=>({name:`${this.serviceName} ${e.title}`,short:e.title,value:e.value,description:e.value,isError:!1})),y(this.handleError$))}extractJSONFromError(e){const t=/[{[]{1}([,:{}[\]0-9.\-+Eaeflnr-u \n\r\t]|".*?")+[}\]]{1}/gis,r=e.match(t);return r?Object.assign({},...r.map(o=>JSON.parse(o))):{error:{message:"Unknown error"}}}async generateMessage(e){const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:o,codeReviewPromptPath:s,logging:n,locale:m,generate:a,type:c,maxLength:l}=this.params.config,i={...R,locale:m,maxLength:l,type:c,generate:a,systemPrompt:r,systemPromptPath:o,codeReviewPromptPath:s,vcs_branch:this.params.branchName||""},h=e==="review"?$(i):S(i),p=`Here is the diff: ${t}`,A=`${this.params.config.url||"https://api.perplexity.ai"}/chat/completions`,w={Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"};I(t,e,"Perplexity",this.params.config.model,A,w,n),k(t,e,"Perplexity",h,p,n);const C=await this.createChatCompletions(h,p,e);return this.parseMessage(C,c,a)}async createChatCompletions(e,t,r){const o=this.params.stagedDiff.diff,{logging:s}=this.params.config,n={model:this.params.config.model,messages:[{role:"system",content:e},{role:"user",content:t}],temperature:this.params.config.temperature,top_p:this.params.config.topP,max_tokens:this.params.config.maxTokens,stream:!1};M(o,r,"Perplexity",n,s);const m=Date.now();try{const a=this.params.config.url||"https://api.perplexity.ai",c=await new D({method:"POST",baseURL:`${a}/chat/completions`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).setBody(n).execute(),l=Date.now()-m,i=c.data;if(E(o,r,"Perplexity",i,s),!i.choices||i.choices.length===0||!i.choices[0].message?.content)throw x(o,r,"Perplexity",{message:"No Content on response",result:i},s),new Error("No Content on response. Please open a Bug report");const p=i.choices[0].message.content;return N(o,r,"Perplexity",l,p,s),p}catch(a){throw x(o,r,"Perplexity",a,s),a}}}export{O as PerplexityService};
|