aicommit2 2.0.1 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +46 -33
  2. package/dist/cli.mjs +4 -4
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -62,7 +62,7 @@ npm install -g aicommit2
62
62
 
63
63
  ```sh
64
64
  aicommit2 config set OPENAI.key=<your key>
65
- aicommit2 config set OLLAMA.model=<your local model>
65
+ aicommit2 config set ANTHROPIC.key=<your key>
66
66
  # ... (similar commands for other providers)
67
67
  ```
68
68
 
@@ -381,13 +381,13 @@ aicommit2 config set ignoreBody="true"
381
381
 
382
382
  ### OpenAI
383
383
 
384
- | Setting | Description | Default |
385
- |--------------------|---------------------------------------------------------------------|------------------------|
386
- | `key` | API key | - |
387
- | `model` | Model to use | `gpt-3.5-turbo` |
388
- | `url` | API endpoint URL | https://api.openai.com |
389
- | `path` | API path | /v1/chat/completions |
390
- | `proxy` | Proxy settings | - |
384
+ | Setting | Description | Default |
385
+ |--------------------|--------------------|------------------------|
386
+ | `key` | API key | - |
387
+ | `model` | Model to use | `gpt-3.5-turbo` |
388
+ | `url` | API endpoint URL | https://api.openai.com |
389
+ | `path` | API path | /v1/chat/completions |
390
+ | `proxy` | Proxy settings | - |
391
391
 
392
392
  ##### OPENAI.key
393
393
 
@@ -425,13 +425,26 @@ Default: `/v1/chat/completions`
425
425
 
426
426
  The OpenAI Path.
427
427
 
428
+ ##### OPENAI.topP
429
+
430
+ Default: `1`
431
+
432
+ The `top_p` parameter selects tokens whose combined probability meets a threshold. Please see [detail](https://platform.openai.com/docs/api-reference/chat/create#chat-create-top_p)
433
+
434
+ ```sh
435
+ aicommit2 config set OPENAI.topP=0
436
+ ```
437
+
438
+ > NOTE: If `topP` is less than 0, it does not deliver the `top_p` parameter to the request.
439
+ > - You can use it when you don't need a `top_p` parameter on other compatible platform.
440
+
428
441
  ### Ollama
429
442
 
430
- | Setting | Description | Default |
431
- |--------------------|------------------------------------------------------------------------------------------------------------------|------------------------|
432
- | `model` | Model(s) to use (comma-separated list) | - |
433
- | `host` | Ollama host URL | http://localhost:11434 |
434
- | `timeout` | Request timeout (milliseconds) | 100_000 (100sec) |
443
+ | Setting | Description | Default |
444
+ |--------------------|----------------------------------------------|------------------------|
445
+ | `model` | Model(s) to use (comma-separated list) | - |
446
+ | `host` | Ollama host URL | http://localhost:11434 |
447
+ | `timeout` | Request timeout (milliseconds) | 100_000 (100sec) |
435
448
 
436
449
  ##### OLLAMA.model
437
450
 
@@ -474,10 +487,10 @@ Ollama does not support the following options in General Settings.
474
487
 
475
488
  ### HuggingFace
476
489
 
477
- | Setting | Description | Default |
478
- |--------------------|------------------------------------------------------------------------------------------------------------------|----------------------------------------|
479
- | `cookie` | Authentication cookie | - |
480
- | `model` | Model to use | `CohereForAI/c4ai-command-r-plus` |
490
+ | Setting | Description | Default |
491
+ |--------------------|----------------------------|----------------------------------------|
492
+ | `cookie` | Authentication cookie | - |
493
+ | `model` | Model to use | `CohereForAI/c4ai-command-r-plus` |
481
494
 
482
495
  ##### HUGGINGFACE.cookie
483
496
 
@@ -516,10 +529,10 @@ Huggingface does not support the following options in General Settings.
516
529
 
517
530
  ### Gemini
518
531
 
519
- | Setting | Description | Default |
520
- |--------------------|------------------------------------------------------------------------------------------------------------------|-------------------|
521
- | `key` | API key | - |
522
- | `model` | Model to use | `gemini-1.5-pro` |
532
+ | Setting | Description | Default |
533
+ |--------------------|------------------------|-------------------|
534
+ | `key` | API key | - |
535
+ | `model` | Model to use | `gemini-1.5-pro` |
523
536
 
524
537
  ##### GEMINI.key
525
538
 
@@ -581,10 +594,10 @@ Anthropic does not support the following options in General Settings.
581
594
 
582
595
  ### Mistral
583
596
 
584
- | Setting | Description | Default |
585
- |--------------------|------------------------------------------------------------------------------------------------------------------|----------------|
586
- | `key` | API key | - |
587
- | `model` | Model to use | `mistral-tiny` |
597
+ | Setting | Description | Default |
598
+ |--------------------|--------------|----------------|
599
+ | `key` | API key | - |
600
+ | `model` | Model to use | `mistral-tiny` |
588
601
 
589
602
  ##### MISTRAL.key
590
603
 
@@ -612,10 +625,10 @@ Supported:
612
625
 
613
626
  ### Codestral
614
627
 
615
- | Setting | Description | Default |
616
- |--------------------|------------------------------------------------------------------------------------------------------------------|--------------------|
617
- | `key` | API key | - |
618
- | `model` | Model to use | `codestral-latest` |
628
+ | Setting | Description | Default |
629
+ |--------------------|-----------------|--------------------|
630
+ | `key` | API key | - |
631
+ | `model` | Model to use | `codestral-latest` |
619
632
 
620
633
  ##### CODESTRAL.key
621
634
 
@@ -635,10 +648,10 @@ aicommit2 config set CODESTRAL.model="codestral-2405"
635
648
 
636
649
  #### Cohere
637
650
 
638
- | Setting | Description | Default |
639
- |--------------------|------------------------------------------------------------------------------------------------------------------|-------------|
640
- | `key` | API key | - |
641
- | `model` | Model to use | `command` |
651
+ | Setting | Description | Default |
652
+ |--------------------|--------------|-------------|
653
+ | `key` | API key | - |
654
+ | `model` | Model to use | `command` |
642
655
 
643
656
  ##### COHERE.key
644
657
 
package/dist/cli.mjs CHANGED
@@ -33,7 +33,7 @@ ${u.footer}`:""}`}));return s.length>n?s.slice(0,n):s}catch{const s=/\[[\s\S]*?\
33
33
 
34
34
  ${m.body}`:""}${m.footer?`
35
35
 
36
- ${m.footer}`:""}`}));return l.length>n?l.slice(0,n):l}catch{return[]}}}extractMessageAsType(t,r){switch(r){case"conventional":const n=/(\w+)(?:\(.*?\))?:\s*(.*)/,o=t.subject.match(n),s=o?o[0]:t.subject;return{...t,subject:this.normalizeCommitMessage(s)};case"gitmoji":const u=/:\w*:\s*(.*)/,i=t.subject.match(u);return{...t,subject:i?i[0].toLowerCase():t.subject};default:return t}}normalizeCommitMessage(t){const r=/^(\w+)(\(.*?\))?:\s(.*)$/,n=t.match(r);if(n){const[,o,s,u]=n,i=o.toLowerCase(),a=u.charAt(0).toLowerCase()+u.slice(1);t=`${i}${s||""}: ${a}`}return t}}var Jr="2.0.1",vi="A Reactive CLI that generates git commit messages with various AI";class F extends Error{}const Et=" ",me=e=>{e instanceof Error&&(e instanceof F||(e.stack&&console.error(C.dim(e.stack.split(`
36
+ ${m.footer}`:""}`}));return l.length>n?l.slice(0,n):l}catch{return[]}}}extractMessageAsType(t,r){switch(r){case"conventional":const n=/(\w+)(?:\(.*?\))?:\s*(.*)/,o=t.subject.match(n),s=o?o[0]:t.subject;return{...t,subject:this.normalizeCommitMessage(s)};case"gitmoji":const u=/:\w*:\s*(.*)/,i=t.subject.match(u);return{...t,subject:i?i[0].toLowerCase():t.subject};default:return t}}normalizeCommitMessage(t){const r=/^(\w+)(\(.*?\))?:\s(.*)$/,n=t.match(r);if(n){const[,o,s,u]=n,i=o.toLowerCase(),a=u.charAt(0).toLowerCase()+u.slice(1);t=`${i}${s||""}: ${a}`}return t}}var Jr="2.0.2",vi="A Reactive CLI that generates git commit messages with various AI";class F extends Error{}const Et=" ",me=e=>{e instanceof Error&&(e instanceof F||(e.stack&&console.error(C.dim(e.stack.split(`
37
37
  `).slice(1).join(`
38
38
  `))),console.error(`
39
39
  ${Et}${C.dim(`aicommit2 v${Jr}`)}`),console.error(`
@@ -95,11 +95,11 @@ ${Oe(n,o)}`}};class Ii extends U{constructor(t){super(t),this.params=t,this.hand
95
95
 
96
96
  ${a}`),i.statusCode===500&&(l+=`
97
97
 
98
- Check the API status: https://status.openai.com`),new F(l)}return JSON.parse(a)},ke=e=>e.trim().replace(/[\n\r]/g,"").replace(/(\w)\.$/,"$1"),fa=async(e,t,r,n,o,s,u,i,a,l,m,c,D,p)=>{try{const d=await pa(e,t,r,{model:n,messages:[{role:"system",content:c},{role:"user",content:`Here are diff: ${s}`}],temperature:m,top_p:1,frequency_penalty:0,presence_penalty:0,max_tokens:l,stream:!1,n:1},a,p),h=d.choices.filter(g=>g.message?.content).map(g=>ke(g.message.content)).join();return D&&H("OPEN AI",s,c,h),d.choices.filter(g=>g.message?.content).map(g=>ke(g.message.content))}catch(d){const h=d;throw h.code==="ENOTFOUND"?new F(`Error connecting to ${h.hostname} (${h.syscall})`):h}};class da extends U{constructor(t){super(t),this.params=t,this.handleError$=r=>{let n="An error occurred";const o=/"message":\s*"([^"]*)"/,s=r.message.match(o);s&&s[1]&&(n=s[1]);const u=`${r.status} ${n}`;return O({name:`${this.errorPrefix} ${u}`,value:n,isError:!0,disabled:!0})},this.colors={primary:"#f55036",secondary:"#fff"},this.serviceName=C.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Groq]"),this.errorPrefix=C.red.bold("[Groq]"),this.groq=new qn({apiKey:this.params.config.key})}generateCommitMessage$(){return j(this.generateMessage()).pipe(R(t=>M(t)),T(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:this.params.config.ignoreBody?t.title:t.value,description:this.params.config.ignoreBody?"":t.value,isError:!1})),k(this.handleError$))}async generateMessage(){try{const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:n,logging:o,locale:s,temperature:u,generate:i,type:a,maxLength:l}=this.params.config,m=this.params.config.maxTokens,c={...L,locale:s,maxLength:l,type:a,generate:i,systemPrompt:r,systemPromptPath:n},D=q(c),p=await this.groq.chat.completions.create({messages:[{role:"system",content:D},{role:"user",content:`Here are diff: ${t}`}],model:this.params.config.model,max_tokens:m,temperature:u},{timeout:this.params.config.timeout}),d=p.choices.filter(g=>g.message?.content).map(g=>ke(g.message.content)).join();o&&H("Groq",t,D,d);const h=p.choices.filter(g=>g.message?.content).map(g=>ke(g.message.content));return Ie(h.map(g=>this.parseMessage(g,a,i)))}catch(t){throw t}}}class ha extends U{constructor(t){super(t),this.params=t,this.headers={accept:"*/*","accept-language":"en-US,en;q=0.9","sec-ch-ua":'"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"',"sec-ch-ua-mobile":"?0","sec-ch-ua-platform":'"Windows"',"sec-fetch-dest":"empty","sec-fetch-mode":"cors","sec-fetch-site":"same-origin",origin:"https://huggingface.co","Referrer-Policy":"strict-origin-when-cross-origin"},this.models=[],this.currentModelId=null,this.currentConversation=void 0,this.currentConversionID=void 0,this.cookie="",this.colors={primary:"#FED21F",secondary:"#000"},this.serviceName=C.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[HuggingFace]"),this.errorPrefix=C.red.bold("[HuggingFace]"),this.cookie=this.params.config.cookie}generateCommitMessage$(){return j(this.generateMessage()).pipe(R(t=>M(t)),T(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:this.params.config.ignoreBody?t.title:t.value,description:this.params.config.ignoreBody?"":t.value,isError:!1})),k(this.handleError$))}async generateMessage(){try{await this.intialize();const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:n,logging:o,locale:s,generate:u,type:i,maxLength:a}=this.params.config,l={...L,locale:s,maxLength:a,type:i,generate:u,systemPrompt:r,systemPromptPath:n},m=q(l),c=await this.getNewChat(m),p=await(await this.sendMessage(`Here are diff: ${t}`,c.id)).completeResponsePromise();return o&&H("HuggingFace",t,m,p),this.parseMessage(p,i,u)}catch(t){const r=t;throw r.code==="ENOTFOUND"?new F(`Error connecting to ${r.hostname} (${r.syscall})`):r}}async intialize(){const t=await this.getRemoteLlms(),r=t.find(n=>n.name?.toLowerCase()===this.params.config.model.toLowerCase());if(r){this.currentModel=r,this.currentModelId=r.id;return}this.currentModel=t[0],this.currentModelId=t[0].id}async getRemoteLlms(){const t=await fetch("https://huggingface.co/chat/__data.json",{headers:{...this.headers,cookie:this.cookie},body:null,method:"GET"});if(t.status!==200)throw new Error(`Failed to get remote LLMs with status code: ${t.status}`);const n=(await t.json()).nodes[0].data,o=n[n[0].models],s=[],u=i=>i===-1?null:n[i];for(const i of o){const a=n[i];if(n[a.unlisted])continue;const l={id:u(a.id),name:u(a.name),displayName:u(a.displayName),preprompt:u(a.preprompt),promptExamples:[],websiteUrl:u(a.websiteUrl),description:u(a.description),datasetName:u(a.datasetName),datasetUrl:u(a.datasetUrl),modelUrl:u(a.modelUrl),parameters:{}},m=u(a.promptExamples);if(m!==null){const p=m.map(d=>u(d));l.promptExamples=p.map(d=>({title:n[d.title],prompt:n[d.prompt]}))}const c=u(a.parameters),D={};for(const[p,d]of Object.entries(c)){if(d===-1){D[p]=null;continue}if(Array.isArray(n[d])){D[p]=n[d].map(h=>n[h]);continue}D[p]=n[d]}l.parameters=D,s.push(l)}return this.models=s,s}async getNewChat(t){const r={model:this.currentModelId,preprompt:t};let n=0;for(;n<5;){const o=await fetch("https://huggingface.co/chat/conversation",{headers:{...this.headers,"content-type":"application/json",cookie:this.cookie,Referer:"https://huggingface.co/chat/"},body:JSON.stringify(r),method:"POST"}),{conversationId:s}=await o.json();if(s){this.currentConversionID=s;break}else n++}if(!this.currentConversionID)throw new Error("Failed to create new conversion");return await this.getConversationHistory(this.currentConversionID)}async getConversationHistory(t){if(!t)throw new Error("conversationId is required for getConversationHistory");const r=await fetch("https://huggingface.co/chat/conversation/"+t+"/__data.json",{headers:{...this.headers,cookie:this.cookie,Referer:"https://huggingface.co/chat/"},body:null,method:"GET"});if(r.status!=200)throw new Error("Unable get conversation details "+r);{const n=await r.json();return this.metadataParser(n,t)}}metadataParser(t,r){const n={id:"",model:"",systemPrompt:"",title:"",history:[]},o=t.nodes[1].data,s=o[o[0].model],u=o[o[0].preprompt],i=o[o[0].title],a=o[o[0].messages],l=[];for(const m of a){const c=o[m],D=new Date(o[c.createdAt][1]).getTime()/1e3,p=new Date(o[c.updatedAt][1]).getTime()/1e3;l.push({id:o[c.id],role:o[c.from],content:o[c.content],createdAt:D,updatedAt:p})}return n.id=r,n.model=s,n.systemPrompt=u,n.title=i,n.history=l,this.currentConversation=n,n}async sendMessage(t,r){if(t==="")throw new Error("the prompt can not be empty.");if(!r&&!this.currentConversionID?await this.getNewChat():r?(this.currentConversionID=r,await this.getConversationHistory(r)):this.currentConversionID&&await this.getConversationHistory(this.currentConversionID),!this.currentConversation)throw new Error("Failed to create new conversion");const n={inputs:t,id:this.currentConversation.history[this.currentConversation.history.length-1].id,is_retry:!1,is_continue:!1,web_search:!1,tools:{}},o=new FormData;o.append("data",JSON.stringify(n));const s=await fetch("https://huggingface.co/chat/conversation/"+this.currentConversionID,{headers:{...this.headers,cookie:this.cookie,Referer:"https://huggingface.co/chat/conversation/"+this.currentConversionID},body:o,method:"POST"});function u(D){try{const p=D.split(`
98
+ Check the API status: https://status.openai.com`),new F(l)}return JSON.parse(a)},ke=e=>e.trim().replace(/[\n\r]/g,"").replace(/(\w)\.$/,"$1"),fa=async(e,t,r,n,o,s,u,i,a,l,m,c)=>{try{const D={model:n,messages:[{role:"system",content:l},{role:"user",content:`Here are diff: ${o}`}],temperature:i,max_tokens:u,stream:!1,n:1,top_p:a,frequency_penalty:0,presence_penalty:0};a<=0&&delete D.top_p;const p=await pa(e,t,r,D,s,c),d=p.choices.filter(h=>h.message?.content).map(h=>ke(h.message.content)).join();return m&&H("OPENAI",o,l,d),p.choices.filter(h=>h.message?.content).map(h=>ke(h.message.content))}catch(D){const p=D;throw p.code==="ENOTFOUND"?new F(`Error connecting to ${p.hostname} (${p.syscall})`):p}};class da extends U{constructor(t){super(t),this.params=t,this.handleError$=r=>{let n="An error occurred";const o=/"message":\s*"([^"]*)"/,s=r.message.match(o);s&&s[1]&&(n=s[1]);const u=`${r.status} ${n}`;return O({name:`${this.errorPrefix} ${u}`,value:n,isError:!0,disabled:!0})},this.colors={primary:"#f55036",secondary:"#fff"},this.serviceName=C.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Groq]"),this.errorPrefix=C.red.bold("[Groq]"),this.groq=new qn({apiKey:this.params.config.key})}generateCommitMessage$(){return j(this.generateMessage()).pipe(R(t=>M(t)),T(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:this.params.config.ignoreBody?t.title:t.value,description:this.params.config.ignoreBody?"":t.value,isError:!1})),k(this.handleError$))}async generateMessage(){try{const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:n,logging:o,locale:s,temperature:u,generate:i,type:a,maxLength:l}=this.params.config,m=this.params.config.maxTokens,c={...L,locale:s,maxLength:l,type:a,generate:i,systemPrompt:r,systemPromptPath:n},D=q(c),p=await this.groq.chat.completions.create({messages:[{role:"system",content:D},{role:"user",content:`Here are diff: ${t}`}],model:this.params.config.model,max_tokens:m,temperature:u},{timeout:this.params.config.timeout}),d=p.choices.filter(g=>g.message?.content).map(g=>ke(g.message.content)).join();o&&H("Groq",t,D,d);const h=p.choices.filter(g=>g.message?.content).map(g=>ke(g.message.content));return Ie(h.map(g=>this.parseMessage(g,a,i)))}catch(t){throw t}}}class ha extends U{constructor(t){super(t),this.params=t,this.headers={accept:"*/*","accept-language":"en-US,en;q=0.9","sec-ch-ua":'"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"',"sec-ch-ua-mobile":"?0","sec-ch-ua-platform":'"Windows"',"sec-fetch-dest":"empty","sec-fetch-mode":"cors","sec-fetch-site":"same-origin",origin:"https://huggingface.co","Referrer-Policy":"strict-origin-when-cross-origin"},this.models=[],this.currentModelId=null,this.currentConversation=void 0,this.currentConversionID=void 0,this.cookie="",this.colors={primary:"#FED21F",secondary:"#000"},this.serviceName=C.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[HuggingFace]"),this.errorPrefix=C.red.bold("[HuggingFace]"),this.cookie=this.params.config.cookie}generateCommitMessage$(){return j(this.generateMessage()).pipe(R(t=>M(t)),T(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:this.params.config.ignoreBody?t.title:t.value,description:this.params.config.ignoreBody?"":t.value,isError:!1})),k(this.handleError$))}async generateMessage(){try{await this.intialize();const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:n,logging:o,locale:s,generate:u,type:i,maxLength:a}=this.params.config,l={...L,locale:s,maxLength:a,type:i,generate:u,systemPrompt:r,systemPromptPath:n},m=q(l),c=await this.getNewChat(m),p=await(await this.sendMessage(`Here are diff: ${t}`,c.id)).completeResponsePromise();return o&&H("HuggingFace",t,m,p),this.parseMessage(p,i,u)}catch(t){const r=t;throw r.code==="ENOTFOUND"?new F(`Error connecting to ${r.hostname} (${r.syscall})`):r}}async intialize(){const t=await this.getRemoteLlms(),r=t.find(n=>n.name?.toLowerCase()===this.params.config.model.toLowerCase());if(r){this.currentModel=r,this.currentModelId=r.id;return}this.currentModel=t[0],this.currentModelId=t[0].id}async getRemoteLlms(){const t=await fetch("https://huggingface.co/chat/__data.json",{headers:{...this.headers,cookie:this.cookie},body:null,method:"GET"});if(t.status!==200)throw new Error(`Failed to get remote LLMs with status code: ${t.status}`);const n=(await t.json()).nodes[0].data,o=n[n[0].models],s=[],u=i=>i===-1?null:n[i];for(const i of o){const a=n[i];if(n[a.unlisted])continue;const l={id:u(a.id),name:u(a.name),displayName:u(a.displayName),preprompt:u(a.preprompt),promptExamples:[],websiteUrl:u(a.websiteUrl),description:u(a.description),datasetName:u(a.datasetName),datasetUrl:u(a.datasetUrl),modelUrl:u(a.modelUrl),parameters:{}},m=u(a.promptExamples);if(m!==null){const p=m.map(d=>u(d));l.promptExamples=p.map(d=>({title:n[d.title],prompt:n[d.prompt]}))}const c=u(a.parameters),D={};for(const[p,d]of Object.entries(c)){if(d===-1){D[p]=null;continue}if(Array.isArray(n[d])){D[p]=n[d].map(h=>n[h]);continue}D[p]=n[d]}l.parameters=D,s.push(l)}return this.models=s,s}async getNewChat(t){const r={model:this.currentModelId,preprompt:t};let n=0;for(;n<5;){const o=await fetch("https://huggingface.co/chat/conversation",{headers:{...this.headers,"content-type":"application/json",cookie:this.cookie,Referer:"https://huggingface.co/chat/"},body:JSON.stringify(r),method:"POST"}),{conversationId:s}=await o.json();if(s){this.currentConversionID=s;break}else n++}if(!this.currentConversionID)throw new Error("Failed to create new conversion");return await this.getConversationHistory(this.currentConversionID)}async getConversationHistory(t){if(!t)throw new Error("conversationId is required for getConversationHistory");const r=await fetch("https://huggingface.co/chat/conversation/"+t+"/__data.json",{headers:{...this.headers,cookie:this.cookie,Referer:"https://huggingface.co/chat/"},body:null,method:"GET"});if(r.status!=200)throw new Error("Unable get conversation details "+r);{const n=await r.json();return this.metadataParser(n,t)}}metadataParser(t,r){const n={id:"",model:"",systemPrompt:"",title:"",history:[]},o=t.nodes[1].data,s=o[o[0].model],u=o[o[0].preprompt],i=o[o[0].title],a=o[o[0].messages],l=[];for(const m of a){const c=o[m],D=new Date(o[c.createdAt][1]).getTime()/1e3,p=new Date(o[c.updatedAt][1]).getTime()/1e3;l.push({id:o[c.id],role:o[c.from],content:o[c.content],createdAt:D,updatedAt:p})}return n.id=r,n.model=s,n.systemPrompt=u,n.title=i,n.history=l,this.currentConversation=n,n}async sendMessage(t,r){if(t==="")throw new Error("the prompt can not be empty.");if(!r&&!this.currentConversionID?await this.getNewChat():r?(this.currentConversionID=r,await this.getConversationHistory(r)):this.currentConversionID&&await this.getConversationHistory(this.currentConversionID),!this.currentConversation)throw new Error("Failed to create new conversion");const n={inputs:t,id:this.currentConversation.history[this.currentConversation.history.length-1].id,is_retry:!1,is_continue:!1,web_search:!1,tools:{}},o=new FormData;o.append("data",JSON.stringify(n));const s=await fetch("https://huggingface.co/chat/conversation/"+this.currentConversionID,{headers:{...this.headers,cookie:this.cookie,Referer:"https://huggingface.co/chat/conversation/"+this.currentConversionID},body:o,method:"POST"});function u(D){try{const p=D.split(`
99
99
  `),d=[];for(const h of p)h.trim()&&d.push(JSON.parse(h));return d}catch{return[{}]}}const i=new TextDecoder;let a="";const l=new TransformStream({async transform(D,p){const d=i.decode(D);try{const h=u(d);for(const g of h)g.type==="finalAnswer"?(a=g?.text||"",p.terminate()):g.type==="stream"&&p.enqueue(g?.token||"")}catch{throw new Error("Error during parsing response")}}}),m=s.body?.pipeThrough(l);async function c(){return new Promise(async(D,p)=>{try{if(!m)p("ModifiedStream undefined");else{const d=m.getReader();for(;;){const{done:h,value:g}=await d.read();if(h){D(a);break}}}}catch(d){p(d)}})}return{id:this.currentConversionID,stream:m,completeResponsePromise:c}}async deleteConversation(t){return(await fetch(`https://huggingface.co/chat/conversation/${t}`,{headers:{...this.headers,cookie:this.cookie,Referer:"https://huggingface.co/chat/"},body:null,method:"DELETE"})).json()}}class ga extends U{constructor(t){super(t),this.params=t,this.host="https://api.mistral.ai",this.apiKey="",this.handleError$=r=>{const n=r.message?.replace(/(\r\n|\n|\r)/gm,"")||"An error occurred";return O({name:`${this.errorPrefix} ${n}`,value:n,isError:!0,disabled:!0})},this.colors={primary:"#ff7000",secondary:"#fff"},this.serviceName=C.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[MistralAI]"),this.errorPrefix=C.red.bold("[MistralAI]"),this.apiKey=this.params.config.key}generateCommitMessage$(){return j(this.generateMessage()).pipe(R(t=>M(t)),T(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:this.params.config.ignoreBody?t.title:t.value,description:this.params.config.ignoreBody?"":t.value,isError:!1})),k(this.handleError$))}async generateMessage(){try{const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:n,logging:o,locale:s,generate:u,type:i,maxLength:a}=this.params.config,l={...L,locale:s,maxLength:a,type:i,generate:u,systemPrompt:r,systemPromptPath:n},m=q(l);await this.checkAvailableModels();const c=await this.createChatCompletions(m,`Here are diff: ${t}`);return o&&H("MistralAI",t,m,c),this.parseMessage(c,this.params.config.type,u)}catch(t){const r=t;throw r.code==="ENOTFOUND"?new F(`Error connecting to ${r.hostname} (${r.syscall})`):r}}async checkAvailableModels(){if((await this.getAvailableModels()).includes(this.params.config.model))return!0;throw new Error(`Invalid model type of Mistral AI: ${this.params.config.model}`)}async getAvailableModels(){return(await new pe({method:"GET",baseURL:`${this.host}/v1/models`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).execute()).data.data.filter(r=>r.object==="model").map(r=>r.id)}async createChatCompletions(t,r){const o=(await new pe({method:"POST",baseURL:`${this.host}/v1/chat/completions`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).setBody({model:this.params.config.model,messages:[{role:"system",content:t},{role:"user",content:r}],temperature:this.params.config.temperature,top_p:1,max_tokens:this.params.config.maxTokens,stream:!1,safe_prompt:!1,random_seed:Me(10,1e3)}).execute()).data;if(!o.choices||o.choices.length===0||!o.choices[0].message?.content)throw new Error("No Content on response. Please open a Bug report");return o.choices[0].message.content}}const{hasOwnProperty:Tt}=Object.prototype,Le=typeof process<"u"&&process.platform==="win32"?`\r
100
100
  `:`
101
- `,kt=(e,t)=>{const r=[];let n="";typeof t=="string"?t={section:t,whitespace:!1}:(t=t||Object.create(null),t.whitespace=t.whitespace===!0);const o=t.whitespace?" = ":"=";for(const s of Object.keys(e)){const u=e[s];if(u&&Array.isArray(u))for(const i of u)n+=se(s+"[]")+o+se(i)+Le;else u&&typeof u=="object"?r.push(s):n+=se(s)+o+se(u)+Le}t.section&&n.length&&(n="["+se(t.section)+"]"+Le+n);for(const s of r){const u=Dn(s).join("\\."),i=(t.section?t.section+".":"")+u,{whitespace:a}=t,l=kt(e[s],{section:i,whitespace:a});n.length&&l.length&&(n+=Le),n+=l}return n},Dn=e=>e.replace(/\1/g,"LITERAL\\1LITERAL").replace(/\\\./g,"").split(/\./).map(t=>t.replace(/\1/g,"\\.").replace(/\2LITERAL\\1LITERAL\2/g,"")),ln=e=>{const t=Object.create(null);let r=t,n=null;const o=/^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i,s=e.split(/[\r\n]+/g);for(const i of s){if(!i||i.match(/^\s*[;#]/))continue;const a=i.match(o);if(!a)continue;if(a[1]!==void 0){if(n=Ne(a[1]),n==="__proto__"){r=Object.create(null);continue}r=t[n]=t[n]||Object.create(null);continue}const l=Ne(a[2]),m=l.length>2&&l.slice(-2)==="[]",c=m?l.slice(0,-2):l;if(c==="__proto__")continue;const D=a[3]?Ne(a[4]):!0,p=D==="true"||D==="false"||D==="null"?JSON.parse(D):D;m&&(Tt.call(r,c)?Array.isArray(r[c])||(r[c]=[r[c]]):r[c]=[]),Array.isArray(r[c])?r[c].push(p):r[c]=p}const u=[];for(const i of Object.keys(t)){if(!Tt.call(t,i)||typeof t[i]!="object"||Array.isArray(t[i]))continue;const a=Dn(i);r=t;const l=a.pop(),m=l.replace(/\\\./g,".");for(const c of a)c!=="__proto__"&&((!Tt.call(r,c)||typeof r[c]!="object")&&(r[c]=Object.create(null)),r=r[c]);r===t&&m===l||(r[m]=t[i],u.push(i))}for(const i of u)delete t[i];return t},mn=e=>e.startsWith('"')&&e.endsWith('"')||e.startsWith("'")&&e.endsWith("'"),se=e=>typeof e!="string"||e.match(/[=\r\n]/)||e.match(/^\[/)||e.length>1&&mn(e)||e!==e.trim()?JSON.stringify(e):e.split(";").join("\\;").split("#").join("\\#"),Ne=(e,t)=>{if(e=(e||"").trim(),mn(e)){e.charAt(0)==="'"&&(e=e.slice(1,-1));try{e=JSON.parse(e)}catch{}}else{let r=!1,n="";for(let o=0,s=e.length;o<s;o++){const u=e.charAt(o);if(r)"\\;#".indexOf(u)!==-1?n+=u:n+="\\"+u,r=!1;else{if(";#".indexOf(u)!==-1)break;u==="\\"?r=!0:n+=u}}return r&&(n+="\\"),n.trim()}return e};var Ca={parse:ln,decode:ln,stringify:kt,encode:kt,safe:se,unsafe:Ne},Lt=Z(Ca);const pn=e=>P.lstat(e).then(()=>!0,()=>!1),Fa=["","conventional","gitmoji"],Nt="http://localhost:11434",{hasOwnProperty:ya}=Object.prototype,Re=(e,t)=>ya.call(e,t),Rt=["OPENAI","OLLAMA","HUGGINGFACE","GEMINI","ANTHROPIC","MISTRAL","CODESTRAL","COHERE","GROQ","PERPLEXITY"],y=(e,t,r)=>{if(!t)throw new F(`Invalid config property ${e}: ${r}`)},f={systemPrompt(e){return e||""},systemPromptPath(e){return e||""},timeout(e){if(!e)return 1e4;y("timeout",/^\d+$/.test(e),"Must be an integer");const t=Number(e);return y("timeout",t>=500,"Must be greater than 500ms"),t},temperature(e){if(!e)return .7;y("temperature",/^(2|\d)(\.\d{1,2})?$/.test(e),"Must be decimal between 0 and 2");const t=Number(e);return y("temperature",t>0,"Must be greater than 0"),y("temperature",t<=2,"Must be less than or equal to 2"),t},maxTokens(e){return e?(y("maxTokens",/^\d+$/.test(e),"Must be an integer"),Number(e)):1024},logging(e){return typeof e=="boolean"?e:e==null?!0:(y("logging",/^(?:true|false)$/.test(e),"Must be a boolean(true or false)"),e==="true")},locale(e){return e?(y("locale",e,"Cannot be empty"),y("locale",/^[a-z-]+$/i.test(e),"Must be a valid locale (letters and dashes/underscores). You can consult the list of codes in: https://wikipedia.org/wiki/List_of_ISO_639-1_codes"),e):"en"},generate(e){if(!e)return 1;y("generate",/^\d+$/.test(e),"Must be an integer");const t=Number(e);return y("generate",t>0,"Must be greater than 0"),y("generate",t<=5,"Must be less or equal to 5"),t},type(e){return e?(y("type",Fa.includes(e),"Invalid commit type"),e):"conventional"},maxLength(e){if(!e)return 50;y("maxLength",/^\d+$/.test(e),"Must be an integer");const t=Number(e);return y("maxLength",t>=20,"Must be greater than 20 characters"),t},ignoreBody(e){return typeof e=="boolean"?e:e==null?!0:(y("ignoreBody",/^(?:true|false)$/.test(e),"Must be a boolean(true or false)"),e==="true")},exclude:e=>e?(typeof e=="string"?e?.split(","):e).map(r=>r.trim()).filter(r=>!!r&&r.length>0):[]},ue={OPENAI:{key:e=>e||"",model:e=>e||"gpt-3.5-turbo",url:e=>e?(y("OPENAI.url",/^https?:\/\//.test(e),"Must be a valid URL"),e):"https://api.openai.com",path:e=>e||"/v1/chat/completions",proxy:e=>e||"",systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,timeout:f.timeout,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},HUGGINGFACE:{cookie:e=>e||"",model:e=>e?(y("HUGGINGFACE.model",["CohereForAI/c4ai-command-r-plus","meta-llama/Meta-Llama-3-70B-Instruct","HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1","mistralai/Mixtral-8x7B-Instruct-v0.1","NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO","01-ai/Yi-1.5-34B-Chat","mistralai/Mistral-7B-Instruct-v0.2","microsoft/Phi-3-mini-4k-instruct"].includes(e),"Invalid model type of HuggingFace chat"),e):"CohereForAI/c4ai-command-r-plus",systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},GEMINI:{key:e=>e||"",model:e=>!e||e.length===0?"gemini-1.5-pro":(y("GEMINI.model",["gemini-1.5-flash","gemini-1.5-pro","gemini-1.5-pro-exp-0801"].includes(e),"Invalid model type of Gemini"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},ANTHROPIC:{key:e=>e||"",model:e=>!e||e.length===0?"claude-3-haiku-20240307":(y("ANTHROPIC.model",["claude-2.1","claude-2.0","claude-instant-1.2","claude-3-haiku-20240307","claude-3-sonnet-20240229","claude-3-opus-20240229"].includes(e),"Invalid model type of Anthropic"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},MISTRAL:{key:e=>e||"",model:e=>!e||e.length===0?"mistral-tiny":(y("MISTRAL.model",["open-mistral-7b","mistral-tiny-2312","mistral-tiny","open-mixtral-8x7b","mistral-small-2312","mistral-small","mistral-small-2402","mistral-small-latest","mistral-medium-latest","mistral-medium-2312","mistral-medium","mistral-large-latest","mistral-large-2402","mistral-embed"].includes(e),"Invalid model type of Mistral AI"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,timeout:f.timeout,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},CODESTRAL:{key:e=>e||"",model:e=>!e||e.length===0?"codestral-latest":(y("CODESTRAL.model",["codestral-latest","codestral-2405"].includes(e),"Invalid model type of Codestral"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,timeout:f.timeout,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},OLLAMA:{model:e=>e?(typeof e=="string"?e?.split(","):e).map(r=>r.trim()).filter(r=>!!r&&r.length>0):[],host:e=>e?(y("OLLAMA.host",/^https?:\/\//.test(e),"Must be a valid URL"),e):Nt,timeout:e=>{if(!e)return 1e5;y("OLLAMA.timeout",/^\d+$/.test(e),"Must be an integer");const t=Number(e);return y("OLLAMA.timeout",t>=500,"Must be greater than 500ms"),t},systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,temperature:f.temperature,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},COHERE:{key:e=>e||"",model:e=>!e||e.length===0?"command":(y("COHERE.model",["command","command-nightly","command-light","command-light-nightly"].includes(e),"Invalid model type of Cohere"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},GROQ:{key:e=>e||"",model:e=>!e||e.length===0?"gemma2-9b-it":(y("GROQ.model",["gemma2-9b-it","gemma-7b-it","llama-3.1-70b-versatile","llama-3.1-8b-instant","llama3-70b-8192","llama3-8b-8192","llama3-groq-70b-8192-tool-use-preview","llama3-groq-8b-8192-tool-use-preview"].includes(e),"Invalid model type of Groq"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,timeout:f.timeout,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},PERPLEXITY:{key:e=>e||"",model:e=>!e||e.length===0?"llama-3.1-sonar-small-128k-chat":(y("PERPLEXITY.model",["llama-3.1-sonar-small-128k-online","llama-3.1-sonar-small-128k-chat","llama-3.1-sonar-large-128k-online","llama-3.1-sonar-large-128k-chat","llama-3.1-8b-instruct","llama-3.1-70b-instruct"].includes(e),"Invalid model type of Perplexity"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,timeout:f.timeout,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody}},je=I.join(We.homedir(),".aicommit2"),Ea=(e=[])=>{const t={};for(const r of e)if(r.startsWith("--")){const[n,o]=r.slice(2).split("="),[s,u]=n.split(".");s&&u&&s in ue?(t[s]||(t[s]={}),t[s][u]=o):t[n]=o}return t},jt=async()=>{if(!await pn(je))return Object.create(null);const t=await P.readFile(je,"utf8");let r=Lt.parse(t);return Re(r,"OLLAMA")&&Re(r.OLLAMA,"model")&&(r={...r,OLLAMA:{...r.OLLAMA,model:typeof r.OLLAMA.model=="string"?[r.OLLAMA.model]:r.OLLAMA.model}}),Re(r,"exclude")&&(r={...r,exclude:typeof r.exclude=="string"?[r.exclude]:r.exclude}),r},_t=async(e,t=[])=>{const r=await jt(),n=Ea(t),o={...e,...n},s={},u=(i,a)=>{const l=o[`${i}.${a}`]??o[i]?.[a],m=r[i]?.[a],c=o[a]??r[a];return l!==void 0?l:m!==void 0?m:c};for(const[i,a]of Object.entries(f)){const l=o[i]??r[i];s[i]=a(l)}for(const[i,a]of Object.entries(ue)){s[i]={};for(const[l,m]of Object.entries(a)){const c=u(i,l);s[i][l]=m(c)}}return s},ba=async e=>{const t=await jt();for(const[r,n]of e){const[o,s]=r.split(".");if(o in ue){t[o]||(t[o]={});const u=ue[o][s];if(!u)throw new F(`Invalid config property: ${r}`);t[o][s]=u(n)}else{const u=f[r];if(!u)throw new F(`Invalid config property: ${r}`);t[r]=u(n)}}await P.writeFile(je,Lt.stringify(t),"utf8")},wa=async e=>{const t=await jt();for(const[r,n]of e){const[o,s]=r.split(".");if(o in ue){t[o]||(t[o]={});const u=o==="OLLAMA"&&s==="model",i=ue[o][s];if(!i||!u)throw new F(`Invalid config property: ${r}. Only supports OLLAMA.model`);const a=t[o][s]||[];t[o][s]=Ie([...a,i(n)])}else throw new F(`Invalid config property: ${r}. Only supports OLLAMA.model`)}await P.writeFile(je,Lt.stringify(t),"utf8")};class Aa extends U{constructor(t){super(t),this.params=t,this.host=Nt,this.model="",this.handleError$=r=>{if(r.response&&r.response.data?.error)return O({name:`${this.errorPrefix} ${r.response.data?.error}`,value:r.response.data?.error,isError:!0,disabled:!0});const n=r.message?.replace(/(\r\n|\n|\r)/gm,"")||"An error occurred";return O({name:`${this.errorPrefix} ${n}`,value:n,isError:!0,disabled:!0})},this.colors={primary:"#FFF",secondary:"#000"},this.model=this.params.keyName,this.serviceName=C.bgHex(this.colors.primary).hex(this.colors.secondary).bold(`[${Zr(this.model)}]`),this.errorPrefix=C.red.bold(`[${Zr(this.model)}]`),this.host=this.params.config.host||Nt,this.ollama=new Zn({host:this.host})}generateCommitMessage$(){return j(this.generateMessage()).pipe(R(t=>M(t)),T(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:this.params.config.ignoreBody?t.title:t.value,description:this.params.config.ignoreBody?"":t.value,isError:!1})),k(this.handleError$))}async generateMessage(){try{const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:n,logging:o,locale:s,generate:u,type:i,maxLength:a}=this.params.config,l={...L,locale:s,maxLength:a,type:i,generate:u,systemPrompt:r,systemPromptPath:n},m=q(l);await this.checkIsAvailableOllama();const c=await this.createChatCompletions(m,`Here are diff: ${t}`);return o&&H(`Ollama_${this.model}`,t,m,c),this.parseMessage(c,i,u)}catch(t){const r=t;throw r.code==="ENOTFOUND"?new F(`Error connecting to ${r.hostname} (${r.syscall})`):r}}async checkIsAvailableOllama(){try{return(await new pe({method:"GET",baseURL:`${this.host}`,timeout:this.params.config.timeout}).execute()).data}catch(t){throw t.code==="ECONNREFUSED"?new F(`Error connecting to ${this.host}. Please run Ollama or check host`):t}}async createChatCompletions(t,r){return(await this.ollama.chat({model:this.model,messages:[{role:"system",content:t},{role:"user",content:r}],stream:!1,options:{temperature:this.params.config.temperature,seed:Me(10,1e3)}})).message.content}}class $a extends U{constructor(t){super(t),this.params=t,this.handleError$=r=>{let n="An error occurred";if(r.message){n=r.message.split(`
102
- `)[0];const o=this.extractJSONFromError(r.message);n+=`: ${o.error.message}`}return O({name:`${this.errorPrefix} ${n}`,value:n,isError:!0,disabled:!0})},this.colors={primary:"#74AA9C",secondary:"#FFF"},this.serviceName=C.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[ChatGPT]"),this.errorPrefix=C.red.bold("[ChatGPT]")}generateCommitMessage$(){return j(this.generateMessage()).pipe(R(t=>M(t)),T(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:this.params.config.ignoreBody?t.title:t.value,description:this.params.config.ignoreBody?"":t.value,isError:!1})),k(this.handleError$))}extractJSONFromError(t){const r=/[{[]{1}([,:{}[\]0-9.\-+Eaeflnr-u \n\r\t]|".*?")+[}\]]{1}/gis,n=t.match(r);return n?Object.assign({},...n.map(o=>JSON.parse(o))):{error:{message:"Unknown error"}}}async generateMessage(){const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:n,temperature:o,logging:s,locale:u,generate:i,type:a,maxLength:l,proxy:m,maxTokens:c,timeout:D}=this.params.config,p={...L,locale:u,maxLength:l,type:a,generate:i,systemPrompt:r,systemPromptPath:n},d=q(p),h=await fa(this.params.config.url,this.params.config.path,this.params.config.key,this.params.config.model,u,t,i,a,D,c,o,d,s,m);return Ie(h.map(g=>this.parseMessage(g,a,i)))}}class va extends U{constructor(t){super(t),this.params=t,this.host="https://api.perplexity.ai",this.apiKey="",this.handleError$=r=>{let n="An error occurred";if(r.message){n=r.message.split(`
101
+ `,kt=(e,t)=>{const r=[];let n="";typeof t=="string"?t={section:t,whitespace:!1}:(t=t||Object.create(null),t.whitespace=t.whitespace===!0);const o=t.whitespace?" = ":"=";for(const s of Object.keys(e)){const u=e[s];if(u&&Array.isArray(u))for(const i of u)n+=se(s+"[]")+o+se(i)+Le;else u&&typeof u=="object"?r.push(s):n+=se(s)+o+se(u)+Le}t.section&&n.length&&(n="["+se(t.section)+"]"+Le+n);for(const s of r){const u=Dn(s).join("\\."),i=(t.section?t.section+".":"")+u,{whitespace:a}=t,l=kt(e[s],{section:i,whitespace:a});n.length&&l.length&&(n+=Le),n+=l}return n},Dn=e=>e.replace(/\1/g,"LITERAL\\1LITERAL").replace(/\\\./g,"").split(/\./).map(t=>t.replace(/\1/g,"\\.").replace(/\2LITERAL\\1LITERAL\2/g,"")),ln=e=>{const t=Object.create(null);let r=t,n=null;const o=/^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i,s=e.split(/[\r\n]+/g);for(const i of s){if(!i||i.match(/^\s*[;#]/))continue;const a=i.match(o);if(!a)continue;if(a[1]!==void 0){if(n=Ne(a[1]),n==="__proto__"){r=Object.create(null);continue}r=t[n]=t[n]||Object.create(null);continue}const l=Ne(a[2]),m=l.length>2&&l.slice(-2)==="[]",c=m?l.slice(0,-2):l;if(c==="__proto__")continue;const D=a[3]?Ne(a[4]):!0,p=D==="true"||D==="false"||D==="null"?JSON.parse(D):D;m&&(Tt.call(r,c)?Array.isArray(r[c])||(r[c]=[r[c]]):r[c]=[]),Array.isArray(r[c])?r[c].push(p):r[c]=p}const u=[];for(const i of Object.keys(t)){if(!Tt.call(t,i)||typeof t[i]!="object"||Array.isArray(t[i]))continue;const a=Dn(i);r=t;const l=a.pop(),m=l.replace(/\\\./g,".");for(const c of a)c!=="__proto__"&&((!Tt.call(r,c)||typeof r[c]!="object")&&(r[c]=Object.create(null)),r=r[c]);r===t&&m===l||(r[m]=t[i],u.push(i))}for(const i of u)delete t[i];return t},mn=e=>e.startsWith('"')&&e.endsWith('"')||e.startsWith("'")&&e.endsWith("'"),se=e=>typeof e!="string"||e.match(/[=\r\n]/)||e.match(/^\[/)||e.length>1&&mn(e)||e!==e.trim()?JSON.stringify(e):e.split(";").join("\\;").split("#").join("\\#"),Ne=(e,t)=>{if(e=(e||"").trim(),mn(e)){e.charAt(0)==="'"&&(e=e.slice(1,-1));try{e=JSON.parse(e)}catch{}}else{let r=!1,n="";for(let o=0,s=e.length;o<s;o++){const u=e.charAt(o);if(r)"\\;#".indexOf(u)!==-1?n+=u:n+="\\"+u,r=!1;else{if(";#".indexOf(u)!==-1)break;u==="\\"?r=!0:n+=u}}return r&&(n+="\\"),n.trim()}return e};var Ca={parse:ln,decode:ln,stringify:kt,encode:kt,safe:se,unsafe:Ne},Lt=Z(Ca);const pn=e=>P.lstat(e).then(()=>!0,()=>!1),Fa=["","conventional","gitmoji"],Nt="http://localhost:11434",{hasOwnProperty:ya}=Object.prototype,Re=(e,t)=>ya.call(e,t),Rt=["OPENAI","OLLAMA","HUGGINGFACE","GEMINI","ANTHROPIC","MISTRAL","CODESTRAL","COHERE","GROQ","PERPLEXITY"],y=(e,t,r)=>{if(!t)throw new F(`Invalid config property ${e}: ${r}`)},f={systemPrompt(e){return e||""},systemPromptPath(e){return e||""},timeout(e){if(!e)return 1e4;y("timeout",/^\d+$/.test(e),"Must be an integer");const t=Number(e);return y("timeout",t>=500,"Must be greater than 500ms"),t},temperature(e){if(!e)return .7;y("temperature",/^(2|\d)(\.\d{1,2})?$/.test(e),"Must be decimal between 0 and 2");const t=Number(e);return y("temperature",t>0,"Must be greater than 0"),y("temperature",t<=2,"Must be less than or equal to 2"),t},maxTokens(e){return e?(y("maxTokens",/^\d+$/.test(e),"Must be an integer"),Number(e)):1024},logging(e){return typeof e=="boolean"?e:e==null?!0:(y("logging",/^(?:true|false)$/.test(e),"Must be a boolean(true or false)"),e==="true")},locale(e){return e?(y("locale",e,"Cannot be empty"),y("locale",/^[a-z-]+$/i.test(e),"Must be a valid locale (letters and dashes/underscores). You can consult the list of codes in: https://wikipedia.org/wiki/List_of_ISO_639-1_codes"),e):"en"},generate(e){if(!e)return 1;y("generate",/^\d+$/.test(e),"Must be an integer");const t=Number(e);return y("generate",t>0,"Must be greater than 0"),y("generate",t<=5,"Must be less or equal to 5"),t},type(e){return e?(y("type",Fa.includes(e),"Invalid commit type"),e):"conventional"},maxLength(e){if(!e)return 50;y("maxLength",/^\d+$/.test(e),"Must be an integer");const t=Number(e);return y("maxLength",t>=20,"Must be greater than 20 characters"),t},ignoreBody(e){return typeof e=="boolean"?e:e==null?!0:(y("ignoreBody",/^(?:true|false)$/.test(e),"Must be a boolean(true or false)"),e==="true")},exclude:e=>e?(typeof e=="string"?e?.split(","):e).map(r=>r.trim()).filter(r=>!!r&&r.length>0):[]},ue={OPENAI:{key:e=>e||"",model:e=>e||"gpt-3.5-turbo",url:e=>e?(y("OPENAI.url",/^https?:\/\//.test(e),"Must be a valid URL"),e):"https://api.openai.com",path:e=>e||"/v1/chat/completions",proxy:e=>e||"",topP:e=>{if(!e)return 1;const t=Number(e);return y("OPENAI.topP",t<=1,"Must be less than or equal to 1"),t},systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,timeout:f.timeout,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},HUGGINGFACE:{cookie:e=>e||"",model:e=>e?(y("HUGGINGFACE.model",["CohereForAI/c4ai-command-r-plus","meta-llama/Meta-Llama-3-70B-Instruct","HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1","mistralai/Mixtral-8x7B-Instruct-v0.1","NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO","01-ai/Yi-1.5-34B-Chat","mistralai/Mistral-7B-Instruct-v0.2","microsoft/Phi-3-mini-4k-instruct"].includes(e),"Invalid model type of HuggingFace chat"),e):"CohereForAI/c4ai-command-r-plus",systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},GEMINI:{key:e=>e||"",model:e=>!e||e.length===0?"gemini-1.5-pro":(y("GEMINI.model",["gemini-1.5-flash","gemini-1.5-pro","gemini-1.5-pro-exp-0801"].includes(e),"Invalid model type of Gemini"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},ANTHROPIC:{key:e=>e||"",model:e=>!e||e.length===0?"claude-3-haiku-20240307":(y("ANTHROPIC.model",["claude-3-haiku-20240307","claude-3-sonnet-20240229","claude-3-opus-20240229","claude-3-5-sonnet-20240620"].includes(e),"Invalid model type of Anthropic"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},MISTRAL:{key:e=>e||"",model:e=>!e||e.length===0?"mistral-tiny":(y("MISTRAL.model",["open-mistral-7b","mistral-tiny-2312","mistral-tiny","open-mixtral-8x7b","mistral-small-2312","mistral-small","mistral-small-2402","mistral-small-latest","mistral-medium-latest","mistral-medium-2312","mistral-medium","mistral-large-latest","mistral-large-2402","mistral-embed"].includes(e),"Invalid model type of Mistral AI"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,timeout:f.timeout,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},CODESTRAL:{key:e=>e||"",model:e=>!e||e.length===0?"codestral-latest":(y("CODESTRAL.model",["codestral-latest","codestral-2405"].includes(e),"Invalid model type of Codestral"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,timeout:f.timeout,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},OLLAMA:{model:e=>e?(typeof e=="string"?e?.split(","):e).map(r=>r.trim()).filter(r=>!!r&&r.length>0):[],host:e=>e?(y("OLLAMA.host",/^https?:\/\//.test(e),"Must be a valid URL"),e):Nt,timeout:e=>{if(!e)return 1e5;y("OLLAMA.timeout",/^\d+$/.test(e),"Must be an integer");const t=Number(e);return y("OLLAMA.timeout",t>=500,"Must be greater than 500ms"),t},systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,temperature:f.temperature,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},COHERE:{key:e=>e||"",model:e=>!e||e.length===0?"command":(y("COHERE.model",["command","command-nightly","command-light","command-light-nightly"].includes(e),"Invalid model type of Cohere"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},GROQ:{key:e=>e||"",model:e=>!e||e.length===0?"gemma2-9b-it":(y("GROQ.model",["gemma2-9b-it","gemma-7b-it","llama-3.1-70b-versatile","llama-3.1-8b-instant","llama3-70b-8192","llama3-8b-8192","llama3-groq-70b-8192-tool-use-preview","llama3-groq-8b-8192-tool-use-preview"].includes(e),"Invalid model type of Groq"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,timeout:f.timeout,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody},PERPLEXITY:{key:e=>e||"",model:e=>!e||e.length===0?"llama-3.1-sonar-small-128k-chat":(y("PERPLEXITY.model",["llama-3.1-sonar-small-128k-online","llama-3.1-sonar-small-128k-chat","llama-3.1-sonar-large-128k-online","llama-3.1-sonar-large-128k-chat","llama-3.1-8b-instruct","llama-3.1-70b-instruct"].includes(e),"Invalid model type of Perplexity"),e),systemPrompt:f.systemPrompt,systemPromptPath:f.systemPromptPath,timeout:f.timeout,temperature:f.temperature,maxTokens:f.maxTokens,logging:f.logging,locale:f.locale,generate:f.generate,type:f.type,maxLength:f.maxLength,ignoreBody:f.ignoreBody}},je=I.join(We.homedir(),".aicommit2"),Ea=(e=[])=>{const t={};for(const r of e)if(r.startsWith("--")){const[n,o]=r.slice(2).split("="),[s,u]=n.split(".");s&&u&&s in ue?(t[s]||(t[s]={}),t[s][u]=o):t[n]=o}return t},jt=async()=>{if(!await pn(je))return Object.create(null);const t=await P.readFile(je,"utf8");let r=Lt.parse(t);return Re(r,"OLLAMA")&&Re(r.OLLAMA,"model")&&(r={...r,OLLAMA:{...r.OLLAMA,model:typeof r.OLLAMA.model=="string"?[r.OLLAMA.model]:r.OLLAMA.model}}),Re(r,"exclude")&&(r={...r,exclude:typeof r.exclude=="string"?[r.exclude]:r.exclude}),r},_t=async(e,t=[])=>{const r=await jt(),n=Ea(t),o={...e,...n},s={},u=(i,a)=>{const l=o[`${i}.${a}`]??o[i]?.[a],m=r[i]?.[a],c=o[a]??r[a];return l!==void 0?l:m!==void 0?m:c};for(const[i,a]of Object.entries(f)){const l=o[i]??r[i];s[i]=a(l)}for(const[i,a]of Object.entries(ue)){s[i]={};for(const[l,m]of Object.entries(a)){const c=u(i,l);s[i][l]=m(c)}}return s},ba=async e=>{const t=await jt();for(const[r,n]of e){const[o,s]=r.split(".");if(o in ue){t[o]||(t[o]={});const u=ue[o][s];if(!u)throw new F(`Invalid config property: ${r}`);t[o][s]=u(n)}else{const u=f[r];if(!u)throw new F(`Invalid config property: ${r}`);t[r]=u(n)}}await P.writeFile(je,Lt.stringify(t),"utf8")},wa=async e=>{const t=await jt();for(const[r,n]of e){const[o,s]=r.split(".");if(o in ue){t[o]||(t[o]={});const u=o==="OLLAMA"&&s==="model",i=ue[o][s];if(!i||!u)throw new F(`Invalid config property: ${r}. Only supports OLLAMA.model`);const a=t[o][s]||[];t[o][s]=Ie([...a,i(n)])}else throw new F(`Invalid config property: ${r}. Only supports OLLAMA.model`)}await P.writeFile(je,Lt.stringify(t),"utf8")};class Aa extends U{constructor(t){super(t),this.params=t,this.host=Nt,this.model="",this.handleError$=r=>{if(r.response&&r.response.data?.error)return O({name:`${this.errorPrefix} ${r.response.data?.error}`,value:r.response.data?.error,isError:!0,disabled:!0});const n=r.message?.replace(/(\r\n|\n|\r)/gm,"")||"An error occurred";return O({name:`${this.errorPrefix} ${n}`,value:n,isError:!0,disabled:!0})},this.colors={primary:"#FFF",secondary:"#000"},this.model=this.params.keyName,this.serviceName=C.bgHex(this.colors.primary).hex(this.colors.secondary).bold(`[${Zr(this.model)}]`),this.errorPrefix=C.red.bold(`[${Zr(this.model)}]`),this.host=this.params.config.host||Nt,this.ollama=new Zn({host:this.host})}generateCommitMessage$(){return j(this.generateMessage()).pipe(R(t=>M(t)),T(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:this.params.config.ignoreBody?t.title:t.value,description:this.params.config.ignoreBody?"":t.value,isError:!1})),k(this.handleError$))}async generateMessage(){try{const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:n,logging:o,locale:s,generate:u,type:i,maxLength:a}=this.params.config,l={...L,locale:s,maxLength:a,type:i,generate:u,systemPrompt:r,systemPromptPath:n},m=q(l);await this.checkIsAvailableOllama();const c=await this.createChatCompletions(m,`Here are diff: ${t}`);return o&&H(`Ollama_${this.model}`,t,m,c),this.parseMessage(c,i,u)}catch(t){const r=t;throw r.code==="ENOTFOUND"?new F(`Error connecting to ${r.hostname} (${r.syscall})`):r}}async checkIsAvailableOllama(){try{return(await new pe({method:"GET",baseURL:`${this.host}`,timeout:this.params.config.timeout}).execute()).data}catch(t){throw t.code==="ECONNREFUSED"?new F(`Error connecting to ${this.host}. Please run Ollama or check host`):t}}async createChatCompletions(t,r){return(await this.ollama.chat({model:this.model,messages:[{role:"system",content:t},{role:"user",content:r}],stream:!1,options:{temperature:this.params.config.temperature,seed:Me(10,1e3)}})).message.content}}class $a extends U{constructor(t){super(t),this.params=t,this.handleError$=r=>{let n="An error occurred";if(r.message){n=r.message.split(`
102
+ `)[0];const o=this.extractJSONFromError(r.message);n+=`: ${o.error.message}`}return O({name:`${this.errorPrefix} ${n}`,value:n,isError:!0,disabled:!0})},this.colors={primary:"#74AA9C",secondary:"#FFF"},this.serviceName=C.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[ChatGPT]"),this.errorPrefix=C.red.bold("[ChatGPT]")}generateCommitMessage$(){return j(this.generateMessage()).pipe(R(t=>M(t)),T(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:this.params.config.ignoreBody?t.title:t.value,description:this.params.config.ignoreBody?"":t.value,isError:!1})),k(this.handleError$))}extractJSONFromError(t){const r=/[{[]{1}([,:{}[\]0-9.\-+Eaeflnr-u \n\r\t]|".*?")+[}\]]{1}/gis,n=t.match(r);return n?Object.assign({},...n.map(o=>JSON.parse(o))):{error:{message:"Unknown error"}}}async generateMessage(){const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:n,temperature:o,logging:s,locale:u,generate:i,type:a,maxLength:l,proxy:m,maxTokens:c,timeout:D}=this.params.config,p={...L,locale:u,maxLength:l,type:a,generate:i,systemPrompt:r,systemPromptPath:n},d=q(p),h=await fa(this.params.config.url,this.params.config.path,this.params.config.key,this.params.config.model,t,D,c,o,this.params.config.topP,d,s,m);return Ie(h.map(g=>this.parseMessage(g,a,i)))}}class va extends U{constructor(t){super(t),this.params=t,this.host="https://api.perplexity.ai",this.apiKey="",this.handleError$=r=>{let n="An error occurred";if(r.message){n=r.message.split(`
103
103
  `)[0];const o=this.extractJSONFromError(r.message);n+=`: ${o.error.message}`}return O({name:`${this.errorPrefix} ${n}`,value:n,isError:!0,disabled:!0})},this.colors={primary:"#20808D",secondary:"#FFF"},this.serviceName=C.bgHex(this.colors.primary).hex(this.colors.secondary).bold("[Perplexity]"),this.errorPrefix=C.red.bold("[Perplexity]"),this.apiKey=this.params.config.key}generateCommitMessage$(){return j(this.generateMessage()).pipe(R(t=>M(t)),T(t=>({name:`${this.serviceName} ${t.title}`,short:t.title,value:this.params.config.ignoreBody?t.title:t.value,description:this.params.config.ignoreBody?"":t.value,isError:!1})),k(this.handleError$))}extractJSONFromError(t){const r=/[{[]{1}([,:{}[\]0-9.\-+Eaeflnr-u \n\r\t]|".*?")+[}\]]{1}/gis,n=t.match(r);return n?Object.assign({},...n.map(o=>JSON.parse(o))):{error:{message:"Unknown error"}}}async generateMessage(){try{const t=this.params.stagedDiff.diff,{systemPrompt:r,systemPromptPath:n,logging:o,locale:s,generate:u,type:i,maxLength:a}=this.params.config,l={...L,locale:s,maxLength:a,type:i,generate:u,systemPrompt:r,systemPromptPath:n},m=q(l),c=await this.createChatCompletions(m,t);return o&&H("Perplexity",t,m,c),this.parseMessage(c,i,u)}catch(t){const r=t;throw r.code==="ENOTFOUND"?new F(`Error connecting to ${r.hostname} (${r.syscall})`):r}}async createChatCompletions(t,r){const o=(await new pe({method:"POST",baseURL:`${this.host}/chat/completions`,timeout:this.params.config.timeout}).setHeaders({Authorization:`Bearer ${this.apiKey}`,"content-type":"application/json"}).setBody({model:this.params.config.model,messages:[{role:"system",content:`${t}`},{role:"user",content:`Here are diff: ${r}`}],temperature:this.params.config.temperature,top_p:1,max_tokens:this.params.config.maxTokens,stream:!1}).execute()).data;if(!o.choices||o.choices.length===0||!o.choices[0].message?.content)throw new Error("No Content on response. Please open a Bug report");return o.choices[0].message.content}}class fn{constructor(t,r){this.config=t,this.stagedDiff=r}createAIRequests$(t){return M(t).pipe(Yt(r=>{switch(r){case"OPENAI":return G.create($a,{config:this.config.OPENAI,stagedDiff:this.stagedDiff,keyName:r}).generateCommitMessage$();case"GEMINI":return G.create(Ni,{config:this.config.GEMINI,stagedDiff:this.stagedDiff,keyName:r}).generateCommitMessage$();case"ANTHROPIC":return G.create(Ii,{config:this.config.ANTHROPIC,stagedDiff:this.stagedDiff,keyName:r}).generateCommitMessage$();case"HUGGINGFACE":return G.create(ha,{config:this.config.HUGGINGFACE,stagedDiff:this.stagedDiff,keyName:r}).generateCommitMessage$();case"MISTRAL":return G.create(ga,{config:this.config.MISTRAL,stagedDiff:this.stagedDiff,keyName:r}).generateCommitMessage$();case"CODESTRAL":return G.create(ki,{config:this.config.CODESTRAL,stagedDiff:this.stagedDiff,keyName:r}).generateCommitMessage$();case"OLLAMA":return M(this.config.OLLAMA.model).pipe(Yt(o=>G.create(Aa,{config:this.config.OLLAMA,keyName:o,stagedDiff:this.stagedDiff}).generateCommitMessage$()));case"COHERE":return G.create(Li,{config:this.config.COHERE,stagedDiff:this.stagedDiff,keyName:r}).generateCommitMessage$();case"GROQ":return G.create(da,{config:this.config.GROQ,stagedDiff:this.stagedDiff,keyName:r}).generateCommitMessage$();case"PERPLEXITY":return G.create(va,{config:this.config.PERPLEXITY,stagedDiff:this.stagedDiff,keyName:r}).generateCommitMessage$();default:const n=C.red.bold(`[${r}]`);return O({name:n+" Invalid AI type",value:"Invalid AI type",isError:!0,disabled:!0})}}),k(r=>{const n=C.red.bold("[UNKNOWN]");return O({name:n+` ${r.message||""}`,value:"Unknown error",isError:!0,disabled:!0})}))}}const dn=async()=>{const{stdout:e,failed:t}=await ne("git",["rev-parse","--show-toplevel"],{reject:!1});if(t)throw new F("The current directory must be a Git repository!");return e},_e=e=>`:(exclude)${e}`,hn=["package-lock.json","pnpm-lock.yaml","*.lock","*.gif","*.png"].map(_e),gn=async(e,t)=>{const r=["diff","--cached","--diff-algorithm=minimal"],{stdout:n}=await ne("git",[...r,"--name-only",...hn,...e?e.map(_e):[],...t?t.map(_e):[]]);if(!n)return null;const{stdout:o}=await ne("git",[...r,...hn,...e?e.map(_e):[]]);return{files:n.split(`
104
104
  `),diff:o}},Ba=e=>`Detected ${e.length.toLocaleString()} staged file${e.length>1?"s":""}`;class Fe{constructor(){this.title="aicommit2"}printTitle(){console.log(Qn.textSync(this.title,{font:"Small"}))}displaySpinner(t){return ze(t).start()}stopSpinner(t){t.stop(),t.clear()}printStagedFiles(t){console.log(C.bold.green("\u2714 ")+C.bold(`${Ba(t.files)}:`)),console.log(`${t.files.map(r=>` ${r}`).join(`
105
105
  `)}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "aicommit2",
3
- "version": "2.0.1",
3
+ "version": "2.0.2",
4
4
  "description": "A Reactive CLI that generates git commit messages with various AI",
5
5
  "keywords": [
6
6
  "cli",