@devangkumar/dvai 1.0.2 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/src/index.js +36 -65
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@devangkumar/dvai",
3
- "version": "1.0.2",
3
+ "version": "1.0.3",
4
4
  "description": "A powerful AI assistant for your terminal",
5
5
  "type": "module",
6
6
  "main": "src/index.js",
package/src/index.js CHANGED
@@ -10,83 +10,54 @@ const model = genAI.getGenerativeModel({ model: "gemini-2.5-flash" });
10
10
 
11
11
  const program = new Command();
12
12
 
13
- program
14
- .name("myai")
15
- .description("AI Assistant")
16
- .version("1.0.2");
13
+ async function startChat(initialHistory = []) {
14
+ let chatHistory = initialHistory;
15
+ while (true) {
16
+ const { userInput } = await inquirer.prompt([{
17
+ type: "input",
18
+ name: "userInput",
19
+ message: ">",
20
+ }]);
17
21
 
18
- async function getAIResponse(prompt, chatHistory = []) {
19
- const spinner = ora({
20
- text: "",
21
- spinner: "dots",
22
- isSilent: true
23
- }).start();
22
+ const cmd = userInput.toLowerCase();
23
+ if (cmd === "exit" || cmd === "quit" || cmd === "bye") break;
24
24
 
25
- try {
26
- if (chatHistory.length > 0) {
27
- const chat = model.startChat({
28
- history: chatHistory,
29
- });
30
- const result = await chat.sendMessage(prompt);
31
- spinner.stop();
32
- return result.response.text();
33
- } else {
34
- const result = await model.generateContent(prompt);
35
- spinner.stop();
36
- return result.response.text();
25
+ const spinner = ora({ text: "", isSilent: true }).start();
26
+ try {
27
+ const chat = model.startChat({ history: chatHistory });
28
+ const result = await chat.sendMessage(userInput);
29
+ spinner.stop();
30
+ const response = result.response.text();
31
+ process.stdout.write(response + "\n\n");
32
+ chatHistory.push({ role: "user", parts: [{ text: userInput }] });
33
+ chatHistory.push({ role: "model", parts: [{ text: response }] });
34
+ } catch (error) {
35
+ spinner.stop();
36
+ console.error("Error:", error.message);
37
+ }
37
38
  }
38
- } catch (error) {
39
- spinner.stop();
40
- console.error("Error:", error.message);
41
- return null;
42
- }
43
39
  }
44
40
 
45
- /**
46
- * Handle Single Question
47
- */
48
41
  program
42
+ .name("myai")
43
+ .version("1.0.3")
49
44
  .argument("[prompt]", "Prompt to send to AI")
50
45
  .action(async (prompt) => {
46
+ // If no prompt, start chat immediately
51
47
  if (!prompt) {
48
+ await startChat();
52
49
  return;
53
50
  }
54
51
 
55
- const response = await getAIResponse(prompt);
56
- if (response) {
57
- process.stdout.write(response + "\n");
58
- }
59
- });
60
-
61
- /**
62
- * Handle Interactive Chat
63
- */
64
- program
65
- .command("chat")
66
- .description("Start chat session")
67
- .action(async () => {
68
- let chatHistory = [];
69
-
70
- while (true) {
71
- const { userInput } = await inquirer.prompt([
72
- {
73
- type: "input",
74
- name: "userInput",
75
- message: ">",
76
- }
77
- ]);
78
-
79
- const cmd = userInput.toLowerCase();
80
- if (cmd === "exit" || cmd === "quit" || cmd === "bye") break;
81
-
82
- const response = await getAIResponse(userInput, chatHistory);
83
-
84
- if (response) {
85
- process.stdout.write(response + "\n\n");
86
-
87
- chatHistory.push({ role: "user", parts: [{ text: userInput }] });
88
- chatHistory.push({ role: "model", parts: [{ text: response }] });
89
- }
52
+ // Otherwise, handle single question
53
+ const spinner = ora({ text: "", isSilent: true }).start();
54
+ try {
55
+ const result = await model.generateContent(prompt);
56
+ spinner.stop();
57
+ process.stdout.write(result.response.text() + "\n");
58
+ } catch (error) {
59
+ spinner.stop();
60
+ console.error("Error:", error.message);
90
61
  }
91
62
  });
92
63