@hkyadav/agent-ic 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +53 -0
- package/package.json +28 -0
- package/test.js +20 -0
- package/tools/fileRead.js +0 -0
- package/utils/ai_service.js +30 -0
- package/utils/apiKey_setup.js +38 -0
- package/utils/welcome.js +31 -0
package/index.js
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import inquirer from "inquirer";
|
|
4
|
+
import Welcome from "./utils/welcome.js";
|
|
5
|
+
import { marked } from "marked";
|
|
6
|
+
import { markedTerminal } from "marked-terminal";
|
|
7
|
+
import { getAiStream } from "./utils/ai_service.js";
|
|
8
|
+
|
|
9
|
+
marked.use(markedTerminal());
|
|
10
|
+
|
|
11
|
+
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
12
|
+
|
|
13
|
+
async function main() {
|
|
14
|
+
await Welcome();
|
|
15
|
+
|
|
16
|
+
while (true) {
|
|
17
|
+
console.log("\n");
|
|
18
|
+
try {
|
|
19
|
+
const { command } = await inquirer.prompt([
|
|
20
|
+
{
|
|
21
|
+
type: "input",
|
|
22
|
+
name: "command",
|
|
23
|
+
message: "✨ How may I help u?",
|
|
24
|
+
},
|
|
25
|
+
]);
|
|
26
|
+
|
|
27
|
+
if (command.toLowerCase() === "exit") break;
|
|
28
|
+
console.log("Processing...");
|
|
29
|
+
|
|
30
|
+
let fullResponse = "";
|
|
31
|
+
|
|
32
|
+
for await (const chunk of getAiStream(command)) {
|
|
33
|
+
fullResponse += chunk;
|
|
34
|
+
|
|
35
|
+
for (const char of chunk) {
|
|
36
|
+
process.stdout.write(char);
|
|
37
|
+
|
|
38
|
+
await sleep(5);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
} catch (error) {
|
|
42
|
+
if (error.name === "ExitPromptError") {
|
|
43
|
+
console.log("\n👋 Bye! See you next time.");
|
|
44
|
+
process.exit(0);
|
|
45
|
+
} else {
|
|
46
|
+
console.error("An error occurred:", error);
|
|
47
|
+
break;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
main();
|
package/package.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@hkyadav/agent-ic",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"bin": {
|
|
5
|
+
"agentIC": "./index.js"
|
|
6
|
+
},
|
|
7
|
+
"type": "module",
|
|
8
|
+
"description": "",
|
|
9
|
+
"main": "index.js",
|
|
10
|
+
"scripts": {
|
|
11
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
12
|
+
},
|
|
13
|
+
"keywords": [],
|
|
14
|
+
"author": "hkyadav",
|
|
15
|
+
"license": "ISC",
|
|
16
|
+
"dependencies": {
|
|
17
|
+
"chalk": "^5.6.2",
|
|
18
|
+
"chalk-animation": "^2.0.3",
|
|
19
|
+
"commander": "^14.0.2",
|
|
20
|
+
"dotenv": "^17.2.3",
|
|
21
|
+
"figlet": "^1.9.4",
|
|
22
|
+
"gradient-string": "^3.0.0",
|
|
23
|
+
"inquirer": "^13.1.0",
|
|
24
|
+
"marked": "^15.0.12",
|
|
25
|
+
"marked-terminal": "^7.3.0",
|
|
26
|
+
"openai": "^6.15.0"
|
|
27
|
+
}
|
|
28
|
+
}
|
package/test.js
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
import "dotenv/config";
|
|
3
|
+
|
|
4
|
+
const openai = new OpenAI({
|
|
5
|
+
apiKey: process.env.GEMINI_API_KEY,
|
|
6
|
+
baseURL: "https://generativelanguage.googleapis.com/v1beta/openai/",
|
|
7
|
+
});
|
|
8
|
+
|
|
9
|
+
const response = await openai.chat.completions.create({
|
|
10
|
+
model: "gemini-2.5-flash",
|
|
11
|
+
messages: [
|
|
12
|
+
{ role: "system", content: "You are a helpful assistant." },
|
|
13
|
+
{
|
|
14
|
+
role: "user",
|
|
15
|
+
content: "Explain to me how AI works",
|
|
16
|
+
},
|
|
17
|
+
],
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
console.log(response.choices[0].message);
|
|
File without changes
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
import { getApiKey } from "./apiKey_setup.js";
|
|
3
|
+
|
|
4
|
+
const apiKey = await getApiKey();
|
|
5
|
+
|
|
6
|
+
const openai = new OpenAI({
|
|
7
|
+
apiKey: apiKey,
|
|
8
|
+
baseURL: "https://generativelanguage.googleapis.com/v1beta/openai/",
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
export async function* getAiStream(userInput) {
|
|
12
|
+
const stream = await openai.chat.completions.create({
|
|
13
|
+
model: "gemini-2.5-flash",
|
|
14
|
+
messages: [
|
|
15
|
+
{
|
|
16
|
+
role: "system",
|
|
17
|
+
content: "You are a helpful cli based coding assistant.",
|
|
18
|
+
},
|
|
19
|
+
{ role: "user", content: userInput },
|
|
20
|
+
],
|
|
21
|
+
stream: true,
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
for await (const chunk of stream) {
|
|
25
|
+
const content = chunk.choices[0]?.delta?.content || "";
|
|
26
|
+
if (content) {
|
|
27
|
+
yield content;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import os from "os";
|
|
4
|
+
import readline from "readline";
|
|
5
|
+
import dotenv from "dotenv";
|
|
6
|
+
|
|
7
|
+
const configDir = path.join(os.homedir(), ".config", "agentIC");
|
|
8
|
+
const envPath = path.join(configDir, ".env");
|
|
9
|
+
|
|
10
|
+
export async function getApiKey() {
|
|
11
|
+
if (process.env.API_KEY) return process.env.API_KEY;
|
|
12
|
+
|
|
13
|
+
if (fs.existsSync(envPath)) {
|
|
14
|
+
dotenv.config({ path: envPath });
|
|
15
|
+
return process.env.API_KEY;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const rl = readline.createInterface({
|
|
19
|
+
input: process.stdin,
|
|
20
|
+
output: process.stdout,
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
const userInput = await new Promise((resolve) => {
|
|
24
|
+
rl.question("No API Key found. Please paste it here: ", (answer) => {
|
|
25
|
+
rl.close();
|
|
26
|
+
resolve(answer.trim());
|
|
27
|
+
});
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
if (!fs.existsSync(configDir)) {
|
|
31
|
+
fs.mkdirSync(configDir, { recursive: true });
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
fs.writeFileSync(envPath, `API_KEY=${userInput}\n`, { mode: 0o600 });
|
|
35
|
+
|
|
36
|
+
process.env.API_KEY = userInput;
|
|
37
|
+
return userInput;
|
|
38
|
+
}
|
package/utils/welcome.js
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { setTimeout } from "timers/promises";
|
|
2
|
+
import figlet from "figlet";
|
|
3
|
+
import gradient from "gradient-string";
|
|
4
|
+
|
|
5
|
+
export default async function Welcome() {
|
|
6
|
+
console.clear();
|
|
7
|
+
|
|
8
|
+
const startupText = figlet.textSync("Agent-IC CLI", {
|
|
9
|
+
font: "Slant",
|
|
10
|
+
horizontalLayout: "fitted",
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
const startupGradient = gradient([
|
|
14
|
+
"#4285F4",
|
|
15
|
+
"#8AB4F8",
|
|
16
|
+
"#A142F4",
|
|
17
|
+
"#E84033",
|
|
18
|
+
]);
|
|
19
|
+
|
|
20
|
+
console.log(startupGradient.multiline(startupText));
|
|
21
|
+
|
|
22
|
+
const geminiGradient = gradient(["#4285F4", "#9B72CB", "#D96570"]);
|
|
23
|
+
|
|
24
|
+
console.log(
|
|
25
|
+
geminiGradient(
|
|
26
|
+
" — Your AI Coding Companion\n",
|
|
27
|
+
),
|
|
28
|
+
);
|
|
29
|
+
|
|
30
|
+
await setTimeout(1000);
|
|
31
|
+
}
|