@projectservan8n/cnapse 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +1 -0
- package/dist/index.js +496 -0
- package/package.json +61 -0
- package/src/components/App.tsx +183 -0
- package/src/components/ChatInput.tsx +32 -0
- package/src/components/ChatMessage.tsx +34 -0
- package/src/components/Header.tsx +27 -0
- package/src/components/StatusBar.tsx +16 -0
- package/src/index.tsx +119 -0
- package/src/lib/api.ts +176 -0
- package/src/lib/config.ts +60 -0
- package/tsconfig.json +18 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,496 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/index.tsx
|
|
4
|
+
import { render } from "ink";
|
|
5
|
+
|
|
6
|
+
// src/components/App.tsx
|
|
7
|
+
import { useState } from "react";
|
|
8
|
+
import { Box as Box5, Text as Text5, useApp, useInput } from "ink";
|
|
9
|
+
|
|
10
|
+
// src/components/Header.tsx
|
|
11
|
+
import { Box, Text } from "ink";
|
|
12
|
+
|
|
13
|
+
// src/lib/config.ts
|
|
14
|
+
import Conf from "conf";
|
|
15
|
+
var config = new Conf({
|
|
16
|
+
projectName: "cnapse",
|
|
17
|
+
defaults: {
|
|
18
|
+
provider: "ollama",
|
|
19
|
+
model: "qwen2.5:0.5b",
|
|
20
|
+
apiKeys: {},
|
|
21
|
+
ollamaHost: "http://localhost:11434",
|
|
22
|
+
openrouter: {
|
|
23
|
+
siteUrl: "https://github.com/projectservan8n/C-napse",
|
|
24
|
+
appName: "C-napse"
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
});
|
|
28
|
+
function getConfig() {
|
|
29
|
+
return {
|
|
30
|
+
provider: config.get("provider"),
|
|
31
|
+
model: config.get("model"),
|
|
32
|
+
apiKeys: config.get("apiKeys"),
|
|
33
|
+
ollamaHost: config.get("ollamaHost"),
|
|
34
|
+
openrouter: config.get("openrouter")
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
function setProvider(provider) {
|
|
38
|
+
config.set("provider", provider);
|
|
39
|
+
}
|
|
40
|
+
function setModel(model) {
|
|
41
|
+
config.set("model", model);
|
|
42
|
+
}
|
|
43
|
+
function setApiKey(provider, key) {
|
|
44
|
+
const keys = config.get("apiKeys");
|
|
45
|
+
keys[provider] = key;
|
|
46
|
+
config.set("apiKeys", keys);
|
|
47
|
+
}
|
|
48
|
+
function getApiKey(provider) {
|
|
49
|
+
return config.get("apiKeys")[provider];
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// src/components/Header.tsx
|
|
53
|
+
import { jsx, jsxs } from "react/jsx-runtime";
|
|
54
|
+
var ASCII_BANNER = `
|
|
55
|
+
\u2588\u2588\u2588\u2588\u2588\u2588\u2557 \u2588\u2588\u2588\u2557 \u2588\u2588\u2557 \u2588\u2588\u2588\u2588\u2588\u2557 \u2588\u2588\u2588\u2588\u2588\u2588\u2557 \u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2557\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2557
|
|
56
|
+
\u2588\u2588\u2554\u2550\u2550\u2550\u2550\u255D \u2588\u2588\u2588\u2588\u2557 \u2588\u2588\u2551\u2588\u2588\u2554\u2550\u2550\u2588\u2588\u2557\u2588\u2588\u2554\u2550\u2550\u2588\u2588\u2557\u2588\u2588\u2554\u2550\u2550\u2550\u2550\u255D\u2588\u2588\u2554\u2550\u2550\u2550\u2550\u255D
|
|
57
|
+
\u2588\u2588\u2551 \u2588\u2588\u2588\u2588\u2588\u2557\u2588\u2588\u2554\u2588\u2588\u2557 \u2588\u2588\u2551\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2551\u2588\u2588\u2588\u2588\u2588\u2588\u2554\u255D\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2557\u2588\u2588\u2588\u2588\u2588\u2557
|
|
58
|
+
\u2588\u2588\u2551 \u255A\u2550\u2550\u2550\u2550\u255D\u2588\u2588\u2551\u255A\u2588\u2588\u2557\u2588\u2588\u2551\u2588\u2588\u2554\u2550\u2550\u2588\u2588\u2551\u2588\u2588\u2554\u2550\u2550\u2550\u255D \u255A\u2550\u2550\u2550\u2550\u2588\u2588\u2551\u2588\u2588\u2554\u2550\u2550\u255D
|
|
59
|
+
\u255A\u2588\u2588\u2588\u2588\u2588\u2588\u2557 \u2588\u2588\u2551 \u255A\u2588\u2588\u2588\u2588\u2551\u2588\u2588\u2551 \u2588\u2588\u2551\u2588\u2588\u2551 \u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2551\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2557
|
|
60
|
+
\u255A\u2550\u2550\u2550\u2550\u2550\u255D \u255A\u2550\u255D \u255A\u2550\u2550\u2550\u255D\u255A\u2550\u255D \u255A\u2550\u255D\u255A\u2550\u255D \u255A\u2550\u2550\u2550\u2550\u2550\u2550\u255D\u255A\u2550\u2550\u2550\u2550\u2550\u2550\u255D
|
|
61
|
+
`.trim();
|
|
62
|
+
function Header() {
|
|
63
|
+
const config2 = getConfig();
|
|
64
|
+
return /* @__PURE__ */ jsxs(Box, { flexDirection: "column", marginBottom: 1, children: [
|
|
65
|
+
/* @__PURE__ */ jsx(Text, { color: "cyan", children: ASCII_BANNER }),
|
|
66
|
+
/* @__PURE__ */ jsx(Box, { justifyContent: "center", children: /* @__PURE__ */ jsxs(Text, { color: "gray", children: [
|
|
67
|
+
config2.provider,
|
|
68
|
+
" \u2502 ",
|
|
69
|
+
config2.model
|
|
70
|
+
] }) })
|
|
71
|
+
] });
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// src/components/ChatMessage.tsx
|
|
75
|
+
import { Box as Box2, Text as Text2 } from "ink";
|
|
76
|
+
import { jsx as jsx2, jsxs as jsxs2 } from "react/jsx-runtime";
|
|
77
|
+
function ChatMessage({ role, content, timestamp, isStreaming }) {
|
|
78
|
+
const time = timestamp ? timestamp.toLocaleTimeString("en-US", { hour: "2-digit", minute: "2-digit", hour12: false }) : "";
|
|
79
|
+
const roleConfig = {
|
|
80
|
+
user: { label: "You", color: "green" },
|
|
81
|
+
assistant: { label: "C-napse", color: "cyan" },
|
|
82
|
+
system: { label: "System", color: "yellow" }
|
|
83
|
+
};
|
|
84
|
+
const { label, color } = roleConfig[role];
|
|
85
|
+
return /* @__PURE__ */ jsxs2(Box2, { flexDirection: "column", marginBottom: 1, children: [
|
|
86
|
+
/* @__PURE__ */ jsxs2(Box2, { children: [
|
|
87
|
+
/* @__PURE__ */ jsx2(Text2, { bold: true, color, children: label }),
|
|
88
|
+
/* @__PURE__ */ jsxs2(Text2, { color: "gray", children: [
|
|
89
|
+
" ",
|
|
90
|
+
time
|
|
91
|
+
] }),
|
|
92
|
+
isStreaming && /* @__PURE__ */ jsx2(Text2, { color: "yellow", children: " \u25CF" })
|
|
93
|
+
] }),
|
|
94
|
+
/* @__PURE__ */ jsx2(Box2, { marginLeft: 2, children: /* @__PURE__ */ jsx2(Text2, { wrap: "wrap", children: content || (isStreaming ? "Thinking..." : "") }) })
|
|
95
|
+
] });
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// src/components/ChatInput.tsx
|
|
99
|
+
import { Box as Box3, Text as Text3 } from "ink";
|
|
100
|
+
import TextInput from "ink-text-input";
|
|
101
|
+
import { jsx as jsx3, jsxs as jsxs3 } from "react/jsx-runtime";
|
|
102
|
+
function ChatInput({ value, onChange, onSubmit, isProcessing }) {
|
|
103
|
+
return /* @__PURE__ */ jsxs3(
|
|
104
|
+
Box3,
|
|
105
|
+
{
|
|
106
|
+
borderStyle: "round",
|
|
107
|
+
borderColor: isProcessing ? "yellow" : "blue",
|
|
108
|
+
paddingX: 1,
|
|
109
|
+
children: [
|
|
110
|
+
/* @__PURE__ */ jsx3(Text3, { color: isProcessing ? "yellow" : "blue", children: isProcessing ? "Processing... " : "Message: " }),
|
|
111
|
+
!isProcessing && /* @__PURE__ */ jsx3(
|
|
112
|
+
TextInput,
|
|
113
|
+
{
|
|
114
|
+
value,
|
|
115
|
+
onChange,
|
|
116
|
+
onSubmit,
|
|
117
|
+
placeholder: "Type your message..."
|
|
118
|
+
}
|
|
119
|
+
)
|
|
120
|
+
]
|
|
121
|
+
}
|
|
122
|
+
);
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// src/components/StatusBar.tsx
|
|
126
|
+
import { Box as Box4, Text as Text4 } from "ink";
|
|
127
|
+
import { jsx as jsx4 } from "react/jsx-runtime";
|
|
128
|
+
function StatusBar({ status }) {
|
|
129
|
+
return /* @__PURE__ */ jsx4(Box4, { children: /* @__PURE__ */ jsx4(Text4, { backgroundColor: "gray", color: "white", children: ` ${status} \u2502 Ctrl+C: Exit \u2502 Enter: Send ` }) });
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// src/lib/api.ts
|
|
133
|
+
var SYSTEM_PROMPT = `You are C-napse, a helpful AI assistant for PC automation running on the user's desktop.
|
|
134
|
+
You can help with coding, file management, shell commands, and more. Be concise and helpful.
|
|
135
|
+
|
|
136
|
+
When responding:
|
|
137
|
+
- Be direct and practical
|
|
138
|
+
- Use markdown formatting for code blocks
|
|
139
|
+
- If asked to do something, explain what you'll do first`;
|
|
140
|
+
async function chat(messages) {
|
|
141
|
+
const config2 = getConfig();
|
|
142
|
+
const allMessages = [
|
|
143
|
+
{ role: "system", content: SYSTEM_PROMPT },
|
|
144
|
+
...messages
|
|
145
|
+
];
|
|
146
|
+
switch (config2.provider) {
|
|
147
|
+
case "openrouter":
|
|
148
|
+
return chatOpenRouter(allMessages, config2.model);
|
|
149
|
+
case "ollama":
|
|
150
|
+
return chatOllama(allMessages, config2.model);
|
|
151
|
+
case "anthropic":
|
|
152
|
+
return chatAnthropic(allMessages, config2.model);
|
|
153
|
+
case "openai":
|
|
154
|
+
return chatOpenAI(allMessages, config2.model);
|
|
155
|
+
default:
|
|
156
|
+
throw new Error(`Unknown provider: ${config2.provider}`);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
async function chatOpenRouter(messages, model) {
|
|
160
|
+
const apiKey = getApiKey("openrouter");
|
|
161
|
+
if (!apiKey) {
|
|
162
|
+
throw new Error("OpenRouter API key not configured. Run: cnapse auth openrouter <key>");
|
|
163
|
+
}
|
|
164
|
+
const config2 = getConfig();
|
|
165
|
+
const response = await fetch("https://openrouter.ai/api/v1/chat/completions", {
|
|
166
|
+
method: "POST",
|
|
167
|
+
headers: {
|
|
168
|
+
"Authorization": `Bearer ${apiKey}`,
|
|
169
|
+
"Content-Type": "application/json",
|
|
170
|
+
"HTTP-Referer": config2.openrouter.siteUrl,
|
|
171
|
+
"X-Title": config2.openrouter.appName
|
|
172
|
+
},
|
|
173
|
+
body: JSON.stringify({
|
|
174
|
+
model,
|
|
175
|
+
messages,
|
|
176
|
+
max_tokens: 2048,
|
|
177
|
+
temperature: 0.7
|
|
178
|
+
})
|
|
179
|
+
});
|
|
180
|
+
if (!response.ok) {
|
|
181
|
+
const error = await response.text();
|
|
182
|
+
throw new Error(`OpenRouter error: ${response.status} - ${error}`);
|
|
183
|
+
}
|
|
184
|
+
const data = await response.json();
|
|
185
|
+
const content = data.choices?.[0]?.message?.content || "";
|
|
186
|
+
return { content, model };
|
|
187
|
+
}
|
|
188
|
+
async function chatOllama(messages, model) {
|
|
189
|
+
const config2 = getConfig();
|
|
190
|
+
const response = await fetch(`${config2.ollamaHost}/api/chat`, {
|
|
191
|
+
method: "POST",
|
|
192
|
+
headers: { "Content-Type": "application/json" },
|
|
193
|
+
body: JSON.stringify({
|
|
194
|
+
model,
|
|
195
|
+
messages,
|
|
196
|
+
stream: false
|
|
197
|
+
})
|
|
198
|
+
});
|
|
199
|
+
if (!response.ok) {
|
|
200
|
+
const error = await response.text();
|
|
201
|
+
throw new Error(`Ollama error: ${response.status} - ${error}`);
|
|
202
|
+
}
|
|
203
|
+
const data = await response.json();
|
|
204
|
+
const content = data.message?.content || "";
|
|
205
|
+
return { content, model };
|
|
206
|
+
}
|
|
207
|
+
async function chatAnthropic(messages, model) {
|
|
208
|
+
const apiKey = getApiKey("anthropic");
|
|
209
|
+
if (!apiKey) {
|
|
210
|
+
throw new Error("Anthropic API key not configured. Run: cnapse auth anthropic <key>");
|
|
211
|
+
}
|
|
212
|
+
const systemMsg = messages.find((m) => m.role === "system");
|
|
213
|
+
const chatMessages = messages.filter((m) => m.role !== "system");
|
|
214
|
+
const response = await fetch("https://api.anthropic.com/v1/messages", {
|
|
215
|
+
method: "POST",
|
|
216
|
+
headers: {
|
|
217
|
+
"x-api-key": apiKey,
|
|
218
|
+
"Content-Type": "application/json",
|
|
219
|
+
"anthropic-version": "2023-06-01"
|
|
220
|
+
},
|
|
221
|
+
body: JSON.stringify({
|
|
222
|
+
model,
|
|
223
|
+
max_tokens: 2048,
|
|
224
|
+
system: systemMsg?.content || "",
|
|
225
|
+
messages: chatMessages
|
|
226
|
+
})
|
|
227
|
+
});
|
|
228
|
+
if (!response.ok) {
|
|
229
|
+
const error = await response.text();
|
|
230
|
+
throw new Error(`Anthropic error: ${response.status} - ${error}`);
|
|
231
|
+
}
|
|
232
|
+
const data = await response.json();
|
|
233
|
+
const content = data.content?.[0]?.text || "";
|
|
234
|
+
return { content, model };
|
|
235
|
+
}
|
|
236
|
+
async function chatOpenAI(messages, model) {
|
|
237
|
+
const apiKey = getApiKey("openai");
|
|
238
|
+
if (!apiKey) {
|
|
239
|
+
throw new Error("OpenAI API key not configured. Run: cnapse auth openai <key>");
|
|
240
|
+
}
|
|
241
|
+
const response = await fetch("https://api.openai.com/v1/chat/completions", {
|
|
242
|
+
method: "POST",
|
|
243
|
+
headers: {
|
|
244
|
+
"Authorization": `Bearer ${apiKey}`,
|
|
245
|
+
"Content-Type": "application/json"
|
|
246
|
+
},
|
|
247
|
+
body: JSON.stringify({
|
|
248
|
+
model,
|
|
249
|
+
messages,
|
|
250
|
+
max_tokens: 2048,
|
|
251
|
+
temperature: 0.7
|
|
252
|
+
})
|
|
253
|
+
});
|
|
254
|
+
if (!response.ok) {
|
|
255
|
+
const error = await response.text();
|
|
256
|
+
throw new Error(`OpenAI error: ${response.status} - ${error}`);
|
|
257
|
+
}
|
|
258
|
+
const data = await response.json();
|
|
259
|
+
const content = data.choices?.[0]?.message?.content || "";
|
|
260
|
+
return { content, model };
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// src/components/App.tsx
|
|
264
|
+
import { jsx as jsx5, jsxs as jsxs4 } from "react/jsx-runtime";
|
|
265
|
+
function App() {
|
|
266
|
+
const { exit } = useApp();
|
|
267
|
+
const [messages, setMessages] = useState([
|
|
268
|
+
{
|
|
269
|
+
id: "0",
|
|
270
|
+
role: "system",
|
|
271
|
+
content: "Welcome to C-napse! Type your message and press Enter.\n\nShortcuts:\n Ctrl+C - Exit\n /clear - Clear chat\n /help - Show help",
|
|
272
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
273
|
+
}
|
|
274
|
+
]);
|
|
275
|
+
const [input, setInput] = useState("");
|
|
276
|
+
const [isProcessing, setIsProcessing] = useState(false);
|
|
277
|
+
const [status, setStatus] = useState("Ready");
|
|
278
|
+
const [error, setError] = useState(null);
|
|
279
|
+
useInput((inputChar, key) => {
|
|
280
|
+
if (key.ctrl && inputChar === "c") {
|
|
281
|
+
exit();
|
|
282
|
+
}
|
|
283
|
+
if (key.ctrl && inputChar === "l") {
|
|
284
|
+
setMessages([messages[0]]);
|
|
285
|
+
setError(null);
|
|
286
|
+
}
|
|
287
|
+
});
|
|
288
|
+
const handleSubmit = async (value) => {
|
|
289
|
+
if (!value.trim() || isProcessing) return;
|
|
290
|
+
const userInput = value.trim();
|
|
291
|
+
setInput("");
|
|
292
|
+
setError(null);
|
|
293
|
+
if (userInput.startsWith("/")) {
|
|
294
|
+
handleCommand(userInput);
|
|
295
|
+
return;
|
|
296
|
+
}
|
|
297
|
+
const userMsg = {
|
|
298
|
+
id: Date.now().toString(),
|
|
299
|
+
role: "user",
|
|
300
|
+
content: userInput,
|
|
301
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
302
|
+
};
|
|
303
|
+
setMessages((prev) => [...prev, userMsg]);
|
|
304
|
+
const assistantId = (Date.now() + 1).toString();
|
|
305
|
+
setMessages((prev) => [
|
|
306
|
+
...prev,
|
|
307
|
+
{
|
|
308
|
+
id: assistantId,
|
|
309
|
+
role: "assistant",
|
|
310
|
+
content: "",
|
|
311
|
+
timestamp: /* @__PURE__ */ new Date(),
|
|
312
|
+
isStreaming: true
|
|
313
|
+
}
|
|
314
|
+
]);
|
|
315
|
+
setIsProcessing(true);
|
|
316
|
+
setStatus("Thinking...");
|
|
317
|
+
try {
|
|
318
|
+
const apiMessages = messages.filter((m) => m.role === "user" || m.role === "assistant").slice(-10).map((m) => ({ role: m.role, content: m.content }));
|
|
319
|
+
apiMessages.push({ role: "user", content: userInput });
|
|
320
|
+
const response = await chat(apiMessages);
|
|
321
|
+
setMessages(
|
|
322
|
+
(prev) => prev.map(
|
|
323
|
+
(m) => m.id === assistantId ? { ...m, content: response.content || "(no response)", isStreaming: false } : m
|
|
324
|
+
)
|
|
325
|
+
);
|
|
326
|
+
} catch (err) {
|
|
327
|
+
const errorMsg = err instanceof Error ? err.message : "Unknown error";
|
|
328
|
+
setError(errorMsg);
|
|
329
|
+
setMessages(
|
|
330
|
+
(prev) => prev.map(
|
|
331
|
+
(m) => m.id === assistantId ? { ...m, content: `Error: ${errorMsg}`, isStreaming: false } : m
|
|
332
|
+
)
|
|
333
|
+
);
|
|
334
|
+
} finally {
|
|
335
|
+
setIsProcessing(false);
|
|
336
|
+
setStatus("Ready");
|
|
337
|
+
}
|
|
338
|
+
};
|
|
339
|
+
const handleCommand = (cmd) => {
|
|
340
|
+
const parts = cmd.slice(1).split(" ");
|
|
341
|
+
const command = parts[0];
|
|
342
|
+
switch (command) {
|
|
343
|
+
case "clear":
|
|
344
|
+
setMessages([messages[0]]);
|
|
345
|
+
addSystemMessage("Chat cleared.");
|
|
346
|
+
break;
|
|
347
|
+
case "help":
|
|
348
|
+
addSystemMessage(
|
|
349
|
+
"Commands:\n /clear - Clear chat history\n /help - Show this help\n\nJust type naturally to chat with the AI!"
|
|
350
|
+
);
|
|
351
|
+
break;
|
|
352
|
+
default:
|
|
353
|
+
addSystemMessage(`Unknown command: ${command}`);
|
|
354
|
+
}
|
|
355
|
+
};
|
|
356
|
+
const addSystemMessage = (content) => {
|
|
357
|
+
setMessages((prev) => [
|
|
358
|
+
...prev,
|
|
359
|
+
{
|
|
360
|
+
id: Date.now().toString(),
|
|
361
|
+
role: "system",
|
|
362
|
+
content,
|
|
363
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
364
|
+
}
|
|
365
|
+
]);
|
|
366
|
+
};
|
|
367
|
+
const visibleMessages = messages.slice(-20);
|
|
368
|
+
return /* @__PURE__ */ jsxs4(Box5, { flexDirection: "column", height: "100%", children: [
|
|
369
|
+
/* @__PURE__ */ jsx5(Header, {}),
|
|
370
|
+
/* @__PURE__ */ jsxs4(Box5, { flexDirection: "column", flexGrow: 1, borderStyle: "round", borderColor: "gray", padding: 1, children: [
|
|
371
|
+
/* @__PURE__ */ jsx5(Text5, { bold: true, color: "gray", children: " Chat " }),
|
|
372
|
+
visibleMessages.map((msg) => /* @__PURE__ */ jsx5(
|
|
373
|
+
ChatMessage,
|
|
374
|
+
{
|
|
375
|
+
role: msg.role,
|
|
376
|
+
content: msg.content,
|
|
377
|
+
timestamp: msg.timestamp,
|
|
378
|
+
isStreaming: msg.isStreaming
|
|
379
|
+
},
|
|
380
|
+
msg.id
|
|
381
|
+
))
|
|
382
|
+
] }),
|
|
383
|
+
error && /* @__PURE__ */ jsx5(Box5, { marginY: 1, children: /* @__PURE__ */ jsxs4(Text5, { color: "red", children: [
|
|
384
|
+
"Error: ",
|
|
385
|
+
error
|
|
386
|
+
] }) }),
|
|
387
|
+
/* @__PURE__ */ jsx5(
|
|
388
|
+
ChatInput,
|
|
389
|
+
{
|
|
390
|
+
value: input,
|
|
391
|
+
onChange: setInput,
|
|
392
|
+
onSubmit: handleSubmit,
|
|
393
|
+
isProcessing
|
|
394
|
+
}
|
|
395
|
+
),
|
|
396
|
+
/* @__PURE__ */ jsx5(StatusBar, { status })
|
|
397
|
+
] });
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
// src/index.tsx
|
|
401
|
+
import { jsx as jsx6 } from "react/jsx-runtime";
|
|
402
|
+
var args = process.argv.slice(2);
|
|
403
|
+
if (args.length > 0) {
|
|
404
|
+
const command = args[0];
|
|
405
|
+
switch (command) {
|
|
406
|
+
case "auth": {
|
|
407
|
+
const provider = args[1];
|
|
408
|
+
const key = args[2];
|
|
409
|
+
if (!provider || !key) {
|
|
410
|
+
console.log("Usage: cnapse auth <provider> <api-key>");
|
|
411
|
+
console.log("Providers: openrouter, anthropic, openai");
|
|
412
|
+
process.exit(1);
|
|
413
|
+
}
|
|
414
|
+
if (!["openrouter", "anthropic", "openai"].includes(provider)) {
|
|
415
|
+
console.log(`Invalid provider: ${provider}`);
|
|
416
|
+
console.log("Valid providers: openrouter, anthropic, openai");
|
|
417
|
+
process.exit(1);
|
|
418
|
+
}
|
|
419
|
+
setApiKey(provider, key);
|
|
420
|
+
console.log(`\u2713 ${provider} API key saved`);
|
|
421
|
+
process.exit(0);
|
|
422
|
+
}
|
|
423
|
+
case "config": {
|
|
424
|
+
const subcommand = args[1];
|
|
425
|
+
if (subcommand === "set") {
|
|
426
|
+
const key = args[2];
|
|
427
|
+
const value = args[3];
|
|
428
|
+
if (key === "provider") {
|
|
429
|
+
if (!["openrouter", "ollama", "anthropic", "openai"].includes(value)) {
|
|
430
|
+
console.log("Valid providers: openrouter, ollama, anthropic, openai");
|
|
431
|
+
process.exit(1);
|
|
432
|
+
}
|
|
433
|
+
setProvider(value);
|
|
434
|
+
console.log(`\u2713 Provider set to: ${value}`);
|
|
435
|
+
} else if (key === "model") {
|
|
436
|
+
setModel(value);
|
|
437
|
+
console.log(`\u2713 Model set to: ${value}`);
|
|
438
|
+
} else {
|
|
439
|
+
console.log("Usage: cnapse config set <provider|model> <value>");
|
|
440
|
+
}
|
|
441
|
+
process.exit(0);
|
|
442
|
+
}
|
|
443
|
+
if (subcommand === "show" || !subcommand) {
|
|
444
|
+
const config2 = getConfig();
|
|
445
|
+
console.log("\nC-napse Configuration:");
|
|
446
|
+
console.log(` Provider: ${config2.provider}`);
|
|
447
|
+
console.log(` Model: ${config2.model}`);
|
|
448
|
+
console.log(` Ollama Host: ${config2.ollamaHost}`);
|
|
449
|
+
console.log(` API Keys configured:`);
|
|
450
|
+
console.log(` - OpenRouter: ${config2.apiKeys.openrouter ? "\u2713" : "\u2717"}`);
|
|
451
|
+
console.log(` - Anthropic: ${config2.apiKeys.anthropic ? "\u2713" : "\u2717"}`);
|
|
452
|
+
console.log(` - OpenAI: ${config2.apiKeys.openai ? "\u2713" : "\u2717"}`);
|
|
453
|
+
console.log("");
|
|
454
|
+
process.exit(0);
|
|
455
|
+
}
|
|
456
|
+
console.log("Usage: cnapse config [show|set <key> <value>]");
|
|
457
|
+
process.exit(1);
|
|
458
|
+
}
|
|
459
|
+
case "help":
|
|
460
|
+
case "--help":
|
|
461
|
+
case "-h": {
|
|
462
|
+
console.log(`
|
|
463
|
+
C-napse - Autonomous PC Intelligence
|
|
464
|
+
|
|
465
|
+
Usage:
|
|
466
|
+
cnapse Start interactive chat
|
|
467
|
+
cnapse auth <provider> <key> Set API key
|
|
468
|
+
cnapse config Show configuration
|
|
469
|
+
cnapse config set <k> <v> Set config value
|
|
470
|
+
cnapse help Show this help
|
|
471
|
+
|
|
472
|
+
Providers:
|
|
473
|
+
ollama - Local AI (default)
|
|
474
|
+
openrouter - OpenRouter API
|
|
475
|
+
anthropic - Anthropic Claude
|
|
476
|
+
openai - OpenAI GPT
|
|
477
|
+
|
|
478
|
+
Examples:
|
|
479
|
+
cnapse auth openrouter sk-or-xxxxx
|
|
480
|
+
cnapse config set provider openrouter
|
|
481
|
+
cnapse config set model qwen/qwen-2.5-coder-32b-instruct
|
|
482
|
+
`);
|
|
483
|
+
process.exit(0);
|
|
484
|
+
}
|
|
485
|
+
case "version":
|
|
486
|
+
case "--version":
|
|
487
|
+
case "-v": {
|
|
488
|
+
console.log("cnapse v0.2.0");
|
|
489
|
+
process.exit(0);
|
|
490
|
+
}
|
|
491
|
+
default: {
|
|
492
|
+
break;
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
render(/* @__PURE__ */ jsx6(App, {}));
|
package/package.json
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@projectservan8n/cnapse",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"description": "Autonomous PC intelligence - AI assistant for desktop automation",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "dist/index.js",
|
|
7
|
+
"bin": {
|
|
8
|
+
"cnapse": "./dist/index.js"
|
|
9
|
+
},
|
|
10
|
+
"scripts": {
|
|
11
|
+
"dev": "tsx src/index.tsx",
|
|
12
|
+
"build": "tsup src/index.tsx --format esm --dts --clean",
|
|
13
|
+
"start": "node dist/index.js",
|
|
14
|
+
"typecheck": "tsc --noEmit"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [
|
|
17
|
+
"ai",
|
|
18
|
+
"cli",
|
|
19
|
+
"assistant",
|
|
20
|
+
"automation",
|
|
21
|
+
"openrouter",
|
|
22
|
+
"ollama"
|
|
23
|
+
],
|
|
24
|
+
"author": "projectservan8n",
|
|
25
|
+
"repository": {
|
|
26
|
+
"type": "git",
|
|
27
|
+
"url": "https://github.com/projectservan8n/C-napse.git"
|
|
28
|
+
},
|
|
29
|
+
"homepage": "https://c-napse.up.railway.app",
|
|
30
|
+
"license": "MIT",
|
|
31
|
+
"dependencies": {
|
|
32
|
+
"ink": "^5.0.1",
|
|
33
|
+
"react": "^18.3.1",
|
|
34
|
+
"ink-text-input": "^6.0.0",
|
|
35
|
+
"ink-spinner": "^5.0.0",
|
|
36
|
+
"chalk": "^5.3.0",
|
|
37
|
+
"conf": "^13.0.1",
|
|
38
|
+
"ora": "^8.0.1",
|
|
39
|
+
"marked": "^14.1.0",
|
|
40
|
+
"marked-terminal": "^7.1.0",
|
|
41
|
+
"clipboardy": "^4.0.0",
|
|
42
|
+
"execa": "^9.3.0",
|
|
43
|
+
"globby": "^14.0.2",
|
|
44
|
+
"node-fetch": "^3.3.2",
|
|
45
|
+
"figlet": "^1.7.0",
|
|
46
|
+
"gradient-string": "^2.0.2",
|
|
47
|
+
"boxen": "^8.0.1"
|
|
48
|
+
},
|
|
49
|
+
"devDependencies": {
|
|
50
|
+
"@types/node": "^22.0.0",
|
|
51
|
+
"@types/react": "^18.3.3",
|
|
52
|
+
"@types/figlet": "^1.5.8",
|
|
53
|
+
"@types/gradient-string": "^1.1.6",
|
|
54
|
+
"tsup": "^8.2.4",
|
|
55
|
+
"tsx": "^4.16.5",
|
|
56
|
+
"typescript": "^5.5.4"
|
|
57
|
+
},
|
|
58
|
+
"engines": {
|
|
59
|
+
"node": ">=18"
|
|
60
|
+
}
|
|
61
|
+
}
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
import React, { useState, useEffect } from 'react';
|
|
2
|
+
import { Box, Text, useApp, useInput } from 'ink';
|
|
3
|
+
import { Header } from './Header.js';
|
|
4
|
+
import { ChatMessage } from './ChatMessage.js';
|
|
5
|
+
import { ChatInput } from './ChatInput.js';
|
|
6
|
+
import { StatusBar } from './StatusBar.js';
|
|
7
|
+
import { chat, Message } from '../lib/api.js';
|
|
8
|
+
|
|
9
|
+
interface ChatMsg {
|
|
10
|
+
id: string;
|
|
11
|
+
role: 'user' | 'assistant' | 'system';
|
|
12
|
+
content: string;
|
|
13
|
+
timestamp: Date;
|
|
14
|
+
isStreaming?: boolean;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export function App() {
|
|
18
|
+
const { exit } = useApp();
|
|
19
|
+
const [messages, setMessages] = useState<ChatMsg[]>([
|
|
20
|
+
{
|
|
21
|
+
id: '0',
|
|
22
|
+
role: 'system',
|
|
23
|
+
content: 'Welcome to C-napse! Type your message and press Enter.\n\nShortcuts:\n Ctrl+C - Exit\n /clear - Clear chat\n /help - Show help',
|
|
24
|
+
timestamp: new Date(),
|
|
25
|
+
},
|
|
26
|
+
]);
|
|
27
|
+
const [input, setInput] = useState('');
|
|
28
|
+
const [isProcessing, setIsProcessing] = useState(false);
|
|
29
|
+
const [status, setStatus] = useState('Ready');
|
|
30
|
+
const [error, setError] = useState<string | null>(null);
|
|
31
|
+
|
|
32
|
+
useInput((inputChar, key) => {
|
|
33
|
+
if (key.ctrl && inputChar === 'c') {
|
|
34
|
+
exit();
|
|
35
|
+
}
|
|
36
|
+
if (key.ctrl && inputChar === 'l') {
|
|
37
|
+
setMessages([messages[0]!]); // Keep welcome message
|
|
38
|
+
setError(null);
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
const handleSubmit = async (value: string) => {
|
|
43
|
+
if (!value.trim() || isProcessing) return;
|
|
44
|
+
|
|
45
|
+
const userInput = value.trim();
|
|
46
|
+
setInput('');
|
|
47
|
+
setError(null);
|
|
48
|
+
|
|
49
|
+
// Handle commands
|
|
50
|
+
if (userInput.startsWith('/')) {
|
|
51
|
+
handleCommand(userInput);
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Add user message
|
|
56
|
+
const userMsg: ChatMsg = {
|
|
57
|
+
id: Date.now().toString(),
|
|
58
|
+
role: 'user',
|
|
59
|
+
content: userInput,
|
|
60
|
+
timestamp: new Date(),
|
|
61
|
+
};
|
|
62
|
+
setMessages((prev) => [...prev, userMsg]);
|
|
63
|
+
|
|
64
|
+
// Add placeholder for assistant
|
|
65
|
+
const assistantId = (Date.now() + 1).toString();
|
|
66
|
+
setMessages((prev) => [
|
|
67
|
+
...prev,
|
|
68
|
+
{
|
|
69
|
+
id: assistantId,
|
|
70
|
+
role: 'assistant',
|
|
71
|
+
content: '',
|
|
72
|
+
timestamp: new Date(),
|
|
73
|
+
isStreaming: true,
|
|
74
|
+
},
|
|
75
|
+
]);
|
|
76
|
+
|
|
77
|
+
setIsProcessing(true);
|
|
78
|
+
setStatus('Thinking...');
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
// Build message history for API
|
|
82
|
+
const apiMessages: Message[] = messages
|
|
83
|
+
.filter((m) => m.role === 'user' || m.role === 'assistant')
|
|
84
|
+
.slice(-10)
|
|
85
|
+
.map((m) => ({ role: m.role as 'user' | 'assistant', content: m.content }));
|
|
86
|
+
|
|
87
|
+
apiMessages.push({ role: 'user', content: userInput });
|
|
88
|
+
|
|
89
|
+
const response = await chat(apiMessages);
|
|
90
|
+
|
|
91
|
+
// Update assistant message with response
|
|
92
|
+
setMessages((prev) =>
|
|
93
|
+
prev.map((m) =>
|
|
94
|
+
m.id === assistantId
|
|
95
|
+
? { ...m, content: response.content || '(no response)', isStreaming: false }
|
|
96
|
+
: m
|
|
97
|
+
)
|
|
98
|
+
);
|
|
99
|
+
} catch (err) {
|
|
100
|
+
const errorMsg = err instanceof Error ? err.message : 'Unknown error';
|
|
101
|
+
setError(errorMsg);
|
|
102
|
+
// Update assistant message with error
|
|
103
|
+
setMessages((prev) =>
|
|
104
|
+
prev.map((m) =>
|
|
105
|
+
m.id === assistantId
|
|
106
|
+
? { ...m, content: `Error: ${errorMsg}`, isStreaming: false }
|
|
107
|
+
: m
|
|
108
|
+
)
|
|
109
|
+
);
|
|
110
|
+
} finally {
|
|
111
|
+
setIsProcessing(false);
|
|
112
|
+
setStatus('Ready');
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
const handleCommand = (cmd: string) => {
|
|
117
|
+
const parts = cmd.slice(1).split(' ');
|
|
118
|
+
const command = parts[0];
|
|
119
|
+
|
|
120
|
+
switch (command) {
|
|
121
|
+
case 'clear':
|
|
122
|
+
setMessages([messages[0]!]);
|
|
123
|
+
addSystemMessage('Chat cleared.');
|
|
124
|
+
break;
|
|
125
|
+
case 'help':
|
|
126
|
+
addSystemMessage(
|
|
127
|
+
'Commands:\n /clear - Clear chat history\n /help - Show this help\n\nJust type naturally to chat with the AI!'
|
|
128
|
+
);
|
|
129
|
+
break;
|
|
130
|
+
default:
|
|
131
|
+
addSystemMessage(`Unknown command: ${command}`);
|
|
132
|
+
}
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
const addSystemMessage = (content: string) => {
|
|
136
|
+
setMessages((prev) => [
|
|
137
|
+
...prev,
|
|
138
|
+
{
|
|
139
|
+
id: Date.now().toString(),
|
|
140
|
+
role: 'system',
|
|
141
|
+
content,
|
|
142
|
+
timestamp: new Date(),
|
|
143
|
+
},
|
|
144
|
+
]);
|
|
145
|
+
};
|
|
146
|
+
|
|
147
|
+
// Only show last N messages that fit
|
|
148
|
+
const visibleMessages = messages.slice(-20);
|
|
149
|
+
|
|
150
|
+
return (
|
|
151
|
+
<Box flexDirection="column" height="100%">
|
|
152
|
+
<Header />
|
|
153
|
+
|
|
154
|
+
<Box flexDirection="column" flexGrow={1} borderStyle="round" borderColor="gray" padding={1}>
|
|
155
|
+
<Text bold color="gray"> Chat </Text>
|
|
156
|
+
{visibleMessages.map((msg) => (
|
|
157
|
+
<ChatMessage
|
|
158
|
+
key={msg.id}
|
|
159
|
+
role={msg.role}
|
|
160
|
+
content={msg.content}
|
|
161
|
+
timestamp={msg.timestamp}
|
|
162
|
+
isStreaming={msg.isStreaming}
|
|
163
|
+
/>
|
|
164
|
+
))}
|
|
165
|
+
</Box>
|
|
166
|
+
|
|
167
|
+
{error && (
|
|
168
|
+
<Box marginY={1}>
|
|
169
|
+
<Text color="red">Error: {error}</Text>
|
|
170
|
+
</Box>
|
|
171
|
+
)}
|
|
172
|
+
|
|
173
|
+
<ChatInput
|
|
174
|
+
value={input}
|
|
175
|
+
onChange={setInput}
|
|
176
|
+
onSubmit={handleSubmit}
|
|
177
|
+
isProcessing={isProcessing}
|
|
178
|
+
/>
|
|
179
|
+
|
|
180
|
+
<StatusBar status={status} />
|
|
181
|
+
</Box>
|
|
182
|
+
);
|
|
183
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import { Box, Text } from 'ink';
|
|
3
|
+
import TextInput from 'ink-text-input';
|
|
4
|
+
|
|
5
|
+
interface ChatInputProps {
|
|
6
|
+
value: string;
|
|
7
|
+
onChange: (value: string) => void;
|
|
8
|
+
onSubmit: (value: string) => void;
|
|
9
|
+
isProcessing: boolean;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function ChatInput({ value, onChange, onSubmit, isProcessing }: ChatInputProps) {
|
|
13
|
+
return (
|
|
14
|
+
<Box
|
|
15
|
+
borderStyle="round"
|
|
16
|
+
borderColor={isProcessing ? 'yellow' : 'blue'}
|
|
17
|
+
paddingX={1}
|
|
18
|
+
>
|
|
19
|
+
<Text color={isProcessing ? 'yellow' : 'blue'}>
|
|
20
|
+
{isProcessing ? 'Processing... ' : 'Message: '}
|
|
21
|
+
</Text>
|
|
22
|
+
{!isProcessing && (
|
|
23
|
+
<TextInput
|
|
24
|
+
value={value}
|
|
25
|
+
onChange={onChange}
|
|
26
|
+
onSubmit={onSubmit}
|
|
27
|
+
placeholder="Type your message..."
|
|
28
|
+
/>
|
|
29
|
+
)}
|
|
30
|
+
</Box>
|
|
31
|
+
);
|
|
32
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import { Box, Text } from 'ink';
|
|
3
|
+
|
|
4
|
+
interface ChatMessageProps {
|
|
5
|
+
role: 'user' | 'assistant' | 'system';
|
|
6
|
+
content: string;
|
|
7
|
+
timestamp?: Date;
|
|
8
|
+
isStreaming?: boolean;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function ChatMessage({ role, content, timestamp, isStreaming }: ChatMessageProps) {
|
|
12
|
+
const time = timestamp ? timestamp.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit', hour12: false }) : '';
|
|
13
|
+
|
|
14
|
+
const roleConfig = {
|
|
15
|
+
user: { label: 'You', color: 'green' as const },
|
|
16
|
+
assistant: { label: 'C-napse', color: 'cyan' as const },
|
|
17
|
+
system: { label: 'System', color: 'yellow' as const },
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
const { label, color } = roleConfig[role];
|
|
21
|
+
|
|
22
|
+
return (
|
|
23
|
+
<Box flexDirection="column" marginBottom={1}>
|
|
24
|
+
<Box>
|
|
25
|
+
<Text bold color={color}>{label}</Text>
|
|
26
|
+
<Text color="gray"> {time}</Text>
|
|
27
|
+
{isStreaming && <Text color="yellow"> ●</Text>}
|
|
28
|
+
</Box>
|
|
29
|
+
<Box marginLeft={2}>
|
|
30
|
+
<Text wrap="wrap">{content || (isStreaming ? 'Thinking...' : '')}</Text>
|
|
31
|
+
</Box>
|
|
32
|
+
</Box>
|
|
33
|
+
);
|
|
34
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import { Box, Text } from 'ink';
|
|
3
|
+
import { getConfig } from '../lib/config.js';
|
|
4
|
+
|
|
5
|
+
const ASCII_BANNER = `
|
|
6
|
+
██████╗ ███╗ ██╗ █████╗ ██████╗ ███████╗███████╗
|
|
7
|
+
██╔════╝ ████╗ ██║██╔══██╗██╔══██╗██╔════╝██╔════╝
|
|
8
|
+
██║ █████╗██╔██╗ ██║███████║██████╔╝███████╗█████╗
|
|
9
|
+
██║ ╚════╝██║╚██╗██║██╔══██║██╔═══╝ ╚════██║██╔══╝
|
|
10
|
+
╚██████╗ ██║ ╚████║██║ ██║██║ ███████║███████╗
|
|
11
|
+
╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚══════╝╚══════╝
|
|
12
|
+
`.trim();
|
|
13
|
+
|
|
14
|
+
export function Header() {
|
|
15
|
+
const config = getConfig();
|
|
16
|
+
|
|
17
|
+
return (
|
|
18
|
+
<Box flexDirection="column" marginBottom={1}>
|
|
19
|
+
<Text color="cyan">{ASCII_BANNER}</Text>
|
|
20
|
+
<Box justifyContent="center">
|
|
21
|
+
<Text color="gray">
|
|
22
|
+
{config.provider} │ {config.model}
|
|
23
|
+
</Text>
|
|
24
|
+
</Box>
|
|
25
|
+
</Box>
|
|
26
|
+
);
|
|
27
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import { Box, Text } from 'ink';
|
|
3
|
+
|
|
4
|
+
interface StatusBarProps {
|
|
5
|
+
status: string;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function StatusBar({ status }: StatusBarProps) {
|
|
9
|
+
return (
|
|
10
|
+
<Box>
|
|
11
|
+
<Text backgroundColor="gray" color="white">
|
|
12
|
+
{` ${status} │ Ctrl+C: Exit │ Enter: Send `}
|
|
13
|
+
</Text>
|
|
14
|
+
</Box>
|
|
15
|
+
);
|
|
16
|
+
}
|
package/src/index.tsx
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import React from 'react';
|
|
3
|
+
import { render } from 'ink';
|
|
4
|
+
import { App } from './components/App.js';
|
|
5
|
+
import { setApiKey, setProvider, setModel, getConfig } from './lib/config.js';
|
|
6
|
+
|
|
7
|
+
const args = process.argv.slice(2);
|
|
8
|
+
|
|
9
|
+
// Handle CLI commands
|
|
10
|
+
if (args.length > 0) {
|
|
11
|
+
const command = args[0];
|
|
12
|
+
|
|
13
|
+
switch (command) {
|
|
14
|
+
case 'auth': {
|
|
15
|
+
const provider = args[1] as 'openrouter' | 'anthropic' | 'openai';
|
|
16
|
+
const key = args[2];
|
|
17
|
+
|
|
18
|
+
if (!provider || !key) {
|
|
19
|
+
console.log('Usage: cnapse auth <provider> <api-key>');
|
|
20
|
+
console.log('Providers: openrouter, anthropic, openai');
|
|
21
|
+
process.exit(1);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if (!['openrouter', 'anthropic', 'openai'].includes(provider)) {
|
|
25
|
+
console.log(`Invalid provider: ${provider}`);
|
|
26
|
+
console.log('Valid providers: openrouter, anthropic, openai');
|
|
27
|
+
process.exit(1);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
setApiKey(provider, key);
|
|
31
|
+
console.log(`✓ ${provider} API key saved`);
|
|
32
|
+
process.exit(0);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
case 'config': {
|
|
36
|
+
const subcommand = args[1];
|
|
37
|
+
|
|
38
|
+
if (subcommand === 'set') {
|
|
39
|
+
const key = args[2];
|
|
40
|
+
const value = args[3];
|
|
41
|
+
|
|
42
|
+
if (key === 'provider') {
|
|
43
|
+
if (!['openrouter', 'ollama', 'anthropic', 'openai'].includes(value!)) {
|
|
44
|
+
console.log('Valid providers: openrouter, ollama, anthropic, openai');
|
|
45
|
+
process.exit(1);
|
|
46
|
+
}
|
|
47
|
+
setProvider(value as any);
|
|
48
|
+
console.log(`✓ Provider set to: ${value}`);
|
|
49
|
+
} else if (key === 'model') {
|
|
50
|
+
setModel(value!);
|
|
51
|
+
console.log(`✓ Model set to: ${value}`);
|
|
52
|
+
} else {
|
|
53
|
+
console.log('Usage: cnapse config set <provider|model> <value>');
|
|
54
|
+
}
|
|
55
|
+
process.exit(0);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (subcommand === 'show' || !subcommand) {
|
|
59
|
+
const config = getConfig();
|
|
60
|
+
console.log('\nC-napse Configuration:');
|
|
61
|
+
console.log(` Provider: ${config.provider}`);
|
|
62
|
+
console.log(` Model: ${config.model}`);
|
|
63
|
+
console.log(` Ollama Host: ${config.ollamaHost}`);
|
|
64
|
+
console.log(` API Keys configured:`);
|
|
65
|
+
console.log(` - OpenRouter: ${config.apiKeys.openrouter ? '✓' : '✗'}`);
|
|
66
|
+
console.log(` - Anthropic: ${config.apiKeys.anthropic ? '✓' : '✗'}`);
|
|
67
|
+
console.log(` - OpenAI: ${config.apiKeys.openai ? '✓' : '✗'}`);
|
|
68
|
+
console.log('');
|
|
69
|
+
process.exit(0);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
console.log('Usage: cnapse config [show|set <key> <value>]');
|
|
73
|
+
process.exit(1);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
case 'help':
|
|
77
|
+
case '--help':
|
|
78
|
+
case '-h': {
|
|
79
|
+
console.log(`
|
|
80
|
+
C-napse - Autonomous PC Intelligence
|
|
81
|
+
|
|
82
|
+
Usage:
|
|
83
|
+
cnapse Start interactive chat
|
|
84
|
+
cnapse auth <provider> <key> Set API key
|
|
85
|
+
cnapse config Show configuration
|
|
86
|
+
cnapse config set <k> <v> Set config value
|
|
87
|
+
cnapse help Show this help
|
|
88
|
+
|
|
89
|
+
Providers:
|
|
90
|
+
ollama - Local AI (default)
|
|
91
|
+
openrouter - OpenRouter API
|
|
92
|
+
anthropic - Anthropic Claude
|
|
93
|
+
openai - OpenAI GPT
|
|
94
|
+
|
|
95
|
+
Examples:
|
|
96
|
+
cnapse auth openrouter sk-or-xxxxx
|
|
97
|
+
cnapse config set provider openrouter
|
|
98
|
+
cnapse config set model qwen/qwen-2.5-coder-32b-instruct
|
|
99
|
+
`);
|
|
100
|
+
process.exit(0);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
case 'version':
|
|
104
|
+
case '--version':
|
|
105
|
+
case '-v': {
|
|
106
|
+
console.log('cnapse v0.2.0');
|
|
107
|
+
process.exit(0);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
default: {
|
|
111
|
+
// Treat as a direct question
|
|
112
|
+
// For now, just start the app
|
|
113
|
+
break;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Start interactive TUI
|
|
119
|
+
render(<App />);
|
package/src/lib/api.ts
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
import { getConfig, getApiKey } from './config.js';
|
|
2
|
+
|
|
3
|
+
export interface Message {
|
|
4
|
+
role: 'system' | 'user' | 'assistant';
|
|
5
|
+
content: string;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export interface ChatResponse {
|
|
9
|
+
content: string;
|
|
10
|
+
model: string;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
const SYSTEM_PROMPT = `You are C-napse, a helpful AI assistant for PC automation running on the user's desktop.
|
|
14
|
+
You can help with coding, file management, shell commands, and more. Be concise and helpful.
|
|
15
|
+
|
|
16
|
+
When responding:
|
|
17
|
+
- Be direct and practical
|
|
18
|
+
- Use markdown formatting for code blocks
|
|
19
|
+
- If asked to do something, explain what you'll do first`;
|
|
20
|
+
|
|
21
|
+
export async function chat(messages: Message[]): Promise<ChatResponse> {
|
|
22
|
+
const config = getConfig();
|
|
23
|
+
|
|
24
|
+
const allMessages: Message[] = [
|
|
25
|
+
{ role: 'system', content: SYSTEM_PROMPT },
|
|
26
|
+
...messages,
|
|
27
|
+
];
|
|
28
|
+
|
|
29
|
+
switch (config.provider) {
|
|
30
|
+
case 'openrouter':
|
|
31
|
+
return chatOpenRouter(allMessages, config.model);
|
|
32
|
+
case 'ollama':
|
|
33
|
+
return chatOllama(allMessages, config.model);
|
|
34
|
+
case 'anthropic':
|
|
35
|
+
return chatAnthropic(allMessages, config.model);
|
|
36
|
+
case 'openai':
|
|
37
|
+
return chatOpenAI(allMessages, config.model);
|
|
38
|
+
default:
|
|
39
|
+
throw new Error(`Unknown provider: ${config.provider}`);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
async function chatOpenRouter(messages: Message[], model: string): Promise<ChatResponse> {
|
|
44
|
+
const apiKey = getApiKey('openrouter');
|
|
45
|
+
if (!apiKey) {
|
|
46
|
+
throw new Error('OpenRouter API key not configured. Run: cnapse auth openrouter <key>');
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const config = getConfig();
|
|
50
|
+
|
|
51
|
+
const response = await fetch('https://openrouter.ai/api/v1/chat/completions', {
|
|
52
|
+
method: 'POST',
|
|
53
|
+
headers: {
|
|
54
|
+
'Authorization': `Bearer ${apiKey}`,
|
|
55
|
+
'Content-Type': 'application/json',
|
|
56
|
+
'HTTP-Referer': config.openrouter.siteUrl,
|
|
57
|
+
'X-Title': config.openrouter.appName,
|
|
58
|
+
},
|
|
59
|
+
body: JSON.stringify({
|
|
60
|
+
model,
|
|
61
|
+
messages,
|
|
62
|
+
max_tokens: 2048,
|
|
63
|
+
temperature: 0.7,
|
|
64
|
+
}),
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
if (!response.ok) {
|
|
68
|
+
const error = await response.text();
|
|
69
|
+
throw new Error(`OpenRouter error: ${response.status} - ${error}`);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const data = await response.json() as any;
|
|
73
|
+
const content = data.choices?.[0]?.message?.content || '';
|
|
74
|
+
|
|
75
|
+
return { content, model };
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
async function chatOllama(messages: Message[], model: string): Promise<ChatResponse> {
|
|
79
|
+
const config = getConfig();
|
|
80
|
+
|
|
81
|
+
const response = await fetch(`${config.ollamaHost}/api/chat`, {
|
|
82
|
+
method: 'POST',
|
|
83
|
+
headers: { 'Content-Type': 'application/json' },
|
|
84
|
+
body: JSON.stringify({
|
|
85
|
+
model,
|
|
86
|
+
messages,
|
|
87
|
+
stream: false,
|
|
88
|
+
}),
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
if (!response.ok) {
|
|
92
|
+
const error = await response.text();
|
|
93
|
+
throw new Error(`Ollama error: ${response.status} - ${error}`);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const data = await response.json() as any;
|
|
97
|
+
const content = data.message?.content || '';
|
|
98
|
+
|
|
99
|
+
return { content, model };
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
async function chatAnthropic(messages: Message[], model: string): Promise<ChatResponse> {
|
|
103
|
+
const apiKey = getApiKey('anthropic');
|
|
104
|
+
if (!apiKey) {
|
|
105
|
+
throw new Error('Anthropic API key not configured. Run: cnapse auth anthropic <key>');
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Extract system message
|
|
109
|
+
const systemMsg = messages.find(m => m.role === 'system');
|
|
110
|
+
const chatMessages = messages.filter(m => m.role !== 'system');
|
|
111
|
+
|
|
112
|
+
const response = await fetch('https://api.anthropic.com/v1/messages', {
|
|
113
|
+
method: 'POST',
|
|
114
|
+
headers: {
|
|
115
|
+
'x-api-key': apiKey,
|
|
116
|
+
'Content-Type': 'application/json',
|
|
117
|
+
'anthropic-version': '2023-06-01',
|
|
118
|
+
},
|
|
119
|
+
body: JSON.stringify({
|
|
120
|
+
model,
|
|
121
|
+
max_tokens: 2048,
|
|
122
|
+
system: systemMsg?.content || '',
|
|
123
|
+
messages: chatMessages,
|
|
124
|
+
}),
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
if (!response.ok) {
|
|
128
|
+
const error = await response.text();
|
|
129
|
+
throw new Error(`Anthropic error: ${response.status} - ${error}`);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const data = await response.json() as any;
|
|
133
|
+
const content = data.content?.[0]?.text || '';
|
|
134
|
+
|
|
135
|
+
return { content, model };
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
async function chatOpenAI(messages: Message[], model: string): Promise<ChatResponse> {
|
|
139
|
+
const apiKey = getApiKey('openai');
|
|
140
|
+
if (!apiKey) {
|
|
141
|
+
throw new Error('OpenAI API key not configured. Run: cnapse auth openai <key>');
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
const response = await fetch('https://api.openai.com/v1/chat/completions', {
|
|
145
|
+
method: 'POST',
|
|
146
|
+
headers: {
|
|
147
|
+
'Authorization': `Bearer ${apiKey}`,
|
|
148
|
+
'Content-Type': 'application/json',
|
|
149
|
+
},
|
|
150
|
+
body: JSON.stringify({
|
|
151
|
+
model,
|
|
152
|
+
messages,
|
|
153
|
+
max_tokens: 2048,
|
|
154
|
+
temperature: 0.7,
|
|
155
|
+
}),
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
if (!response.ok) {
|
|
159
|
+
const error = await response.text();
|
|
160
|
+
throw new Error(`OpenAI error: ${response.status} - ${error}`);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const data = await response.json() as any;
|
|
164
|
+
const content = data.choices?.[0]?.message?.content || '';
|
|
165
|
+
|
|
166
|
+
return { content, model };
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
export async function testConnection(): Promise<boolean> {
|
|
170
|
+
try {
|
|
171
|
+
await chat([{ role: 'user', content: 'hi' }]);
|
|
172
|
+
return true;
|
|
173
|
+
} catch {
|
|
174
|
+
return false;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import Conf from 'conf';
|
|
2
|
+
|
|
3
|
+
interface ConfigSchema {
|
|
4
|
+
provider: 'openrouter' | 'ollama' | 'anthropic' | 'openai';
|
|
5
|
+
model: string;
|
|
6
|
+
apiKeys: {
|
|
7
|
+
openrouter?: string;
|
|
8
|
+
anthropic?: string;
|
|
9
|
+
openai?: string;
|
|
10
|
+
};
|
|
11
|
+
ollamaHost: string;
|
|
12
|
+
openrouter: {
|
|
13
|
+
siteUrl: string;
|
|
14
|
+
appName: string;
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const config = new Conf<ConfigSchema>({
|
|
19
|
+
projectName: 'cnapse',
|
|
20
|
+
defaults: {
|
|
21
|
+
provider: 'ollama',
|
|
22
|
+
model: 'qwen2.5:0.5b',
|
|
23
|
+
apiKeys: {},
|
|
24
|
+
ollamaHost: 'http://localhost:11434',
|
|
25
|
+
openrouter: {
|
|
26
|
+
siteUrl: 'https://github.com/projectservan8n/C-napse',
|
|
27
|
+
appName: 'C-napse',
|
|
28
|
+
},
|
|
29
|
+
},
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
export function getConfig() {
|
|
33
|
+
return {
|
|
34
|
+
provider: config.get('provider'),
|
|
35
|
+
model: config.get('model'),
|
|
36
|
+
apiKeys: config.get('apiKeys'),
|
|
37
|
+
ollamaHost: config.get('ollamaHost'),
|
|
38
|
+
openrouter: config.get('openrouter'),
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export function setProvider(provider: ConfigSchema['provider']) {
|
|
43
|
+
config.set('provider', provider);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export function setModel(model: string) {
|
|
47
|
+
config.set('model', model);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export function setApiKey(provider: keyof ConfigSchema['apiKeys'], key: string) {
|
|
51
|
+
const keys = config.get('apiKeys');
|
|
52
|
+
keys[provider] = key;
|
|
53
|
+
config.set('apiKeys', keys);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function getApiKey(provider: keyof ConfigSchema['apiKeys']): string | undefined {
|
|
57
|
+
return config.get('apiKeys')[provider];
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export { config };
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2022",
|
|
4
|
+
"module": "ESNext",
|
|
5
|
+
"moduleResolution": "bundler",
|
|
6
|
+
"esModuleInterop": true,
|
|
7
|
+
"strict": true,
|
|
8
|
+
"jsx": "react-jsx",
|
|
9
|
+
"outDir": "dist",
|
|
10
|
+
"rootDir": "src",
|
|
11
|
+
"declaration": true,
|
|
12
|
+
"skipLibCheck": true,
|
|
13
|
+
"forceConsistentCasingInFileNames": true,
|
|
14
|
+
"resolveJsonModule": true
|
|
15
|
+
},
|
|
16
|
+
"include": ["src/**/*"],
|
|
17
|
+
"exclude": ["node_modules", "dist"]
|
|
18
|
+
}
|