node-red-contrib-linux-copilot 1.0.4 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/linux-agent.js +63 -0
- package/linux-copilot.js +65 -55
- package/package.json +5 -19
- package/README.md +0 -21
package/linux-agent.js
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
module.exports = function (RED) {
|
|
2
|
+
const { exec } = require("child_process");
|
|
3
|
+
const axios = require("axios");
|
|
4
|
+
|
|
5
|
+
function GenericAgent(config, personality, modelName) {
|
|
6
|
+
RED.nodes.createNode(this, config);
|
|
7
|
+
const node = this;
|
|
8
|
+
|
|
9
|
+
node.on("input", async function (msg, send, done) {
|
|
10
|
+
send = send || function () { node.send.apply(node, arguments); };
|
|
11
|
+
const targetChatId = config.chatId || "1457427557";
|
|
12
|
+
let history = node.context().flow.get("chat_history") || [];
|
|
13
|
+
|
|
14
|
+
if (msg.fromTerminal) {
|
|
15
|
+
history.push({ role: "user", content: "[TERM]: " + msg.payload });
|
|
16
|
+
} else {
|
|
17
|
+
history = [{ role: "user", content: "MISSION: " + (msg.payload.content || msg.payload) }];
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
node.status({ fill: "blue", shape: "dot", text: "Alpha..." });
|
|
21
|
+
|
|
22
|
+
try {
|
|
23
|
+
const response = await axios.post("https://api.deepseek.com/chat/completions", {
|
|
24
|
+
model: modelName,
|
|
25
|
+
messages: [
|
|
26
|
+
{ role: "system", content: personality + " Réponds en JSON: {\"speech\": \"...\", \"cmd\": \"...\"}. Si fini: 'FIN DE MISSION' et cmd:'none'." },
|
|
27
|
+
...history
|
|
28
|
+
],
|
|
29
|
+
response_format: { type: "json_object" }
|
|
30
|
+
}, { headers: { "Authorization": "Bearer " + config.apiKey } });
|
|
31
|
+
|
|
32
|
+
const parsed = JSON.parse(response.data.choices[0].message.content);
|
|
33
|
+
|
|
34
|
+
if (parsed.cmd === "none" || parsed.speech.includes("FIN DE MISSION")) {
|
|
35
|
+
node.status({ fill: "green", shape: "dot", text: "Repos" });
|
|
36
|
+
send([{ payload: { chatId: targetChatId, type: 'message', content: "✅ " + parsed.speech } }, null]);
|
|
37
|
+
node.context().flow.set("chat_history", []);
|
|
38
|
+
return done();
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
history.push({ role: "assistant", content: JSON.stringify(parsed) });
|
|
42
|
+
node.context().flow.set("chat_history", history);
|
|
43
|
+
|
|
44
|
+
send([{ payload: { chatId: targetChatId, type: 'message', content: "🧠 " + parsed.speech } }, null]);
|
|
45
|
+
|
|
46
|
+
exec(parsed.cmd, { shell: "/bin/bash" }, (error, stdout, stderr) => {
|
|
47
|
+
let res = (stdout || stderr || "OK").trim();
|
|
48
|
+
send([{ payload: { chatId: targetChatId, type: 'message', content: "📟 <pre>" + res + "</pre>", options: { parse_mode: "HTML" } } }, { payload: res, fromTerminal: true }]);
|
|
49
|
+
});
|
|
50
|
+
done();
|
|
51
|
+
} catch (e) {
|
|
52
|
+
node.status({ fill: "red", shape: "ring", text: "Erreur" });
|
|
53
|
+
node.context().flow.set("chat_history", []);
|
|
54
|
+
done();
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
RED.nodes.registerType("linux-polyvalent", function(config) { GenericAgent.call(this, config, "Agent Polyvalent", "deepseek-chat"); });
|
|
60
|
+
RED.nodes.registerType("linux-security", function(config) { GenericAgent.call(this, config, "Expert Sécurité", "deepseek-chat"); });
|
|
61
|
+
RED.nodes.registerType("linux-hacker", function(config) { GenericAgent.call(this, config, "White Hat", "deepseek-chat"); });
|
|
62
|
+
RED.nodes.registerType("linux-creator", function(config) { GenericAgent.call(this, config, "Dev App", "deepseek-chat"); });
|
|
63
|
+
};
|
package/linux-copilot.js
CHANGED
|
@@ -6,39 +6,58 @@ module.exports = function(RED) {
|
|
|
6
6
|
RED.nodes.createNode(this, config);
|
|
7
7
|
const node = this;
|
|
8
8
|
|
|
9
|
-
const omniPrompt = `
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
2. ANALYSE : Analyse brièvement les résultats techniques.
|
|
13
|
-
3. ACTION : Propose la commande suivante pour continuer le diagnostic.
|
|
14
|
-
4. FORMAT JSON : {"speech": "ton explication dans la langue de l'utilisateur", "cmd": "commande linux ou none"}`;
|
|
9
|
+
const omniPrompt = `INSTRUCTIONS: Senior Linux SRE Expert. Respond in the user's language.
|
|
10
|
+
Return ONLY JSON: {"speech": "explanation", "cmd": "command or none"}.
|
|
11
|
+
STRICT: No interactive commands. Use 'top -b -n 1', 'ps aux'.\n\n`;
|
|
15
12
|
|
|
16
|
-
const
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
13
|
+
const parseAIResponse = (raw) => {
|
|
14
|
+
try {
|
|
15
|
+
if (!raw) return null;
|
|
16
|
+
let clean = raw.replace(/<think>[\s\S]*?<\/think>/gi, '')
|
|
17
|
+
.replace(/```json/gi, '')
|
|
18
|
+
.replace(/```/gi, '').trim();
|
|
19
|
+
const match = clean.match(/\{[\s\S]*\}/);
|
|
20
|
+
return JSON.parse(match ? match[0] : clean);
|
|
21
|
+
} catch (e) { return null; }
|
|
22
|
+
};
|
|
20
23
|
|
|
21
24
|
const engines = {
|
|
22
25
|
gemini: async (history, key) => {
|
|
23
26
|
const res = await axios.post(`https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash-exp:generateContent?key=${key.trim()}`, {
|
|
24
|
-
contents: history.map(h => ({ role: h.role === "assistant" ? "model" :
|
|
27
|
+
contents: history.map(h => ({ role: h.role === "assistant" ? "model" : "user", parts: [{ text: h.content }] })),
|
|
25
28
|
system_instruction: { parts: [{ text: omniPrompt }] },
|
|
26
|
-
generationConfig: { responseMimeType: "application/json" }
|
|
27
|
-
}, { timeout:
|
|
29
|
+
generationConfig: { responseMimeType: "application/json", temperature: 0.1 }
|
|
30
|
+
}, { timeout: 12000 });
|
|
28
31
|
return JSON.parse(res.data.candidates[0].content.parts[0].text);
|
|
29
32
|
},
|
|
30
33
|
openrouter: async (history, key) => {
|
|
34
|
+
const messages = history.map((h, i) => ({
|
|
35
|
+
role: h.role === "assistant" ? "assistant" : "user",
|
|
36
|
+
content: (i === 0 ? omniPrompt : "") + h.content
|
|
37
|
+
}));
|
|
31
38
|
const res = await axios.post('https://openrouter.ai/api/v1/chat/completions', {
|
|
32
|
-
model: "
|
|
33
|
-
messages:
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
39
|
+
model: "google/gemma-3-12b-it:free",
|
|
40
|
+
messages: messages,
|
|
41
|
+
temperature: 0.1
|
|
42
|
+
}, {
|
|
43
|
+
headers: {
|
|
44
|
+
'Authorization': `Bearer ${key.trim()}`,
|
|
45
|
+
'Content-Type': 'application/json',
|
|
46
|
+
'HTTP-Referer': 'http://localhost:1880',
|
|
47
|
+
'X-Title': 'Node-RED Linux Copilot'
|
|
48
|
+
},
|
|
49
|
+
timeout: 25000
|
|
50
|
+
});
|
|
51
|
+
return parseAIResponse(res.data.choices[0].message.content);
|
|
37
52
|
},
|
|
38
53
|
deepseek: async (history, key) => {
|
|
54
|
+
const messages = history.map((h, i) => ({
|
|
55
|
+
role: h.role === "assistant" ? "assistant" : "user",
|
|
56
|
+
content: (i === 0 ? omniPrompt : "") + h.content
|
|
57
|
+
}));
|
|
39
58
|
const res = await axios.post('https://api.deepseek.com/chat/completions', {
|
|
40
|
-
model: "deepseek-chat",
|
|
41
|
-
messages:
|
|
59
|
+
model: "deepseek-chat",
|
|
60
|
+
messages: messages,
|
|
42
61
|
response_format: { type: 'json_object' }
|
|
43
62
|
}, { headers: { 'Authorization': `Bearer ${key.trim()}` }, timeout: 20000 });
|
|
44
63
|
return JSON.parse(res.data.choices[0].message.content);
|
|
@@ -46,62 +65,53 @@ module.exports = function(RED) {
|
|
|
46
65
|
};
|
|
47
66
|
|
|
48
67
|
node.on('input', async function(msg) {
|
|
49
|
-
const chatId = msg.payload.chatId || config.chatId
|
|
68
|
+
const chatId = msg.payload.chatId || config.chatId;
|
|
50
69
|
let userText = msg.payload.content || (typeof msg.payload === 'string' ? msg.payload : "");
|
|
51
70
|
let loopCount = msg.loopCount || 0;
|
|
52
71
|
|
|
53
|
-
if (loopCount >
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
if (userText.toLowerCase() === "reset") {
|
|
57
|
-
node.context().set('history', []);
|
|
58
|
-
return node.send({ payload: { chatId, type: "message", content: "♻️ Historique effacé." } });
|
|
72
|
+
if (loopCount > 3 || userText.toLowerCase() === "reset") {
|
|
73
|
+
if (userText.toLowerCase() === "reset") node.context().set('history', []);
|
|
74
|
+
return;
|
|
59
75
|
}
|
|
60
76
|
|
|
61
|
-
|
|
77
|
+
let history = node.context().get('history') || [];
|
|
62
78
|
history.push({ role: "user", content: userText });
|
|
63
79
|
|
|
64
|
-
|
|
65
|
-
{ id: '
|
|
66
|
-
{ id: 'openrouter',
|
|
67
|
-
{ id: '
|
|
68
|
-
].filter(q => q.k).sort((a, b) => a.p - b.p);
|
|
69
|
-
|
|
70
|
-
let aiData = null;
|
|
71
|
-
let engineUsed = "";
|
|
80
|
+
const queue = [
|
|
81
|
+
{ id: 'gemini', k: node.credentials.geminiKey, n: "Gemini", p: parseInt(config.prioGEM) || 1 },
|
|
82
|
+
{ id: 'openrouter', k: node.credentials.openrouterKey, n: "Gemma 3", p: parseInt(config.prioOR) || 2 },
|
|
83
|
+
{ id: 'deepseek', k: node.credentials.deepseekKey, n: "DeepSeek", p: parseInt(config.prioDS) || 3 }
|
|
84
|
+
].filter(q => q.k && q.k.length > 10).sort((a, b) => a.p - b.p);
|
|
72
85
|
|
|
86
|
+
let aiData = null; let engineUsed = "";
|
|
73
87
|
for (let e of queue) {
|
|
74
88
|
try {
|
|
75
|
-
node.status({fill:"yellow", text: `
|
|
89
|
+
node.status({fill:"yellow", text: `Chef: ${e.n}...`});
|
|
76
90
|
aiData = await engines[e.id](history, e.k);
|
|
77
|
-
if (aiData
|
|
78
|
-
} catch (err) { node.warn(
|
|
91
|
+
if (aiData) { engineUsed = e.n; break; }
|
|
92
|
+
} catch (err) { node.warn(`Fail ${e.n}: ${err.message}`); }
|
|
79
93
|
}
|
|
80
94
|
|
|
81
|
-
if (!aiData) return node.status({fill:"red", text:"
|
|
95
|
+
if (!aiData) return node.status({fill:"red", text:"Cluster Error"});
|
|
82
96
|
|
|
83
|
-
node.send({ payload: { chatId, type: "message", content: `🤖 <b>${engineUsed}</b
|
|
97
|
+
node.send({ payload: { chatId, type: "message", content: `🤖 <b>${engineUsed}</b>: ${aiData.speech}`, options: { parse_mode: "HTML" } } });
|
|
84
98
|
|
|
85
99
|
let cmd = (aiData.cmd || "").trim();
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
exec(cmd, { timeout: 10000 }, (err, stdout, stderr) => {
|
|
91
|
-
let res = (stdout || stderr || "OK").substring(0, 800);
|
|
92
|
-
node.send({ payload: { chatId, type: "message", content: `📟 <b>Terminal (${cmd})</b> :\n<pre>${res}</pre>`, options: { parse_mode: "HTML" } } });
|
|
100
|
+
if (cmd && cmd.toLowerCase() !== "none") {
|
|
101
|
+
exec(cmd, { timeout: 5000, killSignal: 'SIGKILL' }, (err, stdout, stderr) => {
|
|
102
|
+
let out = (stdout || stderr || "OK").substring(0, 800);
|
|
103
|
+
node.send({ payload: { chatId, type: "message", content: `📟 <code>${cmd}</code>\n<pre>${out}</pre>`, options: { parse_mode: "HTML" } } });
|
|
93
104
|
setTimeout(() => {
|
|
94
|
-
node.emit("input", { payload: { chatId, content: `
|
|
95
|
-
},
|
|
105
|
+
node.emit("input", { payload: { chatId, content: `OUTPUT:\n${out}` }, loopCount: loopCount + 1 });
|
|
106
|
+
}, 2000);
|
|
96
107
|
});
|
|
97
108
|
}
|
|
98
|
-
|
|
99
|
-
history.push({ role: "assistant", content: aiData.speech || "Action" });
|
|
109
|
+
history.push({ role: "assistant", content: aiData.speech });
|
|
100
110
|
node.context().set('history', history.slice(-10));
|
|
101
|
-
node.status({fill:"green", text:`
|
|
111
|
+
node.status({fill:"green", text:`Dernier: ${engineUsed}`});
|
|
102
112
|
});
|
|
103
113
|
}
|
|
104
114
|
RED.nodes.registerType('linux-copilot', LinuxCopilotNode, {
|
|
105
|
-
credentials: { geminiKey: {type:"password"},
|
|
115
|
+
credentials: { geminiKey: {type:"password"}, deepseekKey: {type:"password"}, openrouterKey: {type:"password"} }
|
|
106
116
|
});
|
|
107
|
-
}
|
|
117
|
+
};
|
package/package.json
CHANGED
|
@@ -1,26 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "node-red-contrib-linux-copilot",
|
|
3
|
-
"version": "1.0.
|
|
4
|
-
"description": "
|
|
5
|
-
"main": "linux-
|
|
6
|
-
"scripts": {
|
|
7
|
-
"test": "echo \"Error: no test specified\" && exit 1"
|
|
8
|
-
},
|
|
9
|
-
"keywords": [
|
|
10
|
-
"node-red",
|
|
11
|
-
"linux",
|
|
12
|
-
"sre",
|
|
13
|
-
"copilot",
|
|
14
|
-
"deepseek",
|
|
15
|
-
"gemini",
|
|
16
|
-
"bash"
|
|
17
|
-
],
|
|
3
|
+
"version": "1.0.7",
|
|
4
|
+
"description": "AI Linux Copilot for Node-RED",
|
|
5
|
+
"main": "linux-agent.js",
|
|
18
6
|
"node-red": {
|
|
19
7
|
"nodes": {
|
|
20
|
-
"linux-copilot": "linux-
|
|
8
|
+
"linux-copilot": "linux-agent.js"
|
|
21
9
|
}
|
|
22
10
|
},
|
|
23
|
-
"dependencies": {
|
|
24
|
-
"axios": "^1.6.0"
|
|
25
|
-
}
|
|
11
|
+
"dependencies": {}
|
|
26
12
|
}
|
package/README.md
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
cat > ~/.node-red/node_modules/node-red-contrib-linux-copilot/README.md << 'EOF'
|
|
2
|
-
# node-red-contrib-linux-copilot
|
|
3
|
-
|
|
4
|
-
An advanced, autonomous SRE (Site Reliability Engineering) agent for Node-RED. This node doesn't just talk; it thinks, executes Linux commands, analyzes the output, and follows up until the diagnostic is complete.
|
|
5
|
-
|
|
6
|
-
## 🚀 Key Features
|
|
7
|
-
|
|
8
|
-
* **Multi-Engine Failover**: Supports **DeepSeek**, **OpenRouter (Llama 3.3)**, and **Google Gemini 2.0**.
|
|
9
|
-
* **Autonomous Diagnostic Loop**: If the AI suggests a command, the node executes it, reads the output, and sends it back to the AI for further analysis (up to 5 iterations).
|
|
10
|
-
* **Auto-Language Detection**: Speak to it in French, English, Spanish, or any language; it will detect and respond in kind.
|
|
11
|
-
* **SRE Expertise**: Specialized in system health, performance bottleneck identification, and log analysis.
|
|
12
|
-
* **Secure by Design**: Restricted to a safe list of audit and monitoring commands (`df`, `top`, `free`, `systemctl`, etc.).
|
|
13
|
-
|
|
14
|
-
---
|
|
15
|
-
|
|
16
|
-
## 🛠 Installation
|
|
17
|
-
|
|
18
|
-
Run the following command in your Node-RED user directory (typically `~/.node-red`):
|
|
19
|
-
|
|
20
|
-
```bash
|
|
21
|
-
npm install node-red-contrib-linux-copilot
|