@rainfall-devkit/sdk 0.1.7 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -0
- package/dist/chunk-7MRE4ZVI.mjs +662 -0
- package/dist/chunk-AQFC7YAX.mjs +27 -0
- package/dist/chunk-RA3HDYF4.mjs +778 -0
- package/dist/chunk-V5QWJVLC.mjs +662 -0
- package/dist/chunk-VDPKDC3R.mjs +869 -0
- package/dist/chunk-WOITG5TG.mjs +84 -0
- package/dist/cli/index.js +2756 -607
- package/dist/cli/index.mjs +404 -46
- package/dist/config-DDTQQBN7.mjs +14 -0
- package/dist/config-ZKNHII2A.mjs +8 -0
- package/dist/daemon/index.d.mts +136 -0
- package/dist/daemon/index.d.ts +136 -0
- package/dist/daemon/index.js +2473 -0
- package/dist/daemon/index.mjs +836 -0
- package/dist/errors-BMPseAnM.d.mts +47 -0
- package/dist/errors-BMPseAnM.d.ts +47 -0
- package/dist/errors-CZdRoYyw.d.ts +332 -0
- package/dist/errors-Chjq1Mev.d.mts +332 -0
- package/dist/index.d.mts +3 -1
- package/dist/index.d.ts +3 -1
- package/dist/index.js +762 -5
- package/dist/index.mjs +14 -2
- package/dist/listeners-BbYIaNCs.d.mts +372 -0
- package/dist/listeners-CP2A9J_2.d.ts +372 -0
- package/dist/listeners-CTRSofnm.d.mts +372 -0
- package/dist/listeners-CYI-YwIF.d.mts +372 -0
- package/dist/listeners-QJeEtLbV.d.ts +372 -0
- package/dist/listeners-hp0Ib2Ox.d.ts +372 -0
- package/dist/mcp.d.mts +3 -2
- package/dist/mcp.d.ts +3 -2
- package/dist/mcp.js +95 -3
- package/dist/mcp.mjs +1 -1
- package/dist/sdk-CJ9g5lFo.d.mts +772 -0
- package/dist/sdk-CJ9g5lFo.d.ts +772 -0
- package/dist/sdk-DD1OeGRJ.d.mts +871 -0
- package/dist/sdk-DD1OeGRJ.d.ts +871 -0
- package/dist/types-GnRAfH-h.d.mts +489 -0
- package/dist/types-GnRAfH-h.d.ts +489 -0
- package/package.json +14 -5
|
@@ -0,0 +1,836 @@
|
|
|
1
|
+
import {
|
|
2
|
+
RainfallDaemonContext,
|
|
3
|
+
RainfallListenerRegistry,
|
|
4
|
+
RainfallNetworkedExecutor
|
|
5
|
+
} from "../chunk-7MRE4ZVI.mjs";
|
|
6
|
+
import {
|
|
7
|
+
Rainfall
|
|
8
|
+
} from "../chunk-VDPKDC3R.mjs";
|
|
9
|
+
|
|
10
|
+
// src/daemon/index.ts
|
|
11
|
+
import { WebSocketServer } from "ws";
|
|
12
|
+
import express from "express";
|
|
13
|
+
var RainfallDaemon = class {
|
|
14
|
+
wss;
|
|
15
|
+
openaiApp;
|
|
16
|
+
rainfall;
|
|
17
|
+
port;
|
|
18
|
+
openaiPort;
|
|
19
|
+
rainfallConfig;
|
|
20
|
+
tools = [];
|
|
21
|
+
toolSchemas = /* @__PURE__ */ new Map();
|
|
22
|
+
clients = /* @__PURE__ */ new Set();
|
|
23
|
+
debug;
|
|
24
|
+
// New services
|
|
25
|
+
networkedExecutor;
|
|
26
|
+
context;
|
|
27
|
+
listeners;
|
|
28
|
+
constructor(config = {}) {
|
|
29
|
+
this.port = config.port || 8765;
|
|
30
|
+
this.openaiPort = config.openaiPort || 8787;
|
|
31
|
+
this.rainfallConfig = config.rainfallConfig;
|
|
32
|
+
this.debug = config.debug || false;
|
|
33
|
+
this.openaiApp = express();
|
|
34
|
+
this.openaiApp.use(express.json());
|
|
35
|
+
}
|
|
36
|
+
async start() {
|
|
37
|
+
this.log("\u{1F327}\uFE0F Rainfall Daemon starting...");
|
|
38
|
+
await this.initializeRainfall();
|
|
39
|
+
if (!this.rainfall) {
|
|
40
|
+
throw new Error("Failed to initialize Rainfall SDK");
|
|
41
|
+
}
|
|
42
|
+
this.context = new RainfallDaemonContext(this.rainfall, {
|
|
43
|
+
maxLocalMemories: 1e3,
|
|
44
|
+
maxMessageHistory: 100,
|
|
45
|
+
...this.rainfallConfig
|
|
46
|
+
});
|
|
47
|
+
await this.context.initialize();
|
|
48
|
+
this.networkedExecutor = new RainfallNetworkedExecutor(this.rainfall, {
|
|
49
|
+
wsPort: this.port,
|
|
50
|
+
httpPort: this.openaiPort,
|
|
51
|
+
hostname: process.env.HOSTNAME || "local-daemon",
|
|
52
|
+
capabilities: {
|
|
53
|
+
localExec: true,
|
|
54
|
+
fileWatch: true,
|
|
55
|
+
passiveListen: true
|
|
56
|
+
}
|
|
57
|
+
});
|
|
58
|
+
await this.networkedExecutor.registerEdgeNode();
|
|
59
|
+
await this.networkedExecutor.subscribeToResults((jobId, result, error) => {
|
|
60
|
+
this.log(`\u{1F4EC} Job ${jobId} ${error ? "failed" : "completed"}`, error || result);
|
|
61
|
+
});
|
|
62
|
+
this.listeners = new RainfallListenerRegistry(
|
|
63
|
+
this.rainfall,
|
|
64
|
+
this.context,
|
|
65
|
+
this.networkedExecutor
|
|
66
|
+
);
|
|
67
|
+
await this.loadTools();
|
|
68
|
+
await this.startWebSocketServer();
|
|
69
|
+
await this.startOpenAIProxy();
|
|
70
|
+
console.log(`\u{1F680} Rainfall daemon running`);
|
|
71
|
+
console.log(` WebSocket (MCP): ws://localhost:${this.port}`);
|
|
72
|
+
console.log(` OpenAI API: http://localhost:${this.openaiPort}/v1/chat/completions`);
|
|
73
|
+
console.log(` Health Check: http://localhost:${this.openaiPort}/health`);
|
|
74
|
+
console.log(` Edge Node ID: ${this.networkedExecutor.getEdgeNodeId() || "local"}`);
|
|
75
|
+
console.log(` Tools loaded: ${this.tools.length}`);
|
|
76
|
+
console.log(` Press Ctrl+C to stop`);
|
|
77
|
+
process.on("SIGINT", () => this.stop());
|
|
78
|
+
process.on("SIGTERM", () => this.stop());
|
|
79
|
+
}
|
|
80
|
+
async stop() {
|
|
81
|
+
this.log("\u{1F6D1} Shutting down Rainfall daemon...");
|
|
82
|
+
if (this.listeners) {
|
|
83
|
+
await this.listeners.stopAll();
|
|
84
|
+
}
|
|
85
|
+
if (this.networkedExecutor) {
|
|
86
|
+
await this.networkedExecutor.unregisterEdgeNode();
|
|
87
|
+
}
|
|
88
|
+
for (const client of this.clients) {
|
|
89
|
+
client.close();
|
|
90
|
+
}
|
|
91
|
+
this.clients.clear();
|
|
92
|
+
if (this.wss) {
|
|
93
|
+
this.wss.close();
|
|
94
|
+
this.wss = void 0;
|
|
95
|
+
}
|
|
96
|
+
console.log("\u{1F44B} Rainfall daemon stopped");
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Get the networked executor for distributed job management
|
|
100
|
+
*/
|
|
101
|
+
getNetworkedExecutor() {
|
|
102
|
+
return this.networkedExecutor;
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Get the context for memory/session management
|
|
106
|
+
*/
|
|
107
|
+
getContext() {
|
|
108
|
+
return this.context;
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Get the listener registry for passive triggers
|
|
112
|
+
*/
|
|
113
|
+
getListenerRegistry() {
|
|
114
|
+
return this.listeners;
|
|
115
|
+
}
|
|
116
|
+
async initializeRainfall() {
|
|
117
|
+
if (this.rainfallConfig?.apiKey) {
|
|
118
|
+
this.rainfall = new Rainfall(this.rainfallConfig);
|
|
119
|
+
} else {
|
|
120
|
+
const { loadConfig } = await import("../config-DDTQQBN7.mjs");
|
|
121
|
+
const config = loadConfig();
|
|
122
|
+
if (config.apiKey) {
|
|
123
|
+
this.rainfall = new Rainfall({
|
|
124
|
+
apiKey: config.apiKey,
|
|
125
|
+
baseUrl: config.baseUrl
|
|
126
|
+
});
|
|
127
|
+
} else {
|
|
128
|
+
throw new Error("No API key configured. Run: rainfall auth login <api-key>");
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
async loadTools() {
|
|
133
|
+
if (!this.rainfall) return;
|
|
134
|
+
try {
|
|
135
|
+
this.tools = await this.rainfall.listTools();
|
|
136
|
+
this.log(`\u{1F4E6} Loaded ${this.tools.length} tools`);
|
|
137
|
+
} catch (error) {
|
|
138
|
+
console.warn("\u26A0\uFE0F Failed to load tools:", error instanceof Error ? error.message : error);
|
|
139
|
+
this.tools = [];
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
async getToolSchema(toolId) {
|
|
143
|
+
if (this.toolSchemas.has(toolId)) {
|
|
144
|
+
return this.toolSchemas.get(toolId);
|
|
145
|
+
}
|
|
146
|
+
if (!this.rainfall) return null;
|
|
147
|
+
try {
|
|
148
|
+
const schema = await this.rainfall.getToolSchema(toolId);
|
|
149
|
+
this.toolSchemas.set(toolId, schema);
|
|
150
|
+
return schema;
|
|
151
|
+
} catch {
|
|
152
|
+
return null;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
async startWebSocketServer() {
|
|
156
|
+
this.wss = new WebSocketServer({ port: this.port });
|
|
157
|
+
this.wss.on("connection", (ws) => {
|
|
158
|
+
this.log("\u{1F7E2} MCP client connected");
|
|
159
|
+
this.clients.add(ws);
|
|
160
|
+
ws.on("message", async (data) => {
|
|
161
|
+
try {
|
|
162
|
+
const message = JSON.parse(data.toString());
|
|
163
|
+
const response = await this.handleMCPMessage(message);
|
|
164
|
+
ws.send(JSON.stringify(response));
|
|
165
|
+
} catch (error) {
|
|
166
|
+
const errorResponse = {
|
|
167
|
+
jsonrpc: "2.0",
|
|
168
|
+
id: void 0,
|
|
169
|
+
error: {
|
|
170
|
+
code: -32700,
|
|
171
|
+
message: error instanceof Error ? error.message : "Parse error"
|
|
172
|
+
}
|
|
173
|
+
};
|
|
174
|
+
ws.send(JSON.stringify(errorResponse));
|
|
175
|
+
}
|
|
176
|
+
});
|
|
177
|
+
ws.on("close", () => {
|
|
178
|
+
this.log("\u{1F534} MCP client disconnected");
|
|
179
|
+
this.clients.delete(ws);
|
|
180
|
+
});
|
|
181
|
+
ws.on("error", (error) => {
|
|
182
|
+
console.error("WebSocket error:", error);
|
|
183
|
+
this.clients.delete(ws);
|
|
184
|
+
});
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
async handleMCPMessage(message) {
|
|
188
|
+
const { id, method, params } = message;
|
|
189
|
+
switch (method) {
|
|
190
|
+
case "initialize":
|
|
191
|
+
return {
|
|
192
|
+
jsonrpc: "2.0",
|
|
193
|
+
id,
|
|
194
|
+
result: {
|
|
195
|
+
protocolVersion: "2024-11-05",
|
|
196
|
+
capabilities: {
|
|
197
|
+
tools: { listChanged: true }
|
|
198
|
+
},
|
|
199
|
+
serverInfo: {
|
|
200
|
+
name: "rainfall-daemon",
|
|
201
|
+
version: "0.1.0"
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
};
|
|
205
|
+
case "tools/list":
|
|
206
|
+
return {
|
|
207
|
+
jsonrpc: "2.0",
|
|
208
|
+
id,
|
|
209
|
+
result: {
|
|
210
|
+
tools: await this.getMCPTools()
|
|
211
|
+
}
|
|
212
|
+
};
|
|
213
|
+
case "tools/call": {
|
|
214
|
+
const toolName = params?.name;
|
|
215
|
+
const toolParams = params?.arguments;
|
|
216
|
+
try {
|
|
217
|
+
const startTime = Date.now();
|
|
218
|
+
const result = await this.executeTool(toolName, toolParams);
|
|
219
|
+
const duration = Date.now() - startTime;
|
|
220
|
+
if (this.context) {
|
|
221
|
+
this.context.recordExecution(toolName, toolParams || {}, result, { duration });
|
|
222
|
+
}
|
|
223
|
+
return {
|
|
224
|
+
jsonrpc: "2.0",
|
|
225
|
+
id,
|
|
226
|
+
result: {
|
|
227
|
+
content: [
|
|
228
|
+
{
|
|
229
|
+
type: "text",
|
|
230
|
+
text: typeof result === "string" ? result : JSON.stringify(result, null, 2)
|
|
231
|
+
}
|
|
232
|
+
]
|
|
233
|
+
}
|
|
234
|
+
};
|
|
235
|
+
} catch (error) {
|
|
236
|
+
const errorMessage = error instanceof Error ? error.message : "Tool execution failed";
|
|
237
|
+
if (this.context) {
|
|
238
|
+
this.context.recordExecution(toolName, toolParams || {}, null, {
|
|
239
|
+
error: errorMessage,
|
|
240
|
+
duration: 0
|
|
241
|
+
});
|
|
242
|
+
}
|
|
243
|
+
return {
|
|
244
|
+
jsonrpc: "2.0",
|
|
245
|
+
id,
|
|
246
|
+
error: {
|
|
247
|
+
code: -32603,
|
|
248
|
+
message: errorMessage
|
|
249
|
+
}
|
|
250
|
+
};
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
case "ping":
|
|
254
|
+
return {
|
|
255
|
+
jsonrpc: "2.0",
|
|
256
|
+
id,
|
|
257
|
+
result: {}
|
|
258
|
+
};
|
|
259
|
+
default:
|
|
260
|
+
return {
|
|
261
|
+
jsonrpc: "2.0",
|
|
262
|
+
id,
|
|
263
|
+
error: {
|
|
264
|
+
code: -32601,
|
|
265
|
+
message: `Method not found: ${method}`
|
|
266
|
+
}
|
|
267
|
+
};
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
async getMCPTools() {
|
|
271
|
+
const mcpTools = [];
|
|
272
|
+
for (const tool of this.tools) {
|
|
273
|
+
const schema = await this.getToolSchema(tool.id);
|
|
274
|
+
if (schema) {
|
|
275
|
+
const toolSchema = schema;
|
|
276
|
+
mcpTools.push({
|
|
277
|
+
name: tool.id,
|
|
278
|
+
description: toolSchema.description || tool.description,
|
|
279
|
+
inputSchema: toolSchema.parameters || { type: "object", properties: {} }
|
|
280
|
+
});
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
return mcpTools;
|
|
284
|
+
}
|
|
285
|
+
async executeTool(toolId, params) {
|
|
286
|
+
if (!this.rainfall) {
|
|
287
|
+
throw new Error("Rainfall SDK not initialized");
|
|
288
|
+
}
|
|
289
|
+
return this.rainfall.executeTool(toolId, params);
|
|
290
|
+
}
|
|
291
|
+
async startOpenAIProxy() {
|
|
292
|
+
this.openaiApp.get("/v1/models", async (_req, res) => {
|
|
293
|
+
try {
|
|
294
|
+
if (this.rainfall) {
|
|
295
|
+
const models = await this.rainfall.listModels();
|
|
296
|
+
res.json({
|
|
297
|
+
object: "list",
|
|
298
|
+
data: models.map((m) => ({
|
|
299
|
+
id: m.id,
|
|
300
|
+
object: "model",
|
|
301
|
+
created: Math.floor(Date.now() / 1e3),
|
|
302
|
+
owned_by: "rainfall"
|
|
303
|
+
}))
|
|
304
|
+
});
|
|
305
|
+
} else {
|
|
306
|
+
res.json({
|
|
307
|
+
object: "list",
|
|
308
|
+
data: [
|
|
309
|
+
{ id: "llama-3.3-70b-versatile", object: "model", created: Date.now(), owned_by: "groq" },
|
|
310
|
+
{ id: "gpt-4o", object: "model", created: Date.now(), owned_by: "openai" },
|
|
311
|
+
{ id: "claude-3-5-sonnet", object: "model", created: Date.now(), owned_by: "anthropic" },
|
|
312
|
+
{ id: "gemini-2.0-flash-exp", object: "model", created: Date.now(), owned_by: "gemini" }
|
|
313
|
+
]
|
|
314
|
+
});
|
|
315
|
+
}
|
|
316
|
+
} catch (error) {
|
|
317
|
+
res.status(500).json({ error: "Failed to fetch models" });
|
|
318
|
+
}
|
|
319
|
+
});
|
|
320
|
+
this.openaiApp.post("/v1/chat/completions", async (req, res) => {
|
|
321
|
+
const body = req.body;
|
|
322
|
+
if (!body.messages || !Array.isArray(body.messages)) {
|
|
323
|
+
res.status(400).json({
|
|
324
|
+
error: {
|
|
325
|
+
message: "Missing required field: messages",
|
|
326
|
+
type: "invalid_request_error"
|
|
327
|
+
}
|
|
328
|
+
});
|
|
329
|
+
return;
|
|
330
|
+
}
|
|
331
|
+
if (!this.rainfall) {
|
|
332
|
+
res.status(503).json({
|
|
333
|
+
error: {
|
|
334
|
+
message: "Rainfall SDK not initialized",
|
|
335
|
+
type: "service_unavailable"
|
|
336
|
+
}
|
|
337
|
+
});
|
|
338
|
+
return;
|
|
339
|
+
}
|
|
340
|
+
try {
|
|
341
|
+
const me = await this.rainfall.getMe();
|
|
342
|
+
const subscriberId = me.id;
|
|
343
|
+
const localToolMap = await this.buildLocalToolMap();
|
|
344
|
+
let allTools = [];
|
|
345
|
+
if (body.tools && body.tools.length > 0) {
|
|
346
|
+
allTools = body.tools;
|
|
347
|
+
} else if (body.tool_choice) {
|
|
348
|
+
const openaiTools = await this.getOpenAITools();
|
|
349
|
+
allTools = openaiTools;
|
|
350
|
+
}
|
|
351
|
+
let messages = [...body.messages];
|
|
352
|
+
const maxToolIterations = 10;
|
|
353
|
+
let toolIterations = 0;
|
|
354
|
+
while (toolIterations < maxToolIterations) {
|
|
355
|
+
toolIterations++;
|
|
356
|
+
const llmResponse = await this.callLLM({
|
|
357
|
+
subscriberId,
|
|
358
|
+
model: body.model,
|
|
359
|
+
messages,
|
|
360
|
+
tools: allTools.length > 0 ? allTools : void 0,
|
|
361
|
+
tool_choice: body.tool_choice,
|
|
362
|
+
temperature: body.temperature,
|
|
363
|
+
max_tokens: body.max_tokens,
|
|
364
|
+
stream: false,
|
|
365
|
+
// Always non-streaming for tool loop
|
|
366
|
+
tool_priority: body.tool_priority,
|
|
367
|
+
enable_stacked: body.enable_stacked
|
|
368
|
+
});
|
|
369
|
+
const choice = llmResponse.choices?.[0];
|
|
370
|
+
let toolCalls = choice?.message?.tool_calls || [];
|
|
371
|
+
const content = choice?.message?.content || "";
|
|
372
|
+
const reasoningContent = choice?.message?.reasoning_content || "";
|
|
373
|
+
const fullContent = content + " " + reasoningContent;
|
|
374
|
+
const xmlToolCalls = this.parseXMLToolCalls(fullContent);
|
|
375
|
+
if (xmlToolCalls.length > 0) {
|
|
376
|
+
this.log(`\u{1F4CB} Parsed ${xmlToolCalls.length} XML tool calls from content`);
|
|
377
|
+
toolCalls = xmlToolCalls;
|
|
378
|
+
}
|
|
379
|
+
if (!toolCalls || toolCalls.length === 0) {
|
|
380
|
+
if (body.stream) {
|
|
381
|
+
await this.streamResponse(res, llmResponse);
|
|
382
|
+
} else {
|
|
383
|
+
res.json(llmResponse);
|
|
384
|
+
}
|
|
385
|
+
this.updateContext(body.messages, llmResponse);
|
|
386
|
+
return;
|
|
387
|
+
}
|
|
388
|
+
messages.push({
|
|
389
|
+
role: "assistant",
|
|
390
|
+
content: choice?.message?.content || "",
|
|
391
|
+
tool_calls: toolCalls
|
|
392
|
+
});
|
|
393
|
+
for (const toolCall of toolCalls) {
|
|
394
|
+
const toolName = toolCall.function?.name;
|
|
395
|
+
const toolArgsStr = toolCall.function?.arguments || "{}";
|
|
396
|
+
if (!toolName) continue;
|
|
397
|
+
this.log(`\u{1F527} Tool call: ${toolName}`);
|
|
398
|
+
let toolResult;
|
|
399
|
+
let toolError;
|
|
400
|
+
try {
|
|
401
|
+
const localTool = this.findLocalTool(toolName, localToolMap);
|
|
402
|
+
if (localTool) {
|
|
403
|
+
this.log(` \u2192 Executing locally`);
|
|
404
|
+
const args = JSON.parse(toolArgsStr);
|
|
405
|
+
toolResult = await this.executeLocalTool(localTool.id, args);
|
|
406
|
+
} else {
|
|
407
|
+
const shouldExecuteLocal = body.tool_priority === "local" || body.tool_priority === "stacked";
|
|
408
|
+
if (shouldExecuteLocal) {
|
|
409
|
+
try {
|
|
410
|
+
const args = JSON.parse(toolArgsStr);
|
|
411
|
+
toolResult = await this.rainfall.executeTool(toolName.replace(/_/g, "-"), args);
|
|
412
|
+
} catch {
|
|
413
|
+
toolResult = { _pending: true, tool: toolName, args: toolArgsStr };
|
|
414
|
+
}
|
|
415
|
+
} else {
|
|
416
|
+
toolResult = { _pending: true, tool: toolName, args: toolArgsStr };
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
} catch (error) {
|
|
420
|
+
toolError = error instanceof Error ? error.message : String(error);
|
|
421
|
+
this.log(` \u2192 Error: ${toolError}`);
|
|
422
|
+
}
|
|
423
|
+
messages.push({
|
|
424
|
+
role: "tool",
|
|
425
|
+
content: toolError ? JSON.stringify({ error: toolError }) : typeof toolResult === "string" ? toolResult : JSON.stringify(toolResult),
|
|
426
|
+
tool_call_id: toolCall.id
|
|
427
|
+
});
|
|
428
|
+
if (this.context) {
|
|
429
|
+
this.context.recordExecution(
|
|
430
|
+
toolName,
|
|
431
|
+
JSON.parse(toolArgsStr || "{}"),
|
|
432
|
+
toolResult,
|
|
433
|
+
{ error: toolError, duration: 0 }
|
|
434
|
+
);
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
res.status(500).json({
|
|
439
|
+
error: {
|
|
440
|
+
message: "Maximum tool execution iterations reached",
|
|
441
|
+
type: "tool_execution_error"
|
|
442
|
+
}
|
|
443
|
+
});
|
|
444
|
+
} catch (error) {
|
|
445
|
+
this.log("Chat completions error:", error);
|
|
446
|
+
res.status(500).json({
|
|
447
|
+
error: {
|
|
448
|
+
message: error instanceof Error ? error.message : "Internal server error",
|
|
449
|
+
type: "internal_error"
|
|
450
|
+
}
|
|
451
|
+
});
|
|
452
|
+
}
|
|
453
|
+
});
|
|
454
|
+
this.openaiApp.get("/health", (_req, res) => {
|
|
455
|
+
res.json({
|
|
456
|
+
status: "ok",
|
|
457
|
+
daemon: "rainfall",
|
|
458
|
+
version: "0.1.0",
|
|
459
|
+
tools_loaded: this.tools.length,
|
|
460
|
+
edge_node_id: this.networkedExecutor?.getEdgeNodeId(),
|
|
461
|
+
clients_connected: this.clients.size
|
|
462
|
+
});
|
|
463
|
+
});
|
|
464
|
+
this.openaiApp.get("/status", (_req, res) => {
|
|
465
|
+
res.json(this.getStatus());
|
|
466
|
+
});
|
|
467
|
+
this.openaiApp.post("/v1/queue", async (req, res) => {
|
|
468
|
+
const { tool_id, params, execution_mode = "any" } = req.body;
|
|
469
|
+
if (!tool_id) {
|
|
470
|
+
res.status(400).json({ error: "Missing required field: tool_id" });
|
|
471
|
+
return;
|
|
472
|
+
}
|
|
473
|
+
if (!this.networkedExecutor) {
|
|
474
|
+
res.status(503).json({ error: "Networked executor not available" });
|
|
475
|
+
return;
|
|
476
|
+
}
|
|
477
|
+
try {
|
|
478
|
+
const jobId = await this.networkedExecutor.queueToolExecution(
|
|
479
|
+
tool_id,
|
|
480
|
+
params || {},
|
|
481
|
+
{ executionMode: execution_mode }
|
|
482
|
+
);
|
|
483
|
+
res.json({ job_id: jobId, status: "queued" });
|
|
484
|
+
} catch (error) {
|
|
485
|
+
res.status(500).json({
|
|
486
|
+
error: error instanceof Error ? error.message : "Failed to queue job"
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
});
|
|
490
|
+
return new Promise((resolve) => {
|
|
491
|
+
this.openaiApp.listen(this.openaiPort, () => {
|
|
492
|
+
resolve();
|
|
493
|
+
});
|
|
494
|
+
});
|
|
495
|
+
}
|
|
496
|
+
/**
|
|
497
|
+
* Build a map of local Rainfall tools for quick lookup
|
|
498
|
+
* Maps OpenAI-style underscore names to Rainfall tool IDs
|
|
499
|
+
*/
|
|
500
|
+
async buildLocalToolMap() {
|
|
501
|
+
const map = /* @__PURE__ */ new Map();
|
|
502
|
+
for (const tool of this.tools) {
|
|
503
|
+
const openAiName = tool.id.replace(/-/g, "_");
|
|
504
|
+
map.set(openAiName, {
|
|
505
|
+
id: tool.id,
|
|
506
|
+
name: openAiName,
|
|
507
|
+
description: tool.description
|
|
508
|
+
});
|
|
509
|
+
map.set(tool.id, {
|
|
510
|
+
id: tool.id,
|
|
511
|
+
name: openAiName,
|
|
512
|
+
description: tool.description
|
|
513
|
+
});
|
|
514
|
+
}
|
|
515
|
+
return map;
|
|
516
|
+
}
|
|
517
|
+
/**
|
|
518
|
+
* Find a local Rainfall tool by name (OpenAI underscore format or original)
|
|
519
|
+
*/
|
|
520
|
+
findLocalTool(toolName, localToolMap) {
|
|
521
|
+
if (localToolMap.has(toolName)) {
|
|
522
|
+
return localToolMap.get(toolName);
|
|
523
|
+
}
|
|
524
|
+
const dashedName = toolName.replace(/_/g, "-");
|
|
525
|
+
if (localToolMap.has(dashedName)) {
|
|
526
|
+
return localToolMap.get(dashedName);
|
|
527
|
+
}
|
|
528
|
+
return void 0;
|
|
529
|
+
}
|
|
530
|
+
/**
|
|
531
|
+
* Execute a local Rainfall tool
|
|
532
|
+
*/
|
|
533
|
+
async executeLocalTool(toolId, args) {
|
|
534
|
+
if (!this.rainfall) {
|
|
535
|
+
throw new Error("Rainfall SDK not initialized");
|
|
536
|
+
}
|
|
537
|
+
const startTime = Date.now();
|
|
538
|
+
try {
|
|
539
|
+
const result = await this.rainfall.executeTool(toolId, args);
|
|
540
|
+
const duration = Date.now() - startTime;
|
|
541
|
+
this.log(` \u2713 Completed in ${duration}ms`);
|
|
542
|
+
return result;
|
|
543
|
+
} catch (error) {
|
|
544
|
+
const duration = Date.now() - startTime;
|
|
545
|
+
this.log(` \u2717 Failed after ${duration}ms`);
|
|
546
|
+
throw error;
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
/**
|
|
550
|
+
* Parse XML-style tool calls from model output
|
|
551
|
+
* Handles formats like: <function=name><parameter=key>value</parameter></function>
|
|
552
|
+
*/
|
|
553
|
+
parseXMLToolCalls(content) {
|
|
554
|
+
const toolCalls = [];
|
|
555
|
+
const functionRegex = /<function=([^>]+)>([\s\S]*?)<\/function>/gi;
|
|
556
|
+
let match;
|
|
557
|
+
while ((match = functionRegex.exec(content)) !== null) {
|
|
558
|
+
const functionName = match[1].trim();
|
|
559
|
+
const paramsBlock = match[2];
|
|
560
|
+
const params = {};
|
|
561
|
+
const paramRegex = /<parameter=([^>]+)>([\s\S]*?)<\/parameter>/gi;
|
|
562
|
+
let paramMatch;
|
|
563
|
+
while ((paramMatch = paramRegex.exec(paramsBlock)) !== null) {
|
|
564
|
+
const paramName = paramMatch[1].trim();
|
|
565
|
+
const paramValue = paramMatch[2].trim();
|
|
566
|
+
params[paramName] = paramValue;
|
|
567
|
+
}
|
|
568
|
+
toolCalls.push({
|
|
569
|
+
id: `xml-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`,
|
|
570
|
+
type: "function",
|
|
571
|
+
function: {
|
|
572
|
+
name: functionName,
|
|
573
|
+
arguments: JSON.stringify(params)
|
|
574
|
+
}
|
|
575
|
+
});
|
|
576
|
+
this.log(`\u{1F4CB} Parsed XML tool call: ${functionName}(${JSON.stringify(params)})`);
|
|
577
|
+
}
|
|
578
|
+
return toolCalls;
|
|
579
|
+
}
|
|
580
|
+
/**
|
|
581
|
+
* Call the LLM via Rainfall backend, LM Studio, RunPod, or other providers
|
|
582
|
+
*
|
|
583
|
+
* Provider priority:
|
|
584
|
+
* 1. Config file (llm.provider, llm.baseUrl)
|
|
585
|
+
* 2. Environment variables (OPENAI_API_KEY, OLLAMA_HOST, etc.)
|
|
586
|
+
* 3. Default to Rainfall (credits-based)
|
|
587
|
+
*/
|
|
588
|
+
async callLLM(params) {
|
|
589
|
+
if (!this.rainfall) {
|
|
590
|
+
throw new Error("Rainfall SDK not initialized");
|
|
591
|
+
}
|
|
592
|
+
const { loadConfig, getProviderBaseUrl } = await import("../config-DDTQQBN7.mjs");
|
|
593
|
+
const config = loadConfig();
|
|
594
|
+
const provider = config.llm?.provider || "rainfall";
|
|
595
|
+
switch (provider) {
|
|
596
|
+
case "local":
|
|
597
|
+
case "ollama":
|
|
598
|
+
return this.callLocalLLM(params, config);
|
|
599
|
+
case "openai":
|
|
600
|
+
case "anthropic":
|
|
601
|
+
return this.callExternalLLM(params, config, provider);
|
|
602
|
+
case "rainfall":
|
|
603
|
+
default:
|
|
604
|
+
return this.rainfall.chatCompletions({
|
|
605
|
+
subscriber_id: params.subscriberId,
|
|
606
|
+
model: params.model,
|
|
607
|
+
messages: params.messages,
|
|
608
|
+
stream: params.stream || false,
|
|
609
|
+
temperature: params.temperature,
|
|
610
|
+
max_tokens: params.max_tokens,
|
|
611
|
+
tools: params.tools,
|
|
612
|
+
tool_choice: params.tool_choice,
|
|
613
|
+
tool_priority: params.tool_priority,
|
|
614
|
+
enable_stacked: params.enable_stacked
|
|
615
|
+
});
|
|
616
|
+
}
|
|
617
|
+
}
|
|
618
|
+
/**
|
|
619
|
+
* Call external LLM provider (OpenAI, Anthropic) via their OpenAI-compatible APIs
|
|
620
|
+
*/
|
|
621
|
+
async callExternalLLM(params, config, provider) {
|
|
622
|
+
const { getProviderBaseUrl } = await import("../config-DDTQQBN7.mjs");
|
|
623
|
+
const baseUrl = config.llm?.baseUrl || getProviderBaseUrl({ llm: { provider } });
|
|
624
|
+
const apiKey = config.llm?.apiKey;
|
|
625
|
+
if (!apiKey) {
|
|
626
|
+
throw new Error(`${provider} API key not configured. Set via: rainfall config set llm.apiKey <key>`);
|
|
627
|
+
}
|
|
628
|
+
const model = params.model || config.llm?.model || (provider === "anthropic" ? "claude-3-5-sonnet-20241022" : "gpt-4o");
|
|
629
|
+
const url = `${baseUrl}/chat/completions`;
|
|
630
|
+
const response = await fetch(url, {
|
|
631
|
+
method: "POST",
|
|
632
|
+
headers: {
|
|
633
|
+
"Content-Type": "application/json",
|
|
634
|
+
"Authorization": `Bearer ${apiKey}`
|
|
635
|
+
},
|
|
636
|
+
body: JSON.stringify({
|
|
637
|
+
model,
|
|
638
|
+
messages: params.messages,
|
|
639
|
+
tools: params.tools,
|
|
640
|
+
tool_choice: params.tool_choice,
|
|
641
|
+
temperature: params.temperature,
|
|
642
|
+
max_tokens: params.max_tokens,
|
|
643
|
+
stream: false
|
|
644
|
+
// Tool loop requires non-streaming
|
|
645
|
+
})
|
|
646
|
+
});
|
|
647
|
+
if (!response.ok) {
|
|
648
|
+
const error = await response.text();
|
|
649
|
+
throw new Error(`${provider} API error: ${error}`);
|
|
650
|
+
}
|
|
651
|
+
return response.json();
|
|
652
|
+
}
|
|
653
|
+
/**
|
|
654
|
+
* Call a local LLM (LM Studio, Ollama, etc.)
|
|
655
|
+
*/
|
|
656
|
+
async callLocalLLM(params, config) {
|
|
657
|
+
const baseUrl = config.llm?.baseUrl || "http://localhost:1234/v1";
|
|
658
|
+
const apiKey = config.llm?.apiKey || "not-needed";
|
|
659
|
+
const model = params.model || config.llm?.model || "local-model";
|
|
660
|
+
const url = `${baseUrl}/chat/completions`;
|
|
661
|
+
const response = await fetch(url, {
|
|
662
|
+
method: "POST",
|
|
663
|
+
headers: {
|
|
664
|
+
"Content-Type": "application/json",
|
|
665
|
+
"Authorization": `Bearer ${apiKey}`
|
|
666
|
+
},
|
|
667
|
+
body: JSON.stringify({
|
|
668
|
+
model,
|
|
669
|
+
messages: params.messages,
|
|
670
|
+
tools: params.tools,
|
|
671
|
+
tool_choice: params.tool_choice,
|
|
672
|
+
temperature: params.temperature,
|
|
673
|
+
max_tokens: params.max_tokens,
|
|
674
|
+
stream: false
|
|
675
|
+
// Tool loop requires non-streaming
|
|
676
|
+
})
|
|
677
|
+
});
|
|
678
|
+
if (!response.ok) {
|
|
679
|
+
const error = await response.text();
|
|
680
|
+
throw new Error(`Local LLM error: ${error}`);
|
|
681
|
+
}
|
|
682
|
+
return response.json();
|
|
683
|
+
}
|
|
684
|
+
/**
|
|
685
|
+
* Stream a response to the client (converts non-streaming to SSE format)
|
|
686
|
+
*/
|
|
687
|
+
async streamResponse(res, response) {
|
|
688
|
+
res.setHeader("Content-Type", "text/event-stream");
|
|
689
|
+
res.setHeader("Cache-Control", "no-cache");
|
|
690
|
+
res.setHeader("Connection", "keep-alive");
|
|
691
|
+
const message = response.choices?.[0]?.message;
|
|
692
|
+
const id = response.id || `chatcmpl-${Date.now()}`;
|
|
693
|
+
const model = response.model || "unknown";
|
|
694
|
+
const created = Math.floor(Date.now() / 1e3);
|
|
695
|
+
res.write(`data: ${JSON.stringify({
|
|
696
|
+
id,
|
|
697
|
+
object: "chat.completion.chunk",
|
|
698
|
+
created,
|
|
699
|
+
model,
|
|
700
|
+
choices: [{ index: 0, delta: { role: "assistant" }, finish_reason: null }]
|
|
701
|
+
})}
|
|
702
|
+
|
|
703
|
+
`);
|
|
704
|
+
const content = message?.content || "";
|
|
705
|
+
const chunkSize = 10;
|
|
706
|
+
for (let i = 0; i < content.length; i += chunkSize) {
|
|
707
|
+
const chunk = content.slice(i, i + chunkSize);
|
|
708
|
+
res.write(`data: ${JSON.stringify({
|
|
709
|
+
id,
|
|
710
|
+
object: "chat.completion.chunk",
|
|
711
|
+
created,
|
|
712
|
+
model,
|
|
713
|
+
choices: [{ index: 0, delta: { content: chunk }, finish_reason: null }]
|
|
714
|
+
})}
|
|
715
|
+
|
|
716
|
+
`);
|
|
717
|
+
}
|
|
718
|
+
res.write(`data: ${JSON.stringify({
|
|
719
|
+
id,
|
|
720
|
+
object: "chat.completion.chunk",
|
|
721
|
+
created,
|
|
722
|
+
model,
|
|
723
|
+
choices: [{ index: 0, delta: {}, finish_reason: "stop" }]
|
|
724
|
+
})}
|
|
725
|
+
|
|
726
|
+
`);
|
|
727
|
+
res.write("data: [DONE]\n\n");
|
|
728
|
+
res.end();
|
|
729
|
+
}
|
|
730
|
+
/**
|
|
731
|
+
* Update context with conversation history
|
|
732
|
+
*/
|
|
733
|
+
updateContext(originalMessages, response) {
|
|
734
|
+
if (!this.context) return;
|
|
735
|
+
const lastUserMessage = originalMessages.filter((m) => m.role === "user").pop();
|
|
736
|
+
if (lastUserMessage) {
|
|
737
|
+
this.context.addMessage("user", lastUserMessage.content);
|
|
738
|
+
}
|
|
739
|
+
const assistantContent = response.choices?.[0]?.message?.content;
|
|
740
|
+
if (assistantContent) {
|
|
741
|
+
this.context.addMessage("assistant", assistantContent);
|
|
742
|
+
}
|
|
743
|
+
}
|
|
744
|
+
async getOpenAITools() {
|
|
745
|
+
const tools = [];
|
|
746
|
+
for (const tool of this.tools.slice(0, 128)) {
|
|
747
|
+
const schema = await this.getToolSchema(tool.id);
|
|
748
|
+
if (schema) {
|
|
749
|
+
const toolSchema = schema;
|
|
750
|
+
let parameters = { type: "object", properties: {}, required: [] };
|
|
751
|
+
if (toolSchema.parameters && typeof toolSchema.parameters === "object") {
|
|
752
|
+
const rawParams = toolSchema.parameters;
|
|
753
|
+
parameters = {
|
|
754
|
+
type: rawParams.type || "object",
|
|
755
|
+
properties: rawParams.properties || {},
|
|
756
|
+
required: rawParams.required || []
|
|
757
|
+
};
|
|
758
|
+
}
|
|
759
|
+
tools.push({
|
|
760
|
+
type: "function",
|
|
761
|
+
function: {
|
|
762
|
+
name: tool.id.replace(/-/g, "_"),
|
|
763
|
+
// OpenAI requires underscore names
|
|
764
|
+
description: toolSchema.description || tool.description,
|
|
765
|
+
parameters
|
|
766
|
+
}
|
|
767
|
+
});
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
return tools;
|
|
771
|
+
}
|
|
772
|
+
buildResponseContent() {
|
|
773
|
+
const edgeNodeId = this.networkedExecutor?.getEdgeNodeId();
|
|
774
|
+
const toolCount = this.tools.length;
|
|
775
|
+
return `Rainfall daemon online. Edge node: ${edgeNodeId || "local"}. ${toolCount} tools available. What would you like to execute locally or in the cloud?`;
|
|
776
|
+
}
|
|
777
|
+
getStatus() {
|
|
778
|
+
return {
|
|
779
|
+
running: !!this.wss,
|
|
780
|
+
port: this.port,
|
|
781
|
+
openaiPort: this.openaiPort,
|
|
782
|
+
toolsLoaded: this.tools.length,
|
|
783
|
+
clientsConnected: this.clients.size,
|
|
784
|
+
edgeNodeId: this.networkedExecutor?.getEdgeNodeId(),
|
|
785
|
+
context: this.context?.getStatus() || {
|
|
786
|
+
memoriesCached: 0,
|
|
787
|
+
activeSessions: 0,
|
|
788
|
+
executionHistorySize: 0
|
|
789
|
+
},
|
|
790
|
+
listeners: this.listeners?.getStatus() || {
|
|
791
|
+
fileWatchers: 0,
|
|
792
|
+
cronTriggers: 0,
|
|
793
|
+
recentEvents: 0
|
|
794
|
+
}
|
|
795
|
+
};
|
|
796
|
+
}
|
|
797
|
+
log(...args) {
|
|
798
|
+
if (this.debug) {
|
|
799
|
+
console.log(...args);
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
};
|
|
803
|
+
var daemonInstance = null;
|
|
804
|
+
async function startDaemon(config = {}) {
|
|
805
|
+
if (daemonInstance) {
|
|
806
|
+
console.log("Daemon already running");
|
|
807
|
+
return daemonInstance;
|
|
808
|
+
}
|
|
809
|
+
daemonInstance = new RainfallDaemon(config);
|
|
810
|
+
await daemonInstance.start();
|
|
811
|
+
return daemonInstance;
|
|
812
|
+
}
|
|
813
|
+
async function stopDaemon() {
|
|
814
|
+
if (!daemonInstance) {
|
|
815
|
+
console.log("Daemon not running");
|
|
816
|
+
return;
|
|
817
|
+
}
|
|
818
|
+
await daemonInstance.stop();
|
|
819
|
+
daemonInstance = null;
|
|
820
|
+
}
|
|
821
|
+
function getDaemonStatus() {
|
|
822
|
+
if (!daemonInstance) {
|
|
823
|
+
return null;
|
|
824
|
+
}
|
|
825
|
+
return daemonInstance.getStatus();
|
|
826
|
+
}
|
|
827
|
+
function getDaemonInstance() {
|
|
828
|
+
return daemonInstance;
|
|
829
|
+
}
|
|
830
|
+
export {
|
|
831
|
+
RainfallDaemon,
|
|
832
|
+
getDaemonInstance,
|
|
833
|
+
getDaemonStatus,
|
|
834
|
+
startDaemon,
|
|
835
|
+
stopDaemon
|
|
836
|
+
};
|