@rainfall-devkit/sdk 0.1.7 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/README.md +51 -0
  2. package/dist/chunk-7MRE4ZVI.mjs +662 -0
  3. package/dist/chunk-AQFC7YAX.mjs +27 -0
  4. package/dist/chunk-RA3HDYF4.mjs +778 -0
  5. package/dist/chunk-V5QWJVLC.mjs +662 -0
  6. package/dist/chunk-VDPKDC3R.mjs +869 -0
  7. package/dist/chunk-WOITG5TG.mjs +84 -0
  8. package/dist/cli/index.js +2756 -607
  9. package/dist/cli/index.mjs +404 -46
  10. package/dist/config-DDTQQBN7.mjs +14 -0
  11. package/dist/config-ZKNHII2A.mjs +8 -0
  12. package/dist/daemon/index.d.mts +136 -0
  13. package/dist/daemon/index.d.ts +136 -0
  14. package/dist/daemon/index.js +2473 -0
  15. package/dist/daemon/index.mjs +836 -0
  16. package/dist/errors-BMPseAnM.d.mts +47 -0
  17. package/dist/errors-BMPseAnM.d.ts +47 -0
  18. package/dist/errors-CZdRoYyw.d.ts +332 -0
  19. package/dist/errors-Chjq1Mev.d.mts +332 -0
  20. package/dist/index.d.mts +3 -1
  21. package/dist/index.d.ts +3 -1
  22. package/dist/index.js +762 -5
  23. package/dist/index.mjs +14 -2
  24. package/dist/listeners-BbYIaNCs.d.mts +372 -0
  25. package/dist/listeners-CP2A9J_2.d.ts +372 -0
  26. package/dist/listeners-CTRSofnm.d.mts +372 -0
  27. package/dist/listeners-CYI-YwIF.d.mts +372 -0
  28. package/dist/listeners-QJeEtLbV.d.ts +372 -0
  29. package/dist/listeners-hp0Ib2Ox.d.ts +372 -0
  30. package/dist/mcp.d.mts +3 -2
  31. package/dist/mcp.d.ts +3 -2
  32. package/dist/mcp.js +95 -3
  33. package/dist/mcp.mjs +1 -1
  34. package/dist/sdk-CJ9g5lFo.d.mts +772 -0
  35. package/dist/sdk-CJ9g5lFo.d.ts +772 -0
  36. package/dist/sdk-DD1OeGRJ.d.mts +871 -0
  37. package/dist/sdk-DD1OeGRJ.d.ts +871 -0
  38. package/dist/types-GnRAfH-h.d.mts +489 -0
  39. package/dist/types-GnRAfH-h.d.ts +489 -0
  40. package/package.json +14 -5
@@ -1,41 +1,17 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
3
  Rainfall
4
- } from "../chunk-PNYIIMJS.mjs";
4
+ } from "../chunk-VDPKDC3R.mjs";
5
+ import {
6
+ loadConfig,
7
+ saveConfig
8
+ } from "../chunk-WOITG5TG.mjs";
5
9
 
6
10
  // src/cli/index.ts
7
- import { readFileSync, existsSync, writeFileSync, mkdirSync } from "fs";
8
- import { join } from "path";
9
- import { homedir } from "os";
10
- var CONFIG_DIR = join(homedir(), ".rainfall");
11
- var CONFIG_FILE = join(CONFIG_DIR, "config.json");
12
- function loadConfig() {
13
- if (!existsSync(CONFIG_FILE)) {
14
- return {};
15
- }
16
- try {
17
- return JSON.parse(readFileSync(CONFIG_FILE, "utf8"));
18
- } catch {
19
- return {};
20
- }
21
- }
22
- function saveConfig(config) {
23
- if (!existsSync(CONFIG_DIR)) {
24
- mkdirSync(CONFIG_DIR, { recursive: true });
25
- }
26
- writeFileSync(CONFIG_FILE, JSON.stringify(config, null, 2));
27
- }
28
- function getRainfall() {
29
- const config = loadConfig();
30
- if (!config.apiKey) {
31
- console.error("Error: No API key configured. Run: rainfall auth login");
32
- process.exit(1);
33
- }
34
- return new Rainfall({
35
- apiKey: config.apiKey,
36
- baseUrl: config.baseUrl
37
- });
38
- }
11
+ import { readFileSync } from "fs";
12
+ import { join, dirname } from "path";
13
+ import { fileURLToPath } from "url";
14
+ import { spawn } from "child_process";
39
15
  function printHelp() {
40
16
  console.log(`
41
17
  Rainfall CLI - 200+ tools, one key
@@ -53,28 +29,66 @@ Commands:
53
29
  tools search <query> Search for tools
54
30
 
55
31
  run <tool> [options] Execute a tool
32
+
33
+ daemon start Start the Rainfall daemon
34
+ daemon stop Stop the Rainfall daemon
35
+ daemon restart Restart the Rainfall daemon
36
+ daemon status Check daemon status
37
+
38
+ workflow new Create a new workflow (interactive)
39
+ workflow run <workflow> Run a saved workflow
56
40
 
57
41
  me Show account info and usage
58
42
 
59
43
  config get [key] Get configuration value
60
44
  config set <key> <value> Set configuration value
45
+ config llm Show LLM configuration
46
+
47
+ version Show version information
48
+ upgrade Upgrade to the latest version
61
49
 
62
50
  help Show this help message
63
51
 
52
+ Configuration keys:
53
+ llm.provider LLM provider (rainfall|openai|anthropic|ollama|local)
54
+ llm.baseUrl Base URL for the LLM API
55
+ llm.apiKey API key for the LLM provider
56
+ llm.model Default model to use
57
+
64
58
  Options for 'run':
65
59
  --params, -p <json> Tool parameters as JSON
66
60
  --file, -f <path> Read parameters from file
67
61
  --raw Output raw JSON
62
+ --<key> <value> Pass individual parameters (e.g., --query "AI news")
63
+
64
+ Options for 'daemon start':
65
+ --port <port> WebSocket port (default: 8765)
66
+ --openai-port <port> OpenAI API port (default: 8787)
67
+ --debug Enable verbose debug logging
68
68
 
69
69
  Examples:
70
70
  rainfall auth login
71
71
  rainfall tools list
72
72
  rainfall tools describe github-create-issue
73
73
  rainfall run exa-web-search -p '{"query": "AI news"}'
74
+ rainfall run exa-web-search --query "AI news"
75
+ rainfall run github-create-issue --owner facebook --repo react --title "Bug"
74
76
  rainfall run article-summarize -f ./article.json
77
+ rainfall daemon start
75
78
  echo '{"query": "hello"}' | rainfall run exa-web-search
76
79
  `);
77
80
  }
81
+ function getRainfall() {
82
+ const config = loadConfig();
83
+ if (!config.apiKey) {
84
+ console.error("Error: No API key configured. Run: rainfall auth login");
85
+ process.exit(1);
86
+ }
87
+ return new Rainfall({
88
+ apiKey: config.apiKey,
89
+ baseUrl: config.baseUrl
90
+ });
91
+ }
78
92
  async function authLogin(args) {
79
93
  const apiKey = args[0] || process.env.RAINFALL_API_KEY;
80
94
  if (!apiKey) {
@@ -229,7 +243,30 @@ async function runTool(args) {
229
243
  console.error("\nUsage: rainfall run <tool-id> [options]");
230
244
  process.exit(1);
231
245
  }
246
+ if (toolId === "--help" || toolId === "-h") {
247
+ console.log(`
248
+ Usage: rainfall run <tool-id> [options]
249
+
250
+ Execute a tool by ID.
251
+
252
+ Options:
253
+ -p, --params <json> Tool parameters as JSON string
254
+ -f, --file <path> Read parameters from JSON file
255
+ --raw Output raw JSON (no formatting)
256
+ --<key> <value> Pass individual parameters (e.g., --query "AI news")
257
+
258
+ Examples:
259
+ rainfall run figma-users-getMe
260
+ rainfall run exa-web-search -p '{"query": "AI news"}'
261
+ rainfall run exa-web-search --query "AI news"
262
+ rainfall run github-create-issue --owner facebook --repo react --title "Bug"
263
+ rainfall run github-create-issue -f ./issue.json
264
+ echo '{"query": "hello"}' | rainfall run exa-web-search
265
+ `);
266
+ return;
267
+ }
232
268
  let params = {};
269
+ const rawArgs = [];
233
270
  for (let i = 1; i < args.length; i++) {
234
271
  const arg = args[i];
235
272
  if (arg === "--params" || arg === "-p") {
@@ -257,22 +294,71 @@ async function runTool(args) {
257
294
  process.exit(1);
258
295
  }
259
296
  } else if (arg === "--raw") {
297
+ } else if (arg.startsWith("--")) {
298
+ const key = arg.slice(2);
299
+ const value = args[++i];
300
+ if (value === void 0) {
301
+ console.error(`Error: ${arg} requires a value`);
302
+ process.exit(1);
303
+ }
304
+ try {
305
+ params[key] = JSON.parse(value);
306
+ } catch {
307
+ params[key] = value;
308
+ }
309
+ } else {
310
+ rawArgs.push(arg);
260
311
  }
261
312
  }
262
313
  if (!process.stdin.isTTY) {
263
- const chunks = [];
264
- for await (const chunk of process.stdin) {
265
- chunks.push(chunk);
266
- }
267
- if (chunks.length > 0) {
268
- try {
269
- const piped = JSON.parse(Buffer.concat(chunks).toString());
270
- params = { ...params, ...piped };
271
- } catch {
314
+ process.stdin.pause();
315
+ const fs = await import("fs");
316
+ try {
317
+ const buffer = Buffer.alloc(1024);
318
+ const bytesRead = await new Promise((resolve) => {
319
+ const timeout = setTimeout(() => resolve(0), 50);
320
+ fs.read(process.stdin.fd, buffer, 0, 1024, null, (err, n) => {
321
+ clearTimeout(timeout);
322
+ resolve(err ? 0 : n);
323
+ });
324
+ });
325
+ if (bytesRead > 0) {
326
+ let data = buffer.toString("utf8", 0, bytesRead);
327
+ while (true) {
328
+ const more = await new Promise((resolve) => {
329
+ fs.read(process.stdin.fd, buffer, 0, 1024, null, (err, n) => {
330
+ resolve(err ? 0 : n);
331
+ });
332
+ });
333
+ if (more === 0) break;
334
+ data += buffer.toString("utf8", 0, more);
335
+ }
336
+ if (data.trim()) {
337
+ try {
338
+ const piped = JSON.parse(data);
339
+ params = { ...params, ...piped };
340
+ } catch {
341
+ }
342
+ }
272
343
  }
344
+ } catch {
273
345
  }
274
346
  }
275
347
  const rainfall = getRainfall();
348
+ if (rawArgs.length === 1 && Object.keys(params).length === 0) {
349
+ try {
350
+ const schema = await rainfall.getToolSchema(toolId);
351
+ if (schema.parameters && typeof schema.parameters === "object") {
352
+ const paramEntries = Object.entries(schema.parameters);
353
+ const requiredParams = paramEntries.filter(([, p]) => !p.optional);
354
+ if (requiredParams.length === 1) {
355
+ const [paramName] = requiredParams[0];
356
+ params = { [paramName]: rawArgs[0] };
357
+ }
358
+ }
359
+ } catch {
360
+ }
361
+ }
276
362
  try {
277
363
  const result = await rainfall.executeTool(toolId, params);
278
364
  if (args.includes("--raw")) {
@@ -299,7 +385,16 @@ function configGet(args) {
299
385
  const key = args[0];
300
386
  const config = loadConfig();
301
387
  if (key) {
302
- console.log(config[key] || "");
388
+ const parts = key.split(".");
389
+ let value = config;
390
+ for (const part of parts) {
391
+ value = value?.[part];
392
+ }
393
+ if (typeof value === "object" && value !== null) {
394
+ console.log(JSON.stringify(value, null, 2));
395
+ } else {
396
+ console.log(value ?? "");
397
+ }
303
398
  } else {
304
399
  console.log(JSON.stringify(config, null, 2));
305
400
  }
@@ -310,13 +405,231 @@ function configSet(args) {
310
405
  if (!key || !value) {
311
406
  console.error("Error: Both key and value required");
312
407
  console.error("\nUsage: rainfall config set <key> <value>");
408
+ console.error("\nExamples:");
409
+ console.error(" rainfall config set llm.provider local");
410
+ console.error(" rainfall config set llm.baseUrl http://localhost:1234/v1");
411
+ console.error(" rainfall config set llm.model llama-3.3-70b-versatile");
313
412
  process.exit(1);
314
413
  }
315
414
  const config = loadConfig();
316
- config[key] = value;
415
+ const parts = key.split(".");
416
+ if (parts.length === 1) {
417
+ config[key] = value;
418
+ } else {
419
+ let target = config;
420
+ for (let i = 0; i < parts.length - 1; i++) {
421
+ if (!target[parts[i]] || typeof target[parts[i]] !== "object") {
422
+ target[parts[i]] = {};
423
+ }
424
+ target = target[parts[i]];
425
+ }
426
+ target[parts[parts.length - 1]] = value;
427
+ }
317
428
  saveConfig(config);
318
429
  console.log(`\u2713 Set ${key} = ${value}`);
319
430
  }
431
+ function configLLM() {
432
+ const config = loadConfig();
433
+ const llm = config.llm || { provider: "rainfall" };
434
+ console.log("LLM Configuration:");
435
+ console.log(` Provider: ${llm.provider}`);
436
+ console.log(` Base URL: ${llm.baseUrl || "(default)"}`);
437
+ console.log(` Model: ${llm.model || "(default)"}`);
438
+ console.log(` API Key: ${llm.apiKey ? "****" + llm.apiKey.slice(-4) : "(none)"}`);
439
+ console.log();
440
+ console.log("Providers:");
441
+ console.log(" rainfall - Use Rainfall backend (default, uses your credits)");
442
+ console.log(" openai - Use OpenAI API directly");
443
+ console.log(" anthropic - Use Anthropic API directly");
444
+ console.log(" ollama - Use local Ollama instance");
445
+ console.log(" local - Use any OpenAI-compatible endpoint (LM Studio, etc.)");
446
+ console.log();
447
+ console.log("Examples:");
448
+ console.log(" rainfall config set llm.provider local");
449
+ console.log(" rainfall config set llm.baseUrl http://localhost:1234/v1");
450
+ console.log(" rainfall config set llm.provider openai");
451
+ console.log(" rainfall config set llm.apiKey sk-...");
452
+ }
453
+ function getPackageJson() {
454
+ try {
455
+ const __filename2 = fileURLToPath(import.meta.url);
456
+ const __dirname2 = dirname(__filename2);
457
+ const packagePath = join(__dirname2, "..", "..", "package.json");
458
+ const content = readFileSync(packagePath, "utf8");
459
+ return JSON.parse(content);
460
+ } catch {
461
+ return { version: "unknown", name: "@rainfall-devkit/sdk" };
462
+ }
463
+ }
464
+ function showVersion() {
465
+ const pkg = getPackageJson();
466
+ console.log(`${pkg.name} v${pkg.version}`);
467
+ }
468
+ async function upgrade() {
469
+ const pkg = getPackageJson();
470
+ console.log(`Upgrading ${pkg.name}...`);
471
+ const execPath = process.argv[0];
472
+ const isBun = execPath.includes("bun");
473
+ let command;
474
+ let args;
475
+ if (isBun) {
476
+ command = "bun";
477
+ args = ["add", "-g", `${pkg.name}@latest`];
478
+ } else {
479
+ command = "npm";
480
+ args = ["i", "-g", `${pkg.name}@latest`];
481
+ }
482
+ console.log(`Running: ${command} ${args.join(" ")}`);
483
+ console.log();
484
+ return new Promise((resolve, reject) => {
485
+ const child = spawn(command, args, {
486
+ stdio: "inherit",
487
+ shell: true
488
+ });
489
+ child.on("close", (code) => {
490
+ if (code === 0) {
491
+ console.log();
492
+ console.log("\u2713 Upgrade complete");
493
+ resolve();
494
+ } else {
495
+ reject(new Error(`Upgrade failed with exit code ${code}`));
496
+ }
497
+ });
498
+ child.on("error", (err) => {
499
+ reject(err);
500
+ });
501
+ });
502
+ }
503
+ async function daemonStart(args) {
504
+ let port;
505
+ let openaiPort;
506
+ let debug = false;
507
+ for (let i = 0; i < args.length; i++) {
508
+ const arg = args[i];
509
+ if (arg === "--port") {
510
+ const val = parseInt(args[++i], 10);
511
+ if (!isNaN(val)) port = val;
512
+ } else if (arg === "--openai-port") {
513
+ const val = parseInt(args[++i], 10);
514
+ if (!isNaN(val)) openaiPort = val;
515
+ } else if (arg === "--debug") {
516
+ debug = true;
517
+ }
518
+ }
519
+ const { startDaemon } = await import("../daemon/index.mjs");
520
+ try {
521
+ await startDaemon({ port, openaiPort, debug });
522
+ process.on("SIGINT", async () => {
523
+ console.log("\n");
524
+ const { stopDaemon } = await import("../daemon/index.mjs");
525
+ await stopDaemon();
526
+ process.exit(0);
527
+ });
528
+ process.on("SIGTERM", async () => {
529
+ const { stopDaemon } = await import("../daemon/index.mjs");
530
+ await stopDaemon();
531
+ process.exit(0);
532
+ });
533
+ } catch (error) {
534
+ console.error("Failed to start daemon:", error instanceof Error ? error.message : error);
535
+ process.exit(1);
536
+ }
537
+ }
538
+ async function daemonStop() {
539
+ const { stopDaemon } = await import("../daemon/index.mjs");
540
+ await stopDaemon();
541
+ }
542
+ async function daemonRestart(args) {
543
+ const { stopDaemon, startDaemon } = await import("../daemon/index.mjs");
544
+ let port;
545
+ let openaiPort;
546
+ let debug = false;
547
+ for (let i = 0; i < args.length; i++) {
548
+ const arg = args[i];
549
+ if (arg === "--port") {
550
+ const val = parseInt(args[++i], 10);
551
+ if (!isNaN(val)) port = val;
552
+ } else if (arg === "--openai-port") {
553
+ const val = parseInt(args[++i], 10);
554
+ if (!isNaN(val)) openaiPort = val;
555
+ } else if (arg === "--debug") {
556
+ debug = true;
557
+ }
558
+ }
559
+ console.log("\u{1F504} Restarting daemon...");
560
+ try {
561
+ await stopDaemon();
562
+ await new Promise((resolve) => setTimeout(resolve, 500));
563
+ await startDaemon({ port, openaiPort, debug });
564
+ process.on("SIGINT", async () => {
565
+ console.log("\n");
566
+ const { stopDaemon: stop } = await import("../daemon/index.mjs");
567
+ await stop();
568
+ process.exit(0);
569
+ });
570
+ process.on("SIGTERM", async () => {
571
+ const { stopDaemon: stop } = await import("../daemon/index.mjs");
572
+ await stop();
573
+ process.exit(0);
574
+ });
575
+ } catch (error) {
576
+ console.error("Failed to restart daemon:", error instanceof Error ? error.message : error);
577
+ process.exit(1);
578
+ }
579
+ }
580
+ async function daemonStatus() {
581
+ const { getDaemonStatus } = await import("../daemon/index.mjs");
582
+ const status = getDaemonStatus();
583
+ if (!status) {
584
+ console.log("Daemon not running");
585
+ console.log("Run: rainfall daemon start");
586
+ return;
587
+ }
588
+ console.log("Daemon status:");
589
+ console.log(` Running: ${status.running ? "yes" : "no"}`);
590
+ console.log(` WebSocket port: ${status.port}`);
591
+ console.log(` OpenAI API port: ${status.openaiPort}`);
592
+ console.log(` Tools loaded: ${status.toolsLoaded}`);
593
+ console.log(` Clients connected: ${status.clientsConnected}`);
594
+ console.log(` Edge Node ID: ${status.edgeNodeId || "local"}`);
595
+ console.log();
596
+ console.log("Context:");
597
+ console.log(` Memories cached: ${status.context.memoriesCached}`);
598
+ console.log(` Active sessions: ${status.context.activeSessions}`);
599
+ console.log(` Current session: ${status.context.currentSession || "none"}`);
600
+ console.log(` Execution history: ${status.context.executionHistorySize}`);
601
+ console.log();
602
+ console.log("Listeners:");
603
+ console.log(` File watchers: ${status.listeners.fileWatchers}`);
604
+ console.log(` Cron triggers: ${status.listeners.cronTriggers}`);
605
+ console.log(` Recent events: ${status.listeners.recentEvents}`);
606
+ }
607
+ async function workflowNew() {
608
+ console.log("\u{1F6A7} Interactive workflow creation coming soon!");
609
+ console.log();
610
+ console.log("For now, create workflows using the SDK:");
611
+ console.log(' import { createFileWatcherWorkflow } from "@rainfall-devkit/sdk/daemon";');
612
+ console.log();
613
+ console.log("Example:");
614
+ console.log(` const workflow = createFileWatcherWorkflow('pdf-processor', '~/Downloads', {`);
615
+ console.log(` pattern: '*.pdf',`);
616
+ console.log(` events: ['create'],`);
617
+ console.log(` workflow: [`);
618
+ console.log(` { toolId: 'ocr-pdf', params: {} },`);
619
+ console.log(` { toolId: 'notion-create-page', params: { parent: '...' } },`);
620
+ console.log(` ],`);
621
+ console.log(` });`);
622
+ }
623
+ async function workflowRun(args) {
624
+ const workflowId = args[0];
625
+ if (!workflowId) {
626
+ console.error("Error: Workflow ID required");
627
+ console.error("\nUsage: rainfall workflow run <workflow-id>");
628
+ process.exit(1);
629
+ }
630
+ console.log(`\u{1F6A7} Running workflow: ${workflowId}`);
631
+ console.log("Workflow execution coming soon!");
632
+ }
320
633
  async function main() {
321
634
  const args = process.argv.slice(2);
322
635
  const command = args[0];
@@ -358,7 +671,41 @@ async function main() {
358
671
  }
359
672
  break;
360
673
  case "run":
361
- await runTool(rest);
674
+ await runTool(args.slice(1));
675
+ break;
676
+ case "daemon":
677
+ switch (subcommand) {
678
+ case "start":
679
+ await daemonStart(rest);
680
+ break;
681
+ case "stop":
682
+ await daemonStop();
683
+ break;
684
+ case "restart":
685
+ await daemonRestart(rest);
686
+ break;
687
+ case "status":
688
+ await daemonStatus();
689
+ break;
690
+ default:
691
+ console.error("Error: Unknown daemon subcommand");
692
+ console.error("\nUsage: rainfall daemon <start|stop|restart|status>");
693
+ process.exit(1);
694
+ }
695
+ break;
696
+ case "workflow":
697
+ switch (subcommand) {
698
+ case "new":
699
+ await workflowNew();
700
+ break;
701
+ case "run":
702
+ await workflowRun(rest);
703
+ break;
704
+ default:
705
+ console.error("Error: Unknown workflow subcommand");
706
+ console.error("\nUsage: rainfall workflow <new|run>");
707
+ process.exit(1);
708
+ }
362
709
  break;
363
710
  case "me":
364
711
  await showMe();
@@ -371,12 +718,23 @@ async function main() {
371
718
  case "set":
372
719
  configSet(rest);
373
720
  break;
721
+ case "llm":
722
+ configLLM();
723
+ break;
374
724
  default:
375
725
  console.error("Error: Unknown config subcommand");
376
- console.error("\nUsage: rainfall config <get|set>");
726
+ console.error("\nUsage: rainfall config <get|set|llm>");
377
727
  process.exit(1);
378
728
  }
379
729
  break;
730
+ case "version":
731
+ case "--version":
732
+ case "-v":
733
+ showVersion();
734
+ break;
735
+ case "upgrade":
736
+ await upgrade();
737
+ break;
380
738
  case "help":
381
739
  case "--help":
382
740
  case "-h":
@@ -0,0 +1,14 @@
1
+ import {
2
+ getLLMConfig,
3
+ getProviderBaseUrl,
4
+ isLocalProvider,
5
+ loadConfig,
6
+ saveConfig
7
+ } from "./chunk-WOITG5TG.mjs";
8
+ export {
9
+ getLLMConfig,
10
+ getProviderBaseUrl,
11
+ isLocalProvider,
12
+ loadConfig,
13
+ saveConfig
14
+ };
@@ -0,0 +1,8 @@
1
+ import {
2
+ loadConfig,
3
+ saveConfig
4
+ } from "./chunk-AQFC7YAX.mjs";
5
+ export {
6
+ loadConfig,
7
+ saveConfig
8
+ };
@@ -0,0 +1,136 @@
1
+ import { e as RainfallConfig } from '../sdk-DD1OeGRJ.mjs';
2
+ import { N as NetworkedExecutorOptions, C as ContextOptions, e as RainfallNetworkedExecutor, R as RainfallDaemonContext, d as RainfallListenerRegistry } from '../listeners-CYI-YwIF.mjs';
3
+
4
+ /**
5
+ * Rainfall Daemon - Local websocket server + OpenAI-compatible proxy
6
+ *
7
+ * Provides:
8
+ * - WebSocket server for MCP clients (Claude, Cursor, etc.)
9
+ * - OpenAI-compatible /v1/chat/completions endpoint
10
+ * - Hot-loaded tools from Rainfall SDK
11
+ * - Networked execution for distributed workflows
12
+ * - Persistent context and memory
13
+ * - Passive listeners (file watchers, cron triggers)
14
+ */
15
+
16
+ interface DaemonConfig {
17
+ port?: number;
18
+ openaiPort?: number;
19
+ rainfallConfig?: RainfallConfig;
20
+ /** Enable debug logging */
21
+ debug?: boolean;
22
+ /** Networked executor options */
23
+ networkedOptions?: NetworkedExecutorOptions;
24
+ /** Context/memory options */
25
+ contextOptions?: ContextOptions;
26
+ }
27
+ interface DaemonStatus {
28
+ running: boolean;
29
+ port?: number;
30
+ openaiPort?: number;
31
+ toolsLoaded: number;
32
+ clientsConnected: number;
33
+ edgeNodeId?: string;
34
+ context: {
35
+ memoriesCached: number;
36
+ activeSessions: number;
37
+ currentSession?: string;
38
+ executionHistorySize: number;
39
+ };
40
+ listeners: {
41
+ fileWatchers: number;
42
+ cronTriggers: number;
43
+ recentEvents: number;
44
+ };
45
+ }
46
+ declare class RainfallDaemon {
47
+ private wss?;
48
+ private openaiApp;
49
+ private rainfall?;
50
+ private port;
51
+ private openaiPort;
52
+ private rainfallConfig?;
53
+ private tools;
54
+ private toolSchemas;
55
+ private clients;
56
+ private debug;
57
+ private networkedExecutor?;
58
+ private context?;
59
+ private listeners?;
60
+ constructor(config?: DaemonConfig);
61
+ start(): Promise<void>;
62
+ stop(): Promise<void>;
63
+ /**
64
+ * Get the networked executor for distributed job management
65
+ */
66
+ getNetworkedExecutor(): RainfallNetworkedExecutor | undefined;
67
+ /**
68
+ * Get the context for memory/session management
69
+ */
70
+ getContext(): RainfallDaemonContext | undefined;
71
+ /**
72
+ * Get the listener registry for passive triggers
73
+ */
74
+ getListenerRegistry(): RainfallListenerRegistry | undefined;
75
+ private initializeRainfall;
76
+ private loadTools;
77
+ private getToolSchema;
78
+ private startWebSocketServer;
79
+ private handleMCPMessage;
80
+ private getMCPTools;
81
+ private executeTool;
82
+ private startOpenAIProxy;
83
+ /**
84
+ * Build a map of local Rainfall tools for quick lookup
85
+ * Maps OpenAI-style underscore names to Rainfall tool IDs
86
+ */
87
+ private buildLocalToolMap;
88
+ /**
89
+ * Find a local Rainfall tool by name (OpenAI underscore format or original)
90
+ */
91
+ private findLocalTool;
92
+ /**
93
+ * Execute a local Rainfall tool
94
+ */
95
+ private executeLocalTool;
96
+ /**
97
+ * Parse XML-style tool calls from model output
98
+ * Handles formats like: <function=name><parameter=key>value</parameter></function>
99
+ */
100
+ private parseXMLToolCalls;
101
+ /**
102
+ * Call the LLM via Rainfall backend, LM Studio, RunPod, or other providers
103
+ *
104
+ * Provider priority:
105
+ * 1. Config file (llm.provider, llm.baseUrl)
106
+ * 2. Environment variables (OPENAI_API_KEY, OLLAMA_HOST, etc.)
107
+ * 3. Default to Rainfall (credits-based)
108
+ */
109
+ private callLLM;
110
+ /**
111
+ * Call external LLM provider (OpenAI, Anthropic) via their OpenAI-compatible APIs
112
+ */
113
+ private callExternalLLM;
114
+ /**
115
+ * Call a local LLM (LM Studio, Ollama, etc.)
116
+ */
117
+ private callLocalLLM;
118
+ /**
119
+ * Stream a response to the client (converts non-streaming to SSE format)
120
+ */
121
+ private streamResponse;
122
+ /**
123
+ * Update context with conversation history
124
+ */
125
+ private updateContext;
126
+ private getOpenAITools;
127
+ private buildResponseContent;
128
+ getStatus(): DaemonStatus;
129
+ private log;
130
+ }
131
+ declare function startDaemon(config?: DaemonConfig): Promise<RainfallDaemon>;
132
+ declare function stopDaemon(): Promise<void>;
133
+ declare function getDaemonStatus(): DaemonStatus | null;
134
+ declare function getDaemonInstance(): RainfallDaemon | null;
135
+
136
+ export { type DaemonConfig, type DaemonStatus, RainfallDaemon, getDaemonInstance, getDaemonStatus, startDaemon, stopDaemon };