openfused 0.3.3 → 0.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -8,7 +8,7 @@ import * as registry from "./registry.js";
8
8
  import { fingerprint } from "./crypto.js";
9
9
  import { resolve } from "node:path";
10
10
  import { readFile } from "node:fs/promises";
11
- const VERSION = "0.3.3";
11
+ const VERSION = "0.3.4";
12
12
  const program = new Command();
13
13
  program
14
14
  .name("openfuse")
package/dist/crypto.js CHANGED
@@ -1,3 +1,8 @@
1
+ // --- Why Ed25519 + age? ---
2
+ // Ed25519: fast, deterministic, no padding oracle attacks, widely supported (SSH, FIDO2, libsodium).
3
+ // age over PGP: simpler API, no config footguns, no Web of Trust baggage — just X25519+ChaCha20-Poly1305.
4
+ // Two separate keypairs because signing (Ed25519) and encryption (X25519) are distinct operations;
5
+ // combining them would violate key-separation best practice.
1
6
  import { generateKeyPairSync, sign, verify, createPrivateKey, createPublicKey, createHash } from "node:crypto";
2
7
  import { readFile, writeFile, mkdir } from "node:fs/promises";
3
8
  import { join } from "node:path";
@@ -27,6 +32,8 @@ export async function hasKeys(storeRoot) {
27
32
  return existsSync(join(storeRoot, KEY_DIR, "private.key"));
28
33
  }
29
34
  // --- Fingerprint ---
35
+ // SHA-256 truncated to 16 bytes, displayed as colon-separated hex pairs (GPG-style).
36
+ // Human-readable so agents can verify identities out-of-band — same UX as SSH fingerprints.
30
37
  export function fingerprint(publicKey) {
31
38
  const hash = createHash("sha256").update(publicKey).digest();
32
39
  const pairs = [];
@@ -64,6 +71,12 @@ export async function signMessage(storeRoot, from, message) {
64
71
  const signature = sign(null, payload, privateKey).toString("base64");
65
72
  return { from, timestamp, message, signature, publicKey, encrypted: false };
66
73
  }
74
+ // --- Encrypt-then-sign ---
75
+ // Encrypt first, then sign the ciphertext. This order matters:
76
+ // 1. Proves WHO sent the ciphertext (non-repudiation on the encrypted blob)
77
+ // 2. Prevents Surreptitious Forwarding — signature covers the encrypted form,
78
+ // so a relay can't strip the signature and re-sign for a different recipient.
79
+ // 3. Signature is verifiable by anyone without needing the decryption key.
67
80
  export async function signAndEncrypt(storeRoot, from, plaintext, recipientAgeKey) {
68
81
  const ciphertext = await ageEncrypt(plaintext, recipientAgeKey);
69
82
  const encoded = Buffer.from(ciphertext).toString("base64");
@@ -104,6 +117,8 @@ async function ageDecrypt(ciphertext, storeRoot) {
104
117
  return await d.decrypt(ciphertext, "text");
105
118
  }
106
119
  // --- Helpers ---
120
+ // XML envelope wrapping — gives LLMs a structured, parseable format with clear
121
+ // trust signals (verified/UNVERIFIED). HTML-escaped to prevent injection into prompts.
107
122
  export function wrapExternalMessage(signed, verified) {
108
123
  const status = verified ? "verified" : "UNVERIFIED";
109
124
  const esc = (s) => s.replace(/&/g, "&amp;").replace(/"/g, "&quot;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
package/dist/mcp.js CHANGED
@@ -1,12 +1,18 @@
1
1
  #!/usr/bin/env node
2
+ // --- MCP server: 13 tools ---
3
+ // Why exactly 13? They map 1:1 to the store's capabilities — no more, no less.
4
+ // CRUD for context (read/write/append), profile (read/write), inbox (list/send),
5
+ // shared files (list/read/write), status, and peer management (list/add).
6
+ // Every tool an LLM needs to be a full participant in the mesh, nothing it doesn't.
7
+ // stdio transport because MCP clients (Claude Desktop, Cursor) expect it.
2
8
  import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
3
9
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
10
  import { z } from "zod";
5
11
  import { ContextStore } from "./store.js";
6
12
  import { resolve } from "node:path";
7
- /** Reject path traversal in filenames extract basename, block dangerous patterns */
13
+ // LLMs will pass whatever filenames users ask for including "../../etc/shadow".
14
+ // This is the trust boundary between the AI and the filesystem.
8
15
  function sanitizeFilename(name) {
9
- // Extract basename (strip any directory components)
10
16
  const base = name.split("/").pop().split("\\").pop();
11
17
  if (!base || base === "." || base === ".." || base.includes("..")) {
12
18
  throw new Error(`Invalid filename: ${name}`);
@@ -17,7 +23,7 @@ const storeDir = process.env.OPENFUSE_DIR || process.argv[3] || ".";
17
23
  const store = new ContextStore(resolve(storeDir));
18
24
  const server = new McpServer({
19
25
  name: "openfuse",
20
- version: "0.3.3",
26
+ version: "0.3.4",
21
27
  });
22
28
  // --- Context ---
23
29
  server.tool("context_read", "Read the agent's CONTEXT.md (working memory)", async () => {
package/dist/registry.js CHANGED
@@ -1,3 +1,11 @@
1
+ // --- Registry: DNS + keyserver hybrid ---
2
+ // The registry solves agent discovery without requiring a DHT or blockchain.
3
+ // It's a signed directory: agents register name→endpoint+publicKey mappings,
4
+ // similar to DNS (name resolution) + PGP keyservers (key distribution).
5
+ // Crucially, imported keys are UNTRUSTED by default — the local agent must
6
+ // explicitly `openfuse key trust` after out-of-band verification (fingerprint check).
7
+ // This is TOFU (Trust On First Use) done right: the registry distributes keys,
8
+ // but never asserts trust. Trust is a local decision.
1
9
  import { signMessage, fingerprint } from "./crypto.js";
2
10
  export const DEFAULT_REGISTRY = "https://openfuse-registry.wzmcghee.workers.dev";
3
11
  export function resolveRegistry(flag) {
@@ -16,6 +24,8 @@ export async function register(store, endpoint, registry) {
16
24
  created: new Date().toISOString(),
17
25
  capabilities: ["inbox", "shared", "knowledge"],
18
26
  };
27
+ // Canonical string prevents field-reordering attacks — pipe-delimited, deterministic order.
28
+ // Signature proves the registrant owns the private key (anti-squatting).
19
29
  const canonical = `${manifest.name}|${manifest.endpoint}|${manifest.publicKey}|${manifest.encryptionKey || ""}`;
20
30
  const signed = await signMessage(store.root, manifest.name, canonical);
21
31
  manifest.signature = signed.signature;
@@ -39,6 +49,8 @@ export async function discover(name, registry) {
39
49
  }
40
50
  return (await resp.json());
41
51
  }
52
+ // Revocation is permanent and self-authenticated: the agent signs its own revocation
53
+ // with the key being revoked. No admin needed — if you have the private key, you can kill it.
42
54
  export async function revoke(store, registry) {
43
55
  const config = await store.readConfig();
44
56
  if (!config.publicKey)
@@ -59,6 +71,7 @@ export async function revoke(store, registry) {
59
71
  throw new Error(body.error || `Revocation failed`);
60
72
  }
61
73
  }
74
+ // Non-blocking version check with 2s timeout — never delays the CLI for a slow network.
62
75
  export async function checkUpdate(currentVersion) {
63
76
  try {
64
77
  const controller = new AbortController();
package/dist/store.js CHANGED
@@ -1,3 +1,16 @@
1
+ // --- Store convention ---
2
+ // The context store IS the protocol. Every agent is a directory on disk with a known layout:
3
+ // CONTEXT.md — working memory (mutable, private)
4
+ // PROFILE.md — public address card (replaces SOUL.md: "soul" implied private identity,
5
+ // but this file is shared with peers — "profile" is honest about its visibility)
6
+ // inbox/ — append-only message queue from other agents
7
+ // outbox/ — signed envelopes waiting to be delivered
8
+ // shared/ — files explicitly published to peers
9
+ // history/ — conversation logs
10
+ // knowledge/ — reference docs
11
+ // .keys/ — Ed25519 + age keypairs (gitignored)
12
+ // .mesh.json — config, peer list, keyring
13
+ // No database, no daemon required. `ls` is your status command.
1
14
  import { readFile, writeFile, mkdir, readdir } from "node:fs/promises";
2
15
  import { join, resolve } from "node:path";
3
16
  import { existsSync } from "node:fs";
@@ -44,7 +57,8 @@ export class ContextStore {
44
57
  async readConfig() {
45
58
  const raw = await readFile(this.configPath, "utf-8");
46
59
  const config = JSON.parse(raw);
47
- // Migrate legacy trustedKeys → keyring
60
+ // Migrate legacy trustedKeys → keyring (v0.1 stored bare public keys in a flat array;
61
+ // v0.2+ uses a GPG-style keyring with trust levels, fingerprints, and encryption keys)
48
62
  if (config.trustedKeys && config.trustedKeys.length > 0) {
49
63
  if (!config.keyring)
50
64
  config.keyring = [];
@@ -95,8 +109,10 @@ export class ContextStore {
95
109
  else {
96
110
  signed = await signMessage(this.root, config.id, message);
97
111
  }
112
+ // Envelope filename encodes routing metadata so sync can match outbox files to peers
113
+ // without parsing JSON. Colons/dots replaced to stay filesystem-safe across OS.
98
114
  const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
99
- const filename = `${timestamp}_${peerId}.json`;
115
+ const filename = `${timestamp}_from-${config.name}_to-${peerId}.json`;
100
116
  await writeFile(join(this.root, "outbox", filename), serializeSignedMessage(signed));
101
117
  }
102
118
  async readInbox() {
@@ -159,7 +175,8 @@ export class ContextStore {
159
175
  return readdir(sharedDir);
160
176
  }
161
177
  async share(filename, content) {
162
- // Sanitize: extract basename, reject traversal
178
+ // Path traversal defense: basename extraction + ".." rejection.
179
+ // Critical because MCP tools pass user-supplied filenames directly.
163
180
  const base = filename.split("/").pop().split("\\").pop();
164
181
  if (!base || base === "." || base === ".." || base.includes("..")) {
165
182
  throw new Error(`Invalid filename: ${filename}`);
package/dist/sync.js CHANGED
@@ -1,10 +1,16 @@
1
+ // --- Transport design ---
2
+ // Two transports, one protocol. HTTP for WAN (daemon serves context over the internet),
3
+ // SSH/rsync for LAN (zero config if you already have SSH keys — uses ~/.ssh/config aliases
4
+ // so agents reference hostnames, never raw IPs that change). Both transports do the same
5
+ // thing: pull CONTEXT.md + PROFILE.md + shared/ + knowledge/, push outbox → peer inbox.
1
6
  import { readFile, writeFile, mkdir, readdir, rename } from "node:fs/promises";
2
7
  import { join } from "node:path";
3
8
  import { existsSync } from "node:fs";
4
9
  import { execFile as execFileCb } from "node:child_process";
5
10
  import { promisify } from "node:util";
6
11
  const execFile = promisify(execFileCb);
7
- /** Move delivered message from outbox/ to outbox/.sent/ to prevent re-delivery. */
12
+ // Archive instead of delete: preserves audit trail and lets agents review what was sent.
13
+ // Without this, sync would re-deliver the same message every cycle.
8
14
  async function archiveSent(outboxDir, fname) {
9
15
  const sentDir = join(outboxDir, ".sent");
10
16
  await mkdir(sentDir, { recursive: true });
@@ -21,7 +27,8 @@ function parseUrl(url) {
21
27
  throw new Error("SSH URL must be ssh://host:/path");
22
28
  const host = rest.slice(0, colonIdx);
23
29
  const path = rest.slice(colonIdx + 1);
24
- // Validate: prevent argument injection via rsync
30
+ // Prevent argument injection: rsync treats leading "-" as flags, and shell
31
+ // metacharacters could escape the execFile boundary on some platforms.
25
32
  if (host.startsWith("-") || path.startsWith("-")) {
26
33
  throw new Error("Invalid SSH URL: host/path cannot start with '-'");
27
34
  }
@@ -91,7 +98,8 @@ async function syncHttp(store, peer, baseUrl, peerDir) {
91
98
  for (const f of files) {
92
99
  if (f.is_dir)
93
100
  continue;
94
- // Sanitize remote filename — extract basename, reject traversal
101
+ // Remote peer controls this filename — must sanitize before writing to local disk.
102
+ // Basename extraction blocks "../../../etc/passwd" style traversal from a malicious peer.
95
103
  const safeName = f.name.split("/").pop().split("\\").pop();
96
104
  if (!safeName || safeName.includes(".."))
97
105
  continue;
@@ -112,7 +120,7 @@ async function syncHttp(store, peer, baseUrl, peerDir) {
112
120
  for (const fname of await readdir(outboxDir)) {
113
121
  if (!fname.endsWith(".json"))
114
122
  continue;
115
- if (!fname.includes(peer.name) && !fname.includes(peer.id))
123
+ if (!fname.includes(`_to-${peer.name}`) && !fname.includes(peer.id))
116
124
  continue;
117
125
  try {
118
126
  const body = await readFile(join(outboxDir, fname), "utf-8");
@@ -161,13 +169,35 @@ async function syncSsh(store, peer, host, remotePath, peerDir) {
161
169
  errors.push(`${dir}/: ${e.stderr || e.message}`);
162
170
  }
163
171
  }
164
- // Push outbox
172
+ // Pull peer's outbox for messages addressed to us — peer may be behind NAT
173
+ // and can't push to us, so we grab messages they left in their outbox for us.
174
+ const config = await store.readConfig();
175
+ const myName = config.name;
176
+ const inboxDir = join(store.root, "inbox");
177
+ await mkdir(inboxDir, { recursive: true });
178
+ try {
179
+ await execFile("rsync", [
180
+ "-az", "--ignore-existing",
181
+ "--include", `*_to-${myName}.json`,
182
+ "--include", `*_to-all.json`,
183
+ "--exclude", "*",
184
+ `${host}:${remotePath}/outbox/`,
185
+ `${inboxDir}/`,
186
+ ]);
187
+ pulled.push("outbox→inbox");
188
+ }
189
+ catch (e) {
190
+ if (!String(e.stderr || e.message).includes("No such file")) {
191
+ errors.push(`pull outbox: ${e.stderr || e.message}`);
192
+ }
193
+ }
194
+ // Push our outbox → peer inbox
165
195
  const outboxDir = join(store.root, "outbox");
166
196
  if (existsSync(outboxDir)) {
167
197
  for (const fname of await readdir(outboxDir)) {
168
198
  if (!fname.endsWith(".json"))
169
199
  continue;
170
- if (!fname.includes(peer.name) && !fname.includes(peer.id))
200
+ if (!fname.includes(`_to-${peer.name}`) && !fname.includes(peer.id))
171
201
  continue;
172
202
  try {
173
203
  await execFile("rsync", ["-az", join(outboxDir, fname), `${host}:${remotePath}/inbox/${fname}`]);
package/dist/watch.js CHANGED
@@ -1,3 +1,7 @@
1
+ // --- Watch strategy ---
2
+ // chokidar for local filesystem events (inbox, CONTEXT.md) — instant, inotify-backed on Linux.
3
+ // Polling interval for remote sync (watchSync) — because remote peers are over HTTP/SSH,
4
+ // there's no filesystem event to listen for. Polling is the only option without WebSockets.
1
5
  import { watch } from "chokidar";
2
6
  import { readFile } from "node:fs/promises";
3
7
  import { join, basename } from "node:path";
@@ -25,6 +29,8 @@ export function watchInbox(storeRoot, callback) {
25
29
  }
26
30
  catch { }
27
31
  };
32
+ // awaitWriteFinish: messages are written by sync (multi-step: create + write + close).
33
+ // Without stability threshold, we'd fire on half-written files.
28
34
  const watcher = watch(inboxDir, {
29
35
  ignoreInitial: true,
30
36
  awaitWriteFinish: { stabilityThreshold: 500 },
@@ -55,8 +61,11 @@ export function watchContext(storeRoot, callback) {
55
61
  export function watchSync(store, intervalMs, onSync, onError) {
56
62
  let running = false;
57
63
  const doSync = async () => {
64
+ // Guard against overlapping syncs: if a peer is slow or unreachable, the previous
65
+ // cycle may still be running when the next interval fires. Overlapping syncs could
66
+ // double-deliver outbox messages or corrupt in-flight file writes.
58
67
  if (running)
59
- return; // skip if previous sync still in progress
68
+ return;
60
69
  running = true;
61
70
  try {
62
71
  const results = await syncAll(store);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "openfused",
3
- "version": "0.3.3",
3
+ "version": "0.3.4",
4
4
  "description": "Decentralized context mesh for AI agents. Encrypted sync, signed messaging, MCP server. The protocol is files.",
5
5
  "license": "MIT",
6
6
  "type": "module",