openfused 0.3.12 → 0.3.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +21 -12
- package/dist/cli.js +96 -13
- package/dist/crypto.d.ts +6 -0
- package/dist/crypto.js +8 -0
- package/dist/mcp.js +3 -2
- package/dist/registry.d.ts +1 -1
- package/dist/registry.js +2 -2
- package/dist/store.d.ts +3 -0
- package/dist/store.js +15 -0
- package/dist/sync.d.ts +2 -0
- package/dist/sync.js +62 -9
- package/dist/validity.d.ts +41 -0
- package/dist/validity.js +117 -0
- package/dist/validity.test.d.ts +1 -0
- package/dist/validity.test.js +199 -0
- package/dist/watch.js +19 -3
- package/package.json +3 -3
package/README.md
CHANGED
|
@@ -10,7 +10,7 @@ No vendor lock-in. No proprietary protocol. Just a directory convention that any
|
|
|
10
10
|
|
|
11
11
|
## Install
|
|
12
12
|
|
|
13
|
-
Review the source at [github.com/
|
|
13
|
+
Review the source at [github.com/openfused/openfused](https://github.com/openfused/openfused) before installing.
|
|
14
14
|
|
|
15
15
|
```bash
|
|
16
16
|
# TypeScript (npm) — package: openfused
|
|
@@ -71,6 +71,11 @@ openfuse context --append "## Update\nFinished the research phase."
|
|
|
71
71
|
# (edit CONTEXT.md, add [DONE] to the header, then:)
|
|
72
72
|
openfuse compact
|
|
73
73
|
|
|
74
|
+
# Add validity windows to time-sensitive context
|
|
75
|
+
# <!-- validity: 6h --> for task state, 1d for sprint, 3d for architecture
|
|
76
|
+
openfuse validate # scan for stale entries
|
|
77
|
+
openfuse compact --prune-stale # archive expired validity windows
|
|
78
|
+
|
|
74
79
|
# Send a message (auto-encrypted if peer's age key is on file)
|
|
75
80
|
openfuse inbox send agent-bob "Check out shared/findings.md"
|
|
76
81
|
|
|
@@ -107,7 +112,7 @@ openfuse key export
|
|
|
107
112
|
# Import a peer's keys
|
|
108
113
|
openfuse key import wisp ./wisp-signing.key \
|
|
109
114
|
--encryption-key "age1xyz..." \
|
|
110
|
-
--address "wisp
|
|
115
|
+
--address "wisp.openfused.net"
|
|
111
116
|
|
|
112
117
|
# Trust a key (verified messages show [VERIFIED])
|
|
113
118
|
openfuse key trust wisp
|
|
@@ -127,7 +132,7 @@ my-agent (self)
|
|
|
127
132
|
encryption: age1r9qd5fpt...
|
|
128
133
|
fingerprint: 0EC3:BE39:C64D:8F15:9DEF:B74C:F448:6645
|
|
129
134
|
|
|
130
|
-
wisp wisp
|
|
135
|
+
wisp wisp.openfused.net [TRUSTED]
|
|
131
136
|
signing: 8904f73e...
|
|
132
137
|
encryption: age1z5wm7l4s...
|
|
133
138
|
fingerprint: 2CC7:8684:42E5:B304:1AC2:D870:7E20:9871
|
|
@@ -150,13 +155,17 @@ Public registry at `registry.openfused.dev`. Any agent can register, discover ot
|
|
|
150
155
|
|
|
151
156
|
```bash
|
|
152
157
|
# Register your agent
|
|
153
|
-
|
|
158
|
+
# Registers as yourname.openfused.net
|
|
159
|
+
openfuse register --endpoint https://your-server.com:2053
|
|
160
|
+
|
|
161
|
+
# Or use your own domain:
|
|
162
|
+
openfuse register --name yourname.company.com --endpoint https://yourname.company.com:2053
|
|
154
163
|
|
|
155
164
|
# Discover an agent
|
|
156
|
-
openfuse discover
|
|
165
|
+
openfuse discover wisp
|
|
157
166
|
|
|
158
167
|
# Send a message (resolves via registry, auto-imports key)
|
|
159
|
-
openfuse send
|
|
168
|
+
openfuse send wisp "hello"
|
|
160
169
|
```
|
|
161
170
|
|
|
162
171
|
- **Signed manifests** — prove you own the name (Ed25519 signature)
|
|
@@ -172,10 +181,10 @@ Pull peer context, pull their outbox for your mail, push your outbox. Two transp
|
|
|
172
181
|
|
|
173
182
|
```bash
|
|
174
183
|
# LAN — rsync over SSH (uses your ~/.ssh/config for host aliases)
|
|
175
|
-
openfuse peer add ssh://
|
|
184
|
+
openfuse peer add ssh://your-server:/home/agent/store --name wisp
|
|
176
185
|
|
|
177
186
|
# WAN — HTTP against the OpenFused daemon
|
|
178
|
-
openfuse peer add
|
|
187
|
+
openfuse peer add https://wisp.openfused.dev --name wisp
|
|
179
188
|
|
|
180
189
|
# Sync all peers
|
|
181
190
|
openfuse sync
|
|
@@ -184,7 +193,7 @@ openfuse sync
|
|
|
184
193
|
openfuse watch
|
|
185
194
|
|
|
186
195
|
# Watch + reverse SSH tunnel (NAT traversal)
|
|
187
|
-
openfuse watch --tunnel
|
|
196
|
+
openfuse watch --tunnel your-server
|
|
188
197
|
```
|
|
189
198
|
|
|
190
199
|
Sync does three things:
|
|
@@ -239,10 +248,10 @@ The daemon has two modes:
|
|
|
239
248
|
|
|
240
249
|
```bash
|
|
241
250
|
# Full mode — serves everything to trusted LAN peers
|
|
242
|
-
openfused serve --store ./my-context --port
|
|
251
|
+
openfused serve --store ./my-context --port 2053
|
|
243
252
|
|
|
244
253
|
# Public mode — PROFILE.md + inbox + outbox pickup (for WAN/tunnels)
|
|
245
|
-
openfused serve --store ./my-context --port
|
|
254
|
+
openfused serve --store ./my-context --port 2053 --public
|
|
246
255
|
```
|
|
247
256
|
|
|
248
257
|
Public mode endpoints:
|
|
@@ -267,7 +276,7 @@ Public mode endpoints:
|
|
|
267
276
|
openfuse watch -d ./store # sync every 60s
|
|
268
277
|
openfuse watch -d ./store --sync-interval 30 # sync every 30s
|
|
269
278
|
openfuse watch -d ./store --sync-interval 0 # local watch only
|
|
270
|
-
openfuse watch -d ./store --tunnel
|
|
279
|
+
openfuse watch -d ./store --tunnel your-server # + reverse SSH tunnel
|
|
271
280
|
```
|
|
272
281
|
|
|
273
282
|
## Reachability
|
package/dist/cli.js
CHANGED
|
@@ -1,14 +1,15 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { Command } from "commander";
|
|
3
3
|
import { nanoid } from "nanoid";
|
|
4
|
-
import { ContextStore } from "./store.js";
|
|
4
|
+
import { ContextStore, validateName } from "./store.js";
|
|
5
5
|
import { watchInbox, watchContext, watchSync } from "./watch.js";
|
|
6
6
|
import { syncAll, syncOne, deliverOne } from "./sync.js";
|
|
7
7
|
import * as registry from "./registry.js";
|
|
8
8
|
import { fingerprint } from "./crypto.js";
|
|
9
9
|
import { resolve, join } from "node:path";
|
|
10
10
|
import { readFile } from "node:fs/promises";
|
|
11
|
-
|
|
11
|
+
import { parseValiditySections, buildValidityReport } from "./validity.js";
|
|
12
|
+
const VERSION = "0.3.13";
|
|
12
13
|
const program = new Command();
|
|
13
14
|
program
|
|
14
15
|
.name("openfuse")
|
|
@@ -28,6 +29,7 @@ program
|
|
|
28
29
|
process.exit(1);
|
|
29
30
|
}
|
|
30
31
|
const id = nanoid(12);
|
|
32
|
+
validateName(opts.name, "Agent name");
|
|
31
33
|
if (opts.workspace) {
|
|
32
34
|
await store.initWorkspace(opts.name, id);
|
|
33
35
|
console.log(`Initialized shared workspace: ${store.root}`);
|
|
@@ -68,7 +70,7 @@ program
|
|
|
68
70
|
console.log(`Shared: ${s.sharedCount} files`);
|
|
69
71
|
const latest = await registry.checkUpdate(VERSION);
|
|
70
72
|
if (latest) {
|
|
71
|
-
console.error(`\n Update available: ${VERSION} → ${latest} — https://github.com/
|
|
73
|
+
console.error(`\n Update available: ${VERSION} → ${latest} — https://github.com/openfused/openfused/releases`);
|
|
72
74
|
}
|
|
73
75
|
});
|
|
74
76
|
// --- context ---
|
|
@@ -208,6 +210,15 @@ program
|
|
|
208
210
|
const { spawn } = await import("node:child_process");
|
|
209
211
|
const tunnelPort = opts.tunnelPort;
|
|
210
212
|
const tunnelHost = opts.tunnel;
|
|
213
|
+
// Prevent SSH option injection: reject values that look like flags
|
|
214
|
+
if (tunnelHost.startsWith("-") || /\s/.test(tunnelHost)) {
|
|
215
|
+
console.error("Invalid --tunnel value: must be a hostname, not flags");
|
|
216
|
+
process.exit(1);
|
|
217
|
+
}
|
|
218
|
+
if (tunnelPort.startsWith("-") || /\s/.test(tunnelPort) || !/^\d+$/.test(tunnelPort)) {
|
|
219
|
+
console.error("Invalid --tunnel-port value: must be a numeric port");
|
|
220
|
+
process.exit(1);
|
|
221
|
+
}
|
|
211
222
|
// Try autossh first, fall back to ssh
|
|
212
223
|
const cmd = await (async () => {
|
|
213
224
|
try {
|
|
@@ -220,8 +231,8 @@ program
|
|
|
220
231
|
}
|
|
221
232
|
})();
|
|
222
233
|
const args = cmd === "autossh"
|
|
223
|
-
? ["-M", "0", "-N", "-R", `${tunnelPort}:localhost:
|
|
224
|
-
: ["-N", "-R", `${tunnelPort}:localhost:
|
|
234
|
+
? ["-M", "0", "-N", "-R", `${tunnelPort}:localhost:2053`, tunnelHost, "-o", "ServerAliveInterval=15", "-o", "ExitOnForwardFailure=yes"]
|
|
235
|
+
: ["-N", "-R", `${tunnelPort}:localhost:2053`, tunnelHost, "-o", "ServerAliveInterval=15", "-o", "ExitOnForwardFailure=yes"];
|
|
225
236
|
const tunnel = spawn(cmd, args, { stdio: "ignore" });
|
|
226
237
|
tunnel.on("error", (e) => console.error(`[tunnel] ${cmd} failed: ${e.message}`));
|
|
227
238
|
tunnel.on("exit", (code) => {
|
|
@@ -229,13 +240,13 @@ program
|
|
|
229
240
|
console.error(`[tunnel] ${cmd} exited with code ${code}`);
|
|
230
241
|
});
|
|
231
242
|
process.on("exit", () => tunnel.kill());
|
|
232
|
-
console.log(`Tunnel: ${cmd} -R ${tunnelPort}:localhost:
|
|
233
|
-
console.log(`Your store is reachable at ssh://${tunnelHost}:${tunnelPort} (via daemon on :
|
|
243
|
+
console.log(`Tunnel: ${cmd} -R ${tunnelPort}:localhost:2053 ${tunnelHost}`);
|
|
244
|
+
console.log(`Your store is reachable at ssh://${tunnelHost}:${tunnelPort} (via daemon on :2053)`);
|
|
234
245
|
}
|
|
235
246
|
// Cloudflared quick tunnel (optional) — gives you a public *.trycloudflare.com URL
|
|
236
247
|
if (opts.cloudflared) {
|
|
237
248
|
const { spawn } = await import("node:child_process");
|
|
238
|
-
const cf = spawn("cloudflared", ["tunnel", "--url", "http://localhost:
|
|
249
|
+
const cf = spawn("cloudflared", ["tunnel", "--url", "http://localhost:2053"], {
|
|
239
250
|
stdio: ["ignore", "pipe", "pipe"],
|
|
240
251
|
});
|
|
241
252
|
cf.on("error", (e) => console.error(`[cloudflared] failed: ${e.message}. Install: https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/downloads/`));
|
|
@@ -278,14 +289,74 @@ program
|
|
|
278
289
|
.command("compact")
|
|
279
290
|
.description("Move [DONE] sections from CONTEXT.md to history/")
|
|
280
291
|
.option("-d, --dir <path>", "Context store directory", ".")
|
|
292
|
+
.option("--prune-stale", "Also archive sections past their <!-- validity: --> window (confidence < 0.1)")
|
|
281
293
|
.action(async (opts) => {
|
|
282
294
|
const store = new ContextStore(resolve(opts.dir));
|
|
295
|
+
let prunedCount = 0;
|
|
296
|
+
if (opts.pruneStale) {
|
|
297
|
+
// Soft-expiry pruning: rewrite CONTEXT.md with stale sections stripped
|
|
298
|
+
const content = await store.readContext();
|
|
299
|
+
const sections = parseValiditySections(content);
|
|
300
|
+
const staleSections = sections.filter((s) => s.expired);
|
|
301
|
+
if (staleSections.length > 0) {
|
|
302
|
+
// Remove stale annotated sections from file
|
|
303
|
+
let updated = content;
|
|
304
|
+
for (const s of staleSections) {
|
|
305
|
+
// Strip the section text from the file (simple text removal)
|
|
306
|
+
updated = updated.replace(s.sectionText, "[STALE — archived by openfuse compact --prune-stale]");
|
|
307
|
+
}
|
|
308
|
+
await store.writeContext(updated);
|
|
309
|
+
prunedCount = staleSections.length;
|
|
310
|
+
}
|
|
311
|
+
}
|
|
283
312
|
const { moved, kept } = await store.compactContext();
|
|
284
|
-
if (moved === 0) {
|
|
313
|
+
if (moved === 0 && prunedCount === 0) {
|
|
285
314
|
console.log("Nothing to compact. Mark sections with [DONE] to archive them.");
|
|
286
315
|
}
|
|
287
316
|
else {
|
|
288
|
-
|
|
317
|
+
const parts = [];
|
|
318
|
+
if (moved > 0)
|
|
319
|
+
parts.push(`${moved} done`);
|
|
320
|
+
if (prunedCount > 0)
|
|
321
|
+
parts.push(`${prunedCount} stale`);
|
|
322
|
+
console.log(`Compacted: ${parts.join(", ")}, ${kept} kept.`);
|
|
323
|
+
}
|
|
324
|
+
});
|
|
325
|
+
// --- validate ---
|
|
326
|
+
program
|
|
327
|
+
.command("validate")
|
|
328
|
+
.description("Scan CONTEXT.md for expired validity windows and report stale entries")
|
|
329
|
+
.option("-d, --dir <path>", "Context store directory", ".")
|
|
330
|
+
.option("--json", "Output as JSON")
|
|
331
|
+
.action(async (opts) => {
|
|
332
|
+
const store = new ContextStore(resolve(opts.dir));
|
|
333
|
+
if (!(await store.exists())) {
|
|
334
|
+
console.error("No context store found. Run `openfuse init` first.");
|
|
335
|
+
process.exit(1);
|
|
336
|
+
}
|
|
337
|
+
const content = await store.readContext();
|
|
338
|
+
const sections = parseValiditySections(content);
|
|
339
|
+
const report = buildValidityReport(sections);
|
|
340
|
+
if (opts.json) {
|
|
341
|
+
console.log(JSON.stringify(report, null, 2));
|
|
342
|
+
return;
|
|
343
|
+
}
|
|
344
|
+
if (report.total === 0) {
|
|
345
|
+
console.log("No validity-annotated sections found.");
|
|
346
|
+
console.log("Add `<!-- validity: 6h -->` before time-sensitive context entries.");
|
|
347
|
+
return;
|
|
348
|
+
}
|
|
349
|
+
console.log(`Validity check: ${report.fresh} fresh, ${report.stale} stale (of ${report.total} annotated)`);
|
|
350
|
+
if (report.stale > 0) {
|
|
351
|
+
console.log("\nStale sections (confidence < 0.1):");
|
|
352
|
+
for (const e of report.entries.filter((e) => e.expired)) {
|
|
353
|
+
const age = e.addedAt ? ` written ${e.addedAt}` : "";
|
|
354
|
+
console.log(` [${e.ttlLabel} TTL${age}] ${e.preview}`);
|
|
355
|
+
}
|
|
356
|
+
console.log("\nRun `openfuse compact --prune-stale` to archive stale sections.");
|
|
357
|
+
}
|
|
358
|
+
else {
|
|
359
|
+
console.log("All annotated sections are within their validity windows. ✓");
|
|
289
360
|
}
|
|
290
361
|
});
|
|
291
362
|
// --- share ---
|
|
@@ -327,14 +398,16 @@ peer
|
|
|
327
398
|
const store = new ContextStore(resolve(opts.dir));
|
|
328
399
|
const config = await store.readConfig();
|
|
329
400
|
const peerId = nanoid(12);
|
|
401
|
+
const peerName = opts.name ?? `peer-${config.peers.length + 1}`;
|
|
402
|
+
validateName(peerName, "Peer name");
|
|
330
403
|
config.peers.push({
|
|
331
404
|
id: peerId,
|
|
332
|
-
name:
|
|
405
|
+
name: peerName,
|
|
333
406
|
url,
|
|
334
407
|
access: opts.access,
|
|
335
408
|
});
|
|
336
409
|
await store.writeConfig(config);
|
|
337
|
-
console.log(`Added peer: ${
|
|
410
|
+
console.log(`Added peer: ${peerName} (${url}) [${opts.access}]`);
|
|
338
411
|
});
|
|
339
412
|
peer
|
|
340
413
|
.command("remove <id>")
|
|
@@ -496,16 +569,23 @@ program
|
|
|
496
569
|
.command("register")
|
|
497
570
|
.description("Register this agent in the public registry")
|
|
498
571
|
.option("-d, --dir <path>", "Context store directory", ".")
|
|
572
|
+
.option("-n, --name <name>", "Full agent name (defaults to {storename}.openfused.net, or set your own domain)")
|
|
499
573
|
.requiredOption("-e, --endpoint <url>", "Endpoint URL where peers can reach you")
|
|
500
574
|
.option("-r, --registry <url>", "Registry URL")
|
|
501
575
|
.action(async (opts) => {
|
|
502
576
|
const store = new ContextStore(resolve(opts.dir));
|
|
503
577
|
const reg = registry.resolveRegistry(opts.registry);
|
|
504
|
-
const
|
|
578
|
+
const config = await store.readConfig();
|
|
579
|
+
const agentName = opts.name || `${config.name}.openfused.net`;
|
|
580
|
+
const manifest = await registry.register(store, opts.endpoint, reg, agentName);
|
|
505
581
|
console.log(`Registered: ${manifest.name} [SIGNED]`);
|
|
506
582
|
console.log(` Endpoint: ${manifest.endpoint}`);
|
|
507
583
|
console.log(` Fingerprint: ${manifest.fingerprint}`);
|
|
584
|
+
console.log(` DNS: _openfuse.${manifest.name}`);
|
|
508
585
|
console.log(` Registry: ${reg}`);
|
|
586
|
+
console.log(`\nOthers can find you with:`);
|
|
587
|
+
console.log(` openfuse discover ${manifest.name}`);
|
|
588
|
+
console.log(` openfuse send ${manifest.name} "hello"`);
|
|
509
589
|
});
|
|
510
590
|
// --- discover ---
|
|
511
591
|
program
|
|
@@ -575,6 +655,9 @@ program
|
|
|
575
655
|
// Try direct HTTP delivery if endpoint is http(s)
|
|
576
656
|
if (manifest.endpoint.startsWith("http")) {
|
|
577
657
|
try {
|
|
658
|
+
// SSRF check: registry endpoints are attacker-controlled
|
|
659
|
+
const { checkSsrf } = await import("./sync.js");
|
|
660
|
+
await checkSsrf(manifest.endpoint);
|
|
578
661
|
const body = await readFile(join(store.root, "outbox", filename), "utf-8");
|
|
579
662
|
const r = await fetch(`${manifest.endpoint.replace(/\/$/, "")}/inbox`, {
|
|
580
663
|
method: "POST",
|
package/dist/crypto.d.ts
CHANGED
|
@@ -22,6 +22,12 @@ export declare function generateKeys(storeRoot: string): Promise<{
|
|
|
22
22
|
export declare function hasKeys(storeRoot: string): Promise<boolean>;
|
|
23
23
|
export declare function fingerprint(publicKey: string): string;
|
|
24
24
|
export declare function loadAgeRecipient(storeRoot: string): Promise<string>;
|
|
25
|
+
/** Sign a raw challenge string — used for outbox authentication.
|
|
26
|
+
* Returns { signature, publicKey } without the full SignedMessage envelope. */
|
|
27
|
+
export declare function signChallenge(storeRoot: string, challenge: string): Promise<{
|
|
28
|
+
signature: string;
|
|
29
|
+
publicKey: string;
|
|
30
|
+
}>;
|
|
25
31
|
export declare function signMessage(storeRoot: string, from: string, message: string): Promise<SignedMessage>;
|
|
26
32
|
export declare function signAndEncrypt(storeRoot: string, from: string, plaintext: string, recipientAgeKey: string): Promise<SignedMessage>;
|
|
27
33
|
export declare function verifyMessage(signed: SignedMessage): boolean;
|
package/dist/crypto.js
CHANGED
|
@@ -63,6 +63,14 @@ export async function loadAgeRecipient(storeRoot) {
|
|
|
63
63
|
async function loadAgeIdentity(storeRoot) {
|
|
64
64
|
return (await readFile(join(storeRoot, KEY_DIR, "age.key"), "utf-8")).trim();
|
|
65
65
|
}
|
|
66
|
+
/** Sign a raw challenge string — used for outbox authentication.
|
|
67
|
+
* Returns { signature, publicKey } without the full SignedMessage envelope. */
|
|
68
|
+
export async function signChallenge(storeRoot, challenge) {
|
|
69
|
+
const privateKey = await loadPrivateKey(storeRoot);
|
|
70
|
+
const publicKey = await loadPublicKeyHex(storeRoot);
|
|
71
|
+
const signature = sign(null, Buffer.from(challenge), privateKey).toString("base64");
|
|
72
|
+
return { signature, publicKey };
|
|
73
|
+
}
|
|
66
74
|
export async function signMessage(storeRoot, from, message) {
|
|
67
75
|
const privateKey = await loadPrivateKey(storeRoot);
|
|
68
76
|
const publicKey = await loadPublicKeyHex(storeRoot);
|
package/dist/mcp.js
CHANGED
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
9
9
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
10
10
|
import { z } from "zod";
|
|
11
|
-
import { ContextStore } from "./store.js";
|
|
11
|
+
import { ContextStore, validateName } from "./store.js";
|
|
12
12
|
import { resolve } from "node:path";
|
|
13
13
|
// LLMs will pass whatever filenames users ask for — including "../../etc/shadow".
|
|
14
14
|
// This is the trust boundary between the AI and the filesystem.
|
|
@@ -23,7 +23,7 @@ const storeDir = process.env.OPENFUSE_DIR || process.argv[3] || ".";
|
|
|
23
23
|
const store = new ContextStore(resolve(storeDir));
|
|
24
24
|
const server = new McpServer({
|
|
25
25
|
name: "openfuse",
|
|
26
|
-
version: "0.3.
|
|
26
|
+
version: "0.3.13",
|
|
27
27
|
});
|
|
28
28
|
// --- Context ---
|
|
29
29
|
server.tool("context_read", "Read the agent's CONTEXT.md (working memory)", async () => {
|
|
@@ -115,6 +115,7 @@ server.tool("peer_add", "Add a peer by URL (http:// for WAN, ssh://host:/path fo
|
|
|
115
115
|
name: z.string().describe("Peer name"),
|
|
116
116
|
access: z.enum(["read", "readwrite"]).default("read").describe("Access mode"),
|
|
117
117
|
}, async ({ url, name, access }) => {
|
|
118
|
+
validateName(name, "Peer name");
|
|
118
119
|
const { nanoid } = await import("nanoid");
|
|
119
120
|
const config = await store.readConfig();
|
|
120
121
|
const peerId = nanoid(12);
|
package/dist/registry.d.ts
CHANGED
|
@@ -16,7 +16,7 @@ export interface Manifest {
|
|
|
16
16
|
rotatedFrom?: string;
|
|
17
17
|
}
|
|
18
18
|
export declare function resolveRegistry(flag?: string): string;
|
|
19
|
-
export declare function register(store: ContextStore, endpoint: string, registry: string): Promise<Manifest>;
|
|
19
|
+
export declare function register(store: ContextStore, endpoint: string, registry: string, name?: string): Promise<Manifest>;
|
|
20
20
|
export declare function discover(name: string, registry: string): Promise<Manifest>;
|
|
21
21
|
export declare function revoke(store: ContextStore, registry: string): Promise<void>;
|
|
22
22
|
export declare function checkUpdate(currentVersion: string): Promise<string | null>;
|
package/dist/registry.js
CHANGED
|
@@ -12,12 +12,12 @@ export const DEFAULT_REGISTRY = "https://registry.openfused.dev";
|
|
|
12
12
|
export function resolveRegistry(flag) {
|
|
13
13
|
return flag || process.env.OPENFUSE_REGISTRY || DEFAULT_REGISTRY;
|
|
14
14
|
}
|
|
15
|
-
export async function register(store, endpoint, registry) {
|
|
15
|
+
export async function register(store, endpoint, registry, name) {
|
|
16
16
|
const config = await store.readConfig();
|
|
17
17
|
if (!config.publicKey)
|
|
18
18
|
throw new Error("No signing key — run `openfuse init` first");
|
|
19
19
|
const manifest = {
|
|
20
|
-
name: config.name,
|
|
20
|
+
name: name || config.name,
|
|
21
21
|
endpoint,
|
|
22
22
|
publicKey: config.publicKey,
|
|
23
23
|
encryptionKey: config.encryptionKey,
|
package/dist/store.d.ts
CHANGED
|
@@ -17,6 +17,9 @@ export interface PeerConfig {
|
|
|
17
17
|
access: "read" | "readwrite";
|
|
18
18
|
mountPath?: string;
|
|
19
19
|
}
|
|
20
|
+
/** Validate agent/peer names: alphanumeric + hyphens + underscores + dots, 1-64 chars.
|
|
21
|
+
* Rejects path traversal (../, /, \) and rsync glob chars (*, ?, [). */
|
|
22
|
+
export declare function validateName(name: string, label?: string): string;
|
|
20
23
|
export declare class ContextStore {
|
|
21
24
|
readonly root: string;
|
|
22
25
|
constructor(root: string);
|
package/dist/store.js
CHANGED
|
@@ -16,6 +16,20 @@ import { join, resolve } from "node:path";
|
|
|
16
16
|
import { existsSync } from "node:fs";
|
|
17
17
|
import { generateKeys, signMessage, signAndEncrypt, verifyMessage, decryptMessage, deserializeSignedMessage, serializeSignedMessage, wrapExternalMessage, fingerprint, } from "./crypto.js";
|
|
18
18
|
const STORE_DIRS = ["history", "knowledge", "inbox", "outbox", "shared", ".peers"];
|
|
19
|
+
/** Validate agent/peer names: alphanumeric + hyphens + underscores + dots, 1-64 chars.
|
|
20
|
+
* Rejects path traversal (../, /, \) and rsync glob chars (*, ?, [). */
|
|
21
|
+
export function validateName(name, label = "Name") {
|
|
22
|
+
if (!name || name.length < 1 || name.length > 64) {
|
|
23
|
+
throw new Error(`${label} must be 1-64 characters`);
|
|
24
|
+
}
|
|
25
|
+
if (!/^[a-zA-Z0-9][a-zA-Z0-9._-]*$/.test(name)) {
|
|
26
|
+
throw new Error(`${label} must start with alphanumeric and contain only a-z, 0-9, -, _, .`);
|
|
27
|
+
}
|
|
28
|
+
if (name.includes("..") || name.includes("/") || name.includes("\\")) {
|
|
29
|
+
throw new Error(`${label} contains invalid path characters`);
|
|
30
|
+
}
|
|
31
|
+
return name;
|
|
32
|
+
}
|
|
19
33
|
export class ContextStore {
|
|
20
34
|
root;
|
|
21
35
|
constructor(root) {
|
|
@@ -167,6 +181,7 @@ export class ContextStore {
|
|
|
167
181
|
}
|
|
168
182
|
// --- Inbox ---
|
|
169
183
|
async sendInbox(peerId, message) {
|
|
184
|
+
validateName(peerId, "Recipient name");
|
|
170
185
|
const config = await this.readConfig();
|
|
171
186
|
// Look up peer's encryption key in keyring
|
|
172
187
|
const entry = config.keyring.find((e) => e.name === peerId || e.address.startsWith(`${peerId}@`));
|
package/dist/sync.d.ts
CHANGED
package/dist/sync.js
CHANGED
|
@@ -8,6 +8,29 @@ import { join } from "node:path";
|
|
|
8
8
|
import { existsSync } from "node:fs";
|
|
9
9
|
import { execFile as execFileCb } from "node:child_process";
|
|
10
10
|
import { promisify } from "node:util";
|
|
11
|
+
import dns from "node:dns/promises";
|
|
12
|
+
/** Block SSRF: reject URLs pointing to private/reserved IP ranges. */
|
|
13
|
+
export async function checkSsrf(url) {
|
|
14
|
+
const parsed = new URL(url);
|
|
15
|
+
const hostname = parsed.hostname.replace(/^\[|\]$/g, ""); // strip IPv6 brackets
|
|
16
|
+
try {
|
|
17
|
+
const { address } = await dns.lookup(hostname);
|
|
18
|
+
const parts = address.split(".").map(Number);
|
|
19
|
+
if (address === "127.0.0.1" || address === "::1" || address === "0.0.0.0" ||
|
|
20
|
+
parts[0] === 10 ||
|
|
21
|
+
(parts[0] === 172 && parts[1] >= 16 && parts[1] <= 31) ||
|
|
22
|
+
(parts[0] === 192 && parts[1] === 168) ||
|
|
23
|
+
(parts[0] === 169 && parts[1] === 254) ||
|
|
24
|
+
address.startsWith("fc") || address.startsWith("fd") || address.startsWith("fe80")) {
|
|
25
|
+
throw new Error(`SSRF blocked: ${hostname} resolves to private address ${address}`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
catch (e) {
|
|
29
|
+
if (e.message.startsWith("SSRF blocked"))
|
|
30
|
+
throw e;
|
|
31
|
+
// DNS resolution failed — allow (could be .local or SSH alias)
|
|
32
|
+
}
|
|
33
|
+
}
|
|
11
34
|
const execFile = promisify(execFileCb);
|
|
12
35
|
// Archive instead of delete: preserves audit trail and lets agents review what was sent.
|
|
13
36
|
// Without this, sync would re-deliver the same message every cycle.
|
|
@@ -32,10 +55,10 @@ function parseUrl(url) {
|
|
|
32
55
|
if (host.startsWith("-") || path.startsWith("-")) {
|
|
33
56
|
throw new Error("Invalid SSH URL: host/path cannot start with '-'");
|
|
34
57
|
}
|
|
35
|
-
if (/[
|
|
58
|
+
if (/[;|`$&(){}\s\n\r]/.test(host)) {
|
|
36
59
|
throw new Error("Invalid SSH URL: host contains shell metacharacters");
|
|
37
60
|
}
|
|
38
|
-
if (/[;|`$&(){}]/.test(path)) {
|
|
61
|
+
if (/[;|`$&(){}\s\n\r]/.test(path)) {
|
|
39
62
|
throw new Error("Invalid SSH URL: path contains shell metacharacters");
|
|
40
63
|
}
|
|
41
64
|
return { type: "ssh", host, path };
|
|
@@ -55,6 +78,7 @@ export async function deliverOne(store, peerName, filename) {
|
|
|
55
78
|
try {
|
|
56
79
|
const transport = parseUrl(peer.url);
|
|
57
80
|
if (transport.type === "http") {
|
|
81
|
+
await checkSsrf(transport.baseUrl);
|
|
58
82
|
const body = await readFile(filePath, "utf-8");
|
|
59
83
|
const r = await fetch(`${transport.baseUrl}/inbox`, {
|
|
60
84
|
method: "POST",
|
|
@@ -114,6 +138,8 @@ async function syncHttp(store, peer, baseUrl, peerDir) {
|
|
|
114
138
|
const pulled = [];
|
|
115
139
|
const pushed = [];
|
|
116
140
|
const errors = [];
|
|
141
|
+
// SSRF check: block requests to private/reserved IPs
|
|
142
|
+
await checkSsrf(baseUrl);
|
|
117
143
|
for (const file of ["CONTEXT.md", "PROFILE.md"]) {
|
|
118
144
|
try {
|
|
119
145
|
const resp = await fetch(`${baseUrl}/read/${file}`);
|
|
@@ -154,29 +180,56 @@ async function syncHttp(store, peer, baseUrl, peerDir) {
|
|
|
154
180
|
}
|
|
155
181
|
}
|
|
156
182
|
// Pull peer's outbox for messages addressed to us (HTTP version).
|
|
157
|
-
//
|
|
158
|
-
//
|
|
159
|
-
// in a filename to prevent path traversal (e.g. from="../../.keys/evil").
|
|
183
|
+
// Authenticated: we sign a challenge proving we own this name, so the daemon
|
|
184
|
+
// only serves outbox to the actual recipient. Prevents metadata enumeration.
|
|
160
185
|
const config = await store.readConfig();
|
|
161
186
|
const myName = config.name;
|
|
162
187
|
const inboxDir = join(store.root, "inbox");
|
|
163
188
|
await mkdir(inboxDir, { recursive: true });
|
|
164
189
|
try {
|
|
165
|
-
const
|
|
190
|
+
const { signChallenge } = await import("./crypto.js");
|
|
191
|
+
const timestamp = new Date().toISOString();
|
|
192
|
+
const challenge = `OUTBOX:${myName}:${timestamp}`;
|
|
193
|
+
const { signature, publicKey } = await signChallenge(store.root, challenge);
|
|
194
|
+
const resp = await fetch(`${baseUrl}/outbox/${myName}`, {
|
|
195
|
+
headers: {
|
|
196
|
+
"X-OpenFuse-PublicKey": publicKey,
|
|
197
|
+
"X-OpenFuse-Signature": signature,
|
|
198
|
+
"X-OpenFuse-Timestamp": timestamp,
|
|
199
|
+
},
|
|
200
|
+
});
|
|
166
201
|
if (resp.ok) {
|
|
167
202
|
const messages = (await resp.json());
|
|
168
203
|
for (const msg of messages) {
|
|
169
204
|
const ts = (msg.timestamp || new Date().toISOString()).replace(/[:.]/g, "-");
|
|
170
205
|
const from = msg.from || "unknown";
|
|
171
|
-
// SECURITY: sanitize remote-controlled values before constructing local filenames.
|
|
172
|
-
// Without this, a malicious "from" like "../../.keys/x" could write outside inbox/.
|
|
173
206
|
const safeFrom = from.replace(/[^a-zA-Z0-9\-_]/g, "");
|
|
174
207
|
const safeTs = ts.replace(/[^a-zA-Z0-9\-_]/g, "");
|
|
175
208
|
const fname = `${safeTs}_from-${safeFrom}_to-${myName}.json`;
|
|
209
|
+
const outboxFile = msg._outboxFile; // filename on sender's outbox
|
|
176
210
|
const dest = join(inboxDir, fname);
|
|
177
211
|
if (!existsSync(dest)) {
|
|
178
|
-
|
|
212
|
+
// Strip the _outboxFile metadata before saving
|
|
213
|
+
const { _outboxFile, ...cleanMsg } = msg;
|
|
214
|
+
await writeFile(dest, JSON.stringify(cleanMsg, null, 2));
|
|
179
215
|
pulled.push(`outbox→${fname}`);
|
|
216
|
+
// ACK: tell sender to move this message to .sent/
|
|
217
|
+
if (outboxFile) {
|
|
218
|
+
try {
|
|
219
|
+
const ackTs = new Date().toISOString();
|
|
220
|
+
const ackChallenge = `ACK:${myName}:${outboxFile}:${ackTs}`;
|
|
221
|
+
const ackSig = await signChallenge(store.root, ackChallenge);
|
|
222
|
+
await fetch(`${baseUrl}/outbox/${myName}/${outboxFile}`, {
|
|
223
|
+
method: "DELETE",
|
|
224
|
+
headers: {
|
|
225
|
+
"X-OpenFuse-PublicKey": ackSig.publicKey,
|
|
226
|
+
"X-OpenFuse-Signature": ackSig.signature,
|
|
227
|
+
"X-OpenFuse-Timestamp": ackTs,
|
|
228
|
+
},
|
|
229
|
+
});
|
|
230
|
+
}
|
|
231
|
+
catch { } // best-effort ACK
|
|
232
|
+
}
|
|
180
233
|
}
|
|
181
234
|
}
|
|
182
235
|
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
export interface ValidityAnnotation {
|
|
2
|
+
/** Validity window parsed from <!-- validity: ... --> */
|
|
3
|
+
ttlMs: number;
|
|
4
|
+
/** Written timestamp from <!-- openfuse:added: ISO --> (if present) */
|
|
5
|
+
addedAt?: Date;
|
|
6
|
+
/** True if confidence < 0.1 */
|
|
7
|
+
expired: boolean;
|
|
8
|
+
/** Soft confidence [0, 1]. 1.0 at write time, 0.5 at TTL, asymptotes to 0 */
|
|
9
|
+
confidence: number;
|
|
10
|
+
/** The section text following this annotation (until the next annotation or EOF) */
|
|
11
|
+
sectionText: string;
|
|
12
|
+
}
|
|
13
|
+
export interface ValidityReport {
|
|
14
|
+
total: number;
|
|
15
|
+
stale: number;
|
|
16
|
+
fresh: number;
|
|
17
|
+
entries: Array<{
|
|
18
|
+
preview: string;
|
|
19
|
+
addedAt: string | null;
|
|
20
|
+
ttlLabel: string;
|
|
21
|
+
confidence: number;
|
|
22
|
+
expired: boolean;
|
|
23
|
+
}>;
|
|
24
|
+
}
|
|
25
|
+
/** Parse a TTL label like "6h" or "3d" into milliseconds */
|
|
26
|
+
export declare function parseTtlMs(value: string, unit: string): number;
|
|
27
|
+
/** Soft-expiry confidence. Exponential decay: 1.0 at write, 0.5 at TTL, →0 over time. */
|
|
28
|
+
export declare function computeConfidence(addedAt: Date | undefined, ttlMs: number, now?: Date): number;
|
|
29
|
+
/**
|
|
30
|
+
* Parse CONTEXT.md content for validity-annotated sections.
|
|
31
|
+
* Each `<!-- validity: Xh -->` comment starts a new annotated section.
|
|
32
|
+
* Sections without validity annotations are skipped.
|
|
33
|
+
*/
|
|
34
|
+
export declare function parseValiditySections(content: string, now?: Date): ValidityAnnotation[];
|
|
35
|
+
/** Build a human-readable report of validity window status */
|
|
36
|
+
export declare function buildValidityReport(sections: ValidityAnnotation[]): ValidityReport;
|
|
37
|
+
export declare const DEFAULT_TTL_TIERS: {
|
|
38
|
+
readonly task: number;
|
|
39
|
+
readonly sprint: number;
|
|
40
|
+
readonly architecture: number;
|
|
41
|
+
};
|
package/dist/validity.js
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
// --- Context validity windows ---
|
|
2
|
+
// Agents often write context that's only valid for a bounded time window.
|
|
3
|
+
// This module parses `<!-- validity: Xh -->` (or `1d`, `3d`) annotations
|
|
4
|
+
// from CONTEXT.md, checks freshness against `<!-- openfuse:added: ISO -->`,
|
|
5
|
+
// and provides soft-expiry confidence scoring via exponential decay.
|
|
6
|
+
//
|
|
7
|
+
// Design decisions:
|
|
8
|
+
// - Advisory only: agents that don't understand validity annotations read
|
|
9
|
+
// CONTEXT.md normally. No schema enforcement.
|
|
10
|
+
// - Decay starts at write time, reaches 0.5 at TTL, asymptotes toward 0.
|
|
11
|
+
// Agents can down-weight uncertain context rather than hard-drop it.
|
|
12
|
+
// - "Stale" means confidence < 0.1 (roughly 3× TTL from write time).
|
|
13
|
+
// --- Parsing ---
|
|
14
|
+
const VALIDITY_RE = /<!--\s*validity:\s*(\d+)\s*(h|d)\s*(?:,\s*[^>]*)?\s*-->/i;
|
|
15
|
+
const ADDED_RE = /<!--\s*openfuse:added:\s*([^\s>]+)\s*-->/i;
|
|
16
|
+
/** Parse a TTL label like "6h" or "3d" into milliseconds */
|
|
17
|
+
export function parseTtlMs(value, unit) {
|
|
18
|
+
const n = parseInt(value, 10);
|
|
19
|
+
if (unit.toLowerCase() === "d")
|
|
20
|
+
return n * 24 * 60 * 60 * 1000;
|
|
21
|
+
return n * 60 * 60 * 1000; // hours default
|
|
22
|
+
}
|
|
23
|
+
/** Soft-expiry confidence. Exponential decay: 1.0 at write, 0.5 at TTL, →0 over time. */
|
|
24
|
+
export function computeConfidence(addedAt, ttlMs, now = new Date()) {
|
|
25
|
+
if (!addedAt) {
|
|
26
|
+
// No timestamp — treat as written "now" with full confidence
|
|
27
|
+
return 1.0;
|
|
28
|
+
}
|
|
29
|
+
const ageMs = now.getTime() - addedAt.getTime();
|
|
30
|
+
if (ageMs <= 0)
|
|
31
|
+
return 1.0;
|
|
32
|
+
// Exponential decay: confidence = 0.5 ^ (age / ttl)
|
|
33
|
+
return Math.pow(0.5, ageMs / ttlMs);
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Parse CONTEXT.md content for validity-annotated sections.
|
|
37
|
+
* Each `<!-- validity: Xh -->` comment starts a new annotated section.
|
|
38
|
+
* Sections without validity annotations are skipped.
|
|
39
|
+
*/
|
|
40
|
+
export function parseValiditySections(content, now = new Date()) {
|
|
41
|
+
const results = [];
|
|
42
|
+
const lines = content.split("\n");
|
|
43
|
+
let inSection = false;
|
|
44
|
+
let currentTtlMs = 0;
|
|
45
|
+
let currentAddedAt;
|
|
46
|
+
let sectionLines = [];
|
|
47
|
+
const flushSection = () => {
|
|
48
|
+
if (!inSection)
|
|
49
|
+
return;
|
|
50
|
+
const confidence = computeConfidence(currentAddedAt, currentTtlMs, now);
|
|
51
|
+
results.push({
|
|
52
|
+
ttlMs: currentTtlMs,
|
|
53
|
+
addedAt: currentAddedAt,
|
|
54
|
+
expired: confidence < 0.1,
|
|
55
|
+
confidence,
|
|
56
|
+
sectionText: sectionLines.join("\n").trim(),
|
|
57
|
+
});
|
|
58
|
+
inSection = false;
|
|
59
|
+
sectionLines = [];
|
|
60
|
+
currentAddedAt = undefined;
|
|
61
|
+
currentTtlMs = 0;
|
|
62
|
+
};
|
|
63
|
+
for (const line of lines) {
|
|
64
|
+
const validityMatch = line.match(VALIDITY_RE);
|
|
65
|
+
if (validityMatch) {
|
|
66
|
+
flushSection();
|
|
67
|
+
currentTtlMs = parseTtlMs(validityMatch[1], validityMatch[2]);
|
|
68
|
+
inSection = true;
|
|
69
|
+
continue;
|
|
70
|
+
}
|
|
71
|
+
const addedMatch = line.match(ADDED_RE);
|
|
72
|
+
if (addedMatch && inSection && !currentAddedAt) {
|
|
73
|
+
const parsed = new Date(addedMatch[1]);
|
|
74
|
+
if (!isNaN(parsed.getTime())) {
|
|
75
|
+
currentAddedAt = parsed;
|
|
76
|
+
}
|
|
77
|
+
continue;
|
|
78
|
+
}
|
|
79
|
+
if (inSection) {
|
|
80
|
+
sectionLines.push(line);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
flushSection();
|
|
84
|
+
return results;
|
|
85
|
+
}
|
|
86
|
+
/** Build a human-readable report of validity window status */
|
|
87
|
+
export function buildValidityReport(sections) {
|
|
88
|
+
const entries = sections.map((s) => {
|
|
89
|
+
const preview = s.sectionText.split("\n").find((l) => l.trim()) ?? "(empty)";
|
|
90
|
+
const ttlMs = s.ttlMs;
|
|
91
|
+
const hours = ttlMs / (60 * 60 * 1000);
|
|
92
|
+
const ttlLabel = hours >= 24 ? `${Math.round(hours / 24)}d` : `${Math.round(hours)}h`;
|
|
93
|
+
return {
|
|
94
|
+
preview: preview.slice(0, 80),
|
|
95
|
+
addedAt: s.addedAt ? s.addedAt.toISOString() : null,
|
|
96
|
+
ttlLabel,
|
|
97
|
+
confidence: Math.round(s.confidence * 100) / 100,
|
|
98
|
+
expired: s.expired,
|
|
99
|
+
};
|
|
100
|
+
});
|
|
101
|
+
return {
|
|
102
|
+
total: sections.length,
|
|
103
|
+
stale: sections.filter((s) => s.expired).length,
|
|
104
|
+
fresh: sections.filter((s) => !s.expired).length,
|
|
105
|
+
entries,
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
// --- Default TTL tiers (from multi-agent swarm research) ---
|
|
109
|
+
// Based on 20-agent, 20-run PDR evaluation dataset:
|
|
110
|
+
// task-state context (what I'm working on right now): ~6h half-life
|
|
111
|
+
// sprint-context (sprint goals, current approach): ~24h half-life
|
|
112
|
+
// project-architecture (design decisions, constraints): ~72h half-life
|
|
113
|
+
export const DEFAULT_TTL_TIERS = {
|
|
114
|
+
task: 6 * 60 * 60 * 1000, // 6h
|
|
115
|
+
sprint: 24 * 60 * 60 * 1000, // 24h
|
|
116
|
+
architecture: 72 * 60 * 60 * 1000, // 72h
|
|
117
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
// Tests for context validity window module
|
|
2
|
+
// Run: node --import tsx/esm src/validity.test.ts
|
|
3
|
+
// (or after build: node dist/validity.test.js)
|
|
4
|
+
import { describe, it } from "node:test";
|
|
5
|
+
import assert from "node:assert/strict";
|
|
6
|
+
import { parseTtlMs, computeConfidence, parseValiditySections, buildValidityReport, DEFAULT_TTL_TIERS, } from "./validity.js";
|
|
7
|
+
// --- parseTtlMs ---
|
|
8
|
+
describe("parseTtlMs", () => {
|
|
9
|
+
it("parses hours", () => {
|
|
10
|
+
assert.equal(parseTtlMs("6", "h"), 6 * 60 * 60 * 1000);
|
|
11
|
+
});
|
|
12
|
+
it("parses days", () => {
|
|
13
|
+
assert.equal(parseTtlMs("3", "d"), 3 * 24 * 60 * 60 * 1000);
|
|
14
|
+
});
|
|
15
|
+
it("parses 1d", () => {
|
|
16
|
+
assert.equal(parseTtlMs("1", "d"), 24 * 60 * 60 * 1000);
|
|
17
|
+
});
|
|
18
|
+
it("parses 24h", () => {
|
|
19
|
+
assert.equal(parseTtlMs("24", "H"), 24 * 60 * 60 * 1000);
|
|
20
|
+
});
|
|
21
|
+
});
|
|
22
|
+
// --- computeConfidence ---
|
|
23
|
+
describe("computeConfidence", () => {
|
|
24
|
+
const ttl6h = 6 * 60 * 60 * 1000;
|
|
25
|
+
it("returns 1.0 when no addedAt (no timestamp)", () => {
|
|
26
|
+
assert.equal(computeConfidence(undefined, ttl6h), 1.0);
|
|
27
|
+
});
|
|
28
|
+
it("returns 1.0 at write time", () => {
|
|
29
|
+
const now = new Date("2026-03-21T12:00:00Z");
|
|
30
|
+
const addedAt = new Date("2026-03-21T12:00:00Z");
|
|
31
|
+
assert.equal(computeConfidence(addedAt, ttl6h, now), 1.0);
|
|
32
|
+
});
|
|
33
|
+
it("returns 0.5 at TTL boundary", () => {
|
|
34
|
+
const addedAt = new Date("2026-03-21T00:00:00Z");
|
|
35
|
+
const now = new Date("2026-03-21T06:00:00Z"); // exactly 6h later
|
|
36
|
+
const conf = computeConfidence(addedAt, ttl6h, now);
|
|
37
|
+
assert.ok(Math.abs(conf - 0.5) < 0.0001, `expected ~0.5, got ${conf}`);
|
|
38
|
+
});
|
|
39
|
+
it("returns ~0.25 at 2× TTL", () => {
|
|
40
|
+
const addedAt = new Date("2026-03-21T00:00:00Z");
|
|
41
|
+
const now = new Date("2026-03-21T12:00:00Z"); // 12h = 2× TTL
|
|
42
|
+
const conf = computeConfidence(addedAt, ttl6h, now);
|
|
43
|
+
assert.ok(Math.abs(conf - 0.25) < 0.0001, `expected ~0.25, got ${conf}`);
|
|
44
|
+
});
|
|
45
|
+
it("returns < 0.1 at 3.32× TTL (expired threshold)", () => {
|
|
46
|
+
const addedAt = new Date("2026-03-21T00:00:00Z");
|
|
47
|
+
const now = new Date("2026-03-21T20:00:00Z"); // 20h ≈ 3.33× TTL
|
|
48
|
+
const conf = computeConfidence(addedAt, ttl6h, now);
|
|
49
|
+
assert.ok(conf < 0.1, `expected < 0.1, got ${conf}`);
|
|
50
|
+
});
|
|
51
|
+
it("handles future addedAt gracefully (returns 1.0)", () => {
|
|
52
|
+
const addedAt = new Date("2026-03-22T00:00:00Z");
|
|
53
|
+
const now = new Date("2026-03-21T12:00:00Z");
|
|
54
|
+
assert.equal(computeConfidence(addedAt, ttl6h, now), 1.0);
|
|
55
|
+
});
|
|
56
|
+
});
|
|
57
|
+
// --- parseValiditySections ---
|
|
58
|
+
describe("parseValiditySections", () => {
|
|
59
|
+
it("returns empty array when no validity annotations", () => {
|
|
60
|
+
const content = "# Context\n\nWorking on: auth refactor\n";
|
|
61
|
+
const sections = parseValiditySections(content);
|
|
62
|
+
assert.equal(sections.length, 0);
|
|
63
|
+
});
|
|
64
|
+
it("parses a single validity section with addedAt", () => {
|
|
65
|
+
const addedAt = "2026-03-21T10:00:00Z";
|
|
66
|
+
const now = new Date("2026-03-21T13:00:00Z"); // 3h after write
|
|
67
|
+
const content = [
|
|
68
|
+
"<!-- validity: 6h -->",
|
|
69
|
+
`<!-- openfuse:added: ${addedAt} -->`,
|
|
70
|
+
"Working on: auth refactor",
|
|
71
|
+
"Blocked on: IAM role",
|
|
72
|
+
].join("\n");
|
|
73
|
+
const sections = parseValiditySections(content, now);
|
|
74
|
+
assert.equal(sections.length, 1);
|
|
75
|
+
assert.equal(sections[0].ttlMs, 6 * 60 * 60 * 1000);
|
|
76
|
+
// toISOString() returns .000Z suffix; just check the date round-trips
|
|
77
|
+
assert.ok(sections[0].addedAt instanceof Date);
|
|
78
|
+
assert.equal(sections[0].addedAt.getTime(), new Date(addedAt).getTime());
|
|
79
|
+
assert.ok(sections[0].confidence > 0.5, "should be > 0.5 at 3h of 6h TTL");
|
|
80
|
+
assert.equal(sections[0].expired, false);
|
|
81
|
+
assert.ok(sections[0].sectionText.includes("auth refactor"));
|
|
82
|
+
});
|
|
83
|
+
it("parses a section without addedAt (confidence = 1.0)", () => {
|
|
84
|
+
const content = [
|
|
85
|
+
"<!-- validity: 1d -->",
|
|
86
|
+
"Architecture: JWT with 15-minute expiry",
|
|
87
|
+
].join("\n");
|
|
88
|
+
const sections = parseValiditySections(content);
|
|
89
|
+
assert.equal(sections.length, 1);
|
|
90
|
+
assert.equal(sections[0].addedAt, undefined);
|
|
91
|
+
assert.equal(sections[0].confidence, 1.0);
|
|
92
|
+
assert.equal(sections[0].expired, false);
|
|
93
|
+
});
|
|
94
|
+
it("marks expired sections correctly", () => {
|
|
95
|
+
const addedAt = "2026-03-20T00:00:00Z"; // 48h ago
|
|
96
|
+
const now = new Date("2026-03-22T00:00:00Z");
|
|
97
|
+
const content = [
|
|
98
|
+
"<!-- validity: 6h -->",
|
|
99
|
+
`<!-- openfuse:added: ${addedAt} -->`,
|
|
100
|
+
"Blocked on: IAM role",
|
|
101
|
+
].join("\n");
|
|
102
|
+
const sections = parseValiditySections(content, now);
|
|
103
|
+
assert.equal(sections.length, 1);
|
|
104
|
+
assert.equal(sections[0].expired, true);
|
|
105
|
+
assert.ok(sections[0].confidence < 0.1);
|
|
106
|
+
});
|
|
107
|
+
it("parses multiple validity sections independently", () => {
|
|
108
|
+
const now = new Date("2026-03-22T00:00:00Z");
|
|
109
|
+
const freshAdded = "2026-03-21T23:00:00Z"; // 1h ago — fresh vs 6h TTL
|
|
110
|
+
const staleAdded = "2026-03-18T00:00:00Z"; // 96h ago — expired vs 24h TTL (0.5^4 = 0.0625 < 0.1)
|
|
111
|
+
const content = [
|
|
112
|
+
"# Context",
|
|
113
|
+
"",
|
|
114
|
+
"<!-- validity: 6h -->",
|
|
115
|
+
`<!-- openfuse:added: ${freshAdded} -->`,
|
|
116
|
+
"Working on: auth refactor",
|
|
117
|
+
"",
|
|
118
|
+
"<!-- validity: 1d -->",
|
|
119
|
+
`<!-- openfuse:added: ${staleAdded} -->`,
|
|
120
|
+
"Sprint goal: ship auth by Friday",
|
|
121
|
+
].join("\n");
|
|
122
|
+
const sections = parseValiditySections(content, now);
|
|
123
|
+
assert.equal(sections.length, 2);
|
|
124
|
+
assert.equal(sections[0].expired, false);
|
|
125
|
+
assert.equal(sections[1].expired, true);
|
|
126
|
+
});
|
|
127
|
+
it("parses validity with component annotation", () => {
|
|
128
|
+
const content = [
|
|
129
|
+
"<!-- validity: 6h, component: auth-gateway -->",
|
|
130
|
+
"Working on: JWT refactor",
|
|
131
|
+
].join("\n");
|
|
132
|
+
const sections = parseValiditySections(content);
|
|
133
|
+
assert.equal(sections.length, 1);
|
|
134
|
+
assert.equal(sections[0].ttlMs, 6 * 60 * 60 * 1000);
|
|
135
|
+
});
|
|
136
|
+
it("parses day-based TTL", () => {
|
|
137
|
+
const content = [
|
|
138
|
+
"<!-- validity: 3d -->",
|
|
139
|
+
"Architecture: microservices with shared auth layer",
|
|
140
|
+
].join("\n");
|
|
141
|
+
const sections = parseValiditySections(content);
|
|
142
|
+
assert.equal(sections.length, 1);
|
|
143
|
+
assert.equal(sections[0].ttlMs, 3 * 24 * 60 * 60 * 1000);
|
|
144
|
+
});
|
|
145
|
+
});
|
|
146
|
+
// --- buildValidityReport ---
|
|
147
|
+
describe("buildValidityReport", () => {
|
|
148
|
+
it("returns correct counts for mixed fresh/stale", () => {
|
|
149
|
+
const now = new Date("2026-03-22T00:00:00Z");
|
|
150
|
+
const freshAdded = "2026-03-21T23:00:00Z";
|
|
151
|
+
const staleAdded = "2026-03-20T00:00:00Z";
|
|
152
|
+
const content = [
|
|
153
|
+
"<!-- validity: 6h -->",
|
|
154
|
+
`<!-- openfuse:added: ${freshAdded} -->`,
|
|
155
|
+
"Working on: auth refactor",
|
|
156
|
+
"",
|
|
157
|
+
"<!-- validity: 6h -->",
|
|
158
|
+
`<!-- openfuse:added: ${staleAdded} -->`,
|
|
159
|
+
"Blocked on: IAM role (OLD)",
|
|
160
|
+
].join("\n");
|
|
161
|
+
const sections = parseValiditySections(content, now);
|
|
162
|
+
const report = buildValidityReport(sections);
|
|
163
|
+
assert.equal(report.total, 2);
|
|
164
|
+
assert.equal(report.fresh, 1);
|
|
165
|
+
assert.equal(report.stale, 1);
|
|
166
|
+
});
|
|
167
|
+
it("formats ttlLabel correctly for hours and days", () => {
|
|
168
|
+
const content = [
|
|
169
|
+
"<!-- validity: 6h -->",
|
|
170
|
+
"Task context",
|
|
171
|
+
"",
|
|
172
|
+
"<!-- validity: 1d -->",
|
|
173
|
+
"Sprint context",
|
|
174
|
+
"",
|
|
175
|
+
"<!-- validity: 3d -->",
|
|
176
|
+
"Architecture context",
|
|
177
|
+
].join("\n");
|
|
178
|
+
const sections = parseValiditySections(content);
|
|
179
|
+
const report = buildValidityReport(sections);
|
|
180
|
+
assert.equal(report.entries[0].ttlLabel, "6h");
|
|
181
|
+
assert.equal(report.entries[1].ttlLabel, "1d");
|
|
182
|
+
assert.equal(report.entries[2].ttlLabel, "3d");
|
|
183
|
+
});
|
|
184
|
+
it("truncates long section previews to 80 chars", () => {
|
|
185
|
+
const longLine = "A".repeat(100);
|
|
186
|
+
const content = `<!-- validity: 6h -->\n${longLine}`;
|
|
187
|
+
const sections = parseValiditySections(content);
|
|
188
|
+
const report = buildValidityReport(sections);
|
|
189
|
+
assert.ok(report.entries[0].preview.length <= 80);
|
|
190
|
+
});
|
|
191
|
+
});
|
|
192
|
+
// --- DEFAULT_TTL_TIERS ---
|
|
193
|
+
describe("DEFAULT_TTL_TIERS", () => {
|
|
194
|
+
it("has expected values", () => {
|
|
195
|
+
assert.equal(DEFAULT_TTL_TIERS.task, 6 * 60 * 60 * 1000);
|
|
196
|
+
assert.equal(DEFAULT_TTL_TIERS.sprint, 24 * 60 * 60 * 1000);
|
|
197
|
+
assert.equal(DEFAULT_TTL_TIERS.architecture, 72 * 60 * 60 * 1000);
|
|
198
|
+
});
|
|
199
|
+
});
|
package/dist/watch.js
CHANGED
|
@@ -7,8 +7,10 @@ import { readFile } from "node:fs/promises";
|
|
|
7
7
|
import { join, basename } from "node:path";
|
|
8
8
|
import { deserializeSignedMessage, verifyMessage, wrapExternalMessage } from "./crypto.js";
|
|
9
9
|
import { syncAll } from "./sync.js";
|
|
10
|
+
import { ContextStore } from "./store.js";
|
|
10
11
|
export function watchInbox(storeRoot, callback) {
|
|
11
12
|
const inboxDir = join(storeRoot, "inbox");
|
|
13
|
+
const store = new ContextStore(storeRoot);
|
|
12
14
|
const handleFile = async (filePath) => {
|
|
13
15
|
if (!filePath.endsWith(".json") && !filePath.endsWith(".md"))
|
|
14
16
|
return;
|
|
@@ -16,15 +18,29 @@ export function watchInbox(storeRoot, callback) {
|
|
|
16
18
|
const raw = await readFile(filePath, "utf-8");
|
|
17
19
|
const signed = deserializeSignedMessage(raw);
|
|
18
20
|
if (signed) {
|
|
19
|
-
const
|
|
21
|
+
const sigValid = verifyMessage(signed);
|
|
22
|
+
// Check keyring for trust — not just signature math. Without this,
|
|
23
|
+
// any random keypair shows as [VERIFIED] in watch mode output.
|
|
24
|
+
let verified = false;
|
|
25
|
+
if (sigValid) {
|
|
26
|
+
try {
|
|
27
|
+
const config = await store.readConfig();
|
|
28
|
+
const trusted = config.autoTrust
|
|
29
|
+
? config.keyring.some((k) => k.signingKey.trim() === signed.publicKey.trim())
|
|
30
|
+
: config.keyring.some((k) => k.trusted && k.signingKey.trim() === signed.publicKey.trim());
|
|
31
|
+
verified = trusted;
|
|
32
|
+
}
|
|
33
|
+
catch { }
|
|
34
|
+
}
|
|
20
35
|
callback(signed.from, wrapExternalMessage(signed, verified), filePath, verified);
|
|
21
36
|
return;
|
|
22
37
|
}
|
|
23
|
-
// Unsigned fallback
|
|
38
|
+
// Unsigned fallback — escape XML attributes to prevent injection
|
|
24
39
|
const filename = basename(filePath).replace(/\.(md|json)$/, "");
|
|
25
40
|
const parts = filename.split("_");
|
|
26
41
|
const from = parts.slice(1).join("_");
|
|
27
|
-
const
|
|
42
|
+
const esc = (s) => s.replace(/&/g, "&").replace(/"/g, """).replace(/</g, "<").replace(/>/g, ">");
|
|
43
|
+
const wrapped = `<external_message from="${esc(from)}" verified="false" status="UNVERIFIED">\n${esc(raw)}\n</external_message>`;
|
|
28
44
|
callback(from, wrapped, filePath, false);
|
|
29
45
|
}
|
|
30
46
|
catch { }
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "openfused",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.14",
|
|
4
4
|
"description": "The file protocol for AI agent context. Encrypted, signed, peer-to-peer.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -40,9 +40,9 @@
|
|
|
40
40
|
},
|
|
41
41
|
"repository": {
|
|
42
42
|
"type": "git",
|
|
43
|
-
"url": "git+https://github.com/
|
|
43
|
+
"url": "git+https://github.com/openfused/openfused.git"
|
|
44
44
|
},
|
|
45
|
-
"homepage": "https://github.com/
|
|
45
|
+
"homepage": "https://github.com/openfused/openfused",
|
|
46
46
|
"keywords": [
|
|
47
47
|
"ai",
|
|
48
48
|
"agent",
|