@wipcomputer/memory-crystal 0.7.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (157) hide show
  1. package/.env.example +20 -0
  2. package/CHANGELOG.md +367 -0
  3. package/LICENSE +21 -0
  4. package/README-ENTERPRISE.md +226 -0
  5. package/README.md +127 -0
  6. package/RELAY.md +199 -0
  7. package/TECHNICAL.md +628 -0
  8. package/_trash/RELEASE-NOTES-v0-7-4.md +64 -0
  9. package/_trash/RELEASE-NOTES-v0-7-5.md +19 -0
  10. package/cloud/README.md +116 -0
  11. package/cloud/docs/gpt-system-instructions.md +69 -0
  12. package/cloud/migrations/0001_init.sql +52 -0
  13. package/dist/bridge.d.ts +7 -0
  14. package/dist/bridge.js +14 -0
  15. package/dist/bulk-copy.d.ts +17 -0
  16. package/dist/bulk-copy.js +90 -0
  17. package/dist/cc-hook.d.ts +8 -0
  18. package/dist/cc-hook.js +368 -0
  19. package/dist/cc-poller.d.ts +1 -0
  20. package/dist/cc-poller.js +550 -0
  21. package/dist/chunk-25LXQJ4Z.js +110 -0
  22. package/dist/chunk-2DRXIRQW.js +97 -0
  23. package/dist/chunk-2ZNH5F6E.js +1281 -0
  24. package/dist/chunk-3G3SFYYI.js +288 -0
  25. package/dist/chunk-3RG5ZIWI.js +10 -0
  26. package/dist/chunk-3S6TI23B.js +97 -0
  27. package/dist/chunk-3VFIJYS4.js +818 -0
  28. package/dist/chunk-52QE3YI3.js +1169 -0
  29. package/dist/chunk-57RP3DIN.js +1205 -0
  30. package/dist/chunk-5HSZ4W2P.js +62 -0
  31. package/dist/chunk-645IPXW3.js +290 -0
  32. package/dist/chunk-7A7ELD4C.js +1205 -0
  33. package/dist/chunk-7FYY4GZM.js +1205 -0
  34. package/dist/chunk-7IUE7ODU.js +254 -0
  35. package/dist/chunk-7RMLKZIS.js +108 -0
  36. package/dist/chunk-AA3OPP4Z.js +432 -0
  37. package/dist/chunk-ASSZDR6I.js +108 -0
  38. package/dist/chunk-AYRJVWUC.js +1205 -0
  39. package/dist/chunk-CCYI5O3D.js +148 -0
  40. package/dist/chunk-D3I3ZSE2.js +411 -0
  41. package/dist/chunk-DACSKLY6.js +219 -0
  42. package/dist/chunk-DW5B4BL7.js +108 -0
  43. package/dist/chunk-EKSACBTJ.js +1070 -0
  44. package/dist/chunk-EXEZZADG.js +248 -0
  45. package/dist/chunk-F3Y7EL7K.js +83 -0
  46. package/dist/chunk-FHRZNOMW.js +1205 -0
  47. package/dist/chunk-IM7N24MT.js +129 -0
  48. package/dist/chunk-IPNYIXFK.js +1178 -0
  49. package/dist/chunk-J7MRSZIO.js +167 -0
  50. package/dist/chunk-JITKI2OI.js +106 -0
  51. package/dist/chunk-JWZXYVET.js +1068 -0
  52. package/dist/chunk-KCQUXVYT.js +108 -0
  53. package/dist/chunk-KOQ43OX6.js +1281 -0
  54. package/dist/chunk-KYVWO6ZM.js +1069 -0
  55. package/dist/chunk-L3VHARQH.js +413 -0
  56. package/dist/chunk-LBWDS6BE.js +288 -0
  57. package/dist/chunk-LOVAHSQV.js +411 -0
  58. package/dist/chunk-LQOYCAGG.js +446 -0
  59. package/dist/chunk-LWAIPJ2W.js +146 -0
  60. package/dist/chunk-M5DHKW7M.js +127 -0
  61. package/dist/chunk-MBKCIJHM.js +1328 -0
  62. package/dist/chunk-MK42FMEG.js +147 -0
  63. package/dist/chunk-MOBMYHKL.js +1205 -0
  64. package/dist/chunk-MPLTNMRG.js +67 -0
  65. package/dist/chunk-NIJCVN3O.js +147 -0
  66. package/dist/chunk-NZCFSZQ7.js +1205 -0
  67. package/dist/chunk-O2UITJGH.js +465 -0
  68. package/dist/chunk-OCRA44AZ.js +108 -0
  69. package/dist/chunk-P3KJR66H.js +117 -0
  70. package/dist/chunk-PEK6JH65.js +432 -0
  71. package/dist/chunk-PJ6FFKEX.js +77 -0
  72. package/dist/chunk-PLUBBZYR.js +800 -0
  73. package/dist/chunk-PNKVD2UK.js +26 -0
  74. package/dist/chunk-PSQZURHO.js +229 -0
  75. package/dist/chunk-SGL6ISBJ.js +1061 -0
  76. package/dist/chunk-SJABZZT5.js +97 -0
  77. package/dist/chunk-TD3P3K32.js +1199 -0
  78. package/dist/chunk-TMDZJJKV.js +288 -0
  79. package/dist/chunk-UNHVZB5G.js +411 -0
  80. package/dist/chunk-VAFTWSTE.js +1061 -0
  81. package/dist/chunk-VNFXFQBB.js +217 -0
  82. package/dist/chunk-X3GVFKSJ.js +1205 -0
  83. package/dist/chunk-XZ3S56RQ.js +1061 -0
  84. package/dist/chunk-Y72C7F6O.js +148 -0
  85. package/dist/chunk-YLICP577.js +1205 -0
  86. package/dist/chunk-YX6AXLVK.js +159 -0
  87. package/dist/chunk-ZCQYHTNU.js +146 -0
  88. package/dist/cli.d.ts +1 -0
  89. package/dist/cli.js +1105 -0
  90. package/dist/cloud-crystal.js +6 -0
  91. package/dist/core.d.ts +232 -0
  92. package/dist/core.js +12 -0
  93. package/dist/crypto.d.ts +20 -0
  94. package/dist/crypto.js +27 -0
  95. package/dist/crystal-capture.sh +29 -0
  96. package/dist/crystal-serve.d.ts +4 -0
  97. package/dist/crystal-serve.js +252 -0
  98. package/dist/dev-update-SZ2Z4WCQ.js +6 -0
  99. package/dist/discover.d.ts +30 -0
  100. package/dist/discover.js +177 -0
  101. package/dist/doctor.d.ts +9 -0
  102. package/dist/doctor.js +334 -0
  103. package/dist/dream-weaver.d.ts +8 -0
  104. package/dist/dream-weaver.js +56 -0
  105. package/dist/file-sync.d.ts +48 -0
  106. package/dist/file-sync.js +18 -0
  107. package/dist/installer.d.ts +61 -0
  108. package/dist/installer.js +618 -0
  109. package/dist/ldm-backup.sh +116 -0
  110. package/dist/ldm.d.ts +50 -0
  111. package/dist/ldm.js +32 -0
  112. package/dist/mcp-server.d.ts +1 -0
  113. package/dist/mcp-server.js +265 -0
  114. package/dist/migrate.d.ts +1 -0
  115. package/dist/migrate.js +89 -0
  116. package/dist/mirror-sync.d.ts +1 -0
  117. package/dist/mirror-sync.js +159 -0
  118. package/dist/oc-backfill.d.ts +19 -0
  119. package/dist/oc-backfill.js +74 -0
  120. package/dist/openclaw.d.ts +5 -0
  121. package/dist/openclaw.js +423 -0
  122. package/dist/pair.d.ts +4 -0
  123. package/dist/pair.js +75 -0
  124. package/dist/poller.d.ts +1 -0
  125. package/dist/poller.js +634 -0
  126. package/dist/role.d.ts +24 -0
  127. package/dist/role.js +13 -0
  128. package/dist/search-pipeline-4K4OJSSS.js +255 -0
  129. package/dist/search-pipeline-4PRS6LI7.js +280 -0
  130. package/dist/search-pipeline-7UJMXPLO.js +280 -0
  131. package/dist/search-pipeline-DQTRLGBH.js +74 -0
  132. package/dist/search-pipeline-HNG37REH.js +282 -0
  133. package/dist/search-pipeline-IZFPLBUB.js +280 -0
  134. package/dist/search-pipeline-MID6F26Q.js +73 -0
  135. package/dist/search-pipeline-N52JZFNN.js +282 -0
  136. package/dist/search-pipeline-OPB2PRQQ.js +280 -0
  137. package/dist/search-pipeline-VXTE5HAD.js +262 -0
  138. package/dist/staging.d.ts +29 -0
  139. package/dist/staging.js +21 -0
  140. package/dist/summarize.d.ts +19 -0
  141. package/dist/summarize.js +10 -0
  142. package/dist/worker-demo.js +186 -0
  143. package/dist/worker-mcp.js +404 -0
  144. package/dist/worker.js +137 -0
  145. package/migrations/0001_init.sql +51 -0
  146. package/migrations/0002_cloud_storage.sql +49 -0
  147. package/openclaw.plugin.json +11 -0
  148. package/package.json +57 -0
  149. package/scripts/crystal-capture 2.sh +29 -0
  150. package/scripts/crystal-capture.sh +29 -0
  151. package/scripts/deploy-cloud 2.sh +153 -0
  152. package/scripts/deploy-cloud.sh +153 -0
  153. package/scripts/ldm-backup.sh +116 -0
  154. package/scripts/migrate-lance-to-sqlite.mjs +217 -0
  155. package/skills/memory/SKILL.md +427 -0
  156. package/wrangler-demo.toml +8 -0
  157. package/wrangler-mcp.toml +24 -0
package/dist/worker.js ADDED
@@ -0,0 +1,137 @@
1
+ // src/worker.ts
2
+ function authenticate(request, env) {
3
+ const auth = request.headers.get("Authorization");
4
+ if (!auth?.startsWith("Bearer ")) {
5
+ return json({ error: "Missing Authorization header" }, 401);
6
+ }
7
+ const token = auth.slice(7);
8
+ const tokenMap = {};
9
+ if (env.AUTH_TOKEN_CC_AIR) tokenMap[env.AUTH_TOKEN_CC_AIR] = "cc-air";
10
+ if (env.AUTH_TOKEN_CC_MINI) tokenMap[env.AUTH_TOKEN_CC_MINI] = "cc-mini";
11
+ if (env.AUTH_TOKEN_LESA) tokenMap[env.AUTH_TOKEN_LESA] = "lesa-mini";
12
+ const agentId = tokenMap[token];
13
+ if (!agentId) {
14
+ return json({ error: "Invalid token" }, 403);
15
+ }
16
+ return { agentId };
17
+ }
18
+ var VALID_CHANNELS = ["conversations", "mirror", "commands", "files", "chatgpt", "chatgpt-attachments"];
19
+ function isValidChannel(channel) {
20
+ return VALID_CHANNELS.includes(channel);
21
+ }
22
+ async function handleDrop(request, env, agentId, channel) {
23
+ if (!isValidChannel(channel)) {
24
+ return json({ error: `Invalid channel: ${channel}. Valid: ${VALID_CHANNELS.join(", ")}` }, 400);
25
+ }
26
+ const body = await request.arrayBuffer();
27
+ if (body.byteLength === 0) {
28
+ return json({ error: "Empty payload" }, 400);
29
+ }
30
+ if (body.byteLength > 100 * 1024 * 1024) {
31
+ return json({ error: "Payload too large (max 100MB)" }, 413);
32
+ }
33
+ const id = crypto.randomUUID();
34
+ const key = `${channel}/${id}`;
35
+ const now = (/* @__PURE__ */ new Date()).toISOString();
36
+ await env.RELAY.put(key, body, {
37
+ customMetadata: {
38
+ agent_id: agentId,
39
+ dropped_at: now,
40
+ size: String(body.byteLength)
41
+ }
42
+ });
43
+ return json({ ok: true, id, channel, size: body.byteLength, dropped_at: now });
44
+ }
45
+ async function handlePickupList(env, channel) {
46
+ if (!isValidChannel(channel)) {
47
+ return json({ error: `Invalid channel: ${channel}` }, 400);
48
+ }
49
+ const listed = await env.RELAY.list({ prefix: `${channel}/` });
50
+ const blobs = listed.objects.map((obj) => ({
51
+ id: obj.key.split("/")[1],
52
+ size: obj.size,
53
+ dropped_at: obj.customMetadata?.dropped_at || obj.uploaded.toISOString(),
54
+ agent_id: obj.customMetadata?.agent_id || "unknown"
55
+ }));
56
+ return json({ channel, count: blobs.length, blobs });
57
+ }
58
+ async function handlePickup(env, channel, id) {
59
+ if (!isValidChannel(channel)) {
60
+ return json({ error: `Invalid channel: ${channel}` }, 400);
61
+ }
62
+ const key = `${channel}/${id}`;
63
+ const obj = await env.RELAY.get(key);
64
+ if (!obj) {
65
+ return json({ error: "Blob not found (already picked up or expired)" }, 404);
66
+ }
67
+ return new Response(obj.body, {
68
+ headers: {
69
+ "Content-Type": "application/octet-stream",
70
+ "X-Agent-Id": obj.customMetadata?.agent_id || "unknown",
71
+ "X-Dropped-At": obj.customMetadata?.dropped_at || ""
72
+ }
73
+ });
74
+ }
75
+ async function handleConfirm(env, channel, id) {
76
+ if (!isValidChannel(channel)) {
77
+ return json({ error: `Invalid channel: ${channel}` }, 400);
78
+ }
79
+ const key = `${channel}/${id}`;
80
+ const obj = await env.RELAY.head(key);
81
+ if (!obj) {
82
+ return json({ error: "Blob not found (already confirmed or expired)" }, 404);
83
+ }
84
+ await env.RELAY.delete(key);
85
+ return json({ ok: true, deleted: key });
86
+ }
87
+ function json(data, status = 200) {
88
+ return new Response(JSON.stringify(data), {
89
+ status,
90
+ headers: { "Content-Type": "application/json" }
91
+ });
92
+ }
93
+ var worker_default = {
94
+ async fetch(request, env) {
95
+ const url = new URL(request.url);
96
+ const parts = url.pathname.split("/").filter(Boolean);
97
+ if (parts[0] === "health" && request.method === "GET") {
98
+ return json({ ok: true, service: "memory-crystal-relay", mode: "ephemeral" });
99
+ }
100
+ const authResult = authenticate(request, env);
101
+ if (authResult instanceof Response) return authResult;
102
+ const { agentId } = authResult;
103
+ try {
104
+ if (parts[0] === "drop" && parts[1] && request.method === "POST") {
105
+ return handleDrop(request, env, agentId, parts[1]);
106
+ }
107
+ if (parts[0] === "pickup" && parts[1] && request.method === "GET") {
108
+ if (parts[2]) {
109
+ return handlePickup(env, parts[1], parts[2]);
110
+ }
111
+ return handlePickupList(env, parts[1]);
112
+ }
113
+ if (parts[0] === "confirm" && parts[1] && parts[2] && request.method === "DELETE") {
114
+ return handleConfirm(env, parts[1], parts[2]);
115
+ }
116
+ return json({ error: "Not found" }, 404);
117
+ } catch (err) {
118
+ return json({ error: err.message || "Internal error" }, 500);
119
+ }
120
+ },
121
+ // Scheduled cleanup: delete blobs older than 24h (TTL safety net)
122
+ async scheduled(event, env) {
123
+ const cutoff = Date.now() - 24 * 60 * 60 * 1e3;
124
+ for (const channel of VALID_CHANNELS) {
125
+ const listed = await env.RELAY.list({ prefix: `${channel}/` });
126
+ for (const obj of listed.objects) {
127
+ const droppedAt = obj.customMetadata?.dropped_at;
128
+ if (droppedAt && new Date(droppedAt).getTime() < cutoff) {
129
+ await env.RELAY.delete(obj.key);
130
+ }
131
+ }
132
+ }
133
+ }
134
+ };
135
+ export {
136
+ worker_default as default
137
+ };
@@ -0,0 +1,51 @@
1
+ -- Memory Crystal Cloud: OAuth + User tables
2
+ -- Applied to D1 database: memory-crystal-cloud
3
+
4
+ -- OAuth dynamic client registration
5
+ CREATE TABLE IF NOT EXISTS oauth_clients (
6
+ client_id TEXT PRIMARY KEY,
7
+ client_secret_hash TEXT,
8
+ redirect_uris TEXT NOT NULL,
9
+ client_name TEXT,
10
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
11
+ last_used_at TEXT
12
+ );
13
+
14
+ -- Authorization codes (short-lived, PKCE)
15
+ CREATE TABLE IF NOT EXISTS authorization_codes (
16
+ code TEXT PRIMARY KEY,
17
+ client_id TEXT NOT NULL,
18
+ user_id TEXT NOT NULL,
19
+ code_challenge TEXT NOT NULL,
20
+ code_challenge_method TEXT NOT NULL DEFAULT 'S256',
21
+ redirect_uri TEXT NOT NULL,
22
+ scope TEXT,
23
+ expires_at TEXT NOT NULL,
24
+ used INTEGER DEFAULT 0,
25
+ FOREIGN KEY (client_id) REFERENCES oauth_clients(client_id)
26
+ );
27
+
28
+ -- Access tokens (hashed, never stored raw)
29
+ CREATE TABLE IF NOT EXISTS access_tokens (
30
+ token_hash TEXT PRIMARY KEY,
31
+ client_id TEXT NOT NULL,
32
+ user_id TEXT NOT NULL,
33
+ scope TEXT,
34
+ tier TEXT NOT NULL DEFAULT 'sovereign',
35
+ expires_at TEXT NOT NULL,
36
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
37
+ FOREIGN KEY (client_id) REFERENCES oauth_clients(client_id)
38
+ );
39
+
40
+ -- Users
41
+ CREATE TABLE IF NOT EXISTS users (
42
+ user_id TEXT PRIMARY KEY,
43
+ email TEXT NOT NULL UNIQUE,
44
+ tier TEXT NOT NULL DEFAULT 'sovereign',
45
+ relay_token TEXT,
46
+ created_at TEXT NOT NULL DEFAULT (datetime('now'))
47
+ );
48
+
49
+ -- Index for token lookup
50
+ CREATE INDEX IF NOT EXISTS idx_access_tokens_expires ON access_tokens(expires_at);
51
+ CREATE INDEX IF NOT EXISTS idx_authorization_codes_expires ON authorization_codes(expires_at);
@@ -0,0 +1,49 @@
1
+ -- Memory Crystal Cloud: Tier 2 cloud storage tables
2
+ -- Chunks and memories for cloud search (D1 + Vectorize)
3
+
4
+ CREATE TABLE IF NOT EXISTS chunks (
5
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
6
+ user_id TEXT NOT NULL,
7
+ text TEXT NOT NULL,
8
+ role TEXT NOT NULL DEFAULT 'user',
9
+ source_type TEXT NOT NULL DEFAULT 'chatgpt',
10
+ source_id TEXT NOT NULL DEFAULT '',
11
+ agent_id TEXT NOT NULL DEFAULT 'gpt',
12
+ token_count INTEGER DEFAULT 0,
13
+ created_at TEXT NOT NULL DEFAULT (datetime('now'))
14
+ );
15
+
16
+ CREATE TABLE IF NOT EXISTS memories (
17
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
18
+ user_id TEXT NOT NULL,
19
+ text TEXT NOT NULL,
20
+ category TEXT NOT NULL DEFAULT 'fact',
21
+ confidence REAL NOT NULL DEFAULT 1.0,
22
+ source_ids TEXT DEFAULT '[]',
23
+ status TEXT NOT NULL DEFAULT 'active',
24
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
25
+ updated_at TEXT NOT NULL DEFAULT (datetime('now'))
26
+ );
27
+
28
+ -- FTS5 for BM25 text search
29
+ CREATE VIRTUAL TABLE IF NOT EXISTS chunks_fts USING fts5(
30
+ text,
31
+ content='chunks',
32
+ content_rowid='id'
33
+ );
34
+
35
+ -- Triggers to keep FTS in sync
36
+ CREATE TRIGGER IF NOT EXISTS chunks_ai AFTER INSERT ON chunks BEGIN
37
+ INSERT INTO chunks_fts(rowid, text) VALUES (new.id, new.text);
38
+ END;
39
+
40
+ CREATE TRIGGER IF NOT EXISTS chunks_ad AFTER DELETE ON chunks BEGIN
41
+ INSERT INTO chunks_fts(chunks_fts, rowid, text) VALUES ('delete', old.id, old.text);
42
+ END;
43
+
44
+ -- Indexes
45
+ CREATE INDEX IF NOT EXISTS idx_chunks_user ON chunks(user_id);
46
+ CREATE INDEX IF NOT EXISTS idx_chunks_agent ON chunks(agent_id);
47
+ CREATE INDEX IF NOT EXISTS idx_chunks_created ON chunks(created_at);
48
+ CREATE INDEX IF NOT EXISTS idx_memories_user ON memories(user_id);
49
+ CREATE INDEX IF NOT EXISTS idx_memories_status ON memories(status);
@@ -0,0 +1,11 @@
1
+ {
2
+ "id": "memory-crystal",
3
+ "name": "Memory Crystal",
4
+ "description": "Sovereign memory system — search, remember, forget across all agent conversations and files.",
5
+ "skills": ["./skills"],
6
+ "configSchema": {
7
+ "type": "object",
8
+ "additionalProperties": false,
9
+ "properties": {}
10
+ }
11
+ }
package/package.json ADDED
@@ -0,0 +1,57 @@
1
+ {
2
+ "name": "@wipcomputer/memory-crystal",
3
+ "version": "0.7.10",
4
+ "description": "Sovereign memory system — local-first with ephemeral encrypted relay. Your memory, your machine, your rules.",
5
+ "type": "module",
6
+ "main": "dist/core.js",
7
+ "openclaw": {
8
+ "extensions": [
9
+ "./dist/openclaw.js"
10
+ ]
11
+ },
12
+ "bin": {
13
+ "crystal": "dist/cli.js",
14
+ "crystal-mcp": "dist/mcp-server.js"
15
+ },
16
+ "scripts": {
17
+ "build": "tsup src/core.ts src/cli.ts src/mcp-server.ts src/openclaw.ts src/migrate.ts src/cc-hook.ts src/cc-poller.ts src/crypto.ts src/pair.ts src/poller.ts src/mirror-sync.ts src/file-sync.ts src/ldm.ts src/summarize.ts src/role.ts src/doctor.ts src/bridge.ts src/discover.ts src/bulk-copy.ts src/oc-backfill.ts src/dream-weaver.ts src/crystal-serve.ts src/staging.ts src/installer.ts --format esm --dts --outDir dist && tsup src/worker.ts --format esm --outDir dist --no-dts && cp scripts/crystal-capture.sh scripts/ldm-backup.sh dist/",
18
+ "build:local": "tsup src/core.ts src/cli.ts src/mcp-server.ts src/openclaw.ts src/migrate.ts src/cc-hook.ts src/cc-poller.ts src/crypto.ts src/pair.ts src/poller.ts src/mirror-sync.ts src/file-sync.ts src/ldm.ts src/summarize.ts src/role.ts src/doctor.ts src/bridge.ts src/discover.ts src/bulk-copy.ts src/oc-backfill.ts src/dream-weaver.ts src/crystal-serve.ts src/staging.ts src/installer.ts --format esm --dts --outDir dist",
19
+ "build:worker": "tsup src/worker.ts --format esm --outDir dist --no-dts",
20
+ "build:cloud": "tsup src/worker-mcp.ts src/cloud-crystal.ts --format esm --outDir dist --no-dts",
21
+ "deploy:cloud": "bash -c 'git diff --quiet HEAD -- src/ wrangler-mcp.toml || (echo \"ERROR: uncommitted changes. commit before deploying.\" && exit 1)' && wrangler deploy --config wrangler-mcp.toml",
22
+ "build:demo": "tsup src/worker-demo.ts --format esm --outDir dist --no-dts",
23
+ "dev:demo": "wrangler dev --config wrangler-demo.toml",
24
+ "deploy:demo": "bash -c 'git diff --quiet HEAD -- src/ wrangler-demo.toml || (echo \"ERROR: uncommitted changes. commit before deploying.\" && exit 1)' && wrangler deploy --config wrangler-demo.toml",
25
+ "dev": "tsup src/core.ts src/cli.ts src/mcp-server.ts src/openclaw.ts src/migrate.ts src/cc-hook.ts src/crypto.ts --format esm --watch --outDir dist",
26
+ "check": "node dist/cli.js status",
27
+ "search": "node dist/cli.js search",
28
+ "migrate": "node dist/migrate.js",
29
+ "cloud:dev": "cd cloud && npx wrangler dev",
30
+ "cloud:deploy": "cd cloud && npx wrangler deploy",
31
+ "cloud:db:migrate": "cd cloud && npx wrangler d1 migrations apply memory-crystal-cloud --local",
32
+ "cloud:db:migrate:remote": "cd cloud && npx wrangler d1 migrations apply memory-crystal-cloud --remote"
33
+ },
34
+ "dependencies": {
35
+ "dream-weaver-protocol": "file:../dream-weaver-protocol-private",
36
+ "@lancedb/lancedb": "^0.15.0",
37
+ "@modelcontextprotocol/sdk": "^1.12.1",
38
+ "agents": "^0.7.2",
39
+ "apache-arrow": "^18.1.0",
40
+ "better-sqlite3": "^11.8.1",
41
+ "qrcode-terminal": "^0.12.0",
42
+ "sqlite-vec": "^0.1.7-alpha.2",
43
+ "zod": "^4.3.6"
44
+ },
45
+ "optionalDependencies": {
46
+ "sqlite-vec-darwin-arm64": "^0.1.7-alpha.2"
47
+ },
48
+ "devDependencies": {
49
+ "@cloudflare/workers-types": "^4.20260228.1",
50
+ "@types/better-sqlite3": "^7.6.13",
51
+ "@types/node": "^22.0.0",
52
+ "@types/qrcode-terminal": "^0.12.2",
53
+ "tsup": "^8.0.0",
54
+ "typescript": "^5.7.0",
55
+ "wrangler": "^3.95.0"
56
+ }
57
+ }
@@ -0,0 +1,29 @@
1
+ #!/bin/bash
2
+ # Job: crystal-capture
3
+ # Continuous capture for Claude Code sessions.
4
+ # Reads JSONL files on disk, ingests into Crystal, exports MD sessions, writes daily logs.
5
+ # Primary capture path. Runs every minute via cron.
6
+ # The Stop hook (cc-hook.ts) is a redundancy check only.
7
+ #
8
+ # Source of truth: memory-crystal-private/scripts/crystal-capture.sh
9
+ # Deployed to: ~/.ldm/bin/crystal-capture.sh (via crystal init)
10
+ # Cron entry: * * * * * ~/.ldm/bin/crystal-capture.sh >> /tmp/ldm-dev-tools/crystal-capture.log 2>&1
11
+ #
12
+ # The Node poller fetches the OpenAI API key internally via opRead() in core.ts.
13
+ # opRead uses: op read "op://Agent Secrets/OpenAI API/api key" with the SA token from
14
+ # ~/.openclaw/secrets/op-sa-token. Do NOT call op from this shell script... it triggers
15
+ # macOS TCC popups when run from cron.
16
+
17
+ # Cron provides minimal PATH. Ensure Homebrew binaries (node, op) are findable.
18
+ export PATH="/opt/homebrew/bin:$PATH"
19
+
20
+ POLLER="$HOME/.ldm/extensions/memory-crystal/dist/cc-poller.js"
21
+ NODE="/opt/homebrew/bin/node"
22
+
23
+ if [ ! -f "$POLLER" ]; then
24
+ echo "ERROR: cc-poller not found at $POLLER"
25
+ exit 1
26
+ fi
27
+
28
+ # Single run: scan all sessions, ingest new turns, export MD, exit.
29
+ $NODE "$POLLER" 2>&1
@@ -0,0 +1,29 @@
1
+ #!/bin/bash
2
+ # Job: crystal-capture
3
+ # Continuous capture for Claude Code sessions.
4
+ # Reads JSONL files on disk, ingests into Crystal, exports MD sessions, writes daily logs.
5
+ # Primary capture path. Runs every minute via cron.
6
+ # The Stop hook (cc-hook.ts) is a redundancy check only.
7
+ #
8
+ # Source of truth: memory-crystal-private/scripts/crystal-capture.sh
9
+ # Deployed to: ~/.ldm/bin/crystal-capture.sh (via crystal init)
10
+ # Cron entry: * * * * * ~/.ldm/bin/crystal-capture.sh >> /tmp/ldm-dev-tools/crystal-capture.log 2>&1
11
+ #
12
+ # The Node poller fetches the OpenAI API key internally via opRead() in core.ts.
13
+ # opRead uses: op read "op://Agent Secrets/OpenAI API/api key" with the SA token from
14
+ # ~/.openclaw/secrets/op-sa-token. Do NOT call op from this shell script... it triggers
15
+ # macOS TCC popups when run from cron.
16
+
17
+ # Cron provides minimal PATH. Ensure Homebrew binaries (node, op) are findable.
18
+ export PATH="/opt/homebrew/bin:$PATH"
19
+
20
+ POLLER="$HOME/.ldm/extensions/memory-crystal/dist/cc-poller.js"
21
+ NODE="/opt/homebrew/bin/node"
22
+
23
+ if [ ! -f "$POLLER" ]; then
24
+ echo "ERROR: cc-poller not found at $POLLER"
25
+ exit 1
26
+ fi
27
+
28
+ # Single run: scan all sessions, ingest new turns, export MD, exit.
29
+ $NODE "$POLLER" 2>&1
@@ -0,0 +1,153 @@
1
+ #!/usr/bin/env bash
2
+ #
3
+ # deploy-cloud.sh — Deploy Memory Crystal Cloud MCP server to Cloudflare.
4
+ # Pulls all credentials from 1Password. No keys in env files.
5
+ #
6
+ # Usage:
7
+ # bash scripts/deploy-cloud.sh # full setup (first time)
8
+ # bash scripts/deploy-cloud.sh deploy # just redeploy Worker code
9
+ #
10
+ # Prerequisites:
11
+ # - wrangler CLI installed (npm install -g wrangler)
12
+ # - 1Password items populated:
13
+ # "Parker - Cloudflare Memory Crystal Keys" (api-token, account-id)
14
+ # "OpenAI API" (api key)
15
+
16
+ set -euo pipefail
17
+
18
+ REPO_DIR="$(cd "$(dirname "$0")/.." && pwd)"
19
+ cd "$REPO_DIR"
20
+
21
+ # ── Pull credentials from 1Password ──
22
+
23
+ echo "Pulling credentials from 1Password..."
24
+
25
+ OP_TOKEN=$(cat ~/.openclaw/secrets/op-sa-token)
26
+
27
+ CF_API_TOKEN=$(OP_SERVICE_ACCOUNT_TOKEN="$OP_TOKEN" op item get "Parker - Cloudflare Memory Crystal Keys" \
28
+ --vault="Agent Secrets" --fields label=api-token --reveal)
29
+
30
+ CF_ACCOUNT_ID=$(OP_SERVICE_ACCOUNT_TOKEN="$OP_TOKEN" op item get "Parker - Cloudflare Memory Crystal Keys" \
31
+ --vault="Agent Secrets" --fields label=account-id --reveal)
32
+
33
+ OPENAI_API_KEY=$(OP_SERVICE_ACCOUNT_TOKEN="$OP_TOKEN" op item get "OpenAI API" \
34
+ --vault="Agent Secrets" --fields label="api key" --reveal)
35
+
36
+ if [[ "$CF_API_TOKEN" == "REPLACE_WITH_CLOUDFLARE_API_TOKEN" || "$CF_ACCOUNT_ID" == "REPLACE_WITH_CLOUDFLARE_ACCOUNT_ID" ]]; then
37
+ echo "Error: Cloudflare credentials not yet filled in 1Password."
38
+ echo "Update 'Parker - Cloudflare Memory Crystal Keys' in Agent Secrets vault."
39
+ exit 1
40
+ fi
41
+
42
+ export CLOUDFLARE_API_TOKEN="$CF_API_TOKEN"
43
+ export CLOUDFLARE_ACCOUNT_ID="$CF_ACCOUNT_ID"
44
+
45
+ echo " Cloudflare Account ID: ${CF_ACCOUNT_ID:0:8}..."
46
+ echo " Cloudflare API Token: ${CF_API_TOKEN:0:8}..."
47
+ echo " OpenAI API Key: ${OPENAI_API_KEY:0:8}..."
48
+
49
+ # ── Deploy only? ──
50
+
51
+ if [[ "${1:-}" == "deploy" ]]; then
52
+ echo ""
53
+ echo "Building and deploying Worker..."
54
+ npm run build:cloud
55
+ npx wrangler deploy --config wrangler-mcp.toml
56
+ echo "Done. Worker deployed."
57
+ exit 0
58
+ fi
59
+
60
+ # ── Full setup (first time) ──
61
+
62
+ echo ""
63
+ echo "=== Step 1: Create D1 database ==="
64
+
65
+ # Check if database already exists
66
+ DB_ID=$(npx wrangler d1 list --json 2>/dev/null | python3 -c "
67
+ import sys, json
68
+ dbs = json.load(sys.stdin)
69
+ for db in dbs:
70
+ if db['name'] == 'memory-crystal-cloud':
71
+ print(db['uuid'])
72
+ break
73
+ " 2>/dev/null || echo "")
74
+
75
+ if [[ -z "$DB_ID" ]]; then
76
+ echo "Creating D1 database: memory-crystal-cloud"
77
+ DB_OUTPUT=$(npx wrangler d1 create memory-crystal-cloud 2>&1)
78
+ DB_ID=$(echo "$DB_OUTPUT" | grep -oE '[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}' | head -1)
79
+ echo " Created: $DB_ID"
80
+ else
81
+ echo " Already exists: $DB_ID"
82
+ fi
83
+
84
+ if [[ -z "$DB_ID" ]]; then
85
+ echo "Error: Could not get D1 database ID"
86
+ exit 1
87
+ fi
88
+
89
+ # Update wrangler-mcp.toml with database ID
90
+ if grep -q 'database_id = ""' wrangler-mcp.toml; then
91
+ sed -i.bak "s/database_id = \"\"/database_id = \"$DB_ID\"/" wrangler-mcp.toml
92
+ rm -f wrangler-mcp.toml.bak
93
+ echo " Updated wrangler-mcp.toml with database_id"
94
+ fi
95
+
96
+ echo ""
97
+ echo "=== Step 2: Create Vectorize index ==="
98
+
99
+ VEC_EXISTS=$(npx wrangler vectorize list --json 2>/dev/null | python3 -c "
100
+ import sys, json
101
+ indexes = json.load(sys.stdin)
102
+ for idx in indexes:
103
+ if idx['name'] == 'memory-crystal-chunks':
104
+ print('yes')
105
+ break
106
+ " 2>/dev/null || echo "")
107
+
108
+ if [[ "$VEC_EXISTS" != "yes" ]]; then
109
+ echo "Creating Vectorize index: memory-crystal-chunks (1024 dims, cosine)"
110
+ npx wrangler vectorize create memory-crystal-chunks --dimensions 1024 --metric cosine
111
+ echo " Created."
112
+ else
113
+ echo " Already exists."
114
+ fi
115
+
116
+ echo ""
117
+ echo "=== Step 3: Run D1 migrations ==="
118
+
119
+ npx wrangler d1 migrations apply memory-crystal-cloud --config wrangler-mcp.toml
120
+ echo " Migrations applied."
121
+
122
+ echo ""
123
+ echo "=== Step 4: Set Worker secrets ==="
124
+
125
+ echo "$OPENAI_API_KEY" | npx wrangler secret put OPENAI_API_KEY --config wrangler-mcp.toml
126
+ echo " OPENAI_API_KEY set."
127
+
128
+ # Generate signing key for OAuth tokens
129
+ MCP_SIGNING_KEY=$(openssl rand -hex 32)
130
+ echo "$MCP_SIGNING_KEY" | npx wrangler secret put MCP_SIGNING_KEY --config wrangler-mcp.toml
131
+ echo " MCP_SIGNING_KEY set (generated)."
132
+
133
+ # Generate relay encryption key (base64, 32 bytes)
134
+ RELAY_KEY=$(openssl rand -base64 32)
135
+ echo "$RELAY_KEY" | npx wrangler secret put RELAY_ENCRYPTION_KEY --config wrangler-mcp.toml
136
+ echo " RELAY_ENCRYPTION_KEY set (generated)."
137
+
138
+ echo ""
139
+ echo "=== Step 5: Build and deploy ==="
140
+
141
+ npm run build:cloud
142
+ npx wrangler deploy --config wrangler-mcp.toml
143
+
144
+ echo ""
145
+ echo "=== Done ==="
146
+ echo ""
147
+ echo "Memory Crystal Cloud MCP server deployed."
148
+ echo "Worker URL: https://memory-crystal-cloud.<your-subdomain>.workers.dev"
149
+ echo ""
150
+ echo "Next steps:"
151
+ echo " 1. Test: curl https://memory-crystal-cloud.<subdomain>.workers.dev/health"
152
+ echo " 2. Test OAuth: GET /.well-known/oauth-authorization-server"
153
+ echo " 3. Connect from ChatGPT or Claude"
@@ -0,0 +1,153 @@
1
+ #!/usr/bin/env bash
2
+ #
3
+ # deploy-cloud.sh — Deploy Memory Crystal Cloud MCP server to Cloudflare.
4
+ # Pulls all credentials from 1Password. No keys in env files.
5
+ #
6
+ # Usage:
7
+ # bash scripts/deploy-cloud.sh # full setup (first time)
8
+ # bash scripts/deploy-cloud.sh deploy # just redeploy Worker code
9
+ #
10
+ # Prerequisites:
11
+ # - wrangler CLI installed (npm install -g wrangler)
12
+ # - 1Password items populated:
13
+ # "Parker - Cloudflare Memory Crystal Keys" (api-token, account-id)
14
+ # "OpenAI API" (api key)
15
+
16
+ set -euo pipefail
17
+
18
+ REPO_DIR="$(cd "$(dirname "$0")/.." && pwd)"
19
+ cd "$REPO_DIR"
20
+
21
+ # ── Pull credentials from 1Password ──
22
+
23
+ echo "Pulling credentials from 1Password..."
24
+
25
+ OP_TOKEN=$(cat ~/.openclaw/secrets/op-sa-token)
26
+
27
+ CF_API_TOKEN=$(OP_SERVICE_ACCOUNT_TOKEN="$OP_TOKEN" op item get "Parker - Cloudflare Memory Crystal Keys" \
28
+ --vault="Agent Secrets" --fields label=api-token --reveal)
29
+
30
+ CF_ACCOUNT_ID=$(OP_SERVICE_ACCOUNT_TOKEN="$OP_TOKEN" op item get "Parker - Cloudflare Memory Crystal Keys" \
31
+ --vault="Agent Secrets" --fields label=account-id --reveal)
32
+
33
+ OPENAI_API_KEY=$(OP_SERVICE_ACCOUNT_TOKEN="$OP_TOKEN" op item get "OpenAI API" \
34
+ --vault="Agent Secrets" --fields label="api key" --reveal)
35
+
36
+ if [[ "$CF_API_TOKEN" == "REPLACE_WITH_CLOUDFLARE_API_TOKEN" || "$CF_ACCOUNT_ID" == "REPLACE_WITH_CLOUDFLARE_ACCOUNT_ID" ]]; then
37
+ echo "Error: Cloudflare credentials not yet filled in 1Password."
38
+ echo "Update 'Parker - Cloudflare Memory Crystal Keys' in Agent Secrets vault."
39
+ exit 1
40
+ fi
41
+
42
+ export CLOUDFLARE_API_TOKEN="$CF_API_TOKEN"
43
+ export CLOUDFLARE_ACCOUNT_ID="$CF_ACCOUNT_ID"
44
+
45
+ echo " Cloudflare Account ID: ${CF_ACCOUNT_ID:0:8}..."
46
+ echo " Cloudflare API Token: ${CF_API_TOKEN:0:8}..."
47
+ echo " OpenAI API Key: ${OPENAI_API_KEY:0:8}..."
48
+
49
+ # ── Deploy only? ──
50
+
51
+ if [[ "${1:-}" == "deploy" ]]; then
52
+ echo ""
53
+ echo "Building and deploying Worker..."
54
+ npm run build:cloud
55
+ npx wrangler deploy --config wrangler-mcp.toml
56
+ echo "Done. Worker deployed."
57
+ exit 0
58
+ fi
59
+
60
+ # ── Full setup (first time) ──
61
+
62
+ echo ""
63
+ echo "=== Step 1: Create D1 database ==="
64
+
65
+ # Check if database already exists
66
+ DB_ID=$(npx wrangler d1 list --json 2>/dev/null | python3 -c "
67
+ import sys, json
68
+ dbs = json.load(sys.stdin)
69
+ for db in dbs:
70
+ if db['name'] == 'memory-crystal-cloud':
71
+ print(db['uuid'])
72
+ break
73
+ " 2>/dev/null || echo "")
74
+
75
+ if [[ -z "$DB_ID" ]]; then
76
+ echo "Creating D1 database: memory-crystal-cloud"
77
+ DB_OUTPUT=$(npx wrangler d1 create memory-crystal-cloud 2>&1)
78
+ DB_ID=$(echo "$DB_OUTPUT" | grep -oE '[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}' | head -1)
79
+ echo " Created: $DB_ID"
80
+ else
81
+ echo " Already exists: $DB_ID"
82
+ fi
83
+
84
+ if [[ -z "$DB_ID" ]]; then
85
+ echo "Error: Could not get D1 database ID"
86
+ exit 1
87
+ fi
88
+
89
+ # Update wrangler-mcp.toml with database ID
90
+ if grep -q 'database_id = ""' wrangler-mcp.toml; then
91
+ sed -i.bak "s/database_id = \"\"/database_id = \"$DB_ID\"/" wrangler-mcp.toml
92
+ rm -f wrangler-mcp.toml.bak
93
+ echo " Updated wrangler-mcp.toml with database_id"
94
+ fi
95
+
96
+ echo ""
97
+ echo "=== Step 2: Create Vectorize index ==="
98
+
99
+ VEC_EXISTS=$(npx wrangler vectorize list --json 2>/dev/null | python3 -c "
100
+ import sys, json
101
+ indexes = json.load(sys.stdin)
102
+ for idx in indexes:
103
+ if idx['name'] == 'memory-crystal-chunks':
104
+ print('yes')
105
+ break
106
+ " 2>/dev/null || echo "")
107
+
108
+ if [[ "$VEC_EXISTS" != "yes" ]]; then
109
+ echo "Creating Vectorize index: memory-crystal-chunks (1024 dims, cosine)"
110
+ npx wrangler vectorize create memory-crystal-chunks --dimensions 1024 --metric cosine
111
+ echo " Created."
112
+ else
113
+ echo " Already exists."
114
+ fi
115
+
116
+ echo ""
117
+ echo "=== Step 3: Run D1 migrations ==="
118
+
119
+ npx wrangler d1 migrations apply memory-crystal-cloud --config wrangler-mcp.toml
120
+ echo " Migrations applied."
121
+
122
+ echo ""
123
+ echo "=== Step 4: Set Worker secrets ==="
124
+
125
+ echo "$OPENAI_API_KEY" | npx wrangler secret put OPENAI_API_KEY --config wrangler-mcp.toml
126
+ echo " OPENAI_API_KEY set."
127
+
128
+ # Generate signing key for OAuth tokens
129
+ MCP_SIGNING_KEY=$(openssl rand -hex 32)
130
+ echo "$MCP_SIGNING_KEY" | npx wrangler secret put MCP_SIGNING_KEY --config wrangler-mcp.toml
131
+ echo " MCP_SIGNING_KEY set (generated)."
132
+
133
+ # Generate relay encryption key (base64, 32 bytes)
134
+ RELAY_KEY=$(openssl rand -base64 32)
135
+ echo "$RELAY_KEY" | npx wrangler secret put RELAY_ENCRYPTION_KEY --config wrangler-mcp.toml
136
+ echo " RELAY_ENCRYPTION_KEY set (generated)."
137
+
138
+ echo ""
139
+ echo "=== Step 5: Build and deploy ==="
140
+
141
+ npm run build:cloud
142
+ npx wrangler deploy --config wrangler-mcp.toml
143
+
144
+ echo ""
145
+ echo "=== Done ==="
146
+ echo ""
147
+ echo "Memory Crystal Cloud MCP server deployed."
148
+ echo "Worker URL: https://memory-crystal-cloud.<your-subdomain>.workers.dev"
149
+ echo ""
150
+ echo "Next steps:"
151
+ echo " 1. Test: curl https://memory-crystal-cloud.<subdomain>.workers.dev/health"
152
+ echo " 2. Test OAuth: GET /.well-known/oauth-authorization-server"
153
+ echo " 3. Connect from ChatGPT or Claude"