@hir4ta/memoria 0.14.0 → 0.14.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "memoria",
3
3
  "description": "A plugin that provides long-term memory for Claude Code. It automatically saves context lost during auto-compact, offering features for session restoration, recording technical decisions, and learning developer patterns.",
4
- "version": "0.14.0",
4
+ "version": "0.14.2",
5
5
  "author": {
6
6
  "name": "hir4ta"
7
7
  },
package/bin/memoria.js CHANGED
@@ -5,6 +5,22 @@ import fs from "node:fs";
5
5
  import path from "node:path";
6
6
  import { fileURLToPath } from "node:url";
7
7
 
8
+ // Suppress Node.js SQLite experimental warning (must be before dynamic import)
9
+ const originalEmit = process.emit;
10
+ process.emit = function (name, data, ...args) {
11
+ if (
12
+ name === "warning" &&
13
+ data?.name === "ExperimentalWarning" &&
14
+ data?.message?.includes("SQLite")
15
+ ) {
16
+ return false;
17
+ }
18
+ return originalEmit.call(process, name, data, ...args);
19
+ };
20
+
21
+ // Dynamic import to ensure warning suppression is active
22
+ const { DatabaseSync } = await import("node:sqlite");
23
+
8
24
  const __filename = fileURLToPath(import.meta.url);
9
25
  const __dirname = path.dirname(__filename);
10
26
 
@@ -45,6 +61,7 @@ function initMemoria() {
45
61
  const rulesDir = path.join(memoriaDir, "rules");
46
62
  const patternsDir = path.join(memoriaDir, "patterns");
47
63
  const tagsPath = path.join(memoriaDir, "tags.json");
64
+ const dbPath = path.join(memoriaDir, "local.db");
48
65
 
49
66
  // Check if already initialized
50
67
  if (fs.existsSync(memoriaDir)) {
@@ -82,6 +99,22 @@ function initMemoria() {
82
99
  );
83
100
  fs.writeFileSync(path.join(rulesDir, "dev-rules.json"), rulesTemplate);
84
101
 
102
+ // Initialize SQLite database
103
+ const schemaPath = path.join(packageDir, "lib", "schema.sql");
104
+ try {
105
+ const db = new DatabaseSync(dbPath);
106
+ db.exec("PRAGMA journal_mode = WAL");
107
+ if (fs.existsSync(schemaPath)) {
108
+ const schema = fs.readFileSync(schemaPath, "utf-8");
109
+ db.exec(schema);
110
+ }
111
+ db.close();
112
+ } catch (error) {
113
+ console.error(
114
+ `Warning: Failed to initialize SQLite database: ${error.message}`,
115
+ );
116
+ }
117
+
85
118
  console.log(`memoria initialized: ${memoriaDir}`);
86
119
  console.log(`
87
120
  Created:
@@ -91,6 +124,7 @@ Created:
91
124
  ${tagsPath}
92
125
  ${rulesDir}/review-guidelines.json
93
126
  ${rulesDir}/dev-rules.json
127
+ ${dbPath}
94
128
 
95
129
  You can now use memoria with Claude Code in this project.
96
130
  `);
package/dist/lib/db.js CHANGED
@@ -3,7 +3,14 @@ import { execSync } from "node:child_process";
3
3
  import { existsSync, readFileSync } from "node:fs";
4
4
  import { dirname, join } from "node:path";
5
5
  import { fileURLToPath } from "node:url";
6
- import Database from "better-sqlite3";
6
+ var originalEmit = process.emit;
7
+ process.emit = function(name, data, ...args) {
8
+ if (name === "warning" && typeof data === "object" && data?.name === "ExperimentalWarning" && data?.message?.includes("SQLite")) {
9
+ return false;
10
+ }
11
+ return originalEmit.call(process, name, data, ...args);
12
+ };
13
+ var { DatabaseSync } = await import("node:sqlite");
7
14
  var __filename = fileURLToPath(import.meta.url);
8
15
  var __dirname = dirname(__filename);
9
16
  function getCurrentUser() {
@@ -22,8 +29,8 @@ function getDbPath(memoriaDir) {
22
29
  }
23
30
  function initDatabase(memoriaDir) {
24
31
  const dbPath = getDbPath(memoriaDir);
25
- const db = new Database(dbPath);
26
- db.pragma("journal_mode = WAL");
32
+ const db = new DatabaseSync(dbPath);
33
+ db.exec("PRAGMA journal_mode = WAL");
27
34
  const schemaPath = join(__dirname, "schema.sql");
28
35
  if (existsSync(schemaPath)) {
29
36
  const schema = readFileSync(schemaPath, "utf-8");
@@ -36,30 +43,34 @@ function openDatabase(memoriaDir) {
36
43
  if (!existsSync(dbPath)) {
37
44
  return null;
38
45
  }
39
- const db = new Database(dbPath);
40
- db.pragma("journal_mode = WAL");
46
+ const db = new DatabaseSync(dbPath);
47
+ db.exec("PRAGMA journal_mode = WAL");
41
48
  return db;
42
49
  }
43
50
  function insertInteractions(db, interactions) {
44
51
  const insert = db.prepare(`
45
52
  INSERT INTO interactions (session_id, owner, role, content, thinking, tool_calls, timestamp, is_compact_summary)
46
- VALUES (@session_id, @owner, @role, @content, @thinking, @tool_calls, @timestamp, @is_compact_summary)
53
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
47
54
  `);
48
- const insertMany = db.transaction((items) => {
49
- for (const item of items) {
50
- insert.run({
51
- session_id: item.session_id,
52
- owner: item.owner,
53
- role: item.role,
54
- content: item.content,
55
- thinking: item.thinking || null,
56
- tool_calls: item.tool_calls || null,
57
- timestamp: item.timestamp,
58
- is_compact_summary: item.is_compact_summary || 0
59
- });
55
+ db.exec("BEGIN TRANSACTION");
56
+ try {
57
+ for (const item of interactions) {
58
+ insert.run(
59
+ item.session_id,
60
+ item.owner,
61
+ item.role,
62
+ item.content,
63
+ item.thinking || null,
64
+ item.tool_calls || null,
65
+ item.timestamp,
66
+ item.is_compact_summary || 0
67
+ );
60
68
  }
61
- });
62
- insertMany(interactions);
69
+ db.exec("COMMIT");
70
+ } catch (error) {
71
+ db.exec("ROLLBACK");
72
+ throw error;
73
+ }
63
74
  }
64
75
  function getInteractions(db, sessionId) {
65
76
  const stmt = db.prepare(`
@@ -77,6 +88,42 @@ function getInteractionsByOwner(db, sessionId, owner) {
77
88
  `);
78
89
  return stmt.all(sessionId, owner);
79
90
  }
91
+ function getInteractionsBySessionIds(db, sessionIds) {
92
+ if (sessionIds.length === 0) {
93
+ return [];
94
+ }
95
+ const placeholders = sessionIds.map(() => "?").join(", ");
96
+ const stmt = db.prepare(`
97
+ SELECT * FROM interactions
98
+ WHERE session_id IN (${placeholders})
99
+ ORDER BY timestamp ASC, session_id ASC, role ASC
100
+ `);
101
+ return stmt.all(...sessionIds);
102
+ }
103
+ function getInteractionsBySessionIdsAndOwner(db, sessionIds, owner) {
104
+ if (sessionIds.length === 0) {
105
+ return [];
106
+ }
107
+ const placeholders = sessionIds.map(() => "?").join(", ");
108
+ const stmt = db.prepare(`
109
+ SELECT * FROM interactions
110
+ WHERE session_id IN (${placeholders}) AND owner = ?
111
+ ORDER BY timestamp ASC, session_id ASC, role ASC
112
+ `);
113
+ return stmt.all(...sessionIds, owner);
114
+ }
115
+ function hasInteractionsForSessionIds(db, sessionIds, owner) {
116
+ if (sessionIds.length === 0) {
117
+ return false;
118
+ }
119
+ const placeholders = sessionIds.map(() => "?").join(", ");
120
+ const stmt = db.prepare(`
121
+ SELECT COUNT(*) as count FROM interactions
122
+ WHERE session_id IN (${placeholders}) AND owner = ?
123
+ `);
124
+ const result = stmt.get(...sessionIds, owner);
125
+ return result.count > 0;
126
+ }
80
127
  function hasInteractions(db, sessionId, owner) {
81
128
  const stmt = db.prepare(`
82
129
  SELECT COUNT(*) as count FROM interactions
@@ -88,9 +135,9 @@ function hasInteractions(db, sessionId, owner) {
88
135
  function insertPreCompactBackup(db, backup) {
89
136
  const stmt = db.prepare(`
90
137
  INSERT INTO pre_compact_backups (session_id, owner, interactions)
91
- VALUES (@session_id, @owner, @interactions)
138
+ VALUES (?, ?, ?)
92
139
  `);
93
- stmt.run(backup);
140
+ stmt.run(backup.session_id, backup.owner, backup.interactions);
94
141
  }
95
142
  function getLatestBackup(db, sessionId) {
96
143
  const stmt = db.prepare(`
@@ -146,8 +193,11 @@ export {
146
193
  getDbStats,
147
194
  getInteractions,
148
195
  getInteractionsByOwner,
196
+ getInteractionsBySessionIds,
197
+ getInteractionsBySessionIdsAndOwner,
149
198
  getLatestBackup,
150
199
  hasInteractions,
200
+ hasInteractionsForSessionIds,
151
201
  initDatabase,
152
202
  insertInteractions,
153
203
  insertPreCompactBackup,
package/dist/server.js CHANGED
@@ -2882,7 +2882,14 @@ import { execSync } from "node:child_process";
2882
2882
  import { existsSync as existsSync2, readFileSync } from "node:fs";
2883
2883
  import { dirname, join as join2 } from "node:path";
2884
2884
  import { fileURLToPath } from "node:url";
2885
- import Database from "better-sqlite3";
2885
+ var originalEmit = process.emit;
2886
+ process.emit = function(name, data, ...args) {
2887
+ if (name === "warning" && typeof data === "object" && data?.name === "ExperimentalWarning" && data?.message?.includes("SQLite")) {
2888
+ return false;
2889
+ }
2890
+ return originalEmit.call(process, name, data, ...args);
2891
+ };
2892
+ var { DatabaseSync } = await import("node:sqlite");
2886
2893
  var __filename = fileURLToPath(import.meta.url);
2887
2894
  var __dirname = dirname(__filename);
2888
2895
  function getCurrentUser() {
@@ -2904,24 +2911,32 @@ function openDatabase(memoriaDir2) {
2904
2911
  if (!existsSync2(dbPath)) {
2905
2912
  return null;
2906
2913
  }
2907
- const db = new Database(dbPath);
2908
- db.pragma("journal_mode = WAL");
2914
+ const db = new DatabaseSync(dbPath);
2915
+ db.exec("PRAGMA journal_mode = WAL");
2909
2916
  return db;
2910
2917
  }
2911
- function getInteractions(db, sessionId) {
2918
+ function getInteractionsBySessionIdsAndOwner(db, sessionIds, owner) {
2919
+ if (sessionIds.length === 0) {
2920
+ return [];
2921
+ }
2922
+ const placeholders = sessionIds.map(() => "?").join(", ");
2912
2923
  const stmt = db.prepare(`
2913
2924
  SELECT * FROM interactions
2914
- WHERE session_id = ?
2915
- ORDER BY timestamp ASC
2925
+ WHERE session_id IN (${placeholders}) AND owner = ?
2926
+ ORDER BY timestamp ASC, session_id ASC, role ASC
2916
2927
  `);
2917
- return stmt.all(sessionId);
2928
+ return stmt.all(...sessionIds, owner);
2918
2929
  }
2919
- function hasInteractions(db, sessionId, owner) {
2930
+ function hasInteractionsForSessionIds(db, sessionIds, owner) {
2931
+ if (sessionIds.length === 0) {
2932
+ return false;
2933
+ }
2934
+ const placeholders = sessionIds.map(() => "?").join(", ");
2920
2935
  const stmt = db.prepare(`
2921
2936
  SELECT COUNT(*) as count FROM interactions
2922
- WHERE session_id = ? AND owner = ?
2937
+ WHERE session_id IN (${placeholders}) AND owner = ?
2923
2938
  `);
2924
- const result = stmt.get(sessionId, owner);
2939
+ const result = stmt.get(...sessionIds, owner);
2925
2940
  return result.count > 0;
2926
2941
  }
2927
2942
 
@@ -3546,13 +3561,59 @@ app.get("/api/current-user", async (c) => {
3546
3561
  app.get("/api/sessions/:id/interactions", async (c) => {
3547
3562
  const id = sanitizeId(c.req.param("id"));
3548
3563
  const memoriaDir2 = getMemoriaDir();
3564
+ const sessionLinksDir = path3.join(memoriaDir2, "session-links");
3565
+ const sessionsDir = path3.join(memoriaDir2, "sessions");
3549
3566
  try {
3550
3567
  const currentUser = getCurrentUser();
3551
3568
  const db = openDatabase(memoriaDir2);
3552
3569
  if (!db) {
3553
3570
  return c.json({ interactions: [], count: 0, isOwner: false });
3554
3571
  }
3555
- const isOwner = hasInteractions(db, id, currentUser);
3572
+ let masterId = id;
3573
+ const myLinkFile = path3.join(sessionLinksDir, `${id}.json`);
3574
+ if (fs4.existsSync(myLinkFile)) {
3575
+ try {
3576
+ const myLinkData = JSON.parse(fs4.readFileSync(myLinkFile, "utf-8"));
3577
+ if (myLinkData.masterSessionId) {
3578
+ masterId = myLinkData.masterSessionId;
3579
+ }
3580
+ } catch {
3581
+ }
3582
+ }
3583
+ const sessionIds = [masterId];
3584
+ if (masterId !== id) {
3585
+ sessionIds.push(id);
3586
+ }
3587
+ if (fs4.existsSync(sessionLinksDir)) {
3588
+ const linkFiles = fs4.readdirSync(sessionLinksDir);
3589
+ for (const linkFile of linkFiles) {
3590
+ if (!linkFile.endsWith(".json")) continue;
3591
+ const linkPath = path3.join(sessionLinksDir, linkFile);
3592
+ try {
3593
+ const linkData = JSON.parse(fs4.readFileSync(linkPath, "utf-8"));
3594
+ if (linkData.masterSessionId === masterId) {
3595
+ const childId = linkFile.replace(".json", "");
3596
+ if (!sessionIds.includes(childId)) {
3597
+ sessionIds.push(childId);
3598
+ }
3599
+ }
3600
+ } catch {
3601
+ }
3602
+ }
3603
+ }
3604
+ const sessionFiles = listDatedJsonFiles(sessionsDir);
3605
+ for (const sessionFile of sessionFiles) {
3606
+ try {
3607
+ const sessionData = JSON.parse(fs4.readFileSync(sessionFile, "utf-8"));
3608
+ if (sessionData.resumedFrom === masterId && sessionData.id !== masterId) {
3609
+ if (!sessionIds.includes(sessionData.id)) {
3610
+ sessionIds.push(sessionData.id);
3611
+ }
3612
+ }
3613
+ } catch {
3614
+ }
3615
+ }
3616
+ const isOwner = hasInteractionsForSessionIds(db, sessionIds, currentUser);
3556
3617
  if (!isOwner) {
3557
3618
  db.close();
3558
3619
  return c.json(
@@ -3560,7 +3621,11 @@ app.get("/api/sessions/:id/interactions", async (c) => {
3560
3621
  403
3561
3622
  );
3562
3623
  }
3563
- const interactions = getInteractions(db, id);
3624
+ const interactions = getInteractionsBySessionIdsAndOwner(
3625
+ db,
3626
+ sessionIds,
3627
+ currentUser
3628
+ );
3564
3629
  db.close();
3565
3630
  const groupedInteractions = [];
3566
3631
  let currentInteraction = null;
@@ -38,6 +38,7 @@ fi
38
38
  cwd=$(cd "$cwd" 2>/dev/null && pwd || echo "$cwd")
39
39
  memoria_dir="${cwd}/.memoria"
40
40
  sessions_dir="${memoria_dir}/sessions"
41
+ session_links_dir="${memoria_dir}/session-links"
41
42
  db_path="${memoria_dir}/local.db"
42
43
 
43
44
  # Find session file
@@ -274,4 +275,32 @@ else
274
275
  echo "[memoria] Session completed (no transcript): ${session_file}" >&2
275
276
  fi
276
277
 
278
+ # ============================================
279
+ # Update master session workPeriods.endedAt (if linked)
280
+ # ============================================
281
+ session_link_file="${session_links_dir}/${session_short_id}.json"
282
+ if [ -f "$session_link_file" ]; then
283
+ master_session_id=$(jq -r '.masterSessionId // empty' "$session_link_file" 2>/dev/null || echo "")
284
+ if [ -n "$master_session_id" ]; then
285
+ master_session_path=$(find "$sessions_dir" -name "${master_session_id}.json" -type f 2>/dev/null | head -1)
286
+ if [ -n "$master_session_path" ] && [ -f "$master_session_path" ]; then
287
+ end_now=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
288
+ claude_session_id="${session_id}"
289
+ # Update the workPeriod entry with matching claudeSessionId
290
+ jq --arg claudeSessionId "$claude_session_id" \
291
+ --arg endedAt "$end_now" '
292
+ .workPeriods = [.workPeriods[]? |
293
+ if .claudeSessionId == $claudeSessionId and .endedAt == null
294
+ then .endedAt = $endedAt
295
+ else .
296
+ end
297
+ ] |
298
+ .updatedAt = $endedAt
299
+ ' "$master_session_path" > "${master_session_path}.tmp" \
300
+ && mv "${master_session_path}.tmp" "$master_session_path"
301
+ echo "[memoria] Master session workPeriods.endedAt updated: ${master_session_path}" >&2
302
+ fi
303
+ fi
304
+ fi
305
+
277
306
  exit 0
@@ -42,6 +42,7 @@ memoria_dir="${cwd}/.memoria"
42
42
  sessions_dir="${memoria_dir}/sessions"
43
43
  rules_dir="${memoria_dir}/rules"
44
44
  patterns_dir="${memoria_dir}/patterns"
45
+ session_links_dir="${memoria_dir}/session-links"
45
46
 
46
47
  # Check if memoria is initialized
47
48
  if [ ! -d "$memoria_dir" ]; then
@@ -92,6 +93,22 @@ if git -C "$cwd" rev-parse --git-dir &> /dev/null 2>&1; then
92
93
  fi
93
94
  fi
94
95
 
96
+ # ============================================
97
+ # Check session-links for master session
98
+ # ============================================
99
+ master_session_id=""
100
+ master_session_path=""
101
+ session_link_file="${session_links_dir}/${file_id}.json"
102
+
103
+ if [ -f "$session_link_file" ]; then
104
+ master_session_id=$(jq -r '.masterSessionId // empty' "$session_link_file" 2>/dev/null || echo "")
105
+ if [ -n "$master_session_id" ]; then
106
+ # Find master session file
107
+ master_session_path=$(find "$sessions_dir" -name "${master_session_id}.json" -type f 2>/dev/null | head -1)
108
+ echo "[memoria] Session linked to master: ${master_session_id}" >&2
109
+ fi
110
+ fi
111
+
95
112
  # ============================================
96
113
  # Find existing session file or create new one
97
114
  # ============================================
@@ -194,6 +211,34 @@ else
194
211
  echo "[memoria] Session initialized: ${session_path}" >&2
195
212
  fi
196
213
 
214
+ # ============================================
215
+ # Update master session workPeriods (if linked)
216
+ # ============================================
217
+ if [ -n "$master_session_id" ] && [ -n "$master_session_path" ] && [ -f "$master_session_path" ]; then
218
+ # Use full session_id for consistency with session-end.sh
219
+ claude_session_id="${session_id:-$session_short_id}"
220
+
221
+ # Check if workPeriod already exists for this claudeSessionId (prevent duplicates on clear/compact)
222
+ existing_period=$(jq --arg cid "$claude_session_id" '.workPeriods // [] | map(select(.claudeSessionId == $cid and .endedAt == null)) | length' "$master_session_path" 2>/dev/null || echo "0")
223
+
224
+ if [ "$existing_period" = "0" ]; then
225
+ # Add new workPeriod entry to master session
226
+ jq --arg claudeSessionId "$claude_session_id" \
227
+ --arg startedAt "$now" '
228
+ .workPeriods = ((.workPeriods // []) + [{
229
+ claudeSessionId: $claudeSessionId,
230
+ startedAt: $startedAt,
231
+ endedAt: null
232
+ }]) |
233
+ .updatedAt = $startedAt
234
+ ' "$master_session_path" > "${master_session_path}.tmp" \
235
+ && mv "${master_session_path}.tmp" "$master_session_path"
236
+ echo "[memoria] Master session workPeriods updated: ${master_session_path}" >&2
237
+ else
238
+ echo "[memoria] Master session workPeriod already exists for this Claude session" >&2
239
+ fi
240
+ fi
241
+
197
242
  # Get relative path for additionalContext
198
243
  # Extract year/month from session_path
199
244
  session_relative_path="${session_path#$cwd/}"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hir4ta/memoria",
3
- "version": "0.14.0",
3
+ "version": "0.14.2",
4
4
  "description": "Long-term memory plugin for Claude Code - automated session saving, recording technical decisions, and web dashboard",
5
5
  "keywords": [
6
6
  "claude",
@@ -23,8 +23,8 @@
23
23
  "dev": "vite",
24
24
  "dev:server": "tsx watch dashboard/server/index.ts",
25
25
  "build": "vite build && npm run build:server && npm run build:lib",
26
- "build:server": "esbuild dashboard/server/index.ts --bundle --platform=node --outfile=dist/server.js --format=esm --external:fsevents --external:better-sqlite3",
27
- "build:lib": "esbuild lib/*.ts --bundle --platform=node --outdir=dist/lib --format=esm --external:fsevents --external:better-sqlite3",
26
+ "build:server": "esbuild dashboard/server/index.ts --bundle --platform=node --outfile=dist/server.js --format=esm --external:fsevents",
27
+ "build:lib": "esbuild lib/*.ts --bundle --platform=node --outdir=dist/lib --format=esm --external:fsevents",
28
28
  "preview": "npm run build && node dist/server.js",
29
29
  "lint": "biome check .",
30
30
  "format": "biome format . --write",
@@ -41,12 +41,10 @@
41
41
  "@radix-ui/react-tooltip": "^1.2.8",
42
42
  "@tailwindcss/vite": "^4.0.7",
43
43
  "@tanstack/react-query": "^5.90.20",
44
- "@types/better-sqlite3": "^7.6.13",
45
44
  "@types/node": "^22",
46
45
  "@types/react": "^18",
47
46
  "@types/react-dom": "^18",
48
47
  "@vitejs/plugin-react": "^4.3.4",
49
- "better-sqlite3": "^12.6.2",
50
48
  "class-variance-authority": "^0.7.1",
51
49
  "clsx": "^2.1.1",
52
50
  "esbuild": "^0.24.2",
@@ -18,6 +18,7 @@ Create the `.memoria` directory structure in the current project.
18
18
  4. Create empty rules files:
19
19
  - `.memoria/rules/dev-rules.json`
20
20
  - `.memoria/rules/review-guidelines.json`
21
+ 5. Initialize SQLite database `.memoria/local.db` with the schema
21
22
 
22
23
  Use this JSON template for the rules files:
23
24
  ```json
@@ -29,5 +30,39 @@ Use this JSON template for the rules files:
29
30
  }
30
31
  ```
31
32
 
33
+ For SQLite initialization, run:
34
+ ```bash
35
+ sqlite3 .memoria/local.db < /path/to/memoria/lib/schema.sql
36
+ ```
37
+
38
+ Or if schema.sql is not available, create minimal schema:
39
+ ```bash
40
+ sqlite3 .memoria/local.db "
41
+ CREATE TABLE IF NOT EXISTS interactions (
42
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
43
+ session_id TEXT NOT NULL,
44
+ owner TEXT NOT NULL,
45
+ role TEXT NOT NULL,
46
+ content TEXT NOT NULL,
47
+ thinking TEXT,
48
+ tool_calls TEXT,
49
+ timestamp TEXT NOT NULL,
50
+ is_compact_summary INTEGER DEFAULT 0,
51
+ created_at TEXT DEFAULT (datetime('now'))
52
+ );
53
+ CREATE INDEX IF NOT EXISTS idx_interactions_session ON interactions(session_id);
54
+ CREATE INDEX IF NOT EXISTS idx_interactions_owner ON interactions(owner);
55
+
56
+ CREATE TABLE IF NOT EXISTS pre_compact_backups (
57
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
58
+ session_id TEXT NOT NULL,
59
+ owner TEXT NOT NULL,
60
+ interactions TEXT NOT NULL,
61
+ created_at TEXT DEFAULT (datetime('now'))
62
+ );
63
+ CREATE INDEX IF NOT EXISTS idx_backups_session ON pre_compact_backups(session_id);
64
+ "
65
+ ```
66
+
32
67
  After creation, confirm success and explain that memoria will now track sessions in this project.
33
68
  </instructions>
@@ -55,8 +55,10 @@ Multiple filters can be combined:
55
55
  3. Sort by `createdAt` descending (most recent first)
56
56
  4. Display filtered session list
57
57
  5. If session ID specified, read the JSON file and get details
58
- 6. **Update current session JSON with `resumedFrom` field**
59
- 7. Load session context to resume work
58
+ 6. **Create session-link file** (new master session support)
59
+ 7. **Update master session JSON with `workPeriods` entry**
60
+ 8. **Update current session JSON with `resumedFrom` field** (legacy, for backwards compatibility)
61
+ 9. Load session context to resume work
60
62
 
61
63
  ### File Operations
62
64
 
@@ -70,7 +72,16 @@ Read: .memoria/sessions/{year}/{month}/{filename}.json
70
72
  # Get interactions from SQLite (private, local only)
71
73
  sqlite3 .memoria/local.db "SELECT * FROM interactions WHERE session_id = '{id}' ORDER BY timestamp;"
72
74
 
73
- # Update CURRENT session with resumedFrom
75
+ # Create session-link file (NEW - master session support)
76
+ # This links current Claude session to the master memoria session
77
+ Write: .memoria/session-links/{current_session_short_id}.json
78
+ → {"masterSessionId": "{resumed_session_id}", "claudeSessionId": "{current_full_session_id}", "linkedAt": "{now}"}
79
+
80
+ # Update MASTER session with workPeriods entry (NEW)
81
+ Edit: .memoria/sessions/{master_year}/{master_month}/{master_id}.json
82
+ → Add entry to workPeriods array: {"claudeSessionId": "{current_full_session_id}", "startedAt": "{now}", "endedAt": null}
83
+
84
+ # Update CURRENT session with resumedFrom (legacy, for backwards compatibility)
74
85
  Edit: .memoria/sessions/{current_year}/{current_month}/{current_id}.json
75
86
  → Add "resumedFrom": "{resumed_session_id}"
76
87
 
@@ -198,19 +209,65 @@ If you're resuming a session created by another team member, interactions won't
198
209
  - SQLite contains interactions (local, private)
199
210
  - Always update the CURRENT session's JSON with `resumedFrom` to track session chains.
200
211
 
201
- ## Session Chain Tracking
212
+ ## Session Chain Tracking (Master Session Support)
213
+
214
+ When resuming session `abc123` (master) in a new Claude session `xyz789`:
215
+
216
+ ### Step 1: Create session-link file
217
+
218
+ ```bash
219
+ # Create .memoria/session-links/ directory if not exists
220
+ mkdir -p .memoria/session-links/
221
+
222
+ # Write session-link file
223
+ Write: .memoria/session-links/xyz78901.json
224
+ ```
225
+
226
+ ```json
227
+ {
228
+ "masterSessionId": "abc12345",
229
+ "claudeSessionId": "xyz78901-38e9-464d-9b7c-a9cdca203b5e",
230
+ "linkedAt": "2026-01-27T09:10:00Z"
231
+ }
232
+ ```
233
+
234
+ ### Step 2: Update master session workPeriods
235
+
236
+ ```bash
237
+ Edit: .memoria/sessions/{year}/{month}/abc12345.json
238
+ ```
239
+
240
+ Add to `workPeriods` array:
241
+ ```json
242
+ {
243
+ "workPeriods": [
244
+ {"claudeSessionId": "abc12345-...", "startedAt": "...", "endedAt": "..."},
245
+ {"claudeSessionId": "xyz78901-...", "startedAt": "2026-01-27T09:10:00Z", "endedAt": null}
246
+ ]
247
+ }
248
+ ```
249
+
250
+ ### Step 3: Update current session (legacy, backwards compatibility)
251
+
252
+ ```bash
253
+ Edit: .memoria/sessions/{year}/{month}/xyz78901.json
254
+ ```
255
+
256
+ ```json
257
+ {
258
+ "id": "xyz78901",
259
+ "resumedFrom": "abc12345",
260
+ ...
261
+ }
262
+ ```
202
263
 
203
- When resuming session `abc123` in a new session `xyz789`:
264
+ ### Result
204
265
 
205
- 1. Read current session path from additionalContext
206
- 2. Update current session JSON:
207
- ```json
208
- {
209
- "id": "xyz789",
210
- "resumedFrom": "abc123",
211
- ...
212
- }
213
- ```
214
- 3. This creates a chain: `xyz789 ← abc123`
266
+ - **session-link file**: Links Claude session memoria master session
267
+ - **workPeriods**: Tracks all work periods in the master session
268
+ - **resumedFrom**: Legacy chain tracking (backwards compatible)
215
269
 
216
- The chain allows tracking related sessions over time.
270
+ This design allows:
271
+ 1. Multiple Claude sessions to contribute to one logical memoria session
272
+ 2. `/memoria:save` to merge all data into the master session
273
+ 3. Dashboard to show unified conversation history
@@ -37,16 +37,79 @@ Extract and save all meaningful data from the current session.
37
37
  <phases>
38
38
  Execute all phases in order. Each phase builds on the previous.
39
39
 
40
- - Phase 0: Interactions - Merge preCompactBackups with current conversation
41
- - Phase 1: Summary - Extract session metadata
42
- - Phase 2: Decisions - Save to decisions/
43
- - Phase 3: Patterns - Save to patterns/
44
- - Phase 4: Rules - Extract development standards
40
+ - Phase 0: Master Session - Identify master and merge child sessions
41
+ - Phase 1: Interactions - Merge preCompactBackups with current conversation
42
+ - Phase 2: Summary - Extract session metadata (considering ALL interactions)
43
+ - Phase 3: Decisions - Save to decisions/
44
+ - Phase 4: Patterns - Save to patterns/
45
+ - Phase 5: Rules - Extract development standards
45
46
  </phases>
46
47
 
47
- ### Phase 0: Save Conversation History (interactions)
48
+ ### Phase 0: Identify Master Session and Merge Children
48
49
 
49
- Execute this phase first. Interactions are stored in SQLite (`local.db`) for privacy.
50
+ **Purpose:** Support multiple Claude sessions contributing to one logical memoria session.
51
+
52
+ 1. Get current session path from additionalContext (e.g., `.memoria/sessions/2026/01/xyz78901.json`)
53
+ 2. Get session ID from the path (e.g., `xyz78901`)
54
+
55
+ 3. **Check for session-link file:**
56
+ ```bash
57
+ Read: .memoria/session-links/xyz78901.json
58
+ ```
59
+ If exists, extract `masterSessionId`. If not, current session IS the master.
60
+
61
+ 4. **Find master session file:**
62
+ ```bash
63
+ Glob: .memoria/sessions/**/{masterSessionId}.json
64
+ ```
65
+
66
+ 5. **Find all child sessions linked to this master:**
67
+ ```bash
68
+ # Read all session-link files
69
+ Glob: .memoria/session-links/*.json
70
+
71
+ # Filter by masterSessionId
72
+ for each link:
73
+ if link.masterSessionId == masterSessionId:
74
+ childSessionIds.push(link file's session ID)
75
+ ```
76
+
77
+ 6. **Also check legacy `resumedFrom` chains:**
78
+ ```bash
79
+ # Find sessions where resumedFrom points to master or any child
80
+ Glob: .memoria/sessions/**/*.json
81
+ for each session:
82
+ if session.resumedFrom == masterSessionId or session.resumedFrom in childSessionIds:
83
+ childSessionIds.push(session.id)
84
+ ```
85
+
86
+ 7. **Merge child session data into master:**
87
+ For each child session JSON:
88
+ - Merge `workPeriods` (add any missing entries)
89
+ - Merge `files` (union, deduplicate by path)
90
+ - Merge `discussions` (append unique items)
91
+ - Merge `errors` (append unique items)
92
+ - Merge `metrics.toolUsage` (combine counts)
93
+ - Update `metrics.userMessages` (will be recalculated from SQLite)
94
+
95
+ 8. **Mark child sessions as merged:**
96
+ ```bash
97
+ Edit: .memoria/sessions/{year}/{month}/{childId}.json
98
+ ```
99
+ ```json
100
+ {
101
+ "status": "merged",
102
+ "mergedAt": "2026-01-27T12:00:00Z",
103
+ "masterSessionId": "abc12345"
104
+ }
105
+ ```
106
+
107
+ **Important:** After this phase, all subsequent operations work on the MASTER session.
108
+
109
+ ### Phase 1: Save Conversation History (interactions)
110
+
111
+ Execute this phase after identifying the master session.
112
+ Interactions are stored in SQLite (`local.db`) for privacy.
50
113
  If auto-compact occurred, `pre_compact_backups` table contains earlier conversations.
51
114
 
52
115
  **Storage Location:**
@@ -61,16 +124,16 @@ Without merge: Only 8 interactions saved (data loss)
61
124
  With merge: All 24 interactions saved in SQLite
62
125
  ```
63
126
 
64
- 1. Get session path from additionalContext (e.g., `.memoria/sessions/2026/01/abc12345.json`)
65
- 2. Get session ID from the path (e.g., `abc12345`)
127
+ 1. Use master session ID from Phase 0 (e.g., `abc12345`)
128
+ 2. Collect all related session IDs: `[masterSessionId] + childSessionIds`
66
129
 
67
- 3. **Check for existing data in SQLite**:
130
+ 3. **Check for existing data in SQLite (all related sessions)**:
68
131
  ```bash
69
- # Check for pre_compact_backups (most complete source)
70
- sqlite3 .memoria/local.db "SELECT interactions FROM pre_compact_backups WHERE session_id = 'abc12345' ORDER BY created_at DESC LIMIT 1;"
132
+ # Check for pre_compact_backups from ALL related sessions
133
+ sqlite3 .memoria/local.db "SELECT session_id, interactions FROM pre_compact_backups WHERE session_id IN ('abc12345', 'xyz78901') ORDER BY created_at DESC;"
71
134
 
72
- # Check existing interactions count
73
- sqlite3 .memoria/local.db "SELECT COUNT(*) FROM interactions WHERE session_id = 'abc12345';"
135
+ # Get interactions from ALL related sessions
136
+ sqlite3 .memoria/local.db "SELECT * FROM interactions WHERE session_id IN ('abc12345', 'xyz78901') ORDER BY timestamp ASC;"
74
137
  ```
75
138
 
76
139
  4. **Determine the most complete source**:
@@ -112,10 +175,10 @@ With merge: All 24 interactions saved in SQLite
112
175
 
113
176
  **Note:** Interactions are stored in SQLite for privacy. JSON contains only metadata.
114
177
 
115
- ### Phase 1: Extract Session Data
178
+ ### Phase 2: Extract Session Data
116
179
 
117
- 1. Get session path from additionalContext
118
- 2. Read current session file (already updated with interactions in Phase 0)
180
+ 1. Use master session from Phase 0
181
+ 2. Read master session file (already updated with merged data from Phase 0-1)
119
182
  3. **Scan entire conversation** (including long sessions) to extract:
120
183
 
121
184
  #### Summary
@@ -166,7 +229,7 @@ With merge: All 24 interactions saved in SQLite
166
229
  - **handoff**: stoppedReason, notes, nextSteps
167
230
  - **references**: URLs and files referenced
168
231
 
169
- ### Phase 2: Save to decisions/
232
+ ### Phase 3: Save to decisions/
170
233
 
171
234
  **For each discussion with a clear decision:**
172
235
 
@@ -200,7 +263,7 @@ With merge: All 24 interactions saved in SQLite
200
263
  - No clear decision was made (just discussion)
201
264
  - Similar decision already exists (check by title/topic)
202
265
 
203
- ### Phase 3: Save to patterns/
266
+ ### Phase 4: Save to patterns/
204
267
 
205
268
  **For each error that was solved:**
206
269
 
@@ -242,7 +305,7 @@ With merge: All 24 interactions saved in SQLite
242
305
  - No root cause was identified
243
306
  - Error was environment-specific
244
307
 
245
- ### Phase 4: Extract Rules
308
+ ### Phase 5: Extract Rules
246
309
 
247
310
  Scan conversation for development standards. These include both explicit user
248
311
  instructions and implicit standards from technical discussions.
@@ -362,11 +425,20 @@ Report each phase result:
362
425
  ---
363
426
  **Session saved.**
364
427
 
365
- **Session ID:** abc12345
428
+ **Master Session ID:** abc12345
366
429
  **Path:** .memoria/sessions/2026/01/abc12345.json
367
430
 
368
- **Phase 0 - Interactions:** 15 saved to SQLite (8 from pre_compact_backups + 7 new)
369
- **Phase 1 - Summary:**
431
+ **Phase 0 - Master Session:**
432
+ Master: abc12345
433
+ Children merged: xyz78901, def45678
434
+ Work periods: 3
435
+
436
+ **Phase 1 - Interactions:** 42 saved to SQLite
437
+ - From abc12345: 15 interactions
438
+ - From xyz78901: 18 interactions
439
+ - From def45678: 9 interactions
440
+
441
+ **Phase 2 - Summary:**
370
442
  | Field | Value |
371
443
  |-------|-------|
372
444
  | Title | JWT authentication implementation |
@@ -374,14 +446,14 @@ Report each phase result:
374
446
  | Outcome | success |
375
447
  | Type | implementation |
376
448
 
377
- **Phase 2 - Decisions (2):**
449
+ **Phase 3 - Decisions (2):**
378
450
  - `[jwt-auth-001]` Authentication method selection → decisions/2026/01/
379
451
  - `[token-expiry-001]` Token expiry strategy → decisions/2026/01/
380
452
 
381
- **Phase 3 - Patterns (1):**
453
+ **Phase 4 - Patterns (1):**
382
454
  - `[error-solution]` secretOrPrivateKey must be asymmetric → patterns/user.json
383
455
 
384
- **Phase 4 - Rules:**
456
+ **Phase 5 - Rules:**
385
457
  dev-rules.json:
386
458
  + [code-style] Use early return pattern
387
459
  ~ [architecture] Avoid circular dependencies (skipped: similar exists)
@@ -392,7 +464,7 @@ Report each phase result:
392
464
 
393
465
  If no rules are found, report what was scanned:
394
466
  ```
395
- **Phase 4 - Rules:**
467
+ **Phase 5 - Rules:**
396
468
  Scanned for: user instructions, technical standards from Codex review, security requirements
397
469
  Result: No new rules identified
398
470
  ```