@bunkercache/opencode-memoir 0.2.2 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +114 -20
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -161,12 +161,16 @@ function resolveStoragePaths(worktree, projectId, config) {
161
161
  const relativePath = getGitignoreRelativePath(memoryDb.path, worktree);
162
162
  if (relativePath) {
163
163
  gitignorePaths.push(relativePath);
164
+ gitignorePaths.push(`${relativePath}-shm`);
165
+ gitignorePaths.push(`${relativePath}-wal`);
164
166
  }
165
167
  }
166
168
  if (historyDb?.isLocal && historyDb.manageGitignore && !sharedDatabase) {
167
169
  const relativePath = getGitignoreRelativePath(historyDb.path, worktree);
168
170
  if (relativePath && !gitignorePaths.includes(relativePath)) {
169
171
  gitignorePaths.push(relativePath);
172
+ gitignorePaths.push(`${relativePath}-shm`);
173
+ gitignorePaths.push(`${relativePath}-wal`);
170
174
  }
171
175
  }
172
176
  return {
@@ -448,6 +452,7 @@ class Logger {
448
452
  this.log("error", message, extra);
449
453
  }
450
454
  }
455
+
451
456
  // src/db/index.ts
452
457
  import { existsSync as existsSync4 } from "fs";
453
458
 
@@ -1615,6 +1620,37 @@ class ChunkService {
1615
1620
  getRecentSummaryChunks(limit = 5) {
1616
1621
  return this.repository.getRecentSummaryChunks(limit);
1617
1622
  }
1623
+ getRecentChunks(options) {
1624
+ const limit = options?.limit ?? 10;
1625
+ let sql = `
1626
+ SELECT id, session_id, parent_id, depth, child_refs, content, summary,
1627
+ status, created_at, finalized_at, compacted_at, embedding
1628
+ FROM chunks
1629
+ `;
1630
+ const params = [];
1631
+ if (options?.sessionId) {
1632
+ sql += " WHERE session_id = ?";
1633
+ params.push(options.sessionId);
1634
+ }
1635
+ sql += " ORDER BY created_at DESC LIMIT ?";
1636
+ params.push(limit);
1637
+ const stmt = this.db.prepare(sql);
1638
+ const rows = stmt.all(...params);
1639
+ return rows.map((row) => ({
1640
+ id: row.id,
1641
+ sessionId: row.session_id,
1642
+ parentId: row.parent_id,
1643
+ depth: row.depth,
1644
+ childRefs: row.child_refs ? JSON.parse(row.child_refs) : null,
1645
+ content: JSON.parse(row.content),
1646
+ summary: row.summary,
1647
+ status: row.status,
1648
+ createdAt: row.created_at,
1649
+ finalizedAt: row.finalized_at,
1650
+ compactedAt: row.compacted_at,
1651
+ embedding: row.embedding
1652
+ }));
1653
+ }
1618
1654
  }
1619
1655
  var chunkService = null;
1620
1656
  function initializeChunkService(db, config) {
@@ -14555,28 +14591,89 @@ function estimateTokens2(text) {
14555
14591
  return Math.ceil(text.length / CHARS_PER_TOKEN2);
14556
14592
  }
14557
14593
  var historyTool = tool({
14558
- description: `Search session history for past work. Returns compact chunk summaries with IDs that can be expanded.
14594
+ description: `Browse or search session history. Returns chunk summaries with IDs for memoir_expand.
14595
+
14596
+ DEFAULTS: Searches current session only. Returns recent chunks if no query provided.
14559
14597
 
14560
- CONTEXT BUDGET: Results are summaries (~50-200 tokens each). To view full chunk content, use memoir_expand. For deep exploration of multiple chunks, consider delegating to a subagent.`,
14598
+ OPTIONS:
14599
+ - query: Search text (omit to list recent chunks)
14600
+ - all_sessions: true to include past sessions
14601
+ - limit: Max results (default 10)
14602
+
14603
+ Use memoir_expand({ chunk_id }) to see full content of any chunk.`,
14561
14604
  args: {
14562
- query: tool.schema.string().describe("Search query for finding relevant history"),
14563
- session_id: tool.schema.string().optional().describe("Limit search to a specific session"),
14564
- depth: tool.schema.number().optional().describe("Minimum depth to search (0 = original chunks, higher = summaries)"),
14565
- limit: tool.schema.number().optional().describe("Maximum number of results to return (default: 10, max recommended: 20)")
14605
+ query: tool.schema.string().optional().describe("Search query (omit to list recent chunks without searching)"),
14606
+ all_sessions: tool.schema.boolean().optional().describe("Include past sessions (default: current session only)"),
14607
+ session_ids: tool.schema.array(tool.schema.string()).optional().describe("Search specific session IDs"),
14608
+ depth: tool.schema.number().optional().describe("Minimum chunk depth (0=original, 1+=summaries)"),
14609
+ limit: tool.schema.number().optional().describe("Max results (default: 10)")
14566
14610
  },
14567
- async execute(args) {
14611
+ async execute(args, context) {
14568
14612
  const chunkService2 = getChunkService();
14569
- const results = chunkService2.search(args.query, {
14570
- sessionId: args.session_id,
14613
+ let sessionId;
14614
+ let searchScope;
14615
+ if (args.session_ids && args.session_ids.length > 0) {
14616
+ sessionId = args.session_ids[0];
14617
+ searchScope = `sessions: ${args.session_ids.join(", ")}`;
14618
+ } else if (args.all_sessions) {
14619
+ sessionId = undefined;
14620
+ searchScope = "all sessions";
14621
+ } else {
14622
+ sessionId = context.sessionID;
14623
+ searchScope = "current session";
14624
+ }
14625
+ const limit = args.limit ?? 10;
14626
+ const query = args.query?.trim();
14627
+ if (!query) {
14628
+ const recentChunks = chunkService2.getRecentChunks({
14629
+ sessionId,
14630
+ limit
14631
+ });
14632
+ if (recentChunks.length === 0) {
14633
+ return JSON.stringify({
14634
+ success: true,
14635
+ count: 0,
14636
+ scope: searchScope,
14637
+ mode: "recent",
14638
+ message: `No chunks found in ${searchScope}`,
14639
+ hint: args.all_sessions ? undefined : "Try with all_sessions: true to see past sessions"
14640
+ });
14641
+ }
14642
+ const formatted2 = recentChunks.map((c) => ({
14643
+ id: c.id,
14644
+ sessionId: c.sessionId,
14645
+ depth: c.depth,
14646
+ status: c.status,
14647
+ summary: c.summary || `${c.content.messages.length} messages`,
14648
+ created: new Date(c.createdAt * 1000).toISOString(),
14649
+ stats: {
14650
+ messages: c.content.messages.length,
14651
+ files_modified: c.content.metadata.files_modified?.length || 0
14652
+ }
14653
+ }));
14654
+ return JSON.stringify({
14655
+ success: true,
14656
+ count: formatted2.length,
14657
+ scope: searchScope,
14658
+ mode: "recent",
14659
+ chunks: formatted2,
14660
+ hint: 'Use memoir_expand({ chunk_id: "ch_xxx" }) to see full content.'
14661
+ });
14662
+ }
14663
+ const results = chunkService2.search(query, {
14664
+ sessionId,
14571
14665
  depth: args.depth,
14572
- limit: args.limit
14666
+ limit
14573
14667
  });
14574
14668
  if (results.length === 0) {
14575
14669
  return JSON.stringify({
14576
14670
  success: true,
14577
14671
  count: 0,
14578
- estimated_tokens: 50,
14579
- message: "No matching chunks found"
14672
+ scope: searchScope,
14673
+ mode: "search",
14674
+ query,
14675
+ message: `No matches for "${query}" in ${searchScope}`,
14676
+ hint: args.all_sessions ? undefined : "Try with all_sessions: true to search past sessions"
14580
14677
  });
14581
14678
  }
14582
14679
  const formatted = results.map((r) => {
@@ -14607,10 +14704,13 @@ CONTEXT BUDGET: Results are summaries (~50-200 tokens each). To view full chunk
14607
14704
  const response = {
14608
14705
  success: true,
14609
14706
  count: results.length,
14707
+ scope: searchScope,
14708
+ mode: "search",
14709
+ query,
14610
14710
  estimated_tokens: estimatedTokens,
14611
14711
  estimated_expanded_tokens: estimatedExpandedTokens,
14612
14712
  chunks: formatted,
14613
- hint: 'Use memoir_expand({ chunk_id: "ch_xxx", preview_only: true }) to check size before full expansion.'
14713
+ hint: 'Use memoir_expand({ chunk_id: "ch_xxx" }) to see full content.'
14614
14714
  };
14615
14715
  if (estimatedExpandedTokens > LARGE_RESULT_TOKEN_THRESHOLD && results.length > 3) {
14616
14716
  response.warning = `Expanding all ${results.length} results would use ~${estimatedExpandedTokens} tokens. Consider using memoir_expand with preview_only=true first, or delegate detailed analysis to a subagent.`;
@@ -14678,12 +14778,6 @@ var MemoirPlugin = async (ctx) => {
14678
14778
  };
14679
14779
  var src_default = MemoirPlugin;
14680
14780
  export {
14681
- getMessageTracker,
14682
- getMemoryService,
14683
- getChunkService,
14684
14781
  src_default as default,
14685
- MemoirPlugin,
14686
- Logger,
14687
- DatabaseService,
14688
- ConfigService
14782
+ MemoirPlugin
14689
14783
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bunkercache/opencode-memoir",
3
- "version": "0.2.2",
3
+ "version": "0.3.0",
4
4
  "description": "A smart memory management plugin for OpenCode that supports nested aggregation of memory, summaries, and file changes that compact in layers with upstream references for lookups.",
5
5
  "author": {
6
6
  "name": "Chris Tunbridge",