@psiclawops/hypermem 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/ARCHITECTURE.md +296 -0
  2. package/LICENSE +190 -0
  3. package/README.md +243 -0
  4. package/dist/background-indexer.d.ts +117 -0
  5. package/dist/background-indexer.d.ts.map +1 -0
  6. package/dist/background-indexer.js +732 -0
  7. package/dist/compaction-fence.d.ts +89 -0
  8. package/dist/compaction-fence.d.ts.map +1 -0
  9. package/dist/compaction-fence.js +153 -0
  10. package/dist/compositor.d.ts +139 -0
  11. package/dist/compositor.d.ts.map +1 -0
  12. package/dist/compositor.js +1109 -0
  13. package/dist/cross-agent.d.ts +57 -0
  14. package/dist/cross-agent.d.ts.map +1 -0
  15. package/dist/cross-agent.js +254 -0
  16. package/dist/db.d.ts +131 -0
  17. package/dist/db.d.ts.map +1 -0
  18. package/dist/db.js +398 -0
  19. package/dist/desired-state-store.d.ts +100 -0
  20. package/dist/desired-state-store.d.ts.map +1 -0
  21. package/dist/desired-state-store.js +212 -0
  22. package/dist/doc-chunk-store.d.ts +115 -0
  23. package/dist/doc-chunk-store.d.ts.map +1 -0
  24. package/dist/doc-chunk-store.js +278 -0
  25. package/dist/doc-chunker.d.ts +99 -0
  26. package/dist/doc-chunker.d.ts.map +1 -0
  27. package/dist/doc-chunker.js +324 -0
  28. package/dist/episode-store.d.ts +48 -0
  29. package/dist/episode-store.d.ts.map +1 -0
  30. package/dist/episode-store.js +135 -0
  31. package/dist/fact-store.d.ts +57 -0
  32. package/dist/fact-store.d.ts.map +1 -0
  33. package/dist/fact-store.js +175 -0
  34. package/dist/fleet-store.d.ts +144 -0
  35. package/dist/fleet-store.d.ts.map +1 -0
  36. package/dist/fleet-store.js +276 -0
  37. package/dist/hybrid-retrieval.d.ts +60 -0
  38. package/dist/hybrid-retrieval.d.ts.map +1 -0
  39. package/dist/hybrid-retrieval.js +340 -0
  40. package/dist/index.d.ts +611 -0
  41. package/dist/index.d.ts.map +1 -0
  42. package/dist/index.js +1042 -0
  43. package/dist/knowledge-graph.d.ts +110 -0
  44. package/dist/knowledge-graph.d.ts.map +1 -0
  45. package/dist/knowledge-graph.js +305 -0
  46. package/dist/knowledge-store.d.ts +72 -0
  47. package/dist/knowledge-store.d.ts.map +1 -0
  48. package/dist/knowledge-store.js +241 -0
  49. package/dist/library-schema.d.ts +22 -0
  50. package/dist/library-schema.d.ts.map +1 -0
  51. package/dist/library-schema.js +717 -0
  52. package/dist/message-store.d.ts +76 -0
  53. package/dist/message-store.d.ts.map +1 -0
  54. package/dist/message-store.js +273 -0
  55. package/dist/preference-store.d.ts +54 -0
  56. package/dist/preference-store.d.ts.map +1 -0
  57. package/dist/preference-store.js +109 -0
  58. package/dist/preservation-gate.d.ts +82 -0
  59. package/dist/preservation-gate.d.ts.map +1 -0
  60. package/dist/preservation-gate.js +150 -0
  61. package/dist/provider-translator.d.ts +40 -0
  62. package/dist/provider-translator.d.ts.map +1 -0
  63. package/dist/provider-translator.js +349 -0
  64. package/dist/rate-limiter.d.ts +76 -0
  65. package/dist/rate-limiter.d.ts.map +1 -0
  66. package/dist/rate-limiter.js +179 -0
  67. package/dist/redis.d.ts +188 -0
  68. package/dist/redis.d.ts.map +1 -0
  69. package/dist/redis.js +534 -0
  70. package/dist/schema.d.ts +15 -0
  71. package/dist/schema.d.ts.map +1 -0
  72. package/dist/schema.js +203 -0
  73. package/dist/secret-scanner.d.ts +51 -0
  74. package/dist/secret-scanner.d.ts.map +1 -0
  75. package/dist/secret-scanner.js +248 -0
  76. package/dist/seed.d.ts +108 -0
  77. package/dist/seed.d.ts.map +1 -0
  78. package/dist/seed.js +177 -0
  79. package/dist/system-store.d.ts +73 -0
  80. package/dist/system-store.d.ts.map +1 -0
  81. package/dist/system-store.js +182 -0
  82. package/dist/topic-store.d.ts +45 -0
  83. package/dist/topic-store.d.ts.map +1 -0
  84. package/dist/topic-store.js +136 -0
  85. package/dist/types.d.ts +329 -0
  86. package/dist/types.d.ts.map +1 -0
  87. package/dist/types.js +9 -0
  88. package/dist/vector-store.d.ts +132 -0
  89. package/dist/vector-store.d.ts.map +1 -0
  90. package/dist/vector-store.js +498 -0
  91. package/dist/work-store.d.ts +112 -0
  92. package/dist/work-store.d.ts.map +1 -0
  93. package/dist/work-store.js +273 -0
  94. package/package.json +57 -0
@@ -0,0 +1,99 @@
1
+ /**
2
+ * HyperMem Document Chunker
3
+ *
4
+ * Splits markdown documents into semantically coherent chunks for L3 indexing.
5
+ *
6
+ * Design principles:
7
+ * - Chunk by logical section (## / ###), NOT by token count
8
+ * - Each chunk is a self-contained policy/operational unit
9
+ * - Preserve section hierarchy for context assembly
10
+ * - Track source file hash for atomic re-indexing
11
+ * - Idempotent: same source produces same chunks (deterministic IDs)
12
+ *
13
+ * Collections (as defined in ACA offload spec):
14
+ * governance/policy — POLICY.md, shared-fleet
15
+ * governance/charter — CHARTER.md, per-tier (council/director)
16
+ * governance/comms — COMMS.md, shared-fleet
17
+ * operations/agents — AGENTS.md, per-tier
18
+ * operations/tools — TOOLS.md, per-agent
19
+ * memory/decisions — MEMORY.md, per-agent
20
+ * memory/daily — memory/YYYY-MM-DD.md, per-agent
21
+ * identity/soul — SOUL.md, per-agent (always-loaded kernel, but still indexed)
22
+ * identity/job — JOB.md, per-agent (demand-loaded during deliberation)
23
+ */
24
+ export interface DocChunk {
25
+ /** Unique deterministic ID: sha256(collection + sectionPath + sourceHash) */
26
+ id: string;
27
+ /** Collection path: governance/policy, operations/tools, etc. */
28
+ collection: string;
29
+ /** Full section path: "§3 > Naming > Single-Name Rule" */
30
+ sectionPath: string;
31
+ /** Section depth (0=root, 1=#, 2=##, 3=###) */
32
+ depth: number;
33
+ /** The actual text content of this chunk */
34
+ content: string;
35
+ /** Token estimate (rough: chars / 4) */
36
+ tokenEstimate: number;
37
+ /** SHA-256 of the source file at time of chunking */
38
+ sourceHash: string;
39
+ /** Source file path (relative to workspace) */
40
+ sourcePath: string;
41
+ /** Scope: shared-fleet | per-tier | per-agent */
42
+ scope: 'shared-fleet' | 'per-tier' | 'per-agent';
43
+ /** Tier filter (for per-tier scope): council | director | all */
44
+ tier?: string;
45
+ /** Agent ID (for per-agent scope) */
46
+ agentId?: string;
47
+ /** Parent section path (for hierarchy context) */
48
+ parentPath?: string;
49
+ }
50
+ export interface ChunkOptions {
51
+ collection: string;
52
+ sourcePath: string;
53
+ scope: DocChunk['scope'];
54
+ tier?: string;
55
+ agentId?: string;
56
+ /** Minimum content length to emit a chunk (avoids empty section headers) */
57
+ minContentLen?: number;
58
+ /** Whether to include parent context prefix in chunk content */
59
+ includeParentContext?: boolean;
60
+ }
61
+ /**
62
+ * Hash a string with SHA-256.
63
+ */
64
+ export declare function hashContent(content: string): string;
65
+ /**
66
+ * Chunk a markdown document into semantic sections.
67
+ *
68
+ * Approach:
69
+ * - Level 1 (#) headings become top-level section anchors
70
+ * - Level 2 (##) headings become primary chunks
71
+ * - Level 3 (###) headings become sub-chunks under their parent
72
+ * - Content before the first heading becomes a "preamble" chunk
73
+ * - Empty sections (heading only, no content) are skipped unless minContentLen=0
74
+ *
75
+ * For documents with deeply nested content, we group level-3 sections under
76
+ * their parent level-2 section. This keeps related policy sections together.
77
+ */
78
+ export declare function chunkMarkdown(content: string, opts: ChunkOptions): DocChunk[];
79
+ /**
80
+ * Chunk a file from disk.
81
+ */
82
+ export declare function chunkFile(filePath: string, opts: Omit<ChunkOptions, 'sourcePath'>): DocChunk[];
83
+ /**
84
+ * Standard collection definitions for ACA workspace files.
85
+ * Maps file names to collection paths and scope metadata.
86
+ */
87
+ export interface CollectionDef {
88
+ collection: string;
89
+ scope: DocChunk['scope'];
90
+ tier?: string;
91
+ description: string;
92
+ }
93
+ export declare const ACA_COLLECTIONS: Record<string, CollectionDef>;
94
+ /**
95
+ * Infer the collection definition for a file based on its name.
96
+ * Returns undefined if the file is not a known ACA file.
97
+ */
98
+ export declare function inferCollection(fileName: string, agentId?: string): CollectionDef | undefined;
99
+ //# sourceMappingURL=doc-chunker.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"doc-chunker.d.ts","sourceRoot":"","sources":["../src/doc-chunker.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;GAsBG;AAOH,MAAM,WAAW,QAAQ;IACvB,6EAA6E;IAC7E,EAAE,EAAE,MAAM,CAAC;IACX,iEAAiE;IACjE,UAAU,EAAE,MAAM,CAAC;IACnB,0DAA0D;IAC1D,WAAW,EAAE,MAAM,CAAC;IACpB,+CAA+C;IAC/C,KAAK,EAAE,MAAM,CAAC;IACd,4CAA4C;IAC5C,OAAO,EAAE,MAAM,CAAC;IAChB,wCAAwC;IACxC,aAAa,EAAE,MAAM,CAAC;IACtB,qDAAqD;IACrD,UAAU,EAAE,MAAM,CAAC;IACnB,+CAA+C;IAC/C,UAAU,EAAE,MAAM,CAAC;IACnB,iDAAiD;IACjD,KAAK,EAAE,cAAc,GAAG,UAAU,GAAG,WAAW,CAAC;IACjD,iEAAiE;IACjE,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,qCAAqC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,kDAAkD;IAClD,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,YAAY;IAC3B,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,4EAA4E;IAC5E,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,gEAAgE;IAChE,oBAAoB,CAAC,EAAE,OAAO,CAAC;CAChC;AAID;;GAEG;AACH,wBAAgB,WAAW,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,CAEnD;AAgFD;;;;;;;;;;;;GAYG;AACH,wBAAgB,aAAa,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,YAAY,GAAG,QAAQ,EAAE,CA+I7E;AAED;;GAEG;AACH,wBAAgB,SAAS,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE,YAAY,CAAC,GAAG,QAAQ,EAAE,CAG9F;AAID;;;GAGG;AACH,MAAM,WAAW,aAAa;IAC5B,UAAU,EAAE,MAAM,CAAC;IACnB,KAAK,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,eAAO,MAAM,eAAe,EAAE,MAAM,CAAC,MAAM,EAAE,aAAa,CA8CzD,CAAC;AAEF;;;GAGG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAc7F"}
@@ -0,0 +1,324 @@
1
+ /**
2
+ * HyperMem Document Chunker
3
+ *
4
+ * Splits markdown documents into semantically coherent chunks for L3 indexing.
5
+ *
6
+ * Design principles:
7
+ * - Chunk by logical section (## / ###), NOT by token count
8
+ * - Each chunk is a self-contained policy/operational unit
9
+ * - Preserve section hierarchy for context assembly
10
+ * - Track source file hash for atomic re-indexing
11
+ * - Idempotent: same source produces same chunks (deterministic IDs)
12
+ *
13
+ * Collections (as defined in ACA offload spec):
14
+ * governance/policy — POLICY.md, shared-fleet
15
+ * governance/charter — CHARTER.md, per-tier (council/director)
16
+ * governance/comms — COMMS.md, shared-fleet
17
+ * operations/agents — AGENTS.md, per-tier
18
+ * operations/tools — TOOLS.md, per-agent
19
+ * memory/decisions — MEMORY.md, per-agent
20
+ * memory/daily — memory/YYYY-MM-DD.md, per-agent
21
+ * identity/soul — SOUL.md, per-agent (always-loaded kernel, but still indexed)
22
+ * identity/job — JOB.md, per-agent (demand-loaded during deliberation)
23
+ */
24
+ import { createHash } from 'node:crypto';
25
+ import { readFileSync } from 'node:fs';
26
+ // ─── Core chunker ───────────────────────────────────────────────
27
+ /**
28
+ * Hash a string with SHA-256.
29
+ */
30
+ export function hashContent(content) {
31
+ return createHash('sha256').update(content).digest('hex');
32
+ }
33
+ /**
34
+ * Estimate token count from character length.
35
+ * Rough heuristic: 1 token ≈ 4 chars for English prose.
36
+ */
37
+ function estimateTokens(text) {
38
+ return Math.ceil(text.length / 4);
39
+ }
40
+ function parseMarkdownSections(markdown) {
41
+ const lines = markdown.split('\n');
42
+ const sections = [];
43
+ let currentLevel = 0;
44
+ let currentHeading = '';
45
+ let currentRaw = '';
46
+ let contentLines = [];
47
+ function flush() {
48
+ if (currentHeading || contentLines.length > 0) {
49
+ sections.push({
50
+ heading: currentHeading,
51
+ level: currentLevel,
52
+ content: contentLines.join('\n').trim(),
53
+ rawHeading: currentRaw,
54
+ });
55
+ }
56
+ }
57
+ for (const line of lines) {
58
+ const h3Match = line.match(/^### (.+)$/);
59
+ const h2Match = line.match(/^## (.+)$/);
60
+ const h1Match = line.match(/^# (.+)$/);
61
+ if (h3Match || h2Match || h1Match) {
62
+ flush();
63
+ currentRaw = line;
64
+ if (h3Match) {
65
+ currentLevel = 3;
66
+ currentHeading = h3Match[1].trim();
67
+ }
68
+ else if (h2Match) {
69
+ currentLevel = 2;
70
+ currentHeading = h2Match[1].trim();
71
+ }
72
+ else if (h1Match) {
73
+ currentLevel = 1;
74
+ currentHeading = h1Match[1].trim();
75
+ }
76
+ contentLines = [];
77
+ }
78
+ else {
79
+ contentLines.push(line);
80
+ }
81
+ }
82
+ flush();
83
+ return sections;
84
+ }
85
+ /**
86
+ * Build a hierarchical section path from the section stack.
87
+ * e.g., ["POLICY.md", "§3 Escalation", "Triggers"] → "POLICY.md > §3 Escalation > Triggers"
88
+ */
89
+ function buildSectionPath(stack) {
90
+ return stack.filter(Boolean).join(' > ');
91
+ }
92
+ /**
93
+ * Generate a deterministic chunk ID from its identifying properties.
94
+ * Includes sourcePath to prevent collisions when different files have
95
+ * identical content and section structure (e.g., POLICY.md in two workspaces).
96
+ */
97
+ function chunkId(collection, sectionPath, sourceHash, sourcePath) {
98
+ const key = `${collection}::${sourcePath}::${sectionPath}::${sourceHash}`;
99
+ return createHash('sha256').update(key).digest('hex').slice(0, 16);
100
+ }
101
+ /**
102
+ * Chunk a markdown document into semantic sections.
103
+ *
104
+ * Approach:
105
+ * - Level 1 (#) headings become top-level section anchors
106
+ * - Level 2 (##) headings become primary chunks
107
+ * - Level 3 (###) headings become sub-chunks under their parent
108
+ * - Content before the first heading becomes a "preamble" chunk
109
+ * - Empty sections (heading only, no content) are skipped unless minContentLen=0
110
+ *
111
+ * For documents with deeply nested content, we group level-3 sections under
112
+ * their parent level-2 section. This keeps related policy sections together.
113
+ */
114
+ export function chunkMarkdown(content, opts) {
115
+ const minLen = opts.minContentLen ?? 50;
116
+ const sourceHash = hashContent(content);
117
+ const sections = parseMarkdownSections(content);
118
+ const chunks = [];
119
+ // Track current section hierarchy for path building
120
+ let h1Heading = '';
121
+ let h2Heading = '';
122
+ const h2ContentLines = [];
123
+ let h2Active = false;
124
+ function flushH2() {
125
+ if (!h2Active)
126
+ return;
127
+ const body = h2ContentLines.join('\n').trim();
128
+ if (body.length < minLen && !opts.includeParentContext) {
129
+ h2Active = false;
130
+ h2ContentLines.length = 0;
131
+ return;
132
+ }
133
+ const sectionPath = buildSectionPath([h1Heading, h2Heading].filter(Boolean));
134
+ const chunkContent = [
135
+ h1Heading ? `# ${h1Heading}` : '',
136
+ `## ${h2Heading}`,
137
+ '',
138
+ body,
139
+ ].filter(l => l !== '' || body).join('\n').trim();
140
+ if (chunkContent.length >= minLen) {
141
+ chunks.push({
142
+ id: chunkId(opts.collection, sectionPath, sourceHash, opts.sourcePath),
143
+ collection: opts.collection,
144
+ sectionPath,
145
+ depth: 2,
146
+ content: chunkContent,
147
+ tokenEstimate: estimateTokens(chunkContent),
148
+ sourceHash,
149
+ sourcePath: opts.sourcePath,
150
+ scope: opts.scope,
151
+ tier: opts.tier,
152
+ agentId: opts.agentId,
153
+ parentPath: h1Heading || undefined,
154
+ });
155
+ }
156
+ h2Active = false;
157
+ h2ContentLines.length = 0;
158
+ }
159
+ // Preamble: text before first heading
160
+ let preamble = '';
161
+ for (const section of sections) {
162
+ if (section.level === 1) {
163
+ flushH2();
164
+ h1Heading = section.heading;
165
+ h2Heading = '';
166
+ // Level-1 headings with body content become a preamble chunk
167
+ if (section.content.length >= minLen) {
168
+ const sectionPath = buildSectionPath([h1Heading]);
169
+ const chunkContent = [`# ${h1Heading}`, '', section.content].join('\n').trim();
170
+ chunks.push({
171
+ id: chunkId(opts.collection, sectionPath + '::intro', sourceHash, opts.sourcePath),
172
+ collection: opts.collection,
173
+ sectionPath: sectionPath + ' (intro)',
174
+ depth: 1,
175
+ content: chunkContent,
176
+ tokenEstimate: estimateTokens(chunkContent),
177
+ sourceHash,
178
+ sourcePath: opts.sourcePath,
179
+ scope: opts.scope,
180
+ tier: opts.tier,
181
+ agentId: opts.agentId,
182
+ });
183
+ }
184
+ }
185
+ else if (section.level === 2) {
186
+ flushH2();
187
+ h2Heading = section.heading;
188
+ h2Active = true;
189
+ // Start accumulating: begin with this section's own content
190
+ h2ContentLines.length = 0;
191
+ if (section.content)
192
+ h2ContentLines.push(section.content);
193
+ }
194
+ else if (section.level === 3) {
195
+ // Append h3 sub-sections into the current h2 chunk
196
+ if (h2Active) {
197
+ h2ContentLines.push(`\n### ${section.heading}`);
198
+ if (section.content)
199
+ h2ContentLines.push(section.content);
200
+ }
201
+ else {
202
+ // h3 without a parent h2 — emit as standalone
203
+ const sectionPath = buildSectionPath([h1Heading, section.heading].filter(Boolean));
204
+ const chunkContent = [
205
+ h1Heading ? `# ${h1Heading}` : '',
206
+ `### ${section.heading}`,
207
+ '',
208
+ section.content,
209
+ ].filter(l => l !== '' || section.content).join('\n').trim();
210
+ if (chunkContent.length >= minLen) {
211
+ chunks.push({
212
+ id: chunkId(opts.collection, sectionPath, sourceHash, opts.sourcePath),
213
+ collection: opts.collection,
214
+ sectionPath,
215
+ depth: 3,
216
+ content: chunkContent,
217
+ tokenEstimate: estimateTokens(chunkContent),
218
+ sourceHash,
219
+ sourcePath: opts.sourcePath,
220
+ scope: opts.scope,
221
+ tier: opts.tier,
222
+ agentId: opts.agentId,
223
+ parentPath: h1Heading || undefined,
224
+ });
225
+ }
226
+ }
227
+ }
228
+ else if (section.level === 0) {
229
+ // Pre-heading content
230
+ preamble = section.content.trim();
231
+ }
232
+ }
233
+ // Flush any remaining h2
234
+ flushH2();
235
+ // Emit preamble if substantial
236
+ if (preamble.length >= minLen) {
237
+ chunks.unshift({
238
+ id: chunkId(opts.collection, '(preamble)', sourceHash, opts.sourcePath),
239
+ collection: opts.collection,
240
+ sectionPath: '(preamble)',
241
+ depth: 0,
242
+ content: preamble,
243
+ tokenEstimate: estimateTokens(preamble),
244
+ sourceHash,
245
+ sourcePath: opts.sourcePath,
246
+ scope: opts.scope,
247
+ tier: opts.tier,
248
+ agentId: opts.agentId,
249
+ });
250
+ }
251
+ return chunks;
252
+ }
253
+ /**
254
+ * Chunk a file from disk.
255
+ */
256
+ export function chunkFile(filePath, opts) {
257
+ const content = readFileSync(filePath, 'utf-8');
258
+ return chunkMarkdown(content, { ...opts, sourcePath: filePath });
259
+ }
260
+ export const ACA_COLLECTIONS = {
261
+ 'POLICY.md': {
262
+ collection: 'governance/policy',
263
+ scope: 'shared-fleet',
264
+ description: 'Governance policy: escalation triggers, decision states, council procedures, naming rules',
265
+ },
266
+ 'CHARTER.md': {
267
+ collection: 'governance/charter',
268
+ scope: 'per-tier',
269
+ description: 'Org charter: mission, director structure, boundaries, escalation, work queue',
270
+ },
271
+ 'COMMS.md': {
272
+ collection: 'governance/comms',
273
+ scope: 'shared-fleet',
274
+ description: 'Communications protocol: inter-agent tiers, delegation, platform formatting',
275
+ },
276
+ 'AGENTS.md': {
277
+ collection: 'operations/agents',
278
+ scope: 'per-tier',
279
+ description: 'Agent operational guide: boot sequence, identity, memory, messaging, group chats',
280
+ },
281
+ 'TOOLS.md': {
282
+ collection: 'operations/tools',
283
+ scope: 'per-agent',
284
+ description: 'Tool and runtime configuration: workspace path, model, key paths, quick commands',
285
+ },
286
+ 'SOUL.md': {
287
+ collection: 'identity/soul',
288
+ scope: 'per-agent',
289
+ description: 'Agent soul: core principles, personality, tone, continuity',
290
+ },
291
+ 'JOB.md': {
292
+ collection: 'identity/job',
293
+ scope: 'per-agent',
294
+ description: 'Job performance criteria: duties, response contract, council mode, output discipline',
295
+ },
296
+ 'MOTIVATIONS.md': {
297
+ collection: 'identity/motivations',
298
+ scope: 'per-agent',
299
+ description: 'Agent motivations: drives, fears, tensions that shape perspective',
300
+ },
301
+ 'MEMORY.md': {
302
+ collection: 'memory/decisions',
303
+ scope: 'per-agent',
304
+ description: 'Long-term curated memory: key decisions, lessons, context',
305
+ },
306
+ };
307
+ /**
308
+ * Infer the collection definition for a file based on its name.
309
+ * Returns undefined if the file is not a known ACA file.
310
+ */
311
+ export function inferCollection(fileName, agentId) {
312
+ // Strip path, get just the filename
313
+ const base = fileName.split('/').pop() ?? fileName;
314
+ // Daily memory files: memory/YYYY-MM-DD.md
315
+ if (/^\d{4}-\d{2}-\d{2}\.md$/.test(base)) {
316
+ return {
317
+ collection: 'memory/daily',
318
+ scope: 'per-agent',
319
+ description: `Daily memory log for ${agentId || 'agent'}`,
320
+ };
321
+ }
322
+ return ACA_COLLECTIONS[base];
323
+ }
324
+ //# sourceMappingURL=doc-chunker.js.map
@@ -0,0 +1,48 @@
1
+ /**
2
+ * HyperMem Episode Store
3
+ *
4
+ * Significant events in an agent's lifetime.
5
+ * Lives in the central library DB.
6
+ * Replaces daily log files with structured, queryable episodes.
7
+ */
8
+ import type { DatabaseSync } from 'node:sqlite';
9
+ import type { Episode, EpisodeType } from './types.js';
10
+ export declare class EpisodeStore {
11
+ private readonly db;
12
+ constructor(db: DatabaseSync);
13
+ /**
14
+ * Record a new episode.
15
+ */
16
+ record(agentId: string, eventType: EpisodeType, summary: string, opts?: {
17
+ significance?: number;
18
+ visibility?: string;
19
+ participants?: string[];
20
+ sessionKey?: string;
21
+ }): Episode;
22
+ /**
23
+ * Get recent episodes for an agent.
24
+ */
25
+ getRecent(agentId: string, opts?: {
26
+ eventType?: EpisodeType;
27
+ minSignificance?: number;
28
+ limit?: number;
29
+ since?: string;
30
+ }): Episode[];
31
+ /**
32
+ * Get the most significant episodes (across all time).
33
+ */
34
+ getMostSignificant(agentId: string, limit?: number): Episode[];
35
+ /**
36
+ * Decay all episodes.
37
+ */
38
+ decay(agentId: string, decayRate?: number): number;
39
+ /**
40
+ * Prune fully decayed episodes.
41
+ */
42
+ prune(agentId: string): number;
43
+ /**
44
+ * Get episode summary for a time range.
45
+ */
46
+ getDailySummary(agentId: string, date: string): Episode[];
47
+ }
48
+ //# sourceMappingURL=episode-store.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"episode-store.d.ts","sourceRoot":"","sources":["../src/episode-store.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAChD,OAAO,KAAK,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAsBvD,qBAAa,YAAY;IACX,OAAO,CAAC,QAAQ,CAAC,EAAE;gBAAF,EAAE,EAAE,YAAY;IAE7C;;OAEG;IACH,MAAM,CACJ,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,WAAW,EACtB,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE;QACL,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;QACxB,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB,GACA,OAAO;IA0CV;;OAEG;IACH,SAAS,CACP,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE;QACL,SAAS,CAAC,EAAE,WAAW,CAAC;QACxB,eAAe,CAAC,EAAE,MAAM,CAAC;QACzB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB,GACA,OAAO,EAAE;IA4BZ;;OAEG;IACH,kBAAkB,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,GAAE,MAAW,GAAG,OAAO,EAAE;IAWlE;;OAEG;IACH,KAAK,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,GAAE,MAAc,GAAG,MAAM;IAUzD;;OAEG;IACH,KAAK,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;IAQ9B;;OAEG;IACH,eAAe,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,OAAO,EAAE;CAa1D"}
@@ -0,0 +1,135 @@
1
+ /**
2
+ * HyperMem Episode Store
3
+ *
4
+ * Significant events in an agent's lifetime.
5
+ * Lives in the central library DB.
6
+ * Replaces daily log files with structured, queryable episodes.
7
+ */
8
+ import { isSafeForSharedVisibility, requiresScan } from './secret-scanner.js';
9
+ function nowIso() {
10
+ return new Date().toISOString();
11
+ }
12
+ function parseEpisodeRow(row) {
13
+ return {
14
+ id: row.id,
15
+ agentId: row.agent_id,
16
+ eventType: row.event_type,
17
+ summary: row.summary,
18
+ significance: row.significance,
19
+ visibility: row.visibility || 'org',
20
+ participants: row.participants ? JSON.parse(row.participants) : null,
21
+ sessionKey: row.session_key || null,
22
+ createdAt: row.created_at,
23
+ decayScore: row.decay_score,
24
+ };
25
+ }
26
+ export class EpisodeStore {
27
+ db;
28
+ constructor(db) {
29
+ this.db = db;
30
+ }
31
+ /**
32
+ * Record a new episode.
33
+ */
34
+ record(agentId, eventType, summary, opts) {
35
+ const now = nowIso();
36
+ const significance = opts?.significance || 0.5;
37
+ // Secret gate: if requested visibility is shared, verify content is clean.
38
+ // Downgrade to 'private' rather than throw — better to lose the share than leak a secret.
39
+ let resolvedVisibility = opts?.visibility || 'org';
40
+ if (requiresScan(resolvedVisibility) && !isSafeForSharedVisibility(summary)) {
41
+ resolvedVisibility = 'private';
42
+ }
43
+ const result = this.db.prepare(`
44
+ INSERT INTO episodes (agent_id, event_type, summary, significance,
45
+ visibility, participants, session_key, created_at, decay_score)
46
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0.0)
47
+ `).run(agentId, eventType, summary, significance, resolvedVisibility, opts?.participants ? JSON.stringify(opts.participants) : null, opts?.sessionKey || null, now);
48
+ const id = Number(result.lastInsertRowid);
49
+ return {
50
+ id,
51
+ agentId,
52
+ eventType,
53
+ summary,
54
+ significance,
55
+ visibility: resolvedVisibility,
56
+ participants: opts?.participants || null,
57
+ sessionKey: opts?.sessionKey || null,
58
+ createdAt: now,
59
+ decayScore: 0,
60
+ };
61
+ }
62
+ /**
63
+ * Get recent episodes for an agent.
64
+ */
65
+ getRecent(agentId, opts) {
66
+ let sql = 'SELECT * FROM episodes WHERE agent_id = ? AND decay_score < 0.8';
67
+ const params = [agentId];
68
+ if (opts?.eventType) {
69
+ sql += ' AND event_type = ?';
70
+ params.push(opts.eventType);
71
+ }
72
+ if (opts?.minSignificance) {
73
+ sql += ' AND significance >= ?';
74
+ params.push(opts.minSignificance);
75
+ }
76
+ if (opts?.since) {
77
+ sql += ' AND created_at > ?';
78
+ params.push(opts.since);
79
+ }
80
+ sql += ' ORDER BY created_at DESC';
81
+ if (opts?.limit) {
82
+ sql += ' LIMIT ?';
83
+ params.push(opts.limit);
84
+ }
85
+ const rows = this.db.prepare(sql).all(...params);
86
+ return rows.map(parseEpisodeRow);
87
+ }
88
+ /**
89
+ * Get the most significant episodes (across all time).
90
+ */
91
+ getMostSignificant(agentId, limit = 10) {
92
+ const rows = this.db.prepare(`
93
+ SELECT * FROM episodes
94
+ WHERE agent_id = ? AND decay_score < 0.5
95
+ ORDER BY significance DESC, created_at DESC
96
+ LIMIT ?
97
+ `).all(agentId, limit);
98
+ return rows.map(parseEpisodeRow);
99
+ }
100
+ /**
101
+ * Decay all episodes.
102
+ */
103
+ decay(agentId, decayRate = 0.005) {
104
+ const result = this.db.prepare(`
105
+ UPDATE episodes
106
+ SET decay_score = MIN(decay_score + ?, 1.0)
107
+ WHERE agent_id = ? AND decay_score < 1.0
108
+ `).run(decayRate, agentId);
109
+ return result.changes;
110
+ }
111
+ /**
112
+ * Prune fully decayed episodes.
113
+ */
114
+ prune(agentId) {
115
+ const result = this.db.prepare(`
116
+ DELETE FROM episodes WHERE agent_id = ? AND decay_score >= 1.0
117
+ `).run(agentId);
118
+ return result.changes;
119
+ }
120
+ /**
121
+ * Get episode summary for a time range.
122
+ */
123
+ getDailySummary(agentId, date) {
124
+ const startOfDay = `${date}T00:00:00.000Z`;
125
+ const endOfDay = `${date}T23:59:59.999Z`;
126
+ const rows = this.db.prepare(`
127
+ SELECT * FROM episodes
128
+ WHERE agent_id = ?
129
+ AND created_at >= ? AND created_at <= ?
130
+ ORDER BY significance DESC, created_at ASC
131
+ `).all(agentId, startOfDay, endOfDay);
132
+ return rows.map(parseEpisodeRow);
133
+ }
134
+ }
135
+ //# sourceMappingURL=episode-store.js.map
@@ -0,0 +1,57 @@
1
+ /**
2
+ * HyperMem Fact Store
3
+ *
4
+ * CRUD operations for facts (extracted knowledge that spans sessions).
5
+ * Facts live in the central library DB, tagged by agent_id.
6
+ * Facts have scope (agent/session/user), confidence, and decay.
7
+ */
8
+ import type { DatabaseSync } from 'node:sqlite';
9
+ import type { Fact, FactScope } from './types.js';
10
+ export declare class FactStore {
11
+ private readonly db;
12
+ constructor(db: DatabaseSync);
13
+ /**
14
+ * Add a new fact. Checks for duplicates by content.
15
+ */
16
+ addFact(agentId: string, content: string, opts?: {
17
+ scope?: FactScope;
18
+ domain?: string;
19
+ confidence?: number;
20
+ visibility?: string;
21
+ sourceType?: string;
22
+ sourceSessionKey?: string;
23
+ sourceRef?: string;
24
+ expiresAt?: string;
25
+ }): Fact;
26
+ /**
27
+ * Get active facts for an agent.
28
+ */
29
+ getActiveFacts(agentId: string, opts?: {
30
+ scope?: FactScope;
31
+ domain?: string;
32
+ limit?: number;
33
+ minConfidence?: number;
34
+ }): Fact[];
35
+ /**
36
+ * Full-text search facts.
37
+ */
38
+ searchFacts(query: string, opts?: {
39
+ agentId?: string;
40
+ domain?: string;
41
+ visibility?: string;
42
+ limit?: number;
43
+ }): Fact[];
44
+ /**
45
+ * Decay all facts by a fixed rate.
46
+ */
47
+ decayFacts(agentId: string, decayRate?: number): number;
48
+ /**
49
+ * Remove expired and fully decayed facts.
50
+ */
51
+ pruneFacts(agentId: string): number;
52
+ /**
53
+ * Get fact count for an agent.
54
+ */
55
+ getFactCount(agentId: string): number;
56
+ }
57
+ //# sourceMappingURL=fact-store.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"fact-store.d.ts","sourceRoot":"","sources":["../src/fact-store.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAChD,OAAO,KAAK,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,YAAY,CAAC;AA0BlD,qBAAa,SAAS;IACR,OAAO,CAAC,QAAQ,CAAC,EAAE;gBAAF,EAAE,EAAE,YAAY;IAE7C;;OAEG;IACH,OAAO,CACL,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE;QACL,KAAK,CAAC,EAAE,SAAS,CAAC;QAClB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,gBAAgB,CAAC,EAAE,MAAM,CAAC;QAC1B,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,GACA,IAAI;IA0DP;;OAEG;IACH,cAAc,CACZ,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE;QACL,KAAK,CAAC,EAAE,SAAS,CAAC;QAClB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,aAAa,CAAC,EAAE,MAAM,CAAC;KACxB,GACA,IAAI,EAAE;IAkCT;;OAEG;IACH,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE;QAChC,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB,GAAG,IAAI,EAAE;IAoCV;;OAEG;IACH,UAAU,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,GAAE,MAAa,GAAG,MAAM;IAU7D;;OAEG;IACH,UAAU,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;IAanC;;OAEG;IACH,YAAY,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;CAMtC"}