ahok-skill 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. package/.prettierrc +8 -0
  2. package/Dockerfile +59 -0
  3. package/RAW_SKILL.md +219 -0
  4. package/README.md +277 -0
  5. package/SKILL.md +58 -0
  6. package/bin/opm.js +268 -0
  7. package/data/openmemory.sqlite +0 -0
  8. package/data/openmemory.sqlite-shm +0 -0
  9. package/data/openmemory.sqlite-wal +0 -0
  10. package/dist/ai/graph.js +293 -0
  11. package/dist/ai/mcp.js +397 -0
  12. package/dist/cli.js +78 -0
  13. package/dist/core/cfg.js +87 -0
  14. package/dist/core/db.js +636 -0
  15. package/dist/core/memory.js +116 -0
  16. package/dist/core/migrate.js +227 -0
  17. package/dist/core/models.js +105 -0
  18. package/dist/core/telemetry.js +57 -0
  19. package/dist/core/types.js +2 -0
  20. package/dist/core/vector/postgres.js +52 -0
  21. package/dist/core/vector/valkey.js +246 -0
  22. package/dist/core/vector_store.js +2 -0
  23. package/dist/index.js +44 -0
  24. package/dist/memory/decay.js +301 -0
  25. package/dist/memory/embed.js +675 -0
  26. package/dist/memory/hsg.js +959 -0
  27. package/dist/memory/reflect.js +131 -0
  28. package/dist/memory/user_summary.js +99 -0
  29. package/dist/migrate.js +9 -0
  30. package/dist/ops/compress.js +255 -0
  31. package/dist/ops/dynamics.js +189 -0
  32. package/dist/ops/extract.js +333 -0
  33. package/dist/ops/ingest.js +214 -0
  34. package/dist/server/index.js +109 -0
  35. package/dist/server/middleware/auth.js +137 -0
  36. package/dist/server/routes/auth.js +186 -0
  37. package/dist/server/routes/compression.js +108 -0
  38. package/dist/server/routes/dashboard.js +399 -0
  39. package/dist/server/routes/docs.js +241 -0
  40. package/dist/server/routes/dynamics.js +312 -0
  41. package/dist/server/routes/ide.js +280 -0
  42. package/dist/server/routes/index.js +33 -0
  43. package/dist/server/routes/keys.js +132 -0
  44. package/dist/server/routes/langgraph.js +61 -0
  45. package/dist/server/routes/memory.js +213 -0
  46. package/dist/server/routes/sources.js +140 -0
  47. package/dist/server/routes/system.js +63 -0
  48. package/dist/server/routes/temporal.js +293 -0
  49. package/dist/server/routes/users.js +101 -0
  50. package/dist/server/routes/vercel.js +57 -0
  51. package/dist/server/server.js +211 -0
  52. package/dist/server.js +3 -0
  53. package/dist/sources/base.js +223 -0
  54. package/dist/sources/github.js +171 -0
  55. package/dist/sources/google_drive.js +166 -0
  56. package/dist/sources/google_sheets.js +112 -0
  57. package/dist/sources/google_slides.js +139 -0
  58. package/dist/sources/index.js +34 -0
  59. package/dist/sources/notion.js +165 -0
  60. package/dist/sources/onedrive.js +143 -0
  61. package/dist/sources/web_crawler.js +166 -0
  62. package/dist/temporal_graph/index.js +20 -0
  63. package/dist/temporal_graph/query.js +240 -0
  64. package/dist/temporal_graph/store.js +116 -0
  65. package/dist/temporal_graph/timeline.js +241 -0
  66. package/dist/temporal_graph/types.js +2 -0
  67. package/dist/utils/chunking.js +60 -0
  68. package/dist/utils/index.js +31 -0
  69. package/dist/utils/keyword.js +94 -0
  70. package/dist/utils/text.js +120 -0
  71. package/nodemon.json +7 -0
  72. package/package.json +50 -0
  73. package/references/api_reference.md +66 -0
  74. package/references/examples.md +45 -0
  75. package/src/ai/graph.ts +363 -0
  76. package/src/ai/mcp.ts +494 -0
  77. package/src/cli.ts +94 -0
  78. package/src/core/cfg.ts +110 -0
  79. package/src/core/db.ts +1052 -0
  80. package/src/core/memory.ts +99 -0
  81. package/src/core/migrate.ts +302 -0
  82. package/src/core/models.ts +107 -0
  83. package/src/core/telemetry.ts +47 -0
  84. package/src/core/types.ts +130 -0
  85. package/src/core/vector/postgres.ts +61 -0
  86. package/src/core/vector/valkey.ts +261 -0
  87. package/src/core/vector_store.ts +9 -0
  88. package/src/index.ts +5 -0
  89. package/src/memory/decay.ts +427 -0
  90. package/src/memory/embed.ts +707 -0
  91. package/src/memory/hsg.ts +1245 -0
  92. package/src/memory/reflect.ts +158 -0
  93. package/src/memory/user_summary.ts +110 -0
  94. package/src/migrate.ts +8 -0
  95. package/src/ops/compress.ts +296 -0
  96. package/src/ops/dynamics.ts +272 -0
  97. package/src/ops/extract.ts +360 -0
  98. package/src/ops/ingest.ts +286 -0
  99. package/src/server/index.ts +159 -0
  100. package/src/server/middleware/auth.ts +156 -0
  101. package/src/server/routes/auth.ts +223 -0
  102. package/src/server/routes/compression.ts +106 -0
  103. package/src/server/routes/dashboard.ts +420 -0
  104. package/src/server/routes/docs.ts +380 -0
  105. package/src/server/routes/dynamics.ts +516 -0
  106. package/src/server/routes/ide.ts +283 -0
  107. package/src/server/routes/index.ts +32 -0
  108. package/src/server/routes/keys.ts +131 -0
  109. package/src/server/routes/langgraph.ts +71 -0
  110. package/src/server/routes/memory.ts +440 -0
  111. package/src/server/routes/sources.ts +111 -0
  112. package/src/server/routes/system.ts +68 -0
  113. package/src/server/routes/temporal.ts +335 -0
  114. package/src/server/routes/users.ts +111 -0
  115. package/src/server/routes/vercel.ts +55 -0
  116. package/src/server/server.js +215 -0
  117. package/src/server.ts +1 -0
  118. package/src/sources/base.ts +257 -0
  119. package/src/sources/github.ts +156 -0
  120. package/src/sources/google_drive.ts +144 -0
  121. package/src/sources/google_sheets.ts +85 -0
  122. package/src/sources/google_slides.ts +115 -0
  123. package/src/sources/index.ts +19 -0
  124. package/src/sources/notion.ts +148 -0
  125. package/src/sources/onedrive.ts +131 -0
  126. package/src/sources/web_crawler.ts +161 -0
  127. package/src/temporal_graph/index.ts +4 -0
  128. package/src/temporal_graph/query.ts +299 -0
  129. package/src/temporal_graph/store.ts +156 -0
  130. package/src/temporal_graph/timeline.ts +319 -0
  131. package/src/temporal_graph/types.ts +41 -0
  132. package/src/utils/chunking.ts +66 -0
  133. package/src/utils/index.ts +25 -0
  134. package/src/utils/keyword.ts +137 -0
  135. package/src/utils/text.ts +115 -0
  136. package/tests/test_api_workspace_management.ts +413 -0
  137. package/tests/test_bulk_delete.ts +267 -0
  138. package/tests/test_omnibus.ts +166 -0
  139. package/tests/test_workspace_management.ts +278 -0
  140. package/tests/verify.ts +104 -0
  141. package/tsconfig.json +15 -0
@@ -0,0 +1,959 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.sector_relationships = exports.reinforcement = exports.hybrid_params = exports.scoring_weights = exports.sectors = exports.sector_configs = void 0;
7
+ exports.classify_content = classify_content;
8
+ exports.calc_decay = calc_decay;
9
+ exports.calc_recency_score = calc_recency_score;
10
+ exports.boosted_sim = boosted_sim;
11
+ exports.compute_simhash = compute_simhash;
12
+ exports.hamming_dist = hamming_dist;
13
+ exports.sigmoid = sigmoid;
14
+ exports.extract_essence = extract_essence;
15
+ exports.compute_token_overlap = compute_token_overlap;
16
+ exports.compute_hybrid_score = compute_hybrid_score;
17
+ exports.create_cross_sector_waypoints = create_cross_sector_waypoints;
18
+ exports.calc_mean_vec = calc_mean_vec;
19
+ exports.create_single_waypoint = create_single_waypoint;
20
+ exports.create_inter_mem_waypoints = create_inter_mem_waypoints;
21
+ exports.create_contextual_waypoints = create_contextual_waypoints;
22
+ exports.expand_via_waypoints = expand_via_waypoints;
23
+ exports.reinforce_waypoints = reinforce_waypoints;
24
+ exports.prune_weak_waypoints = prune_weak_waypoints;
25
+ exports.calc_multi_vec_fusion_score = calc_multi_vec_fusion_score;
26
+ exports.hsg_query = hsg_query;
27
+ exports.run_decay_process = run_decay_process;
28
+ exports.add_hsg_memory = add_hsg_memory;
29
+ exports.reinforce_memory = reinforce_memory;
30
+ exports.update_memory = update_memory;
31
+ const node_crypto_1 = __importDefault(require("node:crypto"));
32
+ const text_1 = require("../utils/text");
33
+ const decay_1 = require("./decay");
34
+ const cfg_1 = require("../core/cfg");
35
+ const index_1 = require("../utils/index");
36
+ exports.sector_configs = {
37
+ episodic: {
38
+ model: "episodic-optimized",
39
+ decay_lambda: 0.015,
40
+ weight: 1.2,
41
+ patterns: [
42
+ /\b(today|yesterday|tomorrow|last\s+(week|month|year)|next\s+(week|month|year))\b/i,
43
+ /\b(remember\s+when|recall|that\s+time|when\s+I|I\s+was|we\s+were)\b/i,
44
+ /\b(went|saw|met|felt|heard|visited|attended|participated)\b/i,
45
+ /\b(at\s+\d{1,2}:\d{2}|on\s+(monday|tuesday|wednesday|thursday|friday|saturday|sunday))\b/i,
46
+ /\b(event|moment|experience|incident|occurrence|happened)\b/i,
47
+ /\bI\s+'?m\s+going\s+to\b/i,
48
+ ],
49
+ },
50
+ semantic: {
51
+ model: "semantic-optimized",
52
+ decay_lambda: 0.005,
53
+ weight: 1.0,
54
+ patterns: [
55
+ /\b(is\s+a|represents|means|stands\s+for|defined\s+as)\b/i,
56
+ /\b(concept|theory|principle|law|hypothesis|theorem|axiom)\b/i,
57
+ /\b(fact|statistic|data|evidence|proof|research|study|report)\b/i,
58
+ /\b(capital|population|distance|weight|height|width|depth)\b/i,
59
+ /\b(history|science|geography|math|physics|biology|chemistry)\b/i,
60
+ /\b(know|understand|learn|read|write|speak)\b/i,
61
+ ],
62
+ },
63
+ procedural: {
64
+ model: "procedural-optimized",
65
+ decay_lambda: 0.008,
66
+ weight: 1.1,
67
+ patterns: [
68
+ /\b(how\s+to|step\s+by\s+step|guide|tutorial|manual|instructions)\b/i,
69
+ /\b(first|second|then|next|finally|afterwards|lastly)\b/i,
70
+ /\b(install|run|execute|compile|build|deploy|configure|setup)\b/i,
71
+ /\b(click|press|type|enter|select|drag|drop|scroll)\b/i,
72
+ /\b(method|function|class|algorithm|routine|recipie)\b/i,
73
+ /\b(to\s+do|to\s+make|to\s+build|to\s+create)\b/i,
74
+ ],
75
+ },
76
+ emotional: {
77
+ model: "emotional-optimized",
78
+ decay_lambda: 0.02,
79
+ weight: 1.3,
80
+ patterns: [
81
+ /\b(feel|feeling|felt|emotions?|mood|vibe)\b/i,
82
+ /\b(happy|sad|angry|mad|excited|scared|anxious|nervous|depressed)\b/i,
83
+ /\b(love|hate|like|dislike|adore|detest|enjoy|loathe)\b/i,
84
+ /\b(amazing|terrible|awesome|awful|wonderful|horrible|great|bad)\b/i,
85
+ /\b(frustrated|confused|overwhelmed|stressed|relaxed|calm)\b/i,
86
+ /\b(wow|omg|yay|nooo|ugh|sigh)\b/i,
87
+ /[!]{2,}/,
88
+ ],
89
+ },
90
+ reflective: {
91
+ model: "reflective-optimized",
92
+ decay_lambda: 0.001,
93
+ weight: 0.8,
94
+ patterns: [
95
+ /\b(realize|realized|realization|insight|epiphany)\b/i,
96
+ /\b(think|thought|thinking|ponder|contemplate|reflect)\b/i,
97
+ /\b(understand|understood|understanding|grasp|comprehend)\b/i,
98
+ /\b(pattern|trend|connection|link|relationship|correlation)\b/i,
99
+ /\b(lesson|moral|takeaway|conclusion|summary|implication)\b/i,
100
+ /\b(feedback|review|analysis|evaluation|assessment)\b/i,
101
+ /\b(improve|grow|change|adapt|evolve)\b/i,
102
+ ],
103
+ },
104
+ };
105
+ exports.sectors = Object.keys(exports.sector_configs);
106
+ exports.scoring_weights = {
107
+ similarity: 0.35,
108
+ overlap: 0.20,
109
+ waypoint: 0.15,
110
+ recency: 0.10,
111
+ tag_match: 0.20,
112
+ };
113
+ exports.hybrid_params = {
114
+ tau: 3,
115
+ beta: 2,
116
+ eta: 0.1,
117
+ gamma: 0.2,
118
+ alpha_reinforce: 0.08,
119
+ t_days: 7,
120
+ t_max_days: 60,
121
+ tau_hours: 1,
122
+ epsilon: 1e-8,
123
+ };
124
+ exports.reinforcement = {
125
+ salience_boost: 0.1,
126
+ waypoint_boost: 0.05,
127
+ max_salience: 1.0,
128
+ max_waypoint_weight: 1.0,
129
+ prune_threshold: 0.05,
130
+ };
131
+ // Sector relationship matrix for cross-sector retrieval
132
+ // Higher values = stronger relationship = less penalty
133
+ exports.sector_relationships = {
134
+ semantic: { procedural: 0.8, episodic: 0.6, reflective: 0.7, emotional: 0.4 },
135
+ procedural: { semantic: 0.8, episodic: 0.6, reflective: 0.6, emotional: 0.3 },
136
+ episodic: { reflective: 0.8, semantic: 0.6, procedural: 0.6, emotional: 0.7 },
137
+ reflective: { episodic: 0.8, semantic: 0.7, procedural: 0.6, emotional: 0.6 },
138
+ emotional: { episodic: 0.7, reflective: 0.6, semantic: 0.4, procedural: 0.3 },
139
+ };
140
+ // Detect temporal markers in query for full-sector search
141
+ function has_temporal_markers(text) {
142
+ const temporal_patterns = [
143
+ /\b(today|yesterday|tomorrow|this\s+week|last\s+week|this\s+morning)\b/i,
144
+ /\b\d{4}-\d{2}-\d{2}\b/, // ISO date format like 2025-11-20
145
+ /\b20\d{2}[/-]?(0[1-9]|1[0-2])[/-]?(0[1-9]|[12]\d|3[01])\b/, // Date patterns
146
+ /\b(january|february|march|april|may|june|july|august|september|october|november|december)\s+\d{1,2}/i,
147
+ /\bwhat\s+(did|have)\s+(i|we)\s+(do|done)\b/i, // "what did I do" patterns
148
+ ];
149
+ return temporal_patterns.some(p => p.test(text));
150
+ }
151
+ // Calculate tag match score between query tokens and memory tags
152
+ async function compute_tag_match_score(memory_id, query_tokens) {
153
+ const mem = await db_1.q.get_mem.get(memory_id);
154
+ if (!mem?.tags)
155
+ return 0;
156
+ try {
157
+ const tags = JSON.parse(mem.tags);
158
+ if (!Array.isArray(tags))
159
+ return 0;
160
+ let matches = 0;
161
+ for (const tag of tags) {
162
+ const tag_lower = String(tag).toLowerCase();
163
+ // Check exact match
164
+ if (query_tokens.has(tag_lower)) {
165
+ matches += 2; // Exact match bonus
166
+ }
167
+ else {
168
+ // Check partial match
169
+ for (const token of query_tokens) {
170
+ if (tag_lower.includes(token) || token.includes(tag_lower)) {
171
+ matches += 1;
172
+ }
173
+ }
174
+ }
175
+ }
176
+ return Math.min(1.0, matches / Math.max(1, tags.length * 2));
177
+ }
178
+ catch {
179
+ return 0;
180
+ }
181
+ }
182
+ const compress_vec_for_storage = (vec, target_dim) => {
183
+ if (vec.length <= target_dim)
184
+ return vec;
185
+ const compressed = new Float32Array(target_dim);
186
+ const bucket_sz = vec.length / target_dim;
187
+ for (let i = 0; i < target_dim; i++) {
188
+ const start = Math.floor(i * bucket_sz);
189
+ const end = Math.floor((i + 1) * bucket_sz);
190
+ let sum = 0, count = 0;
191
+ for (let j = start; j < end && j < vec.length; j++) {
192
+ sum += vec[j];
193
+ count++;
194
+ }
195
+ compressed[i] = count > 0 ? sum / count : 0;
196
+ }
197
+ let norm = 0;
198
+ for (let i = 0; i < target_dim; i++)
199
+ norm += compressed[i] * compressed[i];
200
+ norm = Math.sqrt(norm);
201
+ if (norm > 0)
202
+ for (let i = 0; i < target_dim; i++)
203
+ compressed[i] /= norm;
204
+ return Array.from(compressed);
205
+ };
206
+ function classify_content(content, metadata) {
207
+ if (metadata?.sector && exports.sectors.includes(metadata.sector)) {
208
+ return {
209
+ primary: metadata.sector,
210
+ additional: [],
211
+ confidence: 1.0,
212
+ };
213
+ }
214
+ const scores = {};
215
+ for (const [sector, config] of Object.entries(exports.sector_configs)) {
216
+ let score = 0;
217
+ for (const pattern of config.patterns) {
218
+ const matches = content.match(pattern);
219
+ if (matches) {
220
+ score += matches.length * config.weight;
221
+ }
222
+ }
223
+ scores[sector] = score;
224
+ }
225
+ const sortedScores = Object.entries(scores).sort(([, a], [, b]) => b - a);
226
+ const primary = sortedScores[0][0];
227
+ const primaryScore = sortedScores[0][1];
228
+ const threshold = Math.max(1, primaryScore * 0.3);
229
+ const additional = sortedScores
230
+ .slice(1)
231
+ .filter(([, score]) => score > 0 && score >= threshold)
232
+ .map(([sector]) => sector);
233
+ const confidence = primaryScore > 0
234
+ ? Math.min(1.0, primaryScore /
235
+ (primaryScore + (sortedScores[1]?.[1] || 0) + 1))
236
+ : 0.2;
237
+ return {
238
+ primary: primaryScore > 0 ? primary : "semantic",
239
+ additional,
240
+ confidence,
241
+ };
242
+ }
243
+ function calc_decay(sec, init_sal, days_since, seg_idx, max_seg) {
244
+ const cfg = exports.sector_configs[sec];
245
+ if (!cfg)
246
+ return init_sal;
247
+ let lambda = cfg.decay_lambda;
248
+ if (seg_idx !== undefined && max_seg !== undefined && max_seg > 0) {
249
+ const seg_ratio = Math.sqrt(seg_idx / max_seg);
250
+ lambda = lambda * (1 - seg_ratio);
251
+ }
252
+ const decayed = init_sal * Math.exp(-lambda * days_since);
253
+ const reinf = exports.hybrid_params.alpha_reinforce * (1 - Math.exp(-lambda * days_since));
254
+ return Math.max(0, Math.min(1, decayed + reinf));
255
+ }
256
+ function calc_recency_score(last_seen) {
257
+ const now = Date.now();
258
+ const days_since = (now - last_seen) / (1000 * 60 * 60 * 24);
259
+ const t = exports.hybrid_params.t_days;
260
+ const tmax = exports.hybrid_params.t_max_days;
261
+ return Math.exp(-days_since / t) * (1 - days_since / tmax);
262
+ }
263
+ function boosted_sim(s) {
264
+ return 1 - Math.exp(-exports.hybrid_params.tau * s);
265
+ }
266
+ function compute_simhash(text) {
267
+ const tokens = (0, text_1.canonical_token_set)(text);
268
+ const hashes = Array.from(tokens).map((t) => {
269
+ let h = 0;
270
+ for (let i = 0; i < t.length; i++) {
271
+ h = (h << 5) - h + t.charCodeAt(i);
272
+ h = h & h;
273
+ }
274
+ return h;
275
+ });
276
+ const vec = new Array(64).fill(0);
277
+ for (const h of hashes) {
278
+ for (let i = 0; i < 64; i++) {
279
+ if (h & (1 << i))
280
+ vec[i]++;
281
+ else
282
+ vec[i]--;
283
+ }
284
+ }
285
+ let hash = "";
286
+ for (let i = 0; i < 64; i += 4) {
287
+ const nibble = (vec[i] > 0 ? 8 : 0) +
288
+ (vec[i + 1] > 0 ? 4 : 0) +
289
+ (vec[i + 2] > 0 ? 2 : 0) +
290
+ (vec[i + 3] > 0 ? 1 : 0);
291
+ hash += nibble.toString(16);
292
+ }
293
+ return hash;
294
+ }
295
+ function hamming_dist(hash1, hash2) {
296
+ let dist = 0;
297
+ for (let i = 0; i < hash1.length; i++) {
298
+ const xor = parseInt(hash1[i], 16) ^ parseInt(hash2[i], 16);
299
+ dist +=
300
+ (xor & 8 ? 1 : 0) +
301
+ (xor & 4 ? 1 : 0) +
302
+ (xor & 2 ? 1 : 0) +
303
+ (xor & 1 ? 1 : 0);
304
+ }
305
+ return dist;
306
+ }
307
+ function sigmoid(x) {
308
+ return 1 / (1 + Math.exp(-x));
309
+ }
310
+ function extract_essence(raw, sec, max_len) {
311
+ if (!cfg_1.env.use_summary_only || raw.length <= max_len)
312
+ return raw;
313
+ // Split on sentence boundaries (punctuation followed by whitespace) to avoid breaking filenames
314
+ const sents = raw
315
+ .split(/(?<=[.!?])\s+/)
316
+ .map((s) => s.trim())
317
+ .filter((s) => s.length > 10);
318
+ if (sents.length === 0)
319
+ return raw.slice(0, max_len);
320
+ const score_sent = (s, idx) => {
321
+ let sc = 0;
322
+ // First sentence bonus - titles/headers are essential for retrieval
323
+ if (idx === 0)
324
+ sc += 10;
325
+ // Second sentence often contains key context
326
+ if (idx === 1)
327
+ sc += 5;
328
+ // Header/section markers (markdown or label-style)
329
+ if (/^#+\s/.test(s) || /^[A-Z][A-Z\s]+:/.test(s))
330
+ sc += 8;
331
+ // Colon-prefixed labels like "PROBLEM:", "SOLUTION:", "CONTEXT:"
332
+ if (/^[A-Z][a-z]+:/i.test(s))
333
+ sc += 6;
334
+ // Date patterns (ISO format)
335
+ if (/\d{4}-\d{2}-\d{2}/.test(s))
336
+ sc += 7;
337
+ if (/\b(january|february|march|april|may|june|july|august|september|october|november|december)\s+\d+/i.test(s))
338
+ sc += 5;
339
+ if (/\$\d+|\d+\s*(miles|dollars|years|months|km)/.test(s))
340
+ sc += 4;
341
+ if (/\b[A-Z][a-z]+(?:\s+[A-Z][a-z]+)+/.test(s))
342
+ sc += 3;
343
+ if (/\b(bought|purchased|serviced|visited|went|got|received|paid|earned|learned|discovered|found|saw|met|completed|finished|fixed|implemented|created|updated|added|removed|resolved)\b/i.test(s))
344
+ sc += 4;
345
+ if (/\b(who|what|when|where|why|how)\b/i.test(s))
346
+ sc += 2;
347
+ if (s.length < 80)
348
+ sc += 2;
349
+ if (/\b(I|my|me)\b/.test(s))
350
+ sc += 1;
351
+ return sc;
352
+ };
353
+ const scored = sents.map((s, idx) => ({ text: s, score: score_sent(s, idx), idx }));
354
+ // Sort by score to pick the best sentences
355
+ scored.sort((a, b) => b.score - a.score);
356
+ // Select top sentences until we hit max_len
357
+ const selected = [];
358
+ let current_len = 0;
359
+ // Always include the first sentence if it fits
360
+ const firstSent = scored.find(s => s.idx === 0);
361
+ if (firstSent && firstSent.text.length < max_len) {
362
+ selected.push(firstSent);
363
+ current_len += firstSent.text.length;
364
+ }
365
+ for (const item of scored) {
366
+ if (item.idx === 0)
367
+ continue; // Already handled
368
+ if (current_len + item.text.length + 2 <= max_len) {
369
+ selected.push(item);
370
+ current_len += item.text.length + 2; // +2 for ". "
371
+ }
372
+ }
373
+ // Sort selected sentences by their original index to restore context flow
374
+ selected.sort((a, b) => a.idx - b.idx);
375
+ return selected.map(s => s.text).join(" ");
376
+ }
377
+ function compute_token_overlap(q_toks, mem_toks) {
378
+ if (q_toks.size === 0)
379
+ return 0;
380
+ let ov = 0;
381
+ for (const t of q_toks) {
382
+ if (mem_toks.has(t))
383
+ ov++;
384
+ }
385
+ return ov / q_toks.size;
386
+ }
387
+ function compute_hybrid_score(sim, tok_ov, wp_wt, rec_sc, keyword_score = 0, tag_match = 0) {
388
+ const s_p = boosted_sim(sim);
389
+ const raw = exports.scoring_weights.similarity * s_p +
390
+ exports.scoring_weights.overlap * tok_ov +
391
+ exports.scoring_weights.waypoint * wp_wt +
392
+ exports.scoring_weights.recency * rec_sc +
393
+ exports.scoring_weights.tag_match * tag_match +
394
+ keyword_score;
395
+ return sigmoid(raw);
396
+ }
397
+ const db_1 = require("../core/db");
398
+ async function create_cross_sector_waypoints(prim_id, prim_sec, add_secs, user_id) {
399
+ const now = Date.now();
400
+ const wt = 0.5;
401
+ for (const sec of add_secs) {
402
+ await db_1.q.ins_waypoint.run(prim_id, `${prim_id}:${sec}`, user_id || "anonymous", wt, now, now);
403
+ await db_1.q.ins_waypoint.run(`${prim_id}:${sec}`, prim_id, user_id || "anonymous", wt, now, now);
404
+ }
405
+ }
406
+ function calc_mean_vec(emb_res, secs) {
407
+ const dim = emb_res[0].vector.length;
408
+ const wsum = new Array(dim).fill(0);
409
+ const sec_scores = emb_res.map((r) => ({
410
+ vector: r.vector,
411
+ confidence: exports.sector_configs[r.sector]?.weight || 1.0,
412
+ }));
413
+ const beta = exports.hybrid_params.beta;
414
+ const exp_sum = sec_scores.reduce((sum, s) => sum + Math.exp(beta * s.confidence), 0);
415
+ for (const result of emb_res) {
416
+ const sec_wt = exports.sector_configs[result.sector]?.weight || 1.0;
417
+ const sm_wt = Math.exp(beta * sec_wt) / exp_sum;
418
+ for (let i = 0; i < dim; i++) {
419
+ wsum[i] += result.vector[i] * sm_wt;
420
+ }
421
+ }
422
+ const norm = Math.sqrt(wsum.reduce((sum, v) => sum + v * v, 0)) +
423
+ exports.hybrid_params.epsilon;
424
+ return wsum.map((v) => v / norm);
425
+ }
426
+ async function create_single_waypoint(new_id, new_mean, ts, user_id) {
427
+ const thresh = 0.75;
428
+ const mems = user_id
429
+ ? await db_1.q.all_mem_by_user.all(user_id, 1000, 0)
430
+ : await db_1.q.all_mem.all(1000, 0);
431
+ let best = null;
432
+ for (const mem of mems) {
433
+ if (mem.id === new_id || !mem.mean_vec)
434
+ continue;
435
+ const ex_mean = (0, index_1.buf_to_vec)(mem.mean_vec);
436
+ const sim = (0, index_1.cos_sim)(new Float32Array(new_mean), ex_mean);
437
+ if (!best || sim > best.similarity) {
438
+ best = { id: mem.id, similarity: sim };
439
+ }
440
+ }
441
+ if (best) {
442
+ await db_1.q.ins_waypoint.run(new_id, best.id, user_id || "anonymous", best.similarity, ts, ts);
443
+ }
444
+ else {
445
+ await db_1.q.ins_waypoint.run(new_id, new_id, user_id || "anonymous", 1.0, ts, ts);
446
+ }
447
+ }
448
+ async function create_inter_mem_waypoints(new_id, prim_sec, new_vec, ts, user_id) {
449
+ const thresh = 0.75;
450
+ const wt = 0.5;
451
+ const vecs = await db_1.vector_store.getVectorsBySector(prim_sec);
452
+ for (const vr of vecs) {
453
+ if (vr.id === new_id)
454
+ continue;
455
+ const ex_vec = vr.vector;
456
+ const sim = (0, index_1.cos_sim)(new Float32Array(new_vec), new Float32Array(ex_vec));
457
+ if (sim >= thresh) {
458
+ await db_1.q.ins_waypoint.run(new_id, vr.id, user_id || "anonymous", wt, ts, ts);
459
+ await db_1.q.ins_waypoint.run(vr.id, new_id, user_id || "anonymous", wt, ts, ts);
460
+ }
461
+ }
462
+ }
463
+ async function create_contextual_waypoints(mem_id, rel_ids, base_wt = 0.3, user_id) {
464
+ const now = Date.now();
465
+ for (const rel_id of rel_ids) {
466
+ if (mem_id === rel_id)
467
+ continue;
468
+ const existing = await db_1.q.get_waypoint.get(mem_id, rel_id);
469
+ if (existing) {
470
+ const new_wt = Math.min(1.0, existing.weight + 0.1);
471
+ await db_1.q.upd_waypoint.run(mem_id, new_wt, now, rel_id);
472
+ }
473
+ else {
474
+ await db_1.q.ins_waypoint.run(mem_id, rel_id, user_id || "anonymous", base_wt, now, now);
475
+ }
476
+ }
477
+ }
478
+ async function expand_via_waypoints(init_res, max_exp = 10) {
479
+ const exp = [];
480
+ const vis = new Set();
481
+ for (const id of init_res) {
482
+ exp.push({ id, weight: 1.0, path: [id] });
483
+ vis.add(id);
484
+ }
485
+ const q_arr = [...exp];
486
+ let exp_cnt = 0;
487
+ while (q_arr.length > 0 && exp_cnt < max_exp) {
488
+ const cur = q_arr.shift();
489
+ const neighs = await db_1.q.get_neighbors.all(cur.id);
490
+ for (const neigh of neighs) {
491
+ if (vis.has(neigh.dst_id))
492
+ continue;
493
+ // Clamp neighbor weight to valid range - protect against corrupted data
494
+ const neigh_wt = Math.min(1.0, Math.max(0, neigh.weight || 0));
495
+ const exp_wt = cur.weight * neigh_wt * 0.8;
496
+ if (exp_wt < 0.1)
497
+ continue;
498
+ const exp_item = {
499
+ id: neigh.dst_id,
500
+ weight: exp_wt,
501
+ path: [...cur.path, neigh.dst_id],
502
+ };
503
+ exp.push(exp_item);
504
+ vis.add(neigh.dst_id);
505
+ q_arr.push(exp_item);
506
+ exp_cnt++;
507
+ }
508
+ }
509
+ return exp;
510
+ }
511
+ async function reinforce_waypoints(trav_path) {
512
+ const now = Date.now();
513
+ for (let i = 0; i < trav_path.length - 1; i++) {
514
+ const src_id = trav_path[i];
515
+ const dst_id = trav_path[i + 1];
516
+ const wp = await db_1.q.get_waypoint.get(src_id, dst_id);
517
+ if (wp) {
518
+ const new_wt = Math.min(exports.reinforcement.max_waypoint_weight, wp.weight + exports.reinforcement.waypoint_boost);
519
+ await db_1.q.upd_waypoint.run(src_id, new_wt, now, dst_id);
520
+ }
521
+ }
522
+ }
523
+ async function prune_weak_waypoints() {
524
+ await db_1.q.prune_waypoints.run(exports.reinforcement.prune_threshold);
525
+ return 0;
526
+ }
527
+ const embed_1 = require("./embed");
528
+ const chunking_1 = require("../utils/chunking");
529
+ const utils_1 = require("../utils");
530
+ const keyword_1 = require("../utils/keyword");
531
+ const dynamics_1 = require("../ops/dynamics");
532
+ async function calc_multi_vec_fusion_score(mid, qe, w) {
533
+ const vecs = await db_1.vector_store.getVectorsById(mid);
534
+ let sum = 0, tot = 0;
535
+ const wm = {
536
+ semantic: w.semantic_dimension_weight,
537
+ emotional: w.emotional_dimension_weight,
538
+ procedural: w.procedural_dimension_weight,
539
+ episodic: w.temporal_dimension_weight,
540
+ reflective: w.reflective_dimension_weight,
541
+ };
542
+ for (const v of vecs) {
543
+ const qv = qe[v.sector];
544
+ if (!qv)
545
+ continue;
546
+ const mv = v.vector;
547
+ const sim = (0, embed_1.cosineSimilarity)(qv, mv);
548
+ const wgt = wm[v.sector] || 0.5;
549
+ sum += sim * wgt;
550
+ tot += wgt;
551
+ }
552
+ return tot > 0 ? sum / tot : 0;
553
+ }
554
+ const cache = new Map();
555
+ const sal_cache = new Map();
556
+ // vec_cache removed
557
+ const seg_cache = new Map();
558
+ const coact_buf = [];
559
+ const TTL = 60000;
560
+ const VEC_CACHE_MAX = 1000;
561
+ let active_queries = 0;
562
+ // get_vec removed
563
+ const get_segment = async (seg) => {
564
+ if (seg_cache.has(seg))
565
+ return seg_cache.get(seg);
566
+ const rows = await db_1.q.get_mem_by_segment.all(seg);
567
+ seg_cache.set(seg, rows);
568
+ if (seg_cache.size > cfg_1.env.cache_segments) {
569
+ const first = seg_cache.keys().next().value;
570
+ if (first !== undefined)
571
+ seg_cache.delete(first);
572
+ }
573
+ return rows;
574
+ };
575
+ setInterval(async () => {
576
+ if (!coact_buf.length)
577
+ return;
578
+ const pairs = coact_buf.splice(0, 50);
579
+ const now = Date.now();
580
+ const tau_ms = exports.hybrid_params.tau_hours * 3600000;
581
+ for (const [a, b] of pairs) {
582
+ try {
583
+ const [memA, memB] = await Promise.all([
584
+ db_1.q.get_mem.get(a),
585
+ db_1.q.get_mem.get(b),
586
+ ]);
587
+ if (!memA || !memB)
588
+ continue;
589
+ const time_diff = Math.abs(memA.last_seen_at - memB.last_seen_at);
590
+ const temp_fact = Math.exp(-time_diff / tau_ms);
591
+ const wp = await db_1.q.get_waypoint.get(a, b);
592
+ const cur_wt = wp?.weight || 0;
593
+ const new_wt = Math.min(1, cur_wt + exports.hybrid_params.eta * (1 - cur_wt) * temp_fact);
594
+ const user_id = wp?.user_id || memA?.user_id || memB?.user_id || "anonymous";
595
+ await db_1.q.ins_waypoint.run(a, b, user_id, new_wt, wp?.created_at || now, now);
596
+ }
597
+ catch (e) { }
598
+ }
599
+ }, 1000);
600
+ const get_sal = async (id, def_sal) => {
601
+ const c = sal_cache.get(id);
602
+ if (c && Date.now() - c.t < TTL)
603
+ return c.s;
604
+ const m = await db_1.q.get_mem.get(id);
605
+ const s = m?.salience ?? def_sal;
606
+ sal_cache.set(id, { s, t: Date.now() });
607
+ return s;
608
+ };
609
+ async function hsg_query(qt, k = 10, f) {
610
+ // ... (omitted lines to keep context correct, targeting start of function signature change)
611
+ // Actually I'll target the signature and the logic inside the loop.
612
+ // Split into two edits or use multi_replace.
613
+ // Let's use multi_replace.
614
+ if (active_queries >= cfg_1.env.max_active) {
615
+ throw new Error(`Rate limit: ${active_queries} active queries (max ${cfg_1.env.max_active})`);
616
+ }
617
+ active_queries++;
618
+ (0, decay_1.inc_q)();
619
+ try {
620
+ const h = `${qt}:${k}:${JSON.stringify(f || {})}`;
621
+ const cached = cache.get(h);
622
+ if (cached && Date.now() - cached.t < TTL)
623
+ return cached.r;
624
+ const qc = classify_content(qt);
625
+ const is_temporal = has_temporal_markers(qt);
626
+ const qtk = (0, text_1.canonical_token_set)(qt);
627
+ // Store primary sectors for scoring purposes
628
+ const primary_sectors = [qc.primary, ...qc.additional];
629
+ // Determine which sectors to search
630
+ let ss;
631
+ if (f?.sectors?.length) {
632
+ // User explicitly requested specific sectors
633
+ ss = f.sectors;
634
+ }
635
+ else {
636
+ // IMPORTANT: Search ALL sectors to enable cross-sector retrieval
637
+ // The sector relationship penalty will down-weight less relevant sectors
638
+ ss = [...exports.sectors];
639
+ }
640
+ if (!ss.length)
641
+ ss.push("semantic");
642
+ // Batch embed all sectors in one API call for faster queries
643
+ const qe = await (0, embed_1.embedQueryForAllSectors)(qt, ss);
644
+ const w = {
645
+ semantic_dimension_weight: qc.primary === "semantic" ? 1.2 : 0.8,
646
+ emotional_dimension_weight: qc.primary === "emotional" ? 1.5 : 0.6,
647
+ procedural_dimension_weight: qc.primary === "procedural" ? 1.3 : 0.7,
648
+ temporal_dimension_weight: qc.primary === "episodic" ? 1.4 : 0.7,
649
+ reflective_dimension_weight: qc.primary === "reflective" ? 1.1 : 0.5,
650
+ };
651
+ const sr = {};
652
+ for (const s of ss) {
653
+ const qv = qe[s];
654
+ const results = await db_1.vector_store.searchSimilar(s, qv, k * 3);
655
+ sr[s] = results.map(r => ({ id: r.id, similarity: r.score }));
656
+ }
657
+ const all_sims = Object.values(sr).flatMap((r) => r.slice(0, 8).map((x) => x.similarity));
658
+ const avg_top = all_sims.length
659
+ ? all_sims.reduce((a, b) => a + b, 0) / all_sims.length
660
+ : 0;
661
+ const adapt_exp = Math.ceil(0.3 * k * (1 - avg_top));
662
+ const eff_k = k + adapt_exp;
663
+ const high_conf = avg_top >= 0.55;
664
+ const ids = new Set();
665
+ for (const r of Object.values(sr))
666
+ for (const x of r)
667
+ ids.add(x.id);
668
+ const exp = high_conf
669
+ ? []
670
+ : await expand_via_waypoints(Array.from(ids), k * 2);
671
+ for (const e of exp)
672
+ ids.add(e.id);
673
+ let keyword_scores = new Map();
674
+ if (cfg_1.tier === "hybrid") {
675
+ const all_mems = await Promise.all(Array.from(ids).map(async (id) => {
676
+ const m = await db_1.q.get_mem.get(id);
677
+ return m ? { id, content: m.content } : null;
678
+ }));
679
+ const valid_mems = all_mems.filter((m) => m !== null);
680
+ keyword_scores = await (0, keyword_1.keyword_filter_memories)(qt, valid_mems, 0.05);
681
+ }
682
+ const res = [];
683
+ for (const mid of Array.from(ids)) {
684
+ const m = await db_1.q.get_mem.get(mid);
685
+ if (!m || (f?.minSalience && m.salience < f.minSalience))
686
+ continue;
687
+ if (f?.user_id && m.user_id !== f.user_id)
688
+ continue;
689
+ if (f?.startTime && m.created_at < f.startTime)
690
+ continue;
691
+ if (f?.endTime && m.created_at > f.endTime)
692
+ continue;
693
+ const mvf = await calc_multi_vec_fusion_score(mid, qe, w);
694
+ const csr = await (0, dynamics_1.calculateCrossSectorResonanceScore)(m.primary_sector, qc.primary, mvf);
695
+ let bs = csr, bsec = m.primary_sector;
696
+ for (const [sec, rr] of Object.entries(sr)) {
697
+ const mat = rr.find((r) => r.id === mid);
698
+ if (mat && mat.similarity > bs) {
699
+ bs = mat.similarity;
700
+ bsec = sec;
701
+ }
702
+ }
703
+ // Apply sector relationship penalty for cross-sector results
704
+ const mem_sector = m.primary_sector;
705
+ const query_sector = qc.primary;
706
+ let sector_penalty = 1.0;
707
+ if (mem_sector !== query_sector && !primary_sectors.includes(mem_sector)) {
708
+ // Apply penalty based on sector relationship strength
709
+ sector_penalty = exports.sector_relationships[query_sector]?.[mem_sector] || 0.3;
710
+ }
711
+ const adjusted_sim = bs * sector_penalty;
712
+ const em = exp.find((e) => e.id === mid);
713
+ // Clamp waypoint weight to valid range [0, 1] - protect against corrupted data
714
+ const ww = Math.min(1.0, Math.max(0, em?.weight || 0));
715
+ const ds = (Date.now() - m.last_seen_at) / 86400000;
716
+ const sal = calc_decay(m.primary_sector, m.salience, ds);
717
+ const mtk = (0, text_1.canonical_token_set)(m.content);
718
+ const tok_ov = compute_token_overlap(qtk, mtk);
719
+ const rec_sc = calc_recency_score(m.last_seen_at);
720
+ // Calculate tag match score
721
+ const tag_match = await compute_tag_match_score(mid, qtk);
722
+ const keyword_boost = cfg_1.tier === "hybrid"
723
+ ? (keyword_scores.get(mid) || 0) * cfg_1.env.keyword_boost
724
+ : 0;
725
+ const fs = compute_hybrid_score(adjusted_sim, tok_ov, ww, rec_sc, keyword_boost, tag_match);
726
+ const msec = await db_1.vector_store.getVectorsById(mid);
727
+ const sl = msec.map((v) => v.sector);
728
+ res.push({
729
+ id: mid,
730
+ content: m.content,
731
+ score: fs,
732
+ sectors: sl,
733
+ primary_sector: m.primary_sector,
734
+ path: em?.path || [mid],
735
+ salience: sal,
736
+ last_seen_at: m.last_seen_at,
737
+ tags: typeof m.tags === 'string' ? JSON.parse(m.tags) : (m.tags || []),
738
+ meta: typeof m.meta === 'string' ? JSON.parse(m.meta) : (m.meta || {}),
739
+ });
740
+ }
741
+ res.sort((a, b) => b.score - a.score);
742
+ const top_cands = res.slice(0, eff_k);
743
+ if (top_cands.length > 0) {
744
+ const scores = top_cands.map((r) => r.score);
745
+ const mean = scores.reduce((a, b) => a + b, 0) / scores.length;
746
+ const variance = scores.reduce((sum, s) => sum + Math.pow(s - mean, 2), 0) /
747
+ scores.length;
748
+ const stdDev = Math.sqrt(variance);
749
+ for (const r of top_cands) {
750
+ r.score = (r.score - mean) / (stdDev + exports.hybrid_params.epsilon);
751
+ }
752
+ top_cands.sort((a, b) => b.score - a.score);
753
+ }
754
+ const top = top_cands.slice(0, k);
755
+ const tids = top.map((r) => r.id);
756
+ // Update feedback scores for returned memories (simple learning)
757
+ for (const r of top) {
758
+ const cur_fb = (await db_1.q.get_mem.get(r.id))?.feedback_score || 0;
759
+ const new_fb = cur_fb * 0.9 + r.score * 0.1; // Exponential moving average
760
+ await db_1.q.upd_feedback.run(r.id, new_fb);
761
+ }
762
+ for (let i = 0; i < tids.length; i++) {
763
+ for (let j = i + 1; j < tids.length; j++) {
764
+ const [a, b] = [tids[i], tids[j]].sort();
765
+ coact_buf.push([a, b]);
766
+ }
767
+ }
768
+ for (const r of top) {
769
+ const rsal = await (0, dynamics_1.applyRetrievalTraceReinforcementToMemory)(r.id, r.salience);
770
+ await db_1.q.upd_seen.run(r.id, Date.now(), rsal, Date.now());
771
+ if (r.path.length > 1) {
772
+ await reinforce_waypoints(r.path);
773
+ const wps = await db_1.q.get_waypoints_by_src.all(r.id);
774
+ const lns = wps.map((wp) => ({
775
+ target_id: wp.dst_id,
776
+ weight: wp.weight,
777
+ }));
778
+ const pru = await (0, dynamics_1.propagateAssociativeReinforcementToLinkedNodes)(r.id, rsal, lns);
779
+ for (const u of pru) {
780
+ const linked_mem = await db_1.q.get_mem.get(u.node_id);
781
+ if (linked_mem) {
782
+ const time_diff = (Date.now() - linked_mem.last_seen_at) / 86400000;
783
+ const decay_fact = Math.exp(-0.02 * time_diff);
784
+ const ctx_boost = exports.hybrid_params.gamma *
785
+ (rsal - linked_mem.salience) *
786
+ decay_fact;
787
+ const new_sal = Math.max(0, Math.min(1, linked_mem.salience + ctx_boost));
788
+ await db_1.q.upd_seen.run(u.node_id, Date.now(), new_sal, Date.now());
789
+ }
790
+ }
791
+ }
792
+ }
793
+ for (const r of top) {
794
+ (0, decay_1.on_query_hit)(r.id, r.primary_sector, (text) => (0, embed_1.embedForSector)(text, r.primary_sector)).catch(() => { });
795
+ }
796
+ cache.set(h, { r: top, t: Date.now() });
797
+ return top;
798
+ }
799
+ finally {
800
+ active_queries--;
801
+ (0, decay_1.dec_q)();
802
+ }
803
+ }
804
+ async function run_decay_process() {
805
+ const mems = await db_1.q.all_mem.all(10000, 0);
806
+ let p = 0, d = 0;
807
+ for (const m of mems) {
808
+ const ds = (Date.now() - m.last_seen_at) / 86400000;
809
+ const ns = calc_decay(m.primary_sector, m.salience, ds);
810
+ if (ns !== m.salience) {
811
+ await db_1.q.upd_seen.run(m.id, m.last_seen_at, ns, Date.now());
812
+ d++;
813
+ }
814
+ p++;
815
+ }
816
+ if (d > 0)
817
+ await (0, db_1.log_maint_op)("decay", d);
818
+ return { processed: p, decayed: d };
819
+ }
820
+ // Helper to ensure user exists
821
+ async function ensure_user_exists(user_id) {
822
+ try {
823
+ const existing = await db_1.q.get_user.get(user_id);
824
+ if (!existing) {
825
+ await db_1.q.ins_user.run(user_id, null, // clerk_id
826
+ null, // api_key
827
+ null, // stripe_customer_id
828
+ null, // stripe_subscription_id
829
+ 1000, // capacity
830
+ 0, // usage
831
+ "User profile initializing...", // Initial summary
832
+ 0, // Reflection count
833
+ Date.now(), Date.now());
834
+ }
835
+ }
836
+ catch (error) {
837
+ console.error(`[HSG] Failed to ensure user ${user_id} exists:`, error);
838
+ // Don't throw, proceed with memory creation (legacy behavior)
839
+ }
840
+ }
841
+ async function add_hsg_memory(content, tags, metadata, user_id, key_id) {
842
+ const simhash = compute_simhash(content);
843
+ const existing = await db_1.q.get_mem_by_simhash.get(simhash);
844
+ if (existing && hamming_dist(simhash, existing.simhash) <= 3) {
845
+ const now = Date.now();
846
+ const boosted_sal = Math.min(1, existing.salience + 0.15);
847
+ await db_1.q.upd_seen.run(existing.id, now, boosted_sal, now);
848
+ return {
849
+ id: existing.id,
850
+ primary_sector: existing.primary_sector,
851
+ sectors: [existing.primary_sector],
852
+ chunks: 1,
853
+ deduplicated: true,
854
+ };
855
+ }
856
+ const id = node_crypto_1.default.randomUUID();
857
+ const now = Date.now();
858
+ // Ensure user exists in the users table
859
+ if (user_id) {
860
+ await ensure_user_exists(user_id);
861
+ }
862
+ const chunks = (0, chunking_1.chunk_text)(content);
863
+ const use_chunking = chunks.length > 1;
864
+ const classification = classify_content(content, metadata);
865
+ const all_sectors = [classification.primary, ...classification.additional];
866
+ await db_1.transaction.begin();
867
+ try {
868
+ const max_seg_res = await db_1.q.get_max_segment.get();
869
+ let cur_seg = max_seg_res?.max_seg ?? 0;
870
+ const seg_cnt_res = await db_1.q.get_segment_count.get(cur_seg);
871
+ const seg_cnt = seg_cnt_res?.c ?? 0;
872
+ if (seg_cnt >= cfg_1.env.seg_size) {
873
+ cur_seg++;
874
+ // Use stderr for debug output to avoid breaking MCP JSON-RPC protocol
875
+ console.error(`[HSG] Rotated to segment ${cur_seg} (previous segment full: ${seg_cnt} memories)`);
876
+ }
877
+ const stored_content = extract_essence(content, classification.primary, cfg_1.env.summary_max_length);
878
+ const sec_cfg = exports.sector_configs[classification.primary];
879
+ const init_sal = Math.max(0, Math.min(1, 0.4 + 0.1 * classification.additional.length));
880
+ await db_1.q.ins_mem.run(id, user_id || "anonymous", cur_seg, stored_content, simhash, classification.primary, tags || null, JSON.stringify(metadata || {}), now, now, now, init_sal, sec_cfg.decay_lambda, 1, null, null, null, // compressed_vec
881
+ 0, // feedback_score
882
+ key_id || null);
883
+ const emb_res = await (0, embed_1.embedMultiSector)(id, content, all_sectors, use_chunking ? chunks : undefined);
884
+ for (const result of emb_res) {
885
+ await db_1.vector_store.storeVector(id, result.sector, result.vector, result.dim, user_id || "anonymous");
886
+ }
887
+ const mean_vec = calc_mean_vec(emb_res, all_sectors);
888
+ const mean_vec_buf = (0, embed_1.vectorToBuffer)(mean_vec);
889
+ await db_1.q.upd_mean_vec.run(id, mean_vec.length, mean_vec_buf);
890
+ // Store compressed vector for smart tier (for future query optimization)
891
+ if (cfg_1.tier === "smart" && mean_vec.length > 128) {
892
+ const comp = compress_vec_for_storage(mean_vec, 128);
893
+ const comp_buf = (0, embed_1.vectorToBuffer)(comp);
894
+ await db_1.q.upd_compressed_vec.run(comp_buf, id);
895
+ }
896
+ await create_single_waypoint(id, mean_vec, now, user_id);
897
+ if (user_id && user_id !== "anonymous") {
898
+ await db_1.q.inc_user_usage.run(user_id, 1);
899
+ }
900
+ await db_1.transaction.commit();
901
+ return {
902
+ id,
903
+ primary_sector: classification.primary,
904
+ sectors: all_sectors,
905
+ chunks: chunks.length,
906
+ };
907
+ }
908
+ catch (error) {
909
+ await db_1.transaction.rollback();
910
+ throw error;
911
+ }
912
+ }
913
+ async function reinforce_memory(id, boost = 0.1) {
914
+ const mem = await db_1.q.get_mem.get(id);
915
+ if (!mem)
916
+ throw new Error(`Memory ${id} not found`);
917
+ const new_sal = Math.min(exports.reinforcement.max_salience, mem.salience + boost);
918
+ await db_1.q.upd_seen.run(id, Date.now(), new_sal, Date.now());
919
+ if (new_sal > 0.8)
920
+ await (0, db_1.log_maint_op)("consolidate", 1);
921
+ }
922
+ async function update_memory(id, content, tags, metadata) {
923
+ const mem = await db_1.q.get_mem.get(id);
924
+ if (!mem)
925
+ throw new Error(`Memory ${id} not found`);
926
+ const new_content = content !== undefined ? content : mem.content;
927
+ const new_tags = tags !== undefined ? (0, utils_1.j)(tags) : mem.tags || "[]";
928
+ const new_meta = metadata !== undefined ? (0, utils_1.j)(metadata) : mem.meta || "{}";
929
+ await db_1.transaction.begin();
930
+ try {
931
+ if (content !== undefined && content !== mem.content) {
932
+ const chunks = (0, chunking_1.chunk_text)(new_content);
933
+ const use_chunking = chunks.length > 1;
934
+ const classification = classify_content(new_content, metadata);
935
+ const all_sectors = [
936
+ classification.primary,
937
+ ...classification.additional,
938
+ ];
939
+ await db_1.vector_store.deleteVectors(id);
940
+ const emb_res = await (0, embed_1.embedMultiSector)(id, new_content, all_sectors, use_chunking ? chunks : undefined);
941
+ for (const result of emb_res) {
942
+ await db_1.vector_store.storeVector(id, result.sector, result.vector, result.dim, mem.user_id || "anonymous");
943
+ }
944
+ const mean_vec = calc_mean_vec(emb_res, all_sectors);
945
+ const mean_vec_buf = (0, embed_1.vectorToBuffer)(mean_vec);
946
+ await db_1.q.upd_mean_vec.run(id, mean_vec.length, mean_vec_buf);
947
+ await db_1.q.upd_mem_with_sector.run(new_content, classification.primary, new_tags, new_meta, Date.now(), id);
948
+ }
949
+ else {
950
+ await db_1.q.upd_mem.run(new_content, new_tags, new_meta, Date.now(), id);
951
+ }
952
+ await db_1.transaction.commit();
953
+ return { id, updated: true };
954
+ }
955
+ catch (error) {
956
+ await db_1.transaction.rollback();
957
+ throw error;
958
+ }
959
+ }