@pencil-agent/nano-pencil 1.11.18 → 1.11.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/dist/packages/mem-core/cli.d.ts +9 -0
  2. package/dist/packages/mem-core/config.d.ts +78 -0
  3. package/dist/packages/mem-core/config.js +81 -0
  4. package/dist/packages/mem-core/consolidate-v2.d.ts +15 -0
  5. package/dist/packages/mem-core/consolidation.d.ts +12 -0
  6. package/dist/packages/mem-core/consolidation.js +182 -0
  7. package/dist/packages/mem-core/dedup.d.ts +18 -0
  8. package/dist/packages/mem-core/dedup.js +85 -0
  9. package/dist/packages/mem-core/dream-lock.d.ts +11 -0
  10. package/dist/packages/mem-core/dream-lock.js +84 -0
  11. package/dist/packages/mem-core/eviction.d.ts +14 -0
  12. package/dist/packages/mem-core/eviction.js +26 -0
  13. package/dist/packages/mem-core/extension.d.ts +12 -0
  14. package/dist/packages/mem-core/extraction.d.ts +11 -0
  15. package/dist/packages/mem-core/extraction.js +188 -0
  16. package/dist/packages/mem-core/full-insights-html.d.ts +9 -0
  17. package/dist/packages/mem-core/full-insights-html.js +422 -0
  18. package/dist/packages/mem-core/full-insights.d.ts +22 -0
  19. package/dist/packages/mem-core/full-insights.js +329 -0
  20. package/dist/packages/mem-core/hash-embedding.d.ts +9 -0
  21. package/dist/packages/mem-core/human-insights.d.ts +28 -0
  22. package/dist/packages/mem-core/human-insights.js +260 -0
  23. package/dist/packages/mem-core/i18n.d.ts +63 -0
  24. package/dist/packages/mem-core/i18n.js +361 -0
  25. package/dist/packages/mem-core/insights-html.d.ts +9 -0
  26. package/dist/packages/mem-core/insights-html.js +432 -0
  27. package/dist/packages/mem-core/linking.d.ts +22 -0
  28. package/dist/packages/mem-core/linking.js +192 -0
  29. package/dist/packages/mem-core/privacy.d.ts +17 -0
  30. package/dist/packages/mem-core/privacy.js +53 -0
  31. package/dist/packages/mem-core/procedural-v2.d.ts +9 -0
  32. package/dist/packages/mem-core/reconsolidate-v2.d.ts +19 -0
  33. package/dist/packages/mem-core/scoring.d.ts +32 -0
  34. package/dist/packages/mem-core/scoring.js +108 -0
  35. package/dist/packages/mem-core/store-v2.d.ts +35 -0
  36. package/dist/packages/mem-core/store.d.ts +22 -0
  37. package/dist/packages/mem-core/store.js +107 -0
  38. package/dist/packages/mem-core/types.d.ts +320 -0
  39. package/dist/packages/mem-core/types.js +8 -0
  40. package/dist/packages/mem-core/update.d.ts +31 -0
  41. package/dist/packages/mem-core/update.js +250 -0
  42. package/package.json +1 -1
@@ -0,0 +1,9 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * [UPSTREAM]: Depends on node:fs, engine, insights
4
+ * [SURFACE]: NanoMem CLI - stats, search, forget, export, insights commands
5
+ * [LOCUS]: packages/mem-core/src/cli.ts - NanoMem standalone CLI
6
+ * [COVENANT]: Change CLI → update this header
7
+ */
8
+ export {};
9
+ //# sourceMappingURL=cli.d.ts.map
@@ -0,0 +1,78 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on node:os, node:path, ./types.js
3
+ * [SURFACE]: ProgressiveRecallConfig, NanomemConfig, getConfig, BudgetConfig, ScoreWeights
4
+ * [LOCUS]: packages/mem-core/src/config.ts - configuration management for memory system
5
+ * [COVENANT]: Change config structure → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ import type { MemoryScope } from "./types.js";
8
+ export interface ProgressiveRecallConfig {
9
+ /** Score threshold for Active tier — inject full detail */
10
+ thresholdActive: number;
11
+ /** Score threshold for Cue tier — inject name + summary + id */
12
+ thresholdCue: number;
13
+ /** Budget ratio for Active tier (default 0.15 = 15%) */
14
+ budgetActive: number;
15
+ /** Budget ratio for Cue tier (default 0.70 = 70%) */
16
+ budgetCue: number;
17
+ /** Force entries created within this many hours to Active tier */
18
+ forceRecentHours: number;
19
+ /** Force entries with importance >= this value to Active tier */
20
+ forceImportanceMin: number;
21
+ }
22
+ export interface EmbeddingConfig {
23
+ enabled: boolean;
24
+ model: string;
25
+ dim: number;
26
+ autoSync: boolean;
27
+ }
28
+ export interface NanomemConfig {
29
+ memoryDir: string;
30
+ tokenBudget: number;
31
+ budget: {
32
+ lessons: number;
33
+ knowledge: number;
34
+ events: number;
35
+ episodes: number;
36
+ preferences: number;
37
+ work: number;
38
+ facets: number;
39
+ };
40
+ halfLife: Record<string, number>;
41
+ maxEntries: {
42
+ knowledge: number;
43
+ lessons: number;
44
+ events: number;
45
+ preferences: number;
46
+ work: number;
47
+ facets: number;
48
+ };
49
+ consolidationThreshold: number;
50
+ /** Stanford-style retrieval scoring weights */
51
+ scoreWeights: {
52
+ recency: number;
53
+ importance: number;
54
+ relevance: number;
55
+ };
56
+ /** Utility-weighted eviction: access frequency vs base impact */
57
+ evictionWeights: {
58
+ accessFrequency: number;
59
+ baseImpact: number;
60
+ };
61
+ /** Default scope for all operations */
62
+ defaultScope?: MemoryScope;
63
+ /** Locale for LLM prompts and injection templates */
64
+ locale: "en" | "zh";
65
+ /** Strength growth factor on each successful recall (spaced repetition) */
66
+ strengthGrowthFactor: number;
67
+ /** Progressive recall injection configuration */
68
+ progressiveRecall: ProgressiveRecallConfig;
69
+ /** Default forgetting windows for less important memories */
70
+ forgetting: {
71
+ ambientTtlDays: number;
72
+ workTtlDays: number;
73
+ };
74
+ /** Embedding configuration for semantic episodic/procedural recall */
75
+ embeddings: EmbeddingConfig;
76
+ }
77
+ export declare function getConfig(overrides?: Partial<NanomemConfig>): NanomemConfig;
78
+ //# sourceMappingURL=config.d.ts.map
@@ -0,0 +1,81 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on node:os, node:path, ./types.js
3
+ * [SURFACE]: ProgressiveRecallConfig, NanomemConfig, getConfig, BudgetConfig, ScoreWeights
4
+ * [LOCUS]: packages/mem-core/src/config.ts - configuration management for memory system
5
+ * [COVENANT]: Change config structure → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ import { homedir } from "node:os";
8
+ import { join } from "node:path";
9
+ const DEFAULT_BUDGET = {
10
+ lessons: 0.18,
11
+ knowledge: 0.17,
12
+ events: 0.15,
13
+ episodes: 0.18,
14
+ preferences: 0.1,
15
+ work: 0.2,
16
+ facets: 0.12,
17
+ };
18
+ const DEFAULT_HALF_LIFE = {
19
+ lesson: 90,
20
+ fact: 60,
21
+ episode: 14,
22
+ preference: 120,
23
+ decision: 45,
24
+ entity: 30,
25
+ event: 180,
26
+ work: 45,
27
+ pattern: 180,
28
+ struggle: 120,
29
+ };
30
+ const DEFAULT_MAX_ENTRIES = { knowledge: 1000, lessons: 500, events: 300, preferences: 200, work: 400, facets: 400 };
31
+ const DEFAULT_SCORE_WEIGHTS = { recency: 1, importance: 1, relevance: 1 };
32
+ const DEFAULT_EVICTION_WEIGHTS = { accessFrequency: 0.4, baseImpact: 0.6 };
33
+ const DEFAULT_PROGRESSIVE_RECALL = {
34
+ thresholdActive: 0.7,
35
+ thresholdCue: 0.35,
36
+ budgetActive: 0.15,
37
+ budgetCue: 0.70,
38
+ forceRecentHours: 24,
39
+ forceImportanceMin: 9,
40
+ };
41
+ const DEFAULT_FORGETTING = {
42
+ ambientTtlDays: 45,
43
+ workTtlDays: 21,
44
+ };
45
+ const DEFAULT_EMBEDDINGS = {
46
+ enabled: true,
47
+ model: "local-hash-v1",
48
+ dim: 256,
49
+ autoSync: true,
50
+ };
51
+ export function getConfig(overrides) {
52
+ const tokenBudget = Number(process.env.NANOMEM_TOKEN_BUDGET) || 6000;
53
+ const memoryDir = process.env.NANOMEM_MEMORY_DIR || overrides?.memoryDir || join(homedir(), ".nanomem", "memory");
54
+ const locale = process.env.NANOMEM_LOCALE || overrides?.locale || "en";
55
+ const embeddingsEnabled = process.env.NANOMEM_EMBEDDINGS_ENABLED;
56
+ const embeddingsDim = Number(process.env.NANOMEM_EMBEDDINGS_DIM) || DEFAULT_EMBEDDINGS.dim;
57
+ const embeddingsModel = process.env.NANOMEM_EMBEDDINGS_MODEL || DEFAULT_EMBEDDINGS.model;
58
+ return {
59
+ memoryDir,
60
+ tokenBudget: overrides?.tokenBudget ?? tokenBudget,
61
+ budget: overrides?.budget ?? { ...DEFAULT_BUDGET },
62
+ halfLife: overrides?.halfLife ?? { ...DEFAULT_HALF_LIFE },
63
+ maxEntries: overrides?.maxEntries ?? { ...DEFAULT_MAX_ENTRIES },
64
+ consolidationThreshold: overrides?.consolidationThreshold ?? 10,
65
+ scoreWeights: overrides?.scoreWeights ?? { ...DEFAULT_SCORE_WEIGHTS },
66
+ evictionWeights: overrides?.evictionWeights ?? { ...DEFAULT_EVICTION_WEIGHTS },
67
+ defaultScope: overrides?.defaultScope,
68
+ locale,
69
+ strengthGrowthFactor: overrides?.strengthGrowthFactor ?? 1.5,
70
+ progressiveRecall: overrides?.progressiveRecall ?? { ...DEFAULT_PROGRESSIVE_RECALL },
71
+ forgetting: overrides?.forgetting ?? { ...DEFAULT_FORGETTING },
72
+ embeddings: overrides?.embeddings ??
73
+ {
74
+ enabled: embeddingsEnabled ? embeddingsEnabled.toLowerCase() !== "false" : DEFAULT_EMBEDDINGS.enabled,
75
+ model: embeddingsModel,
76
+ dim: embeddingsDim,
77
+ autoSync: DEFAULT_EMBEDDINGS.autoSync,
78
+ },
79
+ };
80
+ }
81
+ //# sourceMappingURL=config.js.map
@@ -0,0 +1,15 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on ./config.js, ./scoring.js, ./store.js, ./types.js, ./types-v2.js
3
+ * [SURFACE]: consolidateV2Memories
4
+ * [LOCUS]: packages/mem-core/src/consolidate-v2.ts - v2 episodic/procedural to semantic consolidation
5
+ * [COVENANT]: Change v2 semantic promotion rules → update this header and keep compatibility with MemoryEntry
6
+ */
7
+ import type { NanomemConfig } from "./config.js";
8
+ import type { MemoryEntry } from "./types.js";
9
+ import type { EpisodeFacet, EpisodeMemory, ProceduralMemory, SemanticMemory } from "./types-v2.js";
10
+ export declare function consolidateV2Memories(episodes: EpisodeMemory[], facets: EpisodeFacet[], procedural: ProceduralMemory[], cfg: NanomemConfig): {
11
+ entries: MemoryEntry[];
12
+ semantic: SemanticMemory[];
13
+ episodeSemanticMap: Map<string, string[]>;
14
+ };
15
+ //# sourceMappingURL=consolidate-v2.d.ts.map
@@ -0,0 +1,12 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on ./config.js, ./i18n.js, ./scoring.js, ./store.js, ./types.js
3
+ * [SURFACE]: consolidateEpisodes, extractFactsFromEpisodes, extractLessonsFromEpisodes
4
+ * [LOCUS]: packages/mem-core/src/consolidation.ts - episodic→semantic consolidation, heart of multi-store memory model
5
+ * [COVENANT]: Change consolidation logic → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ import type { NanomemConfig } from "./config.js";
8
+ import type { Episode, LlmFn, MemoryEntry } from "./types.js";
9
+ export declare function consolidateEpisodes(episodes: Episode[], cfg: NanomemConfig, llmFn?: LlmFn, options?: {
10
+ signal?: AbortSignal;
11
+ }): Promise<MemoryEntry[]>;
12
+ //# sourceMappingURL=consolidation.d.ts.map
@@ -0,0 +1,182 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on ./config.js, ./i18n.js, ./scoring.js, ./store.js, ./types.js
3
+ * [SURFACE]: consolidateEpisodes, extractFactsFromEpisodes, extractLessonsFromEpisodes
4
+ * [LOCUS]: packages/mem-core/src/consolidation.ts - episodic→semantic consolidation, heart of multi-store memory model
5
+ * [COVENANT]: Change consolidation logic → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ import { PROMPTS } from "./i18n.js";
8
+ import { extractTags } from "./scoring.js";
9
+ import { deriveNameFromContent, deriveSummaryFromContent } from "./store.js";
10
+ function makeId() {
11
+ return `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
12
+ }
13
+ export async function consolidateEpisodes(episodes, cfg, llmFn, options) {
14
+ const unconsolidated = episodes.filter((ep) => !ep.consolidated);
15
+ if (unconsolidated.length < cfg.consolidationThreshold)
16
+ return [];
17
+ if (options?.signal?.aborted) {
18
+ throw new Error("AbortError");
19
+ }
20
+ let newEntries;
21
+ if (llmFn) {
22
+ newEntries = await llmConsolidation(unconsolidated, cfg, llmFn, options?.signal);
23
+ }
24
+ else {
25
+ newEntries = heuristicConsolidation(unconsolidated, cfg);
26
+ }
27
+ if (options?.signal?.aborted) {
28
+ throw new Error("AbortError");
29
+ }
30
+ for (const ep of unconsolidated)
31
+ ep.consolidated = true;
32
+ return newEntries;
33
+ }
34
+ async function llmConsolidation(episodes, cfg, llmFn, signal) {
35
+ const p = PROMPTS[cfg.locale] ?? PROMPTS.en;
36
+ const summary = episodes
37
+ .map((ep) => `[${ep.date}] ${ep.project}: ${ep.summary}\nFiles: ${ep.filesModified.join(", ")}\nErrors: ${ep.errors.join("; ") || "none"}`)
38
+ .join("\n\n");
39
+ if (signal?.aborted)
40
+ throw new Error("AbortError");
41
+ const raw = await llmFn(p.consolidationSystem, summary);
42
+ if (signal?.aborted)
43
+ throw new Error("AbortError");
44
+ try {
45
+ const items = JSON.parse(raw);
46
+ const now = new Date().toISOString();
47
+ return items.map((item) => {
48
+ const type = item.type === "lesson" ? "lesson" : item.type === "event" ? "event" : "fact";
49
+ const detail = item.detail || item.content || "";
50
+ const name = item.name || deriveNameFromContent(detail);
51
+ const summary = item.summary || deriveSummaryFromContent(detail);
52
+ return {
53
+ id: makeId(),
54
+ type,
55
+ name,
56
+ summary,
57
+ detail,
58
+ content: detail,
59
+ tags: extractTags(`${name} ${summary} ${detail}`),
60
+ project: episodes[0]?.project ?? "unknown",
61
+ importance: item.importance ?? 6,
62
+ strength: cfg.halfLife[type] ?? 30,
63
+ created: now,
64
+ eventTime: now,
65
+ accessCount: 0,
66
+ relatedIds: [],
67
+ retention: type === "event" ? "key-event" : type === "lesson" ? "key-event" : "ambient",
68
+ salience: type === "event" ? Math.max(8, item.importance ?? 8) : item.importance ?? 6,
69
+ stability: type === "event" ? "stable" : type === "lesson" ? "stable" : "situational",
70
+ scope: cfg.defaultScope,
71
+ };
72
+ });
73
+ }
74
+ catch {
75
+ return heuristicConsolidation(episodes, cfg);
76
+ }
77
+ }
78
+ function heuristicConsolidation(episodes, cfg) {
79
+ const now = new Date().toISOString();
80
+ const result = [];
81
+ const fileCounts = new Map();
82
+ for (const ep of episodes) {
83
+ for (const f of ep.filesModified)
84
+ fileCounts.set(f, (fileCounts.get(f) ?? 0) + 1);
85
+ }
86
+ const hotFiles = [...fileCounts.entries()]
87
+ .filter(([, c]) => c >= 3)
88
+ .sort((a, b) => b[1] - a[1])
89
+ .slice(0, 5);
90
+ if (hotFiles.length) {
91
+ const detail = `Frequently modified files: ${hotFiles.map(([f, c]) => `${f} (${c}x)`).join(", ")}`;
92
+ const name = deriveNameFromContent(detail);
93
+ const summary = deriveSummaryFromContent(detail);
94
+ result.push({
95
+ id: makeId(),
96
+ type: "fact",
97
+ name,
98
+ summary,
99
+ detail,
100
+ content: detail,
101
+ tags: extractTags(detail),
102
+ project: episodes[0]?.project ?? "unknown",
103
+ importance: 5,
104
+ strength: cfg.halfLife.fact ?? 60,
105
+ created: now,
106
+ eventTime: now,
107
+ accessCount: 0,
108
+ relatedIds: [],
109
+ stability: "situational",
110
+ scope: cfg.defaultScope,
111
+ });
112
+ }
113
+ const allErrors = episodes.flatMap((ep) => ep.errors).filter(Boolean);
114
+ if (allErrors.length) {
115
+ const errorSet = [...new Set(allErrors)].slice(0, 5);
116
+ const detail = `Recurring issues: ${errorSet.join("; ")}`;
117
+ const name = deriveNameFromContent(detail);
118
+ const summary = deriveSummaryFromContent(detail);
119
+ result.push({
120
+ id: makeId(),
121
+ type: "lesson",
122
+ name,
123
+ summary,
124
+ detail,
125
+ content: detail,
126
+ tags: extractTags(detail),
127
+ project: episodes[0]?.project ?? "unknown",
128
+ importance: 7,
129
+ strength: cfg.halfLife.lesson ?? 90,
130
+ created: now,
131
+ eventTime: now,
132
+ accessCount: 0,
133
+ relatedIds: [],
134
+ retention: "key-event",
135
+ salience: 8,
136
+ stability: "stable",
137
+ scope: cfg.defaultScope,
138
+ });
139
+ }
140
+ const significantEpisodes = [...episodes]
141
+ .filter((ep) => ep.importance >= 8 || ep.errors.length >= 2)
142
+ .sort((a, b) => b.importance - a.importance)
143
+ .slice(0, 3);
144
+ for (const ep of significantEpisodes) {
145
+ const detailParts = [
146
+ ep.summary,
147
+ ep.userGoal ? `Goal: ${ep.userGoal}` : "",
148
+ ep.errors.length ? `Errors: ${ep.errors.join("; ")}` : "",
149
+ ep.keyObservations.length ? `Observations: ${ep.keyObservations.slice(0, 4).join("; ")}` : "",
150
+ ].filter(Boolean);
151
+ const detail = detailParts.join("\n");
152
+ const name = ep.userGoal ? deriveNameFromContent(ep.userGoal) : deriveNameFromContent(ep.summary);
153
+ const summary = deriveSummaryFromContent(ep.summary || ep.userGoal || detail);
154
+ result.push({
155
+ id: makeId(),
156
+ type: "event",
157
+ name,
158
+ summary,
159
+ detail,
160
+ content: detail,
161
+ tags: extractTags(`${ep.project} ${ep.summary} ${ep.userGoal || ""} ${ep.errors.join(" ")}`),
162
+ project: ep.project || "unknown",
163
+ importance: Math.min(10, Math.max(8, ep.importance)),
164
+ strength: cfg.halfLife.event ?? 180,
165
+ created: now,
166
+ eventTime: ep.date ? new Date(ep.date).toISOString() : now,
167
+ accessCount: 0,
168
+ relatedIds: [],
169
+ retention: "key-event",
170
+ salience: Math.min(10, Math.max(8, ep.importance + ep.errors.length)),
171
+ stability: ep.errors.length ? "situational" : "stable",
172
+ eventData: {
173
+ kind: ep.errors.length ? "incident" : "milestone",
174
+ outcome: ep.errors.length ? "Captured from a high-friction session" : "Captured from a high-importance session",
175
+ emotionalWeight: Math.min(10, ep.importance),
176
+ },
177
+ scope: cfg.defaultScope,
178
+ });
179
+ }
180
+ return result;
181
+ }
182
+ //# sourceMappingURL=consolidation.js.map
@@ -0,0 +1,18 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on ./scoring.js, ./types.js, ./update.js
3
+ * [SURFACE]: dedupeMemoryEntries, dedupeWorkEntries, mergeRelatedIds
4
+ * [LOCUS]: packages/mem-core/src/dedup.ts - batch deduplication and cross-entry merge logic
5
+ * [COVENANT]: Change dedup algorithm → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ import type { MemoryEntry, WorkEntry } from "./types.js";
8
+ /** Deduplicate MemoryEntry array. Keeps best of each duplicate group, merges relatedIds. */
9
+ export declare function deduplicateMemoryEntries(entries: MemoryEntry[]): {
10
+ deduped: MemoryEntry[];
11
+ removedCount: number;
12
+ };
13
+ /** Deduplicate WorkEntry array. */
14
+ export declare function deduplicateWorkEntries(entries: WorkEntry[]): {
15
+ deduped: WorkEntry[];
16
+ removedCount: number;
17
+ };
18
+ //# sourceMappingURL=dedup.d.ts.map
@@ -0,0 +1,85 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on ./scoring.js, ./types.js, ./update.js
3
+ * [SURFACE]: dedupeMemoryEntries, dedupeWorkEntries, mergeRelatedIds
4
+ * [LOCUS]: packages/mem-core/src/dedup.ts - batch deduplication and cross-entry merge logic
5
+ * [COVENANT]: Change dedup algorithm → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ import { tagOverlap } from "./scoring.js";
8
+ import { contentSimilarity } from "./update.js";
9
+ const NOOP_TAG_THRESHOLD = 0.75;
10
+ const NOOP_CONTENT_THRESHOLD = 0.8;
11
+ function isMemoryDuplicate(a, b) {
12
+ if (a.type !== b.type)
13
+ return false;
14
+ const aText = `${a.name || ""} ${a.summary || ""} ${a.content || ""}`.trim();
15
+ const bText = `${b.name || ""} ${b.summary || ""} ${b.content || ""}`.trim();
16
+ const tagMatch = tagOverlap(a.tags, b.tags) >= NOOP_TAG_THRESHOLD;
17
+ const contentMatch = contentSimilarity(aText, bText) >= NOOP_CONTENT_THRESHOLD;
18
+ return tagMatch || contentMatch;
19
+ }
20
+ function isWorkDuplicate(a, b) {
21
+ const aText = `${a.goal} ${a.summary}`.trim();
22
+ const bText = `${b.goal} ${b.summary}`.trim();
23
+ const tagMatch = tagOverlap(a.tags, b.tags) >= NOOP_TAG_THRESHOLD;
24
+ const contentMatch = contentSimilarity(aText, bText) >= NOOP_CONTENT_THRESHOLD;
25
+ return tagMatch || contentMatch;
26
+ }
27
+ /** Merge duplicate into keeper: relatedIds union, max importance/accessCount, latest lastAccessed */
28
+ function mergeMemoryInto(keeper, duplicate) {
29
+ const ids = new Set([...(keeper.relatedIds || []), ...(duplicate.relatedIds || []), duplicate.id]);
30
+ keeper.relatedIds = [...ids];
31
+ keeper.importance = Math.max(keeper.importance, duplicate.importance);
32
+ keeper.accessCount = Math.max(keeper.accessCount, duplicate.accessCount);
33
+ if ((duplicate.lastAccessed || "") > (keeper.lastAccessed || ""))
34
+ keeper.lastAccessed = duplicate.lastAccessed;
35
+ }
36
+ function mergeWorkInto(keeper, duplicate) {
37
+ const ids = new Set([...(keeper.relatedIds || []), ...(duplicate.relatedIds || []), duplicate.id]);
38
+ keeper.relatedIds = [...ids];
39
+ keeper.importance = Math.max(keeper.importance, duplicate.importance);
40
+ keeper.accessCount = Math.max(keeper.accessCount, duplicate.accessCount);
41
+ if ((duplicate.lastAccessed || "") > (keeper.lastAccessed || ""))
42
+ keeper.lastAccessed = duplicate.lastAccessed;
43
+ }
44
+ /** Sort by quality (best first) so we keep the best of each duplicate group */
45
+ function memoryQuality(e) {
46
+ return (e.accessCount + 1) * e.importance;
47
+ }
48
+ function workQuality(w) {
49
+ return (w.accessCount + 1) * w.importance;
50
+ }
51
+ /** Deduplicate MemoryEntry array. Keeps best of each duplicate group, merges relatedIds. */
52
+ export function deduplicateMemoryEntries(entries) {
53
+ const sorted = [...entries].sort((a, b) => memoryQuality(b) - memoryQuality(a));
54
+ const kept = [];
55
+ let removedCount = 0;
56
+ for (const e of sorted) {
57
+ const existing = kept.find((k) => isMemoryDuplicate(k, e));
58
+ if (existing) {
59
+ mergeMemoryInto(existing, e);
60
+ removedCount++;
61
+ }
62
+ else {
63
+ kept.push(e);
64
+ }
65
+ }
66
+ return { deduped: kept, removedCount };
67
+ }
68
+ /** Deduplicate WorkEntry array. */
69
+ export function deduplicateWorkEntries(entries) {
70
+ const sorted = [...entries].sort((a, b) => workQuality(b) - workQuality(a));
71
+ const kept = [];
72
+ let removedCount = 0;
73
+ for (const e of sorted) {
74
+ const existing = kept.find((k) => isWorkDuplicate(k, e));
75
+ if (existing) {
76
+ mergeWorkInto(existing, e);
77
+ removedCount++;
78
+ }
79
+ else {
80
+ kept.push(e);
81
+ }
82
+ }
83
+ return { deduped: kept, removedCount };
84
+ }
85
+ //# sourceMappingURL=dedup.js.map
@@ -0,0 +1,11 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on node:fs, node:fs/promises
3
+ * [SURFACE]: DreamLock, tryAcquireDreamLock, releaseDreamLock, isDreamLocked, waitForDreamLock
4
+ * [LOCUS]: packages/mem-core/src/dream-lock.ts - file-based mutex for background consolidation (dream) process
5
+ * [COVENANT]: Change locking mechanism → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ export declare function readDreamLockMtimeMs(lockPath: string): Promise<number>;
8
+ export declare function tryAcquireDreamLock(lockPath: string, holderStaleMs: number): Promise<number | null>;
9
+ export declare function rollbackDreamLock(lockPath: string, priorMtimeMs: number): Promise<void>;
10
+ export declare function stampDreamLock(lockPath: string): Promise<void>;
11
+ //# sourceMappingURL=dream-lock.d.ts.map
@@ -0,0 +1,84 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on node:fs, node:fs/promises
3
+ * [SURFACE]: DreamLock, tryAcquireDreamLock, releaseDreamLock, isDreamLocked, waitForDreamLock
4
+ * [LOCUS]: packages/mem-core/src/dream-lock.ts - file-based mutex for background consolidation (dream) process
5
+ * [COVENANT]: Change locking mechanism → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ import { existsSync } from "node:fs";
8
+ import { readFile, rm, stat, utimes, writeFile } from "node:fs/promises";
9
+ function isPidAlive(pid) {
10
+ if (!Number.isFinite(pid) || pid <= 0)
11
+ return false;
12
+ try {
13
+ process.kill(pid, 0);
14
+ return true;
15
+ }
16
+ catch {
17
+ return false;
18
+ }
19
+ }
20
+ export async function readDreamLockMtimeMs(lockPath) {
21
+ try {
22
+ if (!existsSync(lockPath))
23
+ return 0;
24
+ const s = await stat(lockPath);
25
+ return s.mtimeMs || 0;
26
+ }
27
+ catch {
28
+ return 0;
29
+ }
30
+ }
31
+ export async function tryAcquireDreamLock(lockPath, holderStaleMs) {
32
+ const now = Date.now();
33
+ let priorMtimeMs = 0;
34
+ try {
35
+ if (existsSync(lockPath)) {
36
+ const s = await stat(lockPath);
37
+ priorMtimeMs = s.mtimeMs || 0;
38
+ const body = await readFile(lockPath, "utf-8").catch(() => "");
39
+ const holderPid = Number.parseInt(body.trim(), 10);
40
+ const isFresh = now - priorMtimeMs < holderStaleMs;
41
+ if (isFresh && isPidAlive(holderPid))
42
+ return null;
43
+ }
44
+ }
45
+ catch {
46
+ // treat as missing/claimable
47
+ }
48
+ try {
49
+ await writeFile(lockPath, String(process.pid), "utf-8");
50
+ await utimes(lockPath, new Date(now), new Date(now));
51
+ const verify = await readFile(lockPath, "utf-8").catch(() => "");
52
+ if (verify.trim() !== String(process.pid))
53
+ return null;
54
+ return priorMtimeMs;
55
+ }
56
+ catch {
57
+ return null;
58
+ }
59
+ }
60
+ export async function rollbackDreamLock(lockPath, priorMtimeMs) {
61
+ try {
62
+ if (priorMtimeMs === 0) {
63
+ await rm(lockPath, { force: true });
64
+ return;
65
+ }
66
+ await writeFile(lockPath, "", "utf-8");
67
+ const prior = new Date(priorMtimeMs);
68
+ await utimes(lockPath, prior, prior);
69
+ }
70
+ catch {
71
+ // best-effort rollback
72
+ }
73
+ }
74
+ export async function stampDreamLock(lockPath) {
75
+ const now = Date.now();
76
+ try {
77
+ await writeFile(lockPath, String(process.pid), "utf-8");
78
+ await utimes(lockPath, new Date(now), new Date(now));
79
+ }
80
+ catch {
81
+ // best-effort stamp
82
+ }
83
+ }
84
+ //# sourceMappingURL=dream-lock.js.map
@@ -0,0 +1,14 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on ./scoring.js, ./types.js
3
+ * [SURFACE]: EvictionWeights, utilityEntry, utilityWork
4
+ * [LOCUS]: packages/mem-core/src/eviction.ts - utility-weighted memory eviction (Ebbinghaus-aligned decay)
5
+ * [COVENANT]: Change eviction formula → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ import type { MemoryEntry, WorkEntry } from "./types.js";
8
+ export interface EvictionWeights {
9
+ accessFrequency: number;
10
+ baseImpact: number;
11
+ }
12
+ export declare function utilityEntry(e: MemoryEntry, defaultHalfLife: Record<string, number>, w: EvictionWeights): number;
13
+ export declare function utilityWork(w: WorkEntry, defaultHalfLife: Record<string, number>, ew: EvictionWeights): number;
14
+ //# sourceMappingURL=eviction.d.ts.map
@@ -0,0 +1,26 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on ./scoring.js, ./types.js
3
+ * [SURFACE]: EvictionWeights, utilityEntry, utilityWork
4
+ * [LOCUS]: packages/mem-core/src/eviction.ts - utility-weighted memory eviction (Ebbinghaus-aligned decay)
5
+ * [COVENANT]: Change eviction formula → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ import { daysSince, decay } from "./scoring.js";
8
+ export function utilityEntry(e, defaultHalfLife, w) {
9
+ const strength = e.strength || defaultHalfLife[e.type] || 30;
10
+ const accessNorm = Math.min(1, (e.accessCount ?? 0) / 10);
11
+ const impactNorm = Math.min(1, e.importance / 10);
12
+ const salienceNorm = Math.min(1, (e.salience ?? e.importance) / 10);
13
+ const retentionMultiplier = e.retention === "core" ? 1.25 : e.retention === "key-event" ? 1.35 : 0.9;
14
+ const stabilityMultiplier = e.stability === "stable" ? 1.08 : 0.78;
15
+ return ((w.accessFrequency * accessNorm + w.baseImpact * impactNorm + salienceNorm * 0.2) *
16
+ retentionMultiplier *
17
+ stabilityMultiplier *
18
+ decay(daysSince(e.created), strength));
19
+ }
20
+ export function utilityWork(w, defaultHalfLife, ew) {
21
+ const strength = w.strength || defaultHalfLife.work || 45;
22
+ const accessNorm = Math.min(1, (w.accessCount ?? 0) / 10);
23
+ const impactNorm = Math.min(1, w.importance / 10);
24
+ return (ew.accessFrequency * accessNorm + ew.baseImpact * impactNorm) * decay(daysSince(w.created), strength);
25
+ }
26
+ //# sourceMappingURL=eviction.js.map
@@ -0,0 +1,12 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on node:fs, node:fs/promises, node:path, @sinclair/typebox, @pencil-agent/nano-pencil
3
+ * [SURFACE]: default export (Extension), nanomem extension for NanoPencil integration
4
+ * [LOCUS]: packages/mem-core/src/extension.ts - thin adapter bridging NanoPencil events to host-agnostic NanoMemEngine
5
+ * [COVENANT]: Change extension hooks → update this header and verify against packages/mem-core/CLAUDE.md
6
+ *
7
+ * NOTE: This is the ONLY module that depends on @pencil-agent/nano-pencil types.
8
+ * For non-NanoPencil hosts, import from the package root (index.ts) instead.
9
+ */
10
+ import type { ExtensionAPI } from "@pencil-agent/nano-pencil";
11
+ export default function nanomemExtension(pi: ExtensionAPI): void;
12
+ //# sourceMappingURL=extension.d.ts.map
@@ -0,0 +1,11 @@
1
+ /**
2
+ * [UPSTREAM]: Depends on ./config.js, ./i18n.js, ./store.js, ./types.js
3
+ * [SURFACE]: extractMemories, extractWork, extractState
4
+ * [LOCUS]: packages/mem-core/src/extraction.ts - dual-path extraction (LLM when available, regex heuristics fallback)
5
+ * [COVENANT]: Change extraction patterns → update this header and verify against packages/mem-core/CLAUDE.md
6
+ */
7
+ import type { NanomemConfig } from "./config.js";
8
+ import type { ExtractedItem, ExtractedWork, LlmFn } from "./types.js";
9
+ export declare function extractMemories(conversation: string, cfg: NanomemConfig, llmFn?: LlmFn): Promise<ExtractedItem[]>;
10
+ export declare function extractWork(conversation: string, cfg: NanomemConfig, llmFn?: LlmFn): Promise<ExtractedWork | null>;
11
+ //# sourceMappingURL=extraction.d.ts.map