stellar-memory 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +362 -0
- package/dist/api/routes/analytics.d.ts +15 -0
- package/dist/api/routes/analytics.js +131 -0
- package/dist/api/routes/analytics.js.map +1 -0
- package/dist/api/routes/conflicts.d.ts +12 -0
- package/dist/api/routes/conflicts.js +67 -0
- package/dist/api/routes/conflicts.js.map +1 -0
- package/dist/api/routes/consolidation.d.ts +11 -0
- package/dist/api/routes/consolidation.js +63 -0
- package/dist/api/routes/consolidation.js.map +1 -0
- package/dist/api/routes/constellation.d.ts +4 -0
- package/dist/api/routes/constellation.js +84 -0
- package/dist/api/routes/constellation.js.map +1 -0
- package/dist/api/routes/memories.d.ts +4 -0
- package/dist/api/routes/memories.js +219 -0
- package/dist/api/routes/memories.js.map +1 -0
- package/dist/api/routes/observations.d.ts +10 -0
- package/dist/api/routes/observations.js +42 -0
- package/dist/api/routes/observations.js.map +1 -0
- package/dist/api/routes/orbit.d.ts +4 -0
- package/dist/api/routes/orbit.js +71 -0
- package/dist/api/routes/orbit.js.map +1 -0
- package/dist/api/routes/projects.d.ts +15 -0
- package/dist/api/routes/projects.js +121 -0
- package/dist/api/routes/projects.js.map +1 -0
- package/dist/api/routes/scan.d.ts +4 -0
- package/dist/api/routes/scan.js +403 -0
- package/dist/api/routes/scan.js.map +1 -0
- package/dist/api/routes/sun.d.ts +4 -0
- package/dist/api/routes/sun.js +43 -0
- package/dist/api/routes/sun.js.map +1 -0
- package/dist/api/routes/system.d.ts +4 -0
- package/dist/api/routes/system.js +70 -0
- package/dist/api/routes/system.js.map +1 -0
- package/dist/api/routes/temporal.d.ts +13 -0
- package/dist/api/routes/temporal.js +82 -0
- package/dist/api/routes/temporal.js.map +1 -0
- package/dist/api/server.d.ts +2 -0
- package/dist/api/server.js +99 -0
- package/dist/api/server.js.map +1 -0
- package/dist/api/websocket.d.ts +53 -0
- package/dist/api/websocket.js +168 -0
- package/dist/api/websocket.js.map +1 -0
- package/dist/cli/index.d.ts +12 -0
- package/dist/cli/index.js +35 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/cli/init.d.ts +10 -0
- package/dist/cli/init.js +163 -0
- package/dist/cli/init.js.map +1 -0
- package/dist/engine/analytics.d.ts +93 -0
- package/dist/engine/analytics.js +437 -0
- package/dist/engine/analytics.js.map +1 -0
- package/dist/engine/conflict.d.ts +54 -0
- package/dist/engine/conflict.js +322 -0
- package/dist/engine/conflict.js.map +1 -0
- package/dist/engine/consolidation.d.ts +83 -0
- package/dist/engine/consolidation.js +368 -0
- package/dist/engine/consolidation.js.map +1 -0
- package/dist/engine/constellation.d.ts +66 -0
- package/dist/engine/constellation.js +382 -0
- package/dist/engine/constellation.js.map +1 -0
- package/dist/engine/corona.d.ts +53 -0
- package/dist/engine/corona.js +181 -0
- package/dist/engine/corona.js.map +1 -0
- package/dist/engine/embedding.d.ts +44 -0
- package/dist/engine/embedding.js +168 -0
- package/dist/engine/embedding.js.map +1 -0
- package/dist/engine/gravity.d.ts +63 -0
- package/dist/engine/gravity.js +121 -0
- package/dist/engine/gravity.js.map +1 -0
- package/dist/engine/multiproject.d.ts +75 -0
- package/dist/engine/multiproject.js +241 -0
- package/dist/engine/multiproject.js.map +1 -0
- package/dist/engine/observation.d.ts +82 -0
- package/dist/engine/observation.js +357 -0
- package/dist/engine/observation.js.map +1 -0
- package/dist/engine/orbit.d.ts +91 -0
- package/dist/engine/orbit.js +249 -0
- package/dist/engine/orbit.js.map +1 -0
- package/dist/engine/planet.d.ts +64 -0
- package/dist/engine/planet.js +432 -0
- package/dist/engine/planet.js.map +1 -0
- package/dist/engine/procedural.d.ts +71 -0
- package/dist/engine/procedural.js +259 -0
- package/dist/engine/procedural.js.map +1 -0
- package/dist/engine/quality.d.ts +48 -0
- package/dist/engine/quality.js +245 -0
- package/dist/engine/quality.js.map +1 -0
- package/dist/engine/repository.d.ts +79 -0
- package/dist/engine/repository.js +13 -0
- package/dist/engine/repository.js.map +1 -0
- package/dist/engine/sun.d.ts +61 -0
- package/dist/engine/sun.js +240 -0
- package/dist/engine/sun.js.map +1 -0
- package/dist/engine/temporal.d.ts +67 -0
- package/dist/engine/temporal.js +283 -0
- package/dist/engine/temporal.js.map +1 -0
- package/dist/engine/types.d.ts +179 -0
- package/dist/engine/types.js +27 -0
- package/dist/engine/types.js.map +1 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +60 -0
- package/dist/index.js.map +1 -0
- package/dist/mcp/connector-registry.d.ts +20 -0
- package/dist/mcp/connector-registry.js +35 -0
- package/dist/mcp/connector-registry.js.map +1 -0
- package/dist/mcp/server.d.ts +13 -0
- package/dist/mcp/server.js +242 -0
- package/dist/mcp/server.js.map +1 -0
- package/dist/mcp/tools/daemon-tool.d.ts +16 -0
- package/dist/mcp/tools/daemon-tool.js +58 -0
- package/dist/mcp/tools/daemon-tool.js.map +1 -0
- package/dist/mcp/tools/ingestion-tools.d.ts +20 -0
- package/dist/mcp/tools/ingestion-tools.js +34 -0
- package/dist/mcp/tools/ingestion-tools.js.map +1 -0
- package/dist/mcp/tools/memory-tools.d.ts +122 -0
- package/dist/mcp/tools/memory-tools.js +1037 -0
- package/dist/mcp/tools/memory-tools.js.map +1 -0
- package/dist/scanner/cloud/github.d.ts +34 -0
- package/dist/scanner/cloud/github.js +260 -0
- package/dist/scanner/cloud/github.js.map +1 -0
- package/dist/scanner/cloud/google-drive.d.ts +30 -0
- package/dist/scanner/cloud/google-drive.js +289 -0
- package/dist/scanner/cloud/google-drive.js.map +1 -0
- package/dist/scanner/cloud/notion.d.ts +33 -0
- package/dist/scanner/cloud/notion.js +231 -0
- package/dist/scanner/cloud/notion.js.map +1 -0
- package/dist/scanner/cloud/slack.d.ts +38 -0
- package/dist/scanner/cloud/slack.js +282 -0
- package/dist/scanner/cloud/slack.js.map +1 -0
- package/dist/scanner/cloud/types.d.ts +73 -0
- package/dist/scanner/cloud/types.js +9 -0
- package/dist/scanner/cloud/types.js.map +1 -0
- package/dist/scanner/index.d.ts +35 -0
- package/dist/scanner/index.js +420 -0
- package/dist/scanner/index.js.map +1 -0
- package/dist/scanner/local/filesystem.d.ts +33 -0
- package/dist/scanner/local/filesystem.js +203 -0
- package/dist/scanner/local/filesystem.js.map +1 -0
- package/dist/scanner/local/git.d.ts +24 -0
- package/dist/scanner/local/git.js +161 -0
- package/dist/scanner/local/git.js.map +1 -0
- package/dist/scanner/local/parsers/code.d.ts +3 -0
- package/dist/scanner/local/parsers/code.js +127 -0
- package/dist/scanner/local/parsers/code.js.map +1 -0
- package/dist/scanner/local/parsers/index.d.ts +11 -0
- package/dist/scanner/local/parsers/index.js +24 -0
- package/dist/scanner/local/parsers/index.js.map +1 -0
- package/dist/scanner/local/parsers/json-parser.d.ts +3 -0
- package/dist/scanner/local/parsers/json-parser.js +117 -0
- package/dist/scanner/local/parsers/json-parser.js.map +1 -0
- package/dist/scanner/local/parsers/markdown.d.ts +3 -0
- package/dist/scanner/local/parsers/markdown.js +120 -0
- package/dist/scanner/local/parsers/markdown.js.map +1 -0
- package/dist/scanner/local/parsers/text.d.ts +3 -0
- package/dist/scanner/local/parsers/text.js +41 -0
- package/dist/scanner/local/parsers/text.js.map +1 -0
- package/dist/scanner/metadata-scanner.d.ts +67 -0
- package/dist/scanner/metadata-scanner.js +356 -0
- package/dist/scanner/metadata-scanner.js.map +1 -0
- package/dist/scanner/types.d.ts +47 -0
- package/dist/scanner/types.js +19 -0
- package/dist/scanner/types.js.map +1 -0
- package/dist/service/daemon.d.ts +23 -0
- package/dist/service/daemon.js +105 -0
- package/dist/service/daemon.js.map +1 -0
- package/dist/service/scheduler.d.ts +73 -0
- package/dist/service/scheduler.js +281 -0
- package/dist/service/scheduler.js.map +1 -0
- package/dist/storage/database.d.ts +10 -0
- package/dist/storage/database.js +265 -0
- package/dist/storage/database.js.map +1 -0
- package/dist/storage/queries.d.ts +85 -0
- package/dist/storage/queries.js +865 -0
- package/dist/storage/queries.js.map +1 -0
- package/dist/storage/sqlite-repository.d.ts +32 -0
- package/dist/storage/sqlite-repository.js +68 -0
- package/dist/storage/sqlite-repository.js.map +1 -0
- package/dist/storage/vec.d.ts +62 -0
- package/dist/storage/vec.js +111 -0
- package/dist/storage/vec.js.map +1 -0
- package/dist/utils/config.d.ts +5 -0
- package/dist/utils/config.js +60 -0
- package/dist/utils/config.js.map +1 -0
- package/dist/utils/logger.d.ts +36 -0
- package/dist/utils/logger.js +86 -0
- package/dist/utils/logger.js.map +1 -0
- package/dist/utils/time.d.ts +21 -0
- package/dist/utils/time.js +42 -0
- package/dist/utils/time.js.map +1 -0
- package/dist/utils/tokenizer.d.ts +13 -0
- package/dist/utils/tokenizer.js +46 -0
- package/dist/utils/tokenizer.js.map +1 -0
- package/package.json +77 -0
- package/scripts/check-node.mjs +36 -0
- package/scripts/setup.mjs +157 -0
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* embedding.ts — Local text embedding using Transformers.js
|
|
3
|
+
*
|
|
4
|
+
* Uses the all-MiniLM-L6-v2 model (384 dimensions) for generating dense
|
|
5
|
+
* vector representations of memory text. The model runs entirely in-process
|
|
6
|
+
* via @xenova/transformers — no API key or network call required after the
|
|
7
|
+
* initial model download (~90 MB, cached in ~/.cache/huggingface).
|
|
8
|
+
*
|
|
9
|
+
* Design:
|
|
10
|
+
* - Singleton pipeline: the model is loaded once and reused.
|
|
11
|
+
* - generateEmbedding() returns a normalized Float32Array.
|
|
12
|
+
* - Supports Korean + English mixed text (multilingual tokenizer).
|
|
13
|
+
* - Input is capped at MAX_CHARS to avoid excessive tokenization.
|
|
14
|
+
*/
|
|
15
|
+
/**
|
|
16
|
+
* Reset the singleton pipeline (for testing purposes).
|
|
17
|
+
* Allows tests to inject a mock pipeline via _setPipelineForTest().
|
|
18
|
+
*/
|
|
19
|
+
export declare function _resetPipeline(): void;
|
|
20
|
+
/**
|
|
21
|
+
* Inject a mock pipeline (for unit tests that don't want to download the model).
|
|
22
|
+
* The mock must be callable: mock(text, options) → { data: Float32Array }
|
|
23
|
+
*/
|
|
24
|
+
export declare function _setPipelineForTest(mock: unknown): void;
|
|
25
|
+
/**
|
|
26
|
+
* Generate a 384-dimensional embedding vector for the given text.
|
|
27
|
+
*
|
|
28
|
+
* The returned Float32Array is L2-normalized so that cosine similarity
|
|
29
|
+
* reduces to a simple dot product.
|
|
30
|
+
*
|
|
31
|
+
* Text preprocessing:
|
|
32
|
+
* - Trims whitespace
|
|
33
|
+
* - Caps at MAX_CHARS characters to stay within the tokenizer limit
|
|
34
|
+
* - Collapses excessive whitespace runs
|
|
35
|
+
*/
|
|
36
|
+
export declare function generateEmbedding(text: string): Promise<Float32Array>;
|
|
37
|
+
/**
|
|
38
|
+
* Preprocess text before tokenization:
|
|
39
|
+
* 1. Trim
|
|
40
|
+
* 2. Collapse internal whitespace runs to a single space
|
|
41
|
+
* 3. Truncate to MAX_CHARS
|
|
42
|
+
*/
|
|
43
|
+
export declare function preprocessText(text: string): string;
|
|
44
|
+
//# sourceMappingURL=embedding.d.ts.map
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* embedding.ts — Local text embedding using Transformers.js
|
|
3
|
+
*
|
|
4
|
+
* Uses the all-MiniLM-L6-v2 model (384 dimensions) for generating dense
|
|
5
|
+
* vector representations of memory text. The model runs entirely in-process
|
|
6
|
+
* via @xenova/transformers — no API key or network call required after the
|
|
7
|
+
* initial model download (~90 MB, cached in ~/.cache/huggingface).
|
|
8
|
+
*
|
|
9
|
+
* Design:
|
|
10
|
+
* - Singleton pipeline: the model is loaded once and reused.
|
|
11
|
+
* - generateEmbedding() returns a normalized Float32Array.
|
|
12
|
+
* - Supports Korean + English mixed text (multilingual tokenizer).
|
|
13
|
+
* - Input is capped at MAX_CHARS to avoid excessive tokenization.
|
|
14
|
+
*/
|
|
15
|
+
// @xenova/transformers uses a dynamic import pattern that is CJS-compatible
|
|
16
|
+
// when called from Node.js ESM. We import the type for the pipeline function
|
|
17
|
+
// and load it lazily.
|
|
18
|
+
const MODEL_NAME = 'Xenova/all-MiniLM-L6-v2';
|
|
19
|
+
const MAX_CHARS = 2000; // roughly 512 tokens for mixed Korean/English text
|
|
20
|
+
// Lazily resolved singleton pipeline instance.
|
|
21
|
+
// Typed as `unknown` to avoid pulling in the full @xenova/transformers types
|
|
22
|
+
// at compile time (the package has non-standard type layouts that vary by version).
|
|
23
|
+
let _pipeline = null;
|
|
24
|
+
let _loading = null;
|
|
25
|
+
// ── Progress tracking ────────────────────────────────────────────────────────
|
|
26
|
+
let _downloadStartTime = 0;
|
|
27
|
+
let _lastProgressPct = -1;
|
|
28
|
+
let _currentFile = '';
|
|
29
|
+
let _loggedFiles = new Set();
|
|
30
|
+
function _formatBytes(bytes) {
|
|
31
|
+
if (bytes < 1024)
|
|
32
|
+
return `${bytes} B`;
|
|
33
|
+
if (bytes < 1024 * 1024)
|
|
34
|
+
return `${(bytes / 1024).toFixed(1)} KB`;
|
|
35
|
+
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
|
36
|
+
}
|
|
37
|
+
function _formatTime(seconds) {
|
|
38
|
+
if (seconds < 60)
|
|
39
|
+
return `${Math.round(seconds)}s`;
|
|
40
|
+
return `${Math.floor(seconds / 60)}m ${Math.round(seconds % 60)}s`;
|
|
41
|
+
}
|
|
42
|
+
function _onDownloadProgress(info) {
|
|
43
|
+
const { status, file, progress, loaded, total } = info;
|
|
44
|
+
const fileName = (file ?? '').split('/').pop() ?? '';
|
|
45
|
+
if (status === 'initiate') {
|
|
46
|
+
if (_downloadStartTime === 0) {
|
|
47
|
+
_downloadStartTime = Date.now();
|
|
48
|
+
console.error('[stellar-memory] Downloading embedding model (~90 MB, first run only)...');
|
|
49
|
+
console.error('[stellar-memory] This will be cached for future sessions.');
|
|
50
|
+
}
|
|
51
|
+
// Reset per-file tracking when a new file starts
|
|
52
|
+
if (fileName && fileName !== _currentFile) {
|
|
53
|
+
_currentFile = fileName;
|
|
54
|
+
_lastProgressPct = -1;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
if (status === 'progress' && progress != null && fileName) {
|
|
58
|
+
// Only show detailed progress for the main model file (onnx)
|
|
59
|
+
const isModelFile = fileName.endsWith('.onnx');
|
|
60
|
+
if (!isModelFile)
|
|
61
|
+
return;
|
|
62
|
+
const pct = Math.min(100, Math.round(progress));
|
|
63
|
+
if (pct > _lastProgressPct + 10 || (pct === 100 && _lastProgressPct < 100)) {
|
|
64
|
+
_lastProgressPct = pct;
|
|
65
|
+
const elapsed = (Date.now() - _downloadStartTime) / 1000;
|
|
66
|
+
const eta = pct > 0 ? (elapsed / pct) * (100 - pct) : 0;
|
|
67
|
+
const sizeInfo = loaded != null && total != null
|
|
68
|
+
? ` ${_formatBytes(loaded)} / ${_formatBytes(total)}`
|
|
69
|
+
: '';
|
|
70
|
+
const etaInfo = eta > 1 ? ` ETA: ${_formatTime(eta)}` : '';
|
|
71
|
+
console.error(`[stellar-memory] Downloading [${fileName}]: ${pct}%${sizeInfo}${etaInfo}`);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
if (status === 'done' && fileName && !_loggedFiles.has(fileName)) {
|
|
75
|
+
_loggedFiles.add(fileName);
|
|
76
|
+
}
|
|
77
|
+
if (status === 'ready') {
|
|
78
|
+
const elapsed = _downloadStartTime > 0
|
|
79
|
+
? ` in ${((Date.now() - _downloadStartTime) / 1000).toFixed(1)}s`
|
|
80
|
+
: '';
|
|
81
|
+
console.error(`[stellar-memory] Embedding model ready${elapsed}`);
|
|
82
|
+
_downloadStartTime = 0;
|
|
83
|
+
_lastProgressPct = -1;
|
|
84
|
+
_currentFile = '';
|
|
85
|
+
_loggedFiles = new Set();
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Load and cache the feature-extraction pipeline.
|
|
90
|
+
* Subsequent calls return the same promise / resolved value.
|
|
91
|
+
*/
|
|
92
|
+
async function getPipeline() {
|
|
93
|
+
if (_pipeline)
|
|
94
|
+
return _pipeline;
|
|
95
|
+
if (!_loading) {
|
|
96
|
+
_loading = (async () => {
|
|
97
|
+
// Dynamic import keeps this from failing at require()-time in environments
|
|
98
|
+
// where the model is not yet available (e.g., during unit tests that mock it).
|
|
99
|
+
const { pipeline, env } = await import('@xenova/transformers');
|
|
100
|
+
// Allow the model to be stored in the default HuggingFace cache directory.
|
|
101
|
+
// In CI/test environments TRANSFORMERS_CACHE can be set to override this.
|
|
102
|
+
if (process.env['TRANSFORMERS_CACHE']) {
|
|
103
|
+
env.cacheDir = process.env['TRANSFORMERS_CACHE'];
|
|
104
|
+
}
|
|
105
|
+
const pipe = await pipeline('feature-extraction', MODEL_NAME, {
|
|
106
|
+
quantized: true, // use the int8-quantized model for faster inference
|
|
107
|
+
progress_callback: _onDownloadProgress,
|
|
108
|
+
});
|
|
109
|
+
_pipeline = pipe;
|
|
110
|
+
return pipe;
|
|
111
|
+
})();
|
|
112
|
+
}
|
|
113
|
+
return _loading;
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Reset the singleton pipeline (for testing purposes).
|
|
117
|
+
* Allows tests to inject a mock pipeline via _setPipelineForTest().
|
|
118
|
+
*/
|
|
119
|
+
export function _resetPipeline() {
|
|
120
|
+
_pipeline = null;
|
|
121
|
+
_loading = null;
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Inject a mock pipeline (for unit tests that don't want to download the model).
|
|
125
|
+
* The mock must be callable: mock(text, options) → { data: Float32Array }
|
|
126
|
+
*/
|
|
127
|
+
export function _setPipelineForTest(mock) {
|
|
128
|
+
_pipeline = mock;
|
|
129
|
+
_loading = null;
|
|
130
|
+
}
|
|
131
|
+
// ---------------------------------------------------------------------------
|
|
132
|
+
// Public API
|
|
133
|
+
// ---------------------------------------------------------------------------
|
|
134
|
+
/**
|
|
135
|
+
* Generate a 384-dimensional embedding vector for the given text.
|
|
136
|
+
*
|
|
137
|
+
* The returned Float32Array is L2-normalized so that cosine similarity
|
|
138
|
+
* reduces to a simple dot product.
|
|
139
|
+
*
|
|
140
|
+
* Text preprocessing:
|
|
141
|
+
* - Trims whitespace
|
|
142
|
+
* - Caps at MAX_CHARS characters to stay within the tokenizer limit
|
|
143
|
+
* - Collapses excessive whitespace runs
|
|
144
|
+
*/
|
|
145
|
+
export async function generateEmbedding(text) {
|
|
146
|
+
const input = preprocessText(text);
|
|
147
|
+
const pipe = await getPipeline();
|
|
148
|
+
// @xenova/transformers returns a Tensor-like object. We use mean pooling
|
|
149
|
+
// (pooling_mode: 'mean') and then L2-normalize to get a unit vector.
|
|
150
|
+
const output = await pipe(input, {
|
|
151
|
+
pooling: 'mean',
|
|
152
|
+
normalize: true,
|
|
153
|
+
});
|
|
154
|
+
return new Float32Array(output.data);
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Preprocess text before tokenization:
|
|
158
|
+
* 1. Trim
|
|
159
|
+
* 2. Collapse internal whitespace runs to a single space
|
|
160
|
+
* 3. Truncate to MAX_CHARS
|
|
161
|
+
*/
|
|
162
|
+
export function preprocessText(text) {
|
|
163
|
+
return text
|
|
164
|
+
.trim()
|
|
165
|
+
.replace(/\s+/g, ' ')
|
|
166
|
+
.slice(0, MAX_CHARS);
|
|
167
|
+
}
|
|
168
|
+
//# sourceMappingURL=embedding.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"embedding.js","sourceRoot":"","sources":["../../src/engine/embedding.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAEH,4EAA4E;AAC5E,6EAA6E;AAC7E,sBAAsB;AAEtB,MAAM,UAAU,GAAG,yBAAyB,CAAC;AAC7C,MAAM,SAAS,GAAI,IAAI,CAAC,CAAC,mDAAmD;AAE5E,+CAA+C;AAC/C,6EAA6E;AAC7E,oFAAoF;AACpF,IAAI,SAAS,GAAY,IAAI,CAAC;AAC9B,IAAI,QAAQ,GAA4B,IAAI,CAAC;AAE7C,gFAAgF;AAEhF,IAAI,kBAAkB,GAAG,CAAC,CAAC;AAC3B,IAAI,gBAAgB,GAAK,CAAC,CAAC,CAAC;AAC5B,IAAI,YAAY,GAAS,EAAE,CAAC;AAC5B,IAAI,YAAY,GAAS,IAAI,GAAG,EAAU,CAAC;AAE3C,SAAS,YAAY,CAAC,KAAa;IACjC,IAAI,KAAK,GAAG,IAAI;QAAE,OAAO,GAAG,KAAK,IAAI,CAAC;IACtC,IAAI,KAAK,GAAG,IAAI,GAAG,IAAI;QAAE,OAAO,GAAG,CAAC,KAAK,GAAG,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC;IAClE,OAAO,GAAG,CAAC,KAAK,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC;AACpD,CAAC;AAED,SAAS,WAAW,CAAC,OAAe;IAClC,IAAI,OAAO,GAAG,EAAE;QAAE,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC;IACnD,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC;AACrE,CAAC;AAWD,SAAS,mBAAmB,CAAC,IAAkB;IAC7C,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC;IACvD,MAAM,QAAQ,GAAG,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC;IAErD,IAAI,MAAM,KAAK,UAAU,EAAE,CAAC;QAC1B,IAAI,kBAAkB,KAAK,CAAC,EAAE,CAAC;YAC7B,kBAAkB,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;YAChC,OAAO,CAAC,KAAK,CAAC,0EAA0E,CAAC,CAAC;YAC1F,OAAO,CAAC,KAAK,CAAC,2DAA2D,CAAC,CAAC;QAC7E,CAAC;QACD,iDAAiD;QACjD,IAAI,QAAQ,IAAI,QAAQ,KAAK,YAAY,EAAE,CAAC;YAC1C,YAAY,GAAG,QAAQ,CAAC;YACxB,gBAAgB,GAAG,CAAC,CAAC,CAAC;QACxB,CAAC;IACH,CAAC;IAED,IAAI,MAAM,KAAK,UAAU,IAAI,QAAQ,IAAI,IAAI,IAAI,QAAQ,EAAE,CAAC;QAC1D,6DAA6D;QAC7D,MAAM,WAAW,GAAG,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC/C,IAAI,CAAC,WAAW;YAAE,OAAO;QAEzB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC;QAChD,IAAI,GAAG,GAAG,gBAAgB,GAAG,EAAE,IAAI,CAAC,GAAG,KAAK,GAAG,IAAI,gBAAgB,GAAG,GAAG,CAAC,EAAE,CAAC;YAC3E,gBAAgB,GAAG,GAAG,CAAC;YACvB,MAAM,OAAO,GAAI,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,kBAAkB,CAAC,GAAG,IAAI,CAAC;YAC1D,MAAM,GAAG,GAAQ,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,GAAG,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YAC7D,MAAM,QAAQ,GAAG,MAAM,IAAI,IAAI,IAAI,KAAK,IAAI,IAAI;gBAC9C,CAAC,CAAC,KAAK,YAAY,CAAC,MAAM,CAAC,MAAM,YAAY,CAAC,KAAK,CAAC,EAAE;gBACtD,CAAC,CAAC,EAAE,CAAC;YACP,MAAM,OAAO,GAAI,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,WAAW,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;YAC7D,OAAO,CAAC,KAAK,CAAC,iCAAiC,QAAQ,MAAM,GAAG,IAAI,QAAQ,GAAG,OAAO,EAAE,CAAC,CAAC;QAC5F,CAAC;IACH,CAAC;IAED,IAAI,MAAM,KAAK,MAAM,IAAI,QAAQ,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC;QACjE,YAAY,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;IAC7B,CAAC;IAED,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;QACvB,MAAM,OAAO,GAAG,kBAAkB,GAAG,CAAC;YACpC,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,kBAAkB,CAAC,GAAG,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;YACjE,CAAC,CAAC,EAAE,CAAC;QACP,OAAO,CAAC,KAAK,CAAC,yCAAyC,OAAO,EAAE,CAAC,CAAC;QAClE,kBAAkB,GAAG,CAAC,CAAC;QACvB,gBAAgB,GAAK,CAAC,CAAC,CAAC;QACxB,YAAY,GAAS,EAAE,CAAC;QACxB,YAAY,GAAS,IAAI,GAAG,EAAE,CAAC;IACjC,CAAC;AACH,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,WAAW;IACxB,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAEhC,IAAI,CAAC,QAAQ,EAAE,CAAC;QACd,QAAQ,GAAG,CAAC,KAAK,IAAI,EAAE;YACrB,2EAA2E;YAC3E,+EAA+E;YAC/E,MAAM,EAAE,QAAQ,EAAE,GAAG,EAAE,GAAG,MAAM,MAAM,CAAC,sBAAsB,CAAC,CAAC;YAE/D,2EAA2E;YAC3E,0EAA0E;YAC1E,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,CAAC;gBACtC,GAAG,CAAC,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;YACnD,CAAC;YAED,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,oBAAoB,EAAE,UAAU,EAAE;gBAC5D,SAAS,EAAE,IAAI,EAAW,oDAAoD;gBAC9E,iBAAiB,EAAE,mBAAmB;aACvC,CAAC,CAAC;YAEH,SAAS,GAAG,IAAI,CAAC;YACjB,OAAO,IAAI,CAAC;QACd,CAAC,CAAC,EAAE,CAAC;IACP,CAAC;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,cAAc;IAC5B,SAAS,GAAG,IAAI,CAAC;IACjB,QAAQ,GAAI,IAAI,CAAC;AACnB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,mBAAmB,CAAC,IAAa;IAC/C,SAAS,GAAG,IAAI,CAAC;IACjB,QAAQ,GAAI,IAAI,CAAC;AACnB,CAAC;AAED,8EAA8E;AAC9E,aAAa;AACb,8EAA8E;AAE9E;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAC,IAAY;IAClD,MAAM,KAAK,GAAG,cAAc,CAAC,IAAI,CAAC,CAAC;IAEnC,MAAM,IAAI,GAAG,MAAM,WAAW,EAAE,CAAC;IAEjC,yEAAyE;IACzE,qEAAqE;IACrE,MAAM,MAAM,GAAG,MAAO,IAGe,CAAC,KAAK,EAAE;QAC3C,OAAO,EAAE,MAAM;QACf,SAAS,EAAE,IAAI;KAChB,CAAC,CAAC;IAEH,OAAO,IAAI,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;AACvC,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,cAAc,CAAC,IAAY;IACzC,OAAO,IAAI;SACR,IAAI,EAAE;SACN,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC;SACpB,KAAK,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC;AACzB,CAAC"}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* gravity.ts — Relevance calculation (keyword + vector)
|
|
3
|
+
*
|
|
4
|
+
* Phase 1: keyword overlap between memory content and sun context.
|
|
5
|
+
* Phase 2: cosine vector similarity + hybrid score combining both.
|
|
6
|
+
*
|
|
7
|
+
* Hybrid formula: 0.7 × vectorRelevance + 0.3 × keywordRelevance
|
|
8
|
+
*
|
|
9
|
+
* The keyword score is retained as a fast fallback when embeddings are not
|
|
10
|
+
* available (e.g., during unit tests or before the model has loaded).
|
|
11
|
+
*/
|
|
12
|
+
/**
|
|
13
|
+
* Tokenize text: split by whitespace, lowercase, strip punctuation, filter
|
|
14
|
+
* words shorter than 2 characters.
|
|
15
|
+
*/
|
|
16
|
+
export declare function tokenize(text: string): string[];
|
|
17
|
+
/**
|
|
18
|
+
* Calculate keyword relevance between a memory and the current sun context.
|
|
19
|
+
*
|
|
20
|
+
* Algorithm:
|
|
21
|
+
* 1. Tokenize both texts.
|
|
22
|
+
* 2. Build a Set from sun tokens.
|
|
23
|
+
* 3. Count how many memory tokens appear in the sun set.
|
|
24
|
+
* 4. Score = min(1.0, overlap / max(3, sunTokens.length * 0.3))
|
|
25
|
+
*
|
|
26
|
+
* Returns a score between 0 and 1.
|
|
27
|
+
* Returns 0 if sunText is empty.
|
|
28
|
+
*/
|
|
29
|
+
export declare function keywordRelevance(memoryText: string, sunText: string): number;
|
|
30
|
+
/**
|
|
31
|
+
* Cosine similarity between two Float32Arrays of equal length.
|
|
32
|
+
*
|
|
33
|
+
* Formula: dot(a, b) / (‖a‖ × ‖b‖)
|
|
34
|
+
*
|
|
35
|
+
* Returns a value in [-1, 1]; in practice [0, 1] for embeddings produced
|
|
36
|
+
* by all-MiniLM-L6-v2 (which are L2-normalized, so norm(a) = norm(b) = 1).
|
|
37
|
+
*
|
|
38
|
+
* Returns 0 for zero-length or mismatched vectors.
|
|
39
|
+
*/
|
|
40
|
+
export declare function cosineSimilarity(a: Float32Array, b: Float32Array): number;
|
|
41
|
+
/**
|
|
42
|
+
* Vector relevance between a memory embedding and the sun (context) embedding.
|
|
43
|
+
*
|
|
44
|
+
* Wraps cosineSimilarity and converts the [-1, 1] cosine distance into a
|
|
45
|
+
* [0, 1] relevance score: score = (similarity + 1) / 2
|
|
46
|
+
*
|
|
47
|
+
* For normalized embeddings (all-MiniLM-L6-v2), similarity is already [0, 1]
|
|
48
|
+
* in practice, but the rescaling ensures the contract is always satisfied.
|
|
49
|
+
*
|
|
50
|
+
* Returns 0 when either embedding is null/undefined.
|
|
51
|
+
*/
|
|
52
|
+
export declare function vectorRelevance(memoryEmbedding: Float32Array | null | undefined, sunEmbedding: Float32Array | null | undefined): number;
|
|
53
|
+
/**
|
|
54
|
+
* Hybrid relevance: combines vector semantic similarity with keyword overlap.
|
|
55
|
+
*
|
|
56
|
+
* score = VECTOR_WEIGHT × vectorRelevance + KEYWORD_WEIGHT × keywordRelevance
|
|
57
|
+
*
|
|
58
|
+
* Falls back gracefully:
|
|
59
|
+
* - If embeddings are absent → pure keyword score.
|
|
60
|
+
* - If sunText is empty → pure vector score (if embedding available).
|
|
61
|
+
*/
|
|
62
|
+
export declare function hybridRelevance(memoryText: string, sunText: string, memoryEmbedding: Float32Array | null | undefined, sunEmbedding: Float32Array | null | undefined): number;
|
|
63
|
+
//# sourceMappingURL=gravity.d.ts.map
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* gravity.ts — Relevance calculation (keyword + vector)
|
|
3
|
+
*
|
|
4
|
+
* Phase 1: keyword overlap between memory content and sun context.
|
|
5
|
+
* Phase 2: cosine vector similarity + hybrid score combining both.
|
|
6
|
+
*
|
|
7
|
+
* Hybrid formula: 0.7 × vectorRelevance + 0.3 × keywordRelevance
|
|
8
|
+
*
|
|
9
|
+
* The keyword score is retained as a fast fallback when embeddings are not
|
|
10
|
+
* available (e.g., during unit tests or before the model has loaded).
|
|
11
|
+
*/
|
|
12
|
+
// Hybrid weighting constants
|
|
13
|
+
const VECTOR_WEIGHT = 0.7;
|
|
14
|
+
const KEYWORD_WEIGHT = 0.3;
|
|
15
|
+
/**
|
|
16
|
+
* Tokenize text: split by whitespace, lowercase, strip punctuation, filter
|
|
17
|
+
* words shorter than 2 characters.
|
|
18
|
+
*/
|
|
19
|
+
export function tokenize(text) {
|
|
20
|
+
return text
|
|
21
|
+
.toLowerCase()
|
|
22
|
+
.split(/\s+/)
|
|
23
|
+
.map(word => word.replace(/[^\w]/g, ''))
|
|
24
|
+
.filter(word => word.length >= 2);
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Calculate keyword relevance between a memory and the current sun context.
|
|
28
|
+
*
|
|
29
|
+
* Algorithm:
|
|
30
|
+
* 1. Tokenize both texts.
|
|
31
|
+
* 2. Build a Set from sun tokens.
|
|
32
|
+
* 3. Count how many memory tokens appear in the sun set.
|
|
33
|
+
* 4. Score = min(1.0, overlap / max(3, sunTokens.length * 0.3))
|
|
34
|
+
*
|
|
35
|
+
* Returns a score between 0 and 1.
|
|
36
|
+
* Returns 0 if sunText is empty.
|
|
37
|
+
*/
|
|
38
|
+
export function keywordRelevance(memoryText, sunText) {
|
|
39
|
+
if (!sunText || sunText.trim().length === 0) {
|
|
40
|
+
return 0;
|
|
41
|
+
}
|
|
42
|
+
const sunTokens = tokenize(sunText);
|
|
43
|
+
if (sunTokens.length === 0) {
|
|
44
|
+
return 0;
|
|
45
|
+
}
|
|
46
|
+
const sunSet = new Set(sunTokens);
|
|
47
|
+
const memoryTokens = tokenize(memoryText);
|
|
48
|
+
const memorySet = new Set(memoryTokens);
|
|
49
|
+
let overlap = 0;
|
|
50
|
+
for (const token of memorySet) {
|
|
51
|
+
if (sunSet.has(token)) {
|
|
52
|
+
overlap++;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
const denominator = Math.max(5, sunTokens.length * 0.3);
|
|
56
|
+
return Math.min(1.0, overlap / denominator);
|
|
57
|
+
}
|
|
58
|
+
// ---------------------------------------------------------------------------
|
|
59
|
+
// Vector relevance (Phase 2)
|
|
60
|
+
// ---------------------------------------------------------------------------
|
|
61
|
+
/**
|
|
62
|
+
* Cosine similarity between two Float32Arrays of equal length.
|
|
63
|
+
*
|
|
64
|
+
* Formula: dot(a, b) / (‖a‖ × ‖b‖)
|
|
65
|
+
*
|
|
66
|
+
* Returns a value in [-1, 1]; in practice [0, 1] for embeddings produced
|
|
67
|
+
* by all-MiniLM-L6-v2 (which are L2-normalized, so norm(a) = norm(b) = 1).
|
|
68
|
+
*
|
|
69
|
+
* Returns 0 for zero-length or mismatched vectors.
|
|
70
|
+
*/
|
|
71
|
+
export function cosineSimilarity(a, b) {
|
|
72
|
+
if (a.length === 0 || a.length !== b.length)
|
|
73
|
+
return 0;
|
|
74
|
+
let dot = 0;
|
|
75
|
+
let normA = 0;
|
|
76
|
+
let normB = 0;
|
|
77
|
+
for (let i = 0; i < a.length; i++) {
|
|
78
|
+
dot += a[i] * b[i];
|
|
79
|
+
normA += a[i] * a[i];
|
|
80
|
+
normB += b[i] * b[i];
|
|
81
|
+
}
|
|
82
|
+
const denom = Math.sqrt(normA) * Math.sqrt(normB);
|
|
83
|
+
if (denom === 0)
|
|
84
|
+
return 0;
|
|
85
|
+
// Clamp to [0, 1] — cosine can be slightly negative due to float rounding.
|
|
86
|
+
return Math.max(0, Math.min(1, dot / denom));
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Vector relevance between a memory embedding and the sun (context) embedding.
|
|
90
|
+
*
|
|
91
|
+
* Wraps cosineSimilarity and converts the [-1, 1] cosine distance into a
|
|
92
|
+
* [0, 1] relevance score: score = (similarity + 1) / 2
|
|
93
|
+
*
|
|
94
|
+
* For normalized embeddings (all-MiniLM-L6-v2), similarity is already [0, 1]
|
|
95
|
+
* in practice, but the rescaling ensures the contract is always satisfied.
|
|
96
|
+
*
|
|
97
|
+
* Returns 0 when either embedding is null/undefined.
|
|
98
|
+
*/
|
|
99
|
+
export function vectorRelevance(memoryEmbedding, sunEmbedding) {
|
|
100
|
+
if (!memoryEmbedding || !sunEmbedding)
|
|
101
|
+
return 0;
|
|
102
|
+
return Math.max(0, cosineSimilarity(memoryEmbedding, sunEmbedding));
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Hybrid relevance: combines vector semantic similarity with keyword overlap.
|
|
106
|
+
*
|
|
107
|
+
* score = VECTOR_WEIGHT × vectorRelevance + KEYWORD_WEIGHT × keywordRelevance
|
|
108
|
+
*
|
|
109
|
+
* Falls back gracefully:
|
|
110
|
+
* - If embeddings are absent → pure keyword score.
|
|
111
|
+
* - If sunText is empty → pure vector score (if embedding available).
|
|
112
|
+
*/
|
|
113
|
+
export function hybridRelevance(memoryText, sunText, memoryEmbedding, sunEmbedding) {
|
|
114
|
+
const vecScore = vectorRelevance(memoryEmbedding, sunEmbedding);
|
|
115
|
+
const kwScore = keywordRelevance(memoryText, sunText);
|
|
116
|
+
// If we have no vector signal, fall back entirely to keywords.
|
|
117
|
+
if (!memoryEmbedding || !sunEmbedding)
|
|
118
|
+
return kwScore;
|
|
119
|
+
return Math.min(1.0, VECTOR_WEIGHT * vecScore + KEYWORD_WEIGHT * kwScore);
|
|
120
|
+
}
|
|
121
|
+
//# sourceMappingURL=gravity.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gravity.js","sourceRoot":"","sources":["../../src/engine/gravity.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AAEH,6BAA6B;AAC7B,MAAM,aAAa,GAAI,GAAG,CAAC;AAC3B,MAAM,cAAc,GAAG,GAAG,CAAC;AAE3B;;;GAGG;AACH,MAAM,UAAU,QAAQ,CAAC,IAAY;IACnC,OAAO,IAAI;SACR,WAAW,EAAE;SACb,KAAK,CAAC,KAAK,CAAC;SACZ,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC;SACvC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC;AACtC,CAAC;AAED;;;;;;;;;;;GAWG;AACH,MAAM,UAAU,gBAAgB,CAAC,UAAkB,EAAE,OAAe;IAClE,IAAI,CAAC,OAAO,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC5C,OAAO,CAAC,CAAC;IACX,CAAC;IAED,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;IACpC,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC3B,OAAO,CAAC,CAAC;IACX,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;IAE1C,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,YAAY,CAAC,CAAC;IACxC,IAAI,OAAO,GAAG,CAAC,CAAC;IAChB,KAAK,MAAM,KAAK,IAAI,SAAS,EAAE,CAAC;QAC9B,IAAI,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;YACtB,OAAO,EAAE,CAAC;QACZ,CAAC;IACH,CAAC;IAED,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,MAAM,GAAG,GAAG,CAAC,CAAC;IACxD,OAAO,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,OAAO,GAAG,WAAW,CAAC,CAAC;AAC9C,CAAC;AAED,8EAA8E;AAC9E,6BAA6B;AAC7B,8EAA8E;AAE9E;;;;;;;;;GASG;AACH,MAAM,UAAU,gBAAgB,CAAC,CAAe,EAAE,CAAe;IAC/D,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,MAAM;QAAE,OAAO,CAAC,CAAC;IAEtD,IAAI,GAAG,GAAI,CAAC,CAAC;IACb,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,IAAI,KAAK,GAAG,CAAC,CAAC;IAEd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QAClC,GAAG,IAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACrB,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACrB,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACvB,CAAC;IAED,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAClD,IAAI,KAAK,KAAK,CAAC;QAAE,OAAO,CAAC,CAAC;IAE1B,2EAA2E;IAC3E,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC;AAC/C,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,UAAU,eAAe,CAC7B,eAAgD,EAChD,YAA+C;IAE/C,IAAI,CAAC,eAAe,IAAI,CAAC,YAAY;QAAE,OAAO,CAAC,CAAC;IAChD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,gBAAgB,CAAC,eAAe,EAAE,YAAY,CAAC,CAAC,CAAC;AACtE,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,eAAe,CAC7B,UAAkB,EAClB,OAAe,EACf,eAAgD,EAChD,YAA+C;IAE/C,MAAM,QAAQ,GAAG,eAAe,CAAC,eAAe,EAAE,YAAY,CAAC,CAAC;IAChE,MAAM,OAAO,GAAI,gBAAgB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAEvD,+DAA+D;IAC/D,IAAI,CAAC,eAAe,IAAI,CAAC,YAAY;QAAE,OAAO,OAAO,CAAC;IAEtD,OAAO,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,aAAa,GAAG,QAAQ,GAAG,cAAc,GAAG,OAAO,CAAC,CAAC;AAC5E,CAAC"}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* multiproject.ts — Multi-Project Galaxy management
|
|
3
|
+
*
|
|
4
|
+
* Manages runtime project switching and cross-project knowledge sharing.
|
|
5
|
+
* Universal memories are memories marked to appear in ALL project recall results.
|
|
6
|
+
*
|
|
7
|
+
* State: currentProject is the one mutable singleton in this module.
|
|
8
|
+
* All other functions are pure queries or side-effectful writes to the DB.
|
|
9
|
+
*/
|
|
10
|
+
import type { Memory } from './types.js';
|
|
11
|
+
/**
|
|
12
|
+
* Return the currently active project name.
|
|
13
|
+
*/
|
|
14
|
+
export declare function getCurrentProject(): string;
|
|
15
|
+
/**
|
|
16
|
+
* Switch the active project at runtime without restarting.
|
|
17
|
+
*
|
|
18
|
+
* Returns info about the transition so callers can report it to the user.
|
|
19
|
+
*/
|
|
20
|
+
export declare function switchProject(project: string): {
|
|
21
|
+
previous: string;
|
|
22
|
+
current: string;
|
|
23
|
+
memoryCount: number;
|
|
24
|
+
};
|
|
25
|
+
/**
|
|
26
|
+
* Create a new project by seeding a sun_state entry for it.
|
|
27
|
+
* If the project already has a sun_state, returns created=false.
|
|
28
|
+
*/
|
|
29
|
+
export declare function createProject(name: string): {
|
|
30
|
+
project: string;
|
|
31
|
+
created: boolean;
|
|
32
|
+
};
|
|
33
|
+
/**
|
|
34
|
+
* List all projects with basic statistics.
|
|
35
|
+
* Always includes 'default' even if it has no memories yet.
|
|
36
|
+
*/
|
|
37
|
+
export declare function listAllProjects(): Array<{
|
|
38
|
+
project: string;
|
|
39
|
+
memoryCount: number;
|
|
40
|
+
lastUpdated: string;
|
|
41
|
+
hasUniversal: boolean;
|
|
42
|
+
}>;
|
|
43
|
+
/**
|
|
44
|
+
* Mark (or unmark) a memory as universal.
|
|
45
|
+
* Universal memories surface in recall results for all projects.
|
|
46
|
+
*/
|
|
47
|
+
export declare function markUniversal(memoryId: string, isUniversal: boolean): void;
|
|
48
|
+
/**
|
|
49
|
+
* Get universal memories relevant to a project.
|
|
50
|
+
* Filters out memories that already belong to the current project
|
|
51
|
+
* (no need to surface them twice) and sorts by importance.
|
|
52
|
+
*/
|
|
53
|
+
export declare function getUniversalContext(project: string, limit?: number): Memory[];
|
|
54
|
+
/**
|
|
55
|
+
* Detect memories that are strong candidates to become universal.
|
|
56
|
+
*
|
|
57
|
+
* Criteria:
|
|
58
|
+
* - type = 'procedural' (behavioral rules are generally universal)
|
|
59
|
+
* - type = 'context' with generic technical content (no project-specific terms)
|
|
60
|
+
* - importance > 0.8 (highly important knowledge tends to be broadly applicable)
|
|
61
|
+
* - not already marked universal
|
|
62
|
+
*/
|
|
63
|
+
export declare function detectUniversalCandidates(project: string): Memory[];
|
|
64
|
+
/**
|
|
65
|
+
* Detailed statistics for a single project.
|
|
66
|
+
*/
|
|
67
|
+
export declare function getProjectStats(project: string): {
|
|
68
|
+
memoryCount: number;
|
|
69
|
+
zoneDistribution: Record<string, number>;
|
|
70
|
+
typeDistribution: Record<string, number>;
|
|
71
|
+
universalCount: number;
|
|
72
|
+
oldestMemory: string;
|
|
73
|
+
newestMemory: string;
|
|
74
|
+
};
|
|
75
|
+
//# sourceMappingURL=multiproject.d.ts.map
|