@agents-eco/agentic-memory 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 agents.eco
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,430 @@
1
+ <p align="center">
2
+ <img src="icon.png" alt="Agentic Memory" width="160" />
3
+ </p>
4
+
5
+ <h1 align="center">Agentic Memory</h1>
6
+
7
+ <p align="center">
8
+ <strong>Graph-based agent memory — short-term and long-term memory with local or Voyage AI backends.</strong><br/>
9
+ Built by <a href="https://agents.eco">agents.eco</a> — the decentralized AI agent economy.
10
+ </p>
11
+
12
+ <p align="center">
13
+ <a href="https://www.npmjs.com/package/@agents-eco/agentic-memory"><img src="https://img.shields.io/npm/v/@agents-eco/agentic-memory?style=flat-square" alt="npm" /></a>
14
+ <a href="https://github.com/agents-eco/agentic-memory/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-blue?style=flat-square" alt="MIT License" /></a>
15
+ <a href="https://github.com/agents-eco/agentic-memory"><img src="https://img.shields.io/github/stars/agents-eco/agentic-memory?style=flat-square" alt="GitHub stars" /></a>
16
+ </p>
17
+
18
+ ```
19
+ npm install @agents-eco/agentic-memory
20
+ ```
21
+
22
+ ---
23
+
24
+ ## Why This Exists
25
+
26
+ Most agent memory is either a flat conversation buffer or an opaque vector database. Neither captures how memory actually works.
27
+
28
+ Agentic Memory is a **graph-based memory system** that separates short-term and long-term memory, with typed nodes, weighted relationships, decay, reinforcement, and hybrid search.
29
+
30
+ - **Short-Term Memory (STM)** — Bounded working context. Recent items with automatic expiry. Fast keyword search.
31
+ - **Long-Term Memory (LTM)** — Persistent graph with typed nodes (episodic, semantic, entity, goal, etc.) and weighted edges (causal, temporal, hierarchical). Supports decay and reinforcement.
32
+ - **Two backends** — Local (zero dependencies, offline) or Voyage AI (high-quality neural embeddings).
33
+ - **Graph traversal** — Find related memories by walking the graph, not just by vector similarity.
34
+ - **Human-readable persistence** — Stored as JSON files you can inspect and version control.
35
+
36
+ ## Quick Start
37
+
38
+ ### Local Backend (no API key needed)
39
+
40
+ ```typescript
41
+ import { AgenticMemory } from "@agents-eco/agentic-memory";
42
+
43
+ const memory = new AgenticMemory({ backend: "local" });
44
+
45
+ // Add memories
46
+ await memory.add("User's name is Alice", "semantic", 0.8);
47
+ await memory.add("Alice prefers dark mode", "semantic", 0.6);
48
+ await memory.addEpisode("User asked about the weather in NYC");
49
+ await memory.addGoal("Help Alice plan her trip to Tokyo");
50
+
51
+ // Search
52
+ const results = await memory.search("What is the user's name?");
53
+ console.log(results[0].node.content); // "User's name is Alice"
54
+
55
+ // Build context for LLM prompt injection
56
+ const context = await memory.buildContext("Tell me about Alice");
57
+ console.log(context);
58
+ ```
59
+
60
+ ### Voyage AI Backend (high-quality embeddings)
61
+
62
+ ```typescript
63
+ import { AgenticMemory } from "@agents-eco/agentic-memory";
64
+
65
+ const memory = new AgenticMemory({
66
+ backend: "voyage",
67
+ voyageApiKey: process.env.VOYAGE_API_KEY!,
68
+ voyageModel: "voyage-3-lite", // 512 dims, fast and cheap
69
+ });
70
+
71
+ await memory.add("The project deadline is March 15th", "semantic", 0.9);
72
+ const results = await memory.search("When is the deadline?");
73
+ ```
74
+
75
+ ## Architecture
76
+
77
+ ```
78
+ ┌─────────────────────────────────────────────────────────┐
79
+ │ AgenticMemory │
80
+ │ │
81
+ │ ┌─────────────────────┐ ┌──────────────────────────┐ │
82
+ │ │ Short-Term Memory │ │ Long-Term Memory │ │
83
+ │ │ │ │ │ │
84
+ │ │ Bounded buffer │ │ ┌─────────────────────┐ │ │
85
+ │ │ TTL-based expiry │ │ │ Memory Graph │ │ │
86
+ │ │ Keyword search │ │ │ │ │ │
87
+ │ │ Importance ranking │ │ │ Nodes (typed): │ │ │
88
+ │ │ │ │ │ - episodic │ │ │
89
+ │ │ ┌───────────────┐ │ │ │ - semantic │ │ │
90
+ │ │ │ Consolidation │──┼──┼─▶│ - entity │ │ │
91
+ │ │ │ (STM → LTM) │ │ │ │ - goal │ │ │
92
+ │ │ └───────────────┘ │ │ │ - observation │ │ │
93
+ │ │ │ │ │ - procedural │ │ │
94
+ │ └─────────────────────┘ │ │ - emotional │ │ │
95
+ │ │ │ │ │ │
96
+ │ │ │ Edges (weighted): │ │ │
97
+ │ │ │ - related_to │ │ │
98
+ │ │ │ - caused_by │ │ │
99
+ │ │ │ - leads_to │ │ │
100
+ │ │ │ - part_of │ │ │
101
+ │ │ │ - similar_to │ │ │
102
+ │ │ │ - mentioned_in │ │ │
103
+ │ │ └─────────────────────┘ │ │
104
+ │ │ │ │
105
+ │ │ Decay + Reinforcement │ │
106
+ │ │ Hybrid Search │ │
107
+ │ │ Graph Traversal │ │
108
+ │ └──────────────────────────┘ │
109
+ │ │
110
+ │ ┌──────────────────┐ ┌──────────────────────────────┐ │
111
+ │ │ Embedding Backend │ │ Storage Backend │ │
112
+ │ │ │ │ │ │
113
+ │ │ Local (hash) │ │ Local (JSON files) │ │
114
+ │ │ Voyage AI │ │ Custom (implement iface) │ │
115
+ │ └──────────────────┘ └──────────────────────────────┘ │
116
+ └─────────────────────────────────────────────────────────┘
117
+ ```
118
+
119
+ ## Memory Types
120
+
121
+ ### Node Types
122
+
123
+ | Type | Description | Example |
124
+ |------|-------------|---------|
125
+ | `episodic` | Specific events and conversations | "User asked about weather in NYC" |
126
+ | `semantic` | Facts, knowledge, extracted info | "User's name is Alice" |
127
+ | `entity` | People, places, things | "Alice", "Tokyo", "Project X" |
128
+ | `goal` | Objectives, tasks, intentions | "Help user plan trip to Tokyo" |
129
+ | `observation` | Agent observations about the world | "User seems frustrated today" |
130
+ | `procedural` | How-to, skills, patterns | "To check weather, use the weather API" |
131
+ | `emotional` | Sentiment, preferences, reactions | "User prefers concise responses" |
132
+
133
+ ### Edge Types (Relations)
134
+
135
+ | Relation | Description |
136
+ |----------|-------------|
137
+ | `related_to` | General association |
138
+ | `caused_by` | Causal relationship |
139
+ | `leads_to` | Sequential / temporal |
140
+ | `part_of` | Hierarchical |
141
+ | `contradicts` | Conflicting information |
142
+ | `reinforces` | Supporting information |
143
+ | `derived_from` | Extracted / inferred from |
144
+ | `similar_to` | Semantic similarity |
145
+ | `mentioned_in` | Entity mentioned in episode |
146
+ | `precedes` / `follows` | Temporal ordering |
147
+
148
+ ## STM (Short-Term Memory)
149
+
150
+ The working context buffer. Bounded, fast, and ephemeral.
151
+
152
+ ```typescript
153
+ import { ShortTermMemory } from "@agents-eco/agentic-memory";
154
+
155
+ const stm = new ShortTermMemory({
156
+ capacity: 20, // max items
157
+ ttlMs: 30 * 60 * 1000, // 30 min expiry
158
+ });
159
+
160
+ stm.add("User just asked about pricing", "episodic", 0.7);
161
+ stm.add("Current topic is billing", "observation", 0.5);
162
+
163
+ // Get recent context
164
+ const recent = stm.getRecent(5);
165
+
166
+ // Search
167
+ const results = stm.search("pricing");
168
+
169
+ // Build context string for prompt injection
170
+ const context = stm.buildContext();
171
+ ```
172
+
173
+ ## LTM (Long-Term Memory)
174
+
175
+ Persistent graph with decay, reinforcement, and hybrid search.
176
+
177
+ ```typescript
178
+ import { LongTermMemory } from "@agents-eco/agentic-memory";
179
+ import { LocalEmbedding } from "@agents-eco/agentic-memory";
180
+
181
+ const ltm = new LongTermMemory(
182
+ { decayRate: 0.01, minImportance: 0.1, maxNodes: 10000 },
183
+ new LocalEmbedding()
184
+ );
185
+
186
+ // Add memories
187
+ const fact = await ltm.add("Alice lives in New York", "semantic", 0.7);
188
+ const entity = await ltm.add("Alice", "entity", 0.6);
189
+ ltm.link(fact.id, entity.id, "mentioned_in");
190
+
191
+ // Add facts with auto entity linking
192
+ const { node, entityNodes } = await ltm.addFact(
193
+ "Alice is a software engineer at Acme Corp",
194
+ ["Alice", "Acme Corp"]
195
+ );
196
+
197
+ // Search with graph traversal
198
+ const results = await ltm.search("Where does Alice work?", {
199
+ limit: 5,
200
+ includeRelated: true,
201
+ traversalDepth: 2,
202
+ });
203
+
204
+ // Decay old memories
205
+ ltm.decay();
206
+ ```
207
+
208
+ ## Consolidation (STM to LTM)
209
+
210
+ Important short-term memories are promoted to long-term storage.
211
+
212
+ ```typescript
213
+ const memory = new AgenticMemory({ backend: "local" });
214
+
215
+ // Add several memories to STM
216
+ await memory.add("User mentioned they like sushi", "semantic", 0.7);
217
+ await memory.add("User asked about Tokyo restaurants", "episodic", 0.5);
218
+ await memory.add("Random small talk", "episodic", 0.2);
219
+
220
+ // Consolidate important items to LTM
221
+ const count = await memory.consolidate(0.4); // min importance threshold
222
+ console.log(`Consolidated ${count} memories to LTM`);
223
+ // "Random small talk" stays in STM (too low importance)
224
+ // The other two are now in the LTM graph with temporal links
225
+ ```
226
+
227
+ ## Hybrid Search
228
+
229
+ Combines vector similarity, keyword matching, recency, and importance scoring.
230
+
231
+ ```typescript
232
+ const results = await memory.search("What does Alice like?", {
233
+ limit: 5,
234
+ types: ["semantic", "episodic"], // filter by type
235
+ minScore: 0.3, // minimum relevance
236
+ includeRelated: true, // include graph neighbors
237
+ traversalDepth: 2, // how far to walk the graph
238
+ method: "hybrid", // vector + keyword + recency
239
+ });
240
+
241
+ for (const r of results) {
242
+ console.log(`[${r.method}] (${r.score.toFixed(2)}) ${r.node.content}`);
243
+ if (r.related) {
244
+ for (const rel of r.related) {
245
+ console.log(` └─ ${rel.content}`);
246
+ }
247
+ }
248
+ }
249
+ ```
250
+
251
+ ## Persistence
252
+
253
+ Memory is saved as JSON files you can inspect and version control.
254
+
255
+ ```typescript
256
+ const memory = new AgenticMemory({
257
+ backend: "local",
258
+ storageDir: "./.agent/memory",
259
+ namespace: "my-agent", // creates graph-my-agent.json
260
+ });
261
+
262
+ // Auto-loads on first operation
263
+ await memory.add("Something important", "semantic", 0.8);
264
+
265
+ // Explicit save
266
+ await memory.save();
267
+
268
+ // Stats
269
+ console.log(memory.stats());
270
+ // { stm: 1, ltm: { nodes: 1, edges: 0, byType: { semantic: 1 } } }
271
+ ```
272
+
273
+ ## Custom Backends
274
+
275
+ ### Custom Embedding Backend
276
+
277
+ ```typescript
278
+ import { EmbeddingBackend } from "@agents-eco/agentic-memory";
279
+
280
+ class OpenAIEmbedding implements EmbeddingBackend {
281
+ name = "openai";
282
+ dimension = 1536;
283
+
284
+ async embed(text: string): Promise<number[]> {
285
+ const res = await fetch("https://api.openai.com/v1/embeddings", {
286
+ method: "POST",
287
+ headers: { Authorization: `Bearer ${apiKey}`, "Content-Type": "application/json" },
288
+ body: JSON.stringify({ model: "text-embedding-3-small", input: text }),
289
+ });
290
+ const data = await res.json();
291
+ return data.data[0].embedding;
292
+ }
293
+
294
+ async embedBatch(texts: string[]): Promise<number[][]> {
295
+ // Similar batch implementation
296
+ }
297
+ }
298
+
299
+ const memory = new AgenticMemory({
300
+ backend: "local",
301
+ embedding: new OpenAIEmbedding(),
302
+ });
303
+ ```
304
+
305
+ ### Custom Storage Backend
306
+
307
+ ```typescript
308
+ import { StorageBackend, SerializedGraph } from "@agents-eco/agentic-memory";
309
+
310
+ class RedisStorage implements StorageBackend {
311
+ name = "redis";
312
+
313
+ async save(graph: SerializedGraph): Promise<void> {
314
+ await redis.set("memory:graph", JSON.stringify(graph));
315
+ }
316
+
317
+ async load(): Promise<SerializedGraph | null> {
318
+ const raw = await redis.get("memory:graph");
319
+ return raw ? JSON.parse(raw) : null;
320
+ }
321
+
322
+ async exists(): Promise<boolean> {
323
+ return (await redis.exists("memory:graph")) === 1;
324
+ }
325
+ }
326
+
327
+ const memory = new AgenticMemory({
328
+ backend: "local",
329
+ storage: new RedisStorage(),
330
+ });
331
+ ```
332
+
333
+ ## Integration with Open Agentic Framework
334
+
335
+ Use as the memory backend for [@agents-eco/open-agentic-framework](https://github.com/agents-eco/open-agentic-framework):
336
+
337
+ ```typescript
338
+ import { Agent } from "@agents-eco/open-agentic-framework";
339
+ import { AgenticMemory } from "@agents-eco/agentic-memory";
340
+
341
+ const memory = new AgenticMemory({ backend: "local" });
342
+
343
+ // Implement the MemoryStore interface
344
+ const memoryStore = {
345
+ async add(entry) {
346
+ const { stmEntry } = await memory.add(entry.content, entry.type as any, 0.5);
347
+ return { id: stmEntry.id, content: entry.content, type: entry.type, timestamp: stmEntry.createdAt };
348
+ },
349
+ async search(query, limit) {
350
+ const results = await memory.search(query, { limit });
351
+ return results.map((r) => ({
352
+ id: r.node.id,
353
+ content: r.node.content,
354
+ type: r.node.type,
355
+ timestamp: r.node.createdAt,
356
+ }));
357
+ },
358
+ async list(limit) {
359
+ const entries = memory.stm.getRecent(limit);
360
+ return entries.map((e) => ({
361
+ id: e.id,
362
+ content: e.content,
363
+ type: e.type,
364
+ timestamp: e.createdAt,
365
+ }));
366
+ },
367
+ async clear() {
368
+ await memory.clear();
369
+ },
370
+ };
371
+
372
+ const agent = new Agent({
373
+ name: "memory-agent",
374
+ systemPrompt: "You remember everything.",
375
+ provider: { name: "venice", apiKey: "...", baseUrl: "https://api.venice.ai/api/v1", defaultModel: "qwen3-4b" },
376
+ memory: memoryStore,
377
+ });
378
+ ```
379
+
380
+ ## API Reference
381
+
382
+ ### `AgenticMemory`
383
+
384
+ | Method | Description |
385
+ |--------|-------------|
386
+ | `add(content, type?, importance?, metadata?)` | Add to STM (and LTM if important) |
387
+ | `addEpisode(content, importance?)` | Add episodic memory with temporal linking |
388
+ | `addFact(content, entities?, importance?)` | Add semantic memory with entity extraction |
389
+ | `addObservation(content, importance?)` | Add an observation |
390
+ | `addGoal(content, importance?)` | Add a goal |
391
+ | `link(sourceId, targetId, relation, weight?)` | Create a relationship in LTM |
392
+ | `search(query, options?)` | Hybrid search across STM + LTM |
393
+ | `buildContext(query?)` | Build context string for prompt injection |
394
+ | `consolidate(minImportance?)` | Promote important STM entries to LTM |
395
+ | `decay()` | Apply decay to LTM nodes |
396
+ | `save()` | Persist to storage |
397
+ | `load()` | Load from storage |
398
+ | `clear()` | Clear all memory |
399
+ | `stats()` | Get memory statistics |
400
+
401
+ ### `SearchOptions`
402
+
403
+ | Field | Type | Default | Description |
404
+ |-------|------|---------|-------------|
405
+ | `limit` | `number` | `5` | Max results |
406
+ | `types` | `MemoryNodeType[]` | all | Filter by node type |
407
+ | `minScore` | `number` | `0.0` | Minimum relevance score |
408
+ | `includeRelated` | `boolean` | `false` | Include graph neighbors |
409
+ | `traversalDepth` | `number` | `1` | Graph walk depth |
410
+ | `method` | `"vector" \| "keyword" \| "hybrid"` | `"hybrid"` | Search method |
411
+
412
+ ## Contributing
413
+
414
+ We welcome contributions. This project is early and there is room to shape its direction.
415
+
416
+ - **Add a storage backend** — SQLite, Redis, PostgreSQL, S3
417
+ - **Add an embedding backend** — OpenAI, Cohere, local transformers
418
+ - **Improve search** — better scoring, re-ranking, query expansion
419
+ - **Visualization** — graph visualization tools for debugging memory
420
+ - **Report issues** — bug reports and feature requests help us prioritize
421
+
422
+ ## License
423
+
424
+ MIT — [agents.eco](https://agents.eco)
425
+
426
+ ---
427
+
428
+ <p align="center">
429
+ Built by <a href="https://agents.eco">agents.eco</a> — the decentralized AI agent economy.
430
+ </p>
@@ -0,0 +1,20 @@
1
+ import type { EmbeddingBackend } from "../types.js";
2
+ /**
3
+ * Local embedding backend using a simple bag-of-words / hash-based approach.
4
+ * No external API calls required. Good for offline use and development.
5
+ *
6
+ * Uses a deterministic hash to project words into a fixed-dimension vector space.
7
+ * Not as accurate as neural embeddings, but fast and zero-dependency.
8
+ */
9
+ export declare class LocalEmbedding implements EmbeddingBackend {
10
+ readonly name = "local";
11
+ readonly dimension: number;
12
+ constructor(dimension?: number);
13
+ embed(text: string): Promise<number[]>;
14
+ embedBatch(texts: string[]): Promise<number[][]>;
15
+ private computeEmbedding;
16
+ private tokenize;
17
+ private hashWord;
18
+ private hashInt;
19
+ }
20
+ //# sourceMappingURL=local.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"local.d.ts","sourceRoot":"","sources":["../../src/embeddings/local.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC;AAEpD;;;;;;GAMG;AACH,qBAAa,cAAe,YAAW,gBAAgB;IACrD,QAAQ,CAAC,IAAI,WAAW;IACxB,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;gBAEf,SAAS,GAAE,MAAY;IAI7B,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAItC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC;IAItD,OAAO,CAAC,gBAAgB;IA4CxB,OAAO,CAAC,QAAQ;IAQhB,OAAO,CAAC,QAAQ;IAShB,OAAO,CAAC,OAAO;CAMhB"}
@@ -0,0 +1,94 @@
1
+ /**
2
+ * Local embedding backend using a simple bag-of-words / hash-based approach.
3
+ * No external API calls required. Good for offline use and development.
4
+ *
5
+ * Uses a deterministic hash to project words into a fixed-dimension vector space.
6
+ * Not as accurate as neural embeddings, but fast and zero-dependency.
7
+ */
8
+ export class LocalEmbedding {
9
+ name = "local";
10
+ dimension;
11
+ constructor(dimension = 256) {
12
+ this.dimension = dimension;
13
+ }
14
+ async embed(text) {
15
+ return this.computeEmbedding(text);
16
+ }
17
+ async embedBatch(texts) {
18
+ return texts.map((t) => this.computeEmbedding(t));
19
+ }
20
+ computeEmbedding(text) {
21
+ const vector = new Float64Array(this.dimension);
22
+ const words = this.tokenize(text);
23
+ if (words.length === 0)
24
+ return Array.from(vector);
25
+ // Hash each word into the vector space
26
+ for (const word of words) {
27
+ const hash = this.hashWord(word);
28
+ // Use multiple hash projections for better distribution
29
+ for (let i = 0; i < 4; i++) {
30
+ const idx = Math.abs(this.hashInt(hash + i)) % this.dimension;
31
+ const sign = (this.hashInt(hash + i + 1000) % 2 === 0) ? 1 : -1;
32
+ vector[idx] += sign * (1.0 / Math.sqrt(words.length));
33
+ }
34
+ // Bigram features
35
+ const bigramHash = this.hashWord(word + "_bg");
36
+ const bigramIdx = Math.abs(bigramHash) % this.dimension;
37
+ vector[bigramIdx] += 0.5 / Math.sqrt(words.length);
38
+ }
39
+ // Add word-pair features for adjacent words
40
+ for (let i = 0; i < words.length - 1; i++) {
41
+ const pairHash = this.hashWord(words[i] + ":" + words[i + 1]);
42
+ const idx = Math.abs(pairHash) % this.dimension;
43
+ vector[idx] += 0.3 / Math.sqrt(words.length);
44
+ }
45
+ // L2 normalize
46
+ let norm = 0;
47
+ for (let i = 0; i < this.dimension; i++) {
48
+ norm += vector[i] * vector[i];
49
+ }
50
+ norm = Math.sqrt(norm);
51
+ if (norm > 0) {
52
+ for (let i = 0; i < this.dimension; i++) {
53
+ vector[i] /= norm;
54
+ }
55
+ }
56
+ return Array.from(vector);
57
+ }
58
+ tokenize(text) {
59
+ return text
60
+ .toLowerCase()
61
+ .replace(/[^\w\s]/g, " ")
62
+ .split(/\s+/)
63
+ .filter((w) => w.length > 1 && !STOP_WORDS.has(w));
64
+ }
65
+ hashWord(word) {
66
+ let hash = 0;
67
+ for (let i = 0; i < word.length; i++) {
68
+ const char = word.charCodeAt(i);
69
+ hash = ((hash << 5) - hash + char) | 0;
70
+ }
71
+ return hash;
72
+ }
73
+ hashInt(n) {
74
+ n = ((n >> 16) ^ n) * 0x45d9f3b;
75
+ n = ((n >> 16) ^ n) * 0x45d9f3b;
76
+ n = (n >> 16) ^ n;
77
+ return n;
78
+ }
79
+ }
80
+ const STOP_WORDS = new Set([
81
+ "the", "a", "an", "is", "are", "was", "were", "be", "been", "being",
82
+ "have", "has", "had", "do", "does", "did", "will", "would", "could",
83
+ "should", "may", "might", "shall", "can", "need", "dare", "ought",
84
+ "used", "to", "of", "in", "for", "on", "with", "at", "by", "from",
85
+ "as", "into", "through", "during", "before", "after", "above", "below",
86
+ "between", "out", "off", "over", "under", "again", "further", "then",
87
+ "once", "here", "there", "when", "where", "why", "how", "all", "both",
88
+ "each", "few", "more", "most", "other", "some", "such", "no", "nor",
89
+ "not", "only", "own", "same", "so", "than", "too", "very", "just",
90
+ "because", "but", "and", "or", "if", "while", "that", "this", "it",
91
+ "its", "my", "your", "his", "her", "our", "their", "what", "which",
92
+ "who", "whom", "these", "those", "am", "about", "up",
93
+ ]);
94
+ //# sourceMappingURL=local.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"local.js","sourceRoot":"","sources":["../../src/embeddings/local.ts"],"names":[],"mappings":"AAEA;;;;;;GAMG;AACH,MAAM,OAAO,cAAc;IAChB,IAAI,GAAG,OAAO,CAAC;IACf,SAAS,CAAS;IAE3B,YAAY,YAAoB,GAAG;QACjC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;IAC7B,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,IAAY;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;IACrC,CAAC;IAED,KAAK,CAAC,UAAU,CAAC,KAAe;QAC9B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;IACpD,CAAC;IAEO,gBAAgB,CAAC,IAAY;QACnC,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;QAChD,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAElC,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAElD,uCAAuC;QACvC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACzB,MAAM,IAAI,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;YACjC,wDAAwD;YACxD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC3B,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;gBAC9D,MAAM,IAAI,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;gBAChE,MAAM,CAAC,GAAG,CAAC,IAAI,IAAI,GAAG,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC;YACxD,CAAC;YAED,kBAAkB;YAClB,MAAM,UAAU,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,GAAG,KAAK,CAAC,CAAC;YAC/C,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;YACxD,MAAM,CAAC,SAAS,CAAC,IAAI,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC;QAED,4CAA4C;QAC5C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;YAC1C,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YAC9D,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;YAChD,MAAM,CAAC,GAAG,CAAC,IAAI,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;QAC/C,CAAC;QAED,eAAe;QACf,IAAI,IAAI,GAAG,CAAC,CAAC;QACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;YACxC,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;QAChC,CAAC;QACD,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACvB,IAAI,IAAI,GAAG,CAAC,EAAE,CAAC;YACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;gBACxC,MAAM,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;YACpB,CAAC;QACH,CAAC;QAED,OAAO,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC5B,CAAC;IAEO,QAAQ,CAAC,IAAY;QAC3B,OAAO,IAAI;aACR,WAAW,EAAE;aACb,OAAO,CAAC,UAAU,EAAE,GAAG,CAAC;aACxB,KAAK,CAAC,KAAK,CAAC;aACZ,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACvD,CAAC;IAEO,QAAQ,CAAC,IAAY;QAC3B,IAAI,IAAI,GAAG,CAAC,CAAC;QACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACrC,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YAChC,IAAI,GAAG,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;QACzC,CAAC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAEO,OAAO,CAAC,CAAS;QACvB,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;QAChC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;QAChC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC;QAClB,OAAO,CAAC,CAAC;IACX,CAAC;CACF;AAED,MAAM,UAAU,GAAG,IAAI,GAAG,CAAC;IACzB,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO;IACnE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO;IACnE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO;IACjE,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM;IACjE,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,OAAO;IACtE,SAAS,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,MAAM;IACpE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM;IACrE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,KAAK;IACnE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM;IACjE,SAAS,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI;IAClE,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO;IAClE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI;CACrD,CAAC,CAAC"}
@@ -0,0 +1,25 @@
1
+ import type { EmbeddingBackend } from "../types.js";
2
+ /**
3
+ * Voyage AI embedding backend.
4
+ * Uses the Voyage API for high-quality neural embeddings.
5
+ *
6
+ * Models:
7
+ * - voyage-3-lite (default): 512 dimensions, fast, cheap
8
+ * - voyage-3: 1024 dimensions, higher quality
9
+ * - voyage-3-large: 1024 dimensions, best quality
10
+ * - voyage-code-3: optimized for code
11
+ *
12
+ * API docs: https://docs.voyageai.com/reference/embeddings-api
13
+ */
14
+ export declare class VoyageEmbedding implements EmbeddingBackend {
15
+ readonly name = "voyage";
16
+ readonly dimension: number;
17
+ private apiKey;
18
+ private model;
19
+ private baseUrl;
20
+ constructor(apiKey: string, model?: string, baseUrl?: string);
21
+ embed(text: string): Promise<number[]>;
22
+ embedBatch(texts: string[]): Promise<number[][]>;
23
+ private callApi;
24
+ }
25
+ //# sourceMappingURL=voyage.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"voyage.d.ts","sourceRoot":"","sources":["../../src/embeddings/voyage.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC;AAEpD;;;;;;;;;;;GAWG;AACH,qBAAa,eAAgB,YAAW,gBAAgB;IACtD,QAAQ,CAAC,IAAI,YAAY;IACzB,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE,MAAM,EAAE,KAAK,GAAE,MAAwB,EAAE,OAAO,CAAC,EAAE,MAAM;IAuBvE,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAKtC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC;YAcxC,OAAO;CA4BtB"}