@danielzfliu/memory 1.0.1 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,196 +1,362 @@
1
- [![npm version](https://img.shields.io/npm/v/@danielzfliu/memory.svg)](https://www.npmjs.com/package/@danielzfliu/memory)
2
-
3
- # Memory
4
- A fully local Node.js library and REST API for storing, searching, and querying tagged text pieces using ChromaDB for vector storage and Ollama for embeddings + generation.
5
-
6
- ## Prerequisites
7
-
8
- - **Node.js** ≥ 18
9
- - **Ollama** running locally ([install](https://ollama.com))
10
- - **ChromaDB** server running locally
11
-
12
- ### Start Ollama & pull models
13
- To pull models, run:
14
- ```bash
15
- ollama pull nomic-embed-text-v2-moe
16
- ollama pull llama3.2
17
- ```
18
-
19
- If used as api:
20
- ```bash
21
- npm run ollama # or
22
- npm run ollama:port 11435
23
- ```
24
-
25
- If used as a npm package:
26
- ```bash
27
- ollama serve
28
- ```
29
-
30
- ### Start ChromaDB
31
- If used as api:
32
- ```bash
33
- npm run db # or
34
- npm run db:port 9000
35
- ```
36
-
37
- If used as a npm package:
38
- ```bash
39
- chroma run --port 8000
40
- ```
41
-
42
- **Windows note:** If `chroma` is not recognized, the `Scripts` directory may not be on your PATH. Either add it (e.g. `%APPDATA%\Python\Python3xx\Scripts`) or run the executable directly:
43
- ```powershell
44
- & "$env:APPDATA\Python\Python313\Scripts\chroma.exe" run --port 8000
45
- ```
46
-
47
- ## Usage
48
-
49
- ### REST API Server
50
- ```bash
51
- npm install
52
- npm run dev
53
- ```
54
-
55
- Server starts on `http://localhost:3000` by default (set `PORT` env var to change).
56
-
57
- ### API Endpoints
58
-
59
- #### Add a piece
60
- ```bash
61
- curl -X POST http://localhost:3000/pieces \
62
- -H "Content-Type: application/json" \
63
- -d '{"content": "TypeScript is a typed superset of JavaScript.", "tags": ["typescript", "programming"]}'
64
- ```
65
-
66
- #### Get a piece by ID
67
- ```bash
68
- curl http://localhost:3000/pieces/<id>
69
- ```
70
-
71
- #### Update a piece
72
- ```bash
73
- curl -X PUT http://localhost:3000/pieces/<id> \
74
- -H "Content-Type: application/json" \
75
- -d '{"content": "Updated content.", "tags": ["new-tag"]}'
76
- ```
77
-
78
- #### Delete a piece
79
- ```bash
80
- curl -X DELETE http://localhost:3000/pieces/<id>
81
- ```
82
-
83
- #### Semantic search
84
- ```bash
85
- curl -X POST http://localhost:3000/query \
86
- -H "Content-Type: application/json" \
87
- -d '{"query": "What is TypeScript?", "topK": 5}'
88
- ```
89
-
90
- With tag filtering:
91
- ```bash
92
- curl -X POST http://localhost:3000/query \
93
- -H "Content-Type: application/json" \
94
- -d '{"query": "What is TypeScript?", "tags": ["programming"], "topK": 5}'
95
- ```
96
-
97
- #### RAG query (retrieve + generate)
98
- ```bash
99
- curl -X POST http://localhost:3000/rag \
100
- -H "Content-Type: application/json" \
101
- -d '{"query": "Explain TypeScript", "tags": ["programming"], "topK": 5}'
102
- ```
103
-
104
- Returns:
105
- ```json
106
- {
107
- "answer": "Generated answer based on retrieved context...",
108
- "sources": [
109
- {
110
- "piece": { "id": "...", "content": "...", "tags": ["..."] },
111
- "score": 0.87
112
- }
113
- ]
114
- }
115
- ```
116
-
117
- ### Programmatic Usage (Library)
118
-
119
- ```typescript
120
- import { PieceStore, RagPipeline, MemoryConfig } from "@danielzfliu/memory";
121
-
122
- async function main() {
123
- const config: MemoryConfig = {
124
- chromaUrl: "http://localhost:8000",
125
- ollamaUrl: "http://localhost:11434",
126
- embeddingModel: "nomic-embed-text-v2-moe",
127
- };
128
-
129
- const store = new PieceStore(config);
130
- await store.init();
131
-
132
- await store.addPiece("TypeScript is a typed superset of JavaScript.", [
133
- "typescript",
134
- "programming",
135
- ]);
136
- await store.addPiece("Python is great for data science.", [
137
- "python",
138
- "data-science",
139
- ]);
140
-
141
- const results = await store.queryPieces("typed languages", { topK: 5 });
142
- console.log("results", results);
143
-
144
- const filtered = await store.queryPieces("typed languages", {
145
- tags: ["typescript"],
146
- topK: 5,
147
- });
148
- console.log("filtered", filtered);
149
-
150
- const rag = new RagPipeline(store, "http://localhost:11434", "llama3.2");
151
- const answer = await rag.query("What is TypeScript?", {
152
- tags: ["programming"],
153
- });
154
- console.log("answer", answer);
155
- }
156
-
157
- main().catch((err) => {
158
- console.error(err);
159
- process.exit(1);
160
- });
161
- ```
162
-
163
- ## Configuration (`MemoryConfig`)
164
-
165
- | Option | Default | Description |
166
- |--------|---------|-------------|
167
- | `chromaUrl` | `http://localhost:8000` | ChromaDB server URL |
168
- | `ollamaUrl` | `http://localhost:11434` | Ollama server URL |
169
- | `embeddingModel` | `nomic-embed-text-v2-moe` | Ollama model for embeddings |
170
- | `generationModel` | `llama3.2` | Ollama model for RAG generation |
171
- | `collectionName` | `pieces` | ChromaDB collection name |
172
-
173
- ## Testing
174
-
175
- ```bash
176
- npm test # run all tests
177
- npm run test:watch # watch mode
178
- npm run test:coverage # with coverage report
179
- ```
180
-
181
- ## Project Structure
182
-
183
- ```
184
- src/
185
- ├── types.ts # Interfaces (MemoryConfig, Piece, QueryResult, etc.)
186
- ├── embeddings.ts # Ollama embedding client
187
- ├── store.ts # PieceStore — CRUD + semantic search + tag filtering
188
- ├── rag.ts # RAG pipeline — retrieve → prompt → generate
189
- ├── server.ts # Express REST API (app factory)
190
- ├── main.ts # Server entry point (starts listening)
191
- └── index.ts # Library entry point (public exports)
192
- tests/
193
- ├── helpers/ # Shared test fixtures (in-memory ChromaDB mock, etc.)
194
- ├── unit/ # Unit tests (embeddings, store, rag)
195
- └── integration/ # API integration tests (supertest)
1
+ [![npm version](https://img.shields.io/npm/v/@danielzfliu/memory.svg)](https://www.npmjs.com/package/@danielzfliu/memory)
2
+
3
+ # Memory
4
+
5
+ A fully local MCP server and Node.js library for storing, semantically searching, and querying tagged/titled text with ChromaDB (vector storage) and Ollama (embeddings and generation).
6
+
7
+ Three ways to use Memory:
8
+
9
+ - MCP Server Run Memory as a Model Context Protocol server over stdio and expose memory tools to MCP-compatible clients.
10
+ - npm Package — Install `@danielzfliu/memory` in your own project and use the classes directly (store, embeddings, RAG, and MCP server class).
11
+ - REST API Server — Run the standalone HTTP server with CRUD, semantic search, and RAG endpoints.
12
+
13
+ ---
14
+
15
+ ## Prerequisites
16
+
17
+ - **Node.js** ≥ 18
18
+ - **Ollama** running locally
19
+ - **ChromaDB** server running locally
20
+
21
+ ### Setting up Ollama
22
+
23
+ Install Ollama ([install](https://ollama.com)) and pull the default models:
24
+
25
+ ```bash
26
+ ollama pull nomic-embed-text-v2-moe:latest
27
+ ollama pull gemma3:latest
28
+ ```
29
+
30
+ Then run:
31
+ ```bash
32
+ npm run ollama # start Ollama on default port 11434
33
+ npm run ollama:port -- 11435 # start Ollama on a custom port
34
+ ```
35
+
36
+ ### Setting up ChromaDB
37
+
38
+ **Option 1: Docker**
39
+
40
+ The repo includes a Docker Compose file that runs ChromaDB and stores its data in `./chroma/`.
41
+
42
+ ```bash
43
+ npm run docker:up # start ChromaDB on port 8000
44
+ npm run docker:logs # view logs
45
+ npm run docker:down # stop ChromaDB
46
+ ```
47
+
48
+ **Option 2: pip**
49
+
50
+ ```bash
51
+ pip install chromadb
52
+ chroma run --port 8000 # start ChromaDB on port 8000
53
+ ```
54
+
55
+ Note: You may need to add Python's Scripts folder to your PATH after installing.
56
+
57
+ ---
58
+
59
+ ## Option A: MCP Server
60
+
61
+ Use this option to run Memory as a standalone MCP server.
62
+
63
+ ### 1. Setup
64
+
65
+ ```bash
66
+ git clone https://github.com/DanielZFLiu/memory.git
67
+ cd memory
68
+ npm install
69
+ ```
70
+
71
+ ### 2. Build and run the MCP server
72
+
73
+ ```bash
74
+ npm run build
75
+ node ./dist/main.js
76
+ ```
77
+
78
+ Memory MCP communicates over stdio, so it does not bind an HTTP port.
79
+
80
+ ### MCP Client Configuration
81
+
82
+ #### Claude Desktop (example)
83
+
84
+ ```json
85
+ {
86
+ "mcpServers": {
87
+ "memory": {
88
+ "command": "npx",
89
+ "args": ["-y", "@danielzfliu/memory"]
90
+ }
91
+ }
92
+ }
93
+ ```
94
+
95
+ If you are running from a local clone instead of npm:
96
+
97
+ ```json
98
+ {
99
+ "mcpServers": {
100
+ "memory": {
101
+ "command": "node",
102
+ "args": ["c:/path/to/memory/dist/main.js"]
103
+ }
104
+ }
105
+ }
106
+ ```
107
+
108
+ ### MCP Tools
109
+
110
+ | Tool | Description |
111
+ |------|-------------|
112
+ | `add_piece` | Add a new piece with optional title and tags |
113
+ | `get_piece` | Retrieve a piece by id |
114
+ | `update_piece` | Update piece content, title, and/or tags (`title: null` clears title) |
115
+ | `delete_piece` | Delete a piece by id |
116
+ | `query_pieces` | Semantic search over content, plus title when present. Supports hybrid search (vector + keyword via RRF). |
117
+ | `rag_query` | Retrieve + generate answer with citations using content and title context. Supports hybrid search. |
118
+ | `list_collections` | List all collection names in the memory store |
119
+ | `delete_collection` | Delete an entire collection and all its pieces |
120
+
121
+ All piece-level tools accept an optional `collection` parameter to target a specific collection instead of the default. This allows multiple agents to use isolated memory stores.
122
+
123
+ ---
124
+
125
+ ## Option B: npm Package
126
+
127
+ Use this option to integrate Memory into your own Node.js/TypeScript project.
128
+
129
+ ### 1. Install
130
+
131
+ ```bash
132
+ npm install @danielzfliu/memory
133
+ ```
134
+
135
+ ### 2. Programmatic usage
136
+
137
+ #### Using PieceStore and RagPipeline directly
138
+
139
+ ```typescript
140
+ import { PieceStore, RagPipeline, MemoryConfig } from "@danielzfliu/memory";
141
+
142
+ async function main() {
143
+ const config: MemoryConfig = {
144
+ chromaUrl: "http://localhost:8000",
145
+ ollamaUrl: "http://localhost:11434",
146
+ embeddingModel: "nomic-embed-text-v2-moe:latest",
147
+ };
148
+
149
+ // Store: CRUD + semantic search
150
+ const store = new PieceStore(config);
151
+ await store.init();
152
+
153
+ await store.addPiece(
154
+ "TypeScript is a typed superset of JavaScript.",
155
+ ["typescript", "programming"],
156
+ "TypeScript overview",
157
+ );
158
+ await store.addPiece("Python is great for data science.", [
159
+ "python",
160
+ "data-science",
161
+ ]);
162
+
163
+ const results = await store.queryPieces("typed languages", { topK: 5 });
164
+ console.log("results", results);
165
+
166
+ const filtered = await store.queryPieces("typed languages", {
167
+ tags: ["typescript"],
168
+ topK: 5,
169
+ });
170
+ console.log("filtered", filtered);
171
+
172
+ // Hybrid search: combines vector similarity with keyword matching via RRF
173
+ const hybrid = await store.queryPieces("typed languages", {
174
+ topK: 5,
175
+ useHybridSearch: true,
176
+ });
177
+ console.log("hybrid", hybrid);
178
+
179
+ // RAG: retrieve relevant pieces → generate an answer via Ollama
180
+ const rag = new RagPipeline(store, config.ollamaUrl!, "gemma3:latest");
181
+ const answer = await rag.query("What is TypeScript?", {
182
+ tags: ["programming"],
183
+ });
184
+ console.log("answer", answer);
185
+ }
186
+
187
+ main().catch((err) => {
188
+ console.error(err);
189
+ process.exit(1);
190
+ });
191
+ ```
192
+
193
+ #### Embedding the REST API in your own Express app
194
+
195
+ `createServer` returns a configured Express app you can mount or extend:
196
+
197
+ ```typescript
198
+ import { createServer } from "@danielzfliu/memory";
199
+
200
+ const app = createServer({
201
+ chromaUrl: "http://localhost:8000",
202
+ ollamaUrl: "http://localhost:11434",
203
+ });
204
+
205
+ app.listen(4000, () => console.log("Running on :4000"));
206
+ ```
207
+
208
+ ---
209
+
210
+ ## Option C: REST API Server
211
+
212
+ Use this option to run Memory as a standalone HTTP service.
213
+
214
+ ### 1. Setup
215
+
216
+ ```bash
217
+ git clone https://github.com/DanielZFLiu/memory.git
218
+ cd memory
219
+ npm install
220
+ ```
221
+
222
+ ### 2. Start the REST server
223
+
224
+ ```bash
225
+ npm run dev:http
226
+ ```
227
+
228
+ Server starts on `http://localhost:3000` by default (set `PORT` env var to change).
229
+
230
+ ### API Endpoints
231
+
232
+ #### Add a piece
233
+ ```bash
234
+ curl -X POST http://localhost:3000/pieces \
235
+ -H "Content-Type: application/json" \
236
+ -d '{"title": "TypeScript overview", "content": "TypeScript is a typed superset of JavaScript.", "tags": ["typescript", "programming"]}'
237
+ ```
238
+
239
+ With a specific collection:
240
+ ```bash
241
+ curl -X POST http://localhost:3000/pieces \
242
+ -H "Content-Type: application/json" \
243
+ -d '{"content": "Agent-specific memory.", "tags": ["agent"], "collection": "agent-alice"}'
244
+ ```
245
+
246
+ #### Get a piece by ID
247
+ ```bash
248
+ curl http://localhost:3000/pieces/<id>
249
+ curl http://localhost:3000/pieces/<id>?collection=agent-alice
250
+ ```
251
+
252
+ #### Update a piece
253
+ ```bash
254
+ curl -X PUT http://localhost:3000/pieces/<id> \
255
+ -H "Content-Type: application/json" \
256
+ -d '{"title": "Updated title", "content": "Updated content.", "tags": ["new-tag"]}'
257
+ ```
258
+
259
+ Set `title` to `null` to clear it.
260
+
261
+ #### Delete a piece
262
+ ```bash
263
+ curl -X DELETE http://localhost:3000/pieces/<id>
264
+ curl -X DELETE http://localhost:3000/pieces/<id>?collection=agent-alice
265
+ ```
266
+
267
+ #### Semantic search
268
+ ```bash
269
+ curl -X POST http://localhost:3000/query \
270
+ -H "Content-Type: application/json" \
271
+ -d '{"query": "What is TypeScript?", "topK": 5}'
272
+ ```
273
+
274
+ With tag filtering:
275
+ ```bash
276
+ curl -X POST http://localhost:3000/query \
277
+ -H "Content-Type: application/json" \
278
+ -d '{"query": "What is TypeScript?", "tags": ["programming"], "topK": 5}'
279
+ ```
280
+
281
+ With hybrid search (vector + keyword via Reciprocal Rank Fusion):
282
+ ```bash
283
+ curl -X POST http://localhost:3000/query \
284
+ -H "Content-Type: application/json" \
285
+ -d '{"query": "What is TypeScript?", "topK": 5, "useHybridSearch": true}'
286
+ ```
287
+
288
+ #### RAG query (retrieve + generate)
289
+ ```bash
290
+ curl -X POST http://localhost:3000/rag \
291
+ -H "Content-Type: application/json" \
292
+ -d '{"query": "Explain TypeScript", "tags": ["programming"], "topK": 5}'
293
+ ```
294
+
295
+ Returns:
296
+ ```json
297
+ {
298
+ "answer": "Generated answer based on retrieved context...",
299
+ "sources": [
300
+ {
301
+ "piece": { "id": "...", "title": "...", "content": "...", "tags": ["..."] },
302
+ "score": 0.87
303
+ }
304
+ ]
305
+ }
306
+ ```
307
+
308
+ #### List collections
309
+ ```bash
310
+ curl http://localhost:3000/collections
311
+ ```
312
+
313
+ #### Delete a collection
314
+ ```bash
315
+ curl -X DELETE http://localhost:3000/collections/agent-alice
316
+ ```
317
+
318
+ > **Multi-collection:** All piece and query endpoints accept an optional `collection` parameter (in the request body for POST/PUT, as a query string for GET/DELETE) to target a specific collection. Omitting it uses the default collection.
319
+
320
+ ---
321
+
322
+ ## Exports
323
+
324
+ | Export | Description |
325
+ |--------|-------------|
326
+ | `PieceStore` | CRUD + semantic search over tagged text pieces |
327
+ | `RagPipeline` | Retrieve-then-generate pipeline using `PieceStore` + Ollama |
328
+ | `EmbeddingClient` | Low-level Ollama embedding wrapper |
329
+ | `MemoryMcpServer` | MCP server class (stdio transport) exposing memory tools |
330
+ | `createServer` | Express app factory with all REST endpoints pre-configured |
331
+ | `MemoryConfig` | Configuration interface (all fields optional with defaults) |
332
+ | `DEFAULT_MEMORY_CONFIG` | The default values for `MemoryConfig` |
333
+ | `Piece` | `{ id, content, title?, tags }` |
334
+ | `QueryOptions` | `{ tags?, topK?, useHybridSearch? }` |
335
+ | `QueryResult` | `{ piece, score }` |
336
+ | `RagResult` | `{ answer, sources }` |
337
+
338
+ ---
339
+
340
+ ## Configuration (`MemoryConfig`)
341
+
342
+ All fields are optional. Defaults are applied automatically.
343
+
344
+ | Option | Default | Description |
345
+ |--------|---------|-------------|
346
+ | `chromaUrl` | `http://localhost:8000` | ChromaDB server URL |
347
+ | `ollamaUrl` | `http://localhost:11434` | Ollama server URL |
348
+ | `embeddingModel` | `nomic-embed-text-v2-moe:latest` | Ollama model for embeddings |
349
+ | `generationModel` | `gemma3:latest` | Ollama model for RAG generation |
350
+ | `collectionName` | `pieces` | ChromaDB collection name |
351
+
352
+ > **Note:** `generationModel` is used by `createServer` and `MemoryMcpServer`. When constructing `RagPipeline` directly, you pass the model name to its constructor.
353
+
354
+ Environment variables with the names above can override these defaults at runtime.
355
+
356
+ ## Testing
357
+
358
+ ```bash
359
+ npm test # run all tests
360
+ npm run test:watch # watch mode
361
+ npm run test:coverage # with coverage report
196
362
  ```
@@ -0,0 +1,4 @@
1
+ import { MemoryConfig } from "./types";
2
+ export declare const DEFAULT_MEMORY_CONFIG: Required<MemoryConfig>;
3
+ export declare function resolveConfig(config?: MemoryConfig): Required<MemoryConfig>;
4
+ //# sourceMappingURL=config.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AAEvC,eAAO,MAAM,qBAAqB,EAAE,QAAQ,CAAC,YAAY,CAMxD,CAAC;AAqBF,wBAAgB,aAAa,CAAC,MAAM,GAAE,YAAiB,GAAG,QAAQ,CAAC,YAAY,CAAC,CAuB/E"}
package/dist/config.js ADDED
@@ -0,0 +1,47 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DEFAULT_MEMORY_CONFIG = void 0;
4
+ exports.resolveConfig = resolveConfig;
5
+ exports.DEFAULT_MEMORY_CONFIG = {
6
+ chromaUrl: "http://localhost:8000",
7
+ ollamaUrl: "http://localhost:11434",
8
+ embeddingModel: "nomic-embed-text-v2-moe:latest",
9
+ generationModel: "gemma3:latest",
10
+ collectionName: "pieces",
11
+ };
12
+ const ENV_CONFIG_KEYS = {
13
+ chromaUrl: ["MEMORY_CHROMA_URL", "CHROMA_URL"],
14
+ ollamaUrl: ["MEMORY_OLLAMA_URL", "OLLAMA_URL"],
15
+ embeddingModel: ["MEMORY_EMBEDDING_MODEL", "EMBEDDING_MODEL"],
16
+ generationModel: ["MEMORY_GENERATION_MODEL", "GENERATION_MODEL"],
17
+ collectionName: ["MEMORY_COLLECTION_NAME", "COLLECTION_NAME"],
18
+ };
19
+ function resolveEnvOverride(keys) {
20
+ for (const key of keys) {
21
+ const value = process.env[key]?.trim();
22
+ if (value) {
23
+ return value;
24
+ }
25
+ }
26
+ return undefined;
27
+ }
28
+ function resolveConfig(config = {}) {
29
+ return {
30
+ chromaUrl: config.chromaUrl ??
31
+ resolveEnvOverride(ENV_CONFIG_KEYS.chromaUrl) ??
32
+ exports.DEFAULT_MEMORY_CONFIG.chromaUrl,
33
+ ollamaUrl: config.ollamaUrl ??
34
+ resolveEnvOverride(ENV_CONFIG_KEYS.ollamaUrl) ??
35
+ exports.DEFAULT_MEMORY_CONFIG.ollamaUrl,
36
+ embeddingModel: config.embeddingModel ??
37
+ resolveEnvOverride(ENV_CONFIG_KEYS.embeddingModel) ??
38
+ exports.DEFAULT_MEMORY_CONFIG.embeddingModel,
39
+ generationModel: config.generationModel ??
40
+ resolveEnvOverride(ENV_CONFIG_KEYS.generationModel) ??
41
+ exports.DEFAULT_MEMORY_CONFIG.generationModel,
42
+ collectionName: config.collectionName ??
43
+ resolveEnvOverride(ENV_CONFIG_KEYS.collectionName) ??
44
+ exports.DEFAULT_MEMORY_CONFIG.collectionName,
45
+ };
46
+ }
47
+ //# sourceMappingURL=config.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.js","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":";;;AA6BA,sCAuBC;AAlDY,QAAA,qBAAqB,GAA2B;IACzD,SAAS,EAAE,uBAAuB;IAClC,SAAS,EAAE,wBAAwB;IACnC,cAAc,EAAE,gCAAgC;IAChD,eAAe,EAAE,eAAe;IAChC,cAAc,EAAE,QAAQ;CAC3B,CAAC;AAEF,MAAM,eAAe,GAAmD;IACpE,SAAS,EAAE,CAAC,mBAAmB,EAAE,YAAY,CAAC;IAC9C,SAAS,EAAE,CAAC,mBAAmB,EAAE,YAAY,CAAC;IAC9C,cAAc,EAAE,CAAC,wBAAwB,EAAE,iBAAiB,CAAC;IAC7D,eAAe,EAAE,CAAC,yBAAyB,EAAE,kBAAkB,CAAC;IAChE,cAAc,EAAE,CAAC,wBAAwB,EAAE,iBAAiB,CAAC;CAChE,CAAC;AAEF,SAAS,kBAAkB,CAAC,IAAc;IACtC,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE,CAAC;QACrB,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,IAAI,EAAE,CAAC;QACvC,IAAI,KAAK,EAAE,CAAC;YACR,OAAO,KAAK,CAAC;QACjB,CAAC;IACL,CAAC;IAED,OAAO,SAAS,CAAC;AACrB,CAAC;AAED,SAAgB,aAAa,CAAC,SAAuB,EAAE;IACnD,OAAO;QACH,SAAS,EACL,MAAM,CAAC,SAAS;YAChB,kBAAkB,CAAC,eAAe,CAAC,SAAS,CAAC;YAC7C,6BAAqB,CAAC,SAAS;QACnC,SAAS,EACL,MAAM,CAAC,SAAS;YAChB,kBAAkB,CAAC,eAAe,CAAC,SAAS,CAAC;YAC7C,6BAAqB,CAAC,SAAS;QACnC,cAAc,EACV,MAAM,CAAC,cAAc;YACrB,kBAAkB,CAAC,eAAe,CAAC,cAAc,CAAC;YAClD,6BAAqB,CAAC,cAAc;QACxC,eAAe,EACX,MAAM,CAAC,eAAe;YACtB,kBAAkB,CAAC,eAAe,CAAC,eAAe,CAAC;YACnD,6BAAqB,CAAC,eAAe;QACzC,cAAc,EACV,MAAM,CAAC,cAAc;YACrB,kBAAkB,CAAC,eAAe,CAAC,cAAc,CAAC;YAClD,6BAAqB,CAAC,cAAc;KAC3C,CAAC;AACN,CAAC"}
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=http-main.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"http-main.d.ts","sourceRoot":"","sources":["../src/http-main.ts"],"names":[],"mappings":""}
@@ -0,0 +1,9 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const server_1 = require("./server");
4
+ const PORT = process.env.PORT ?? 3000;
5
+ const app = (0, server_1.createServer)();
6
+ app.listen(PORT, () => {
7
+ console.log(`Memory RAG server running on http://localhost:${PORT}`);
8
+ });
9
+ //# sourceMappingURL=http-main.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"http-main.js","sourceRoot":"","sources":["../src/http-main.ts"],"names":[],"mappings":";;AAAA,qCAAwC;AAExC,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,IAAI,IAAI,IAAI,CAAC;AACtC,MAAM,GAAG,GAAG,IAAA,qBAAY,GAAE,CAAC;AAC3B,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,EAAE;IAClB,OAAO,CAAC,GAAG,CAAC,iDAAiD,IAAI,EAAE,CAAC,CAAC;AACzE,CAAC,CAAC,CAAC"}
@@ -0,0 +1,8 @@
1
+ export declare function tokenize(text: string): string[];
2
+ export declare function keywordScore(queryTokens: string[], docText: string): number;
3
+ export interface RankedItem<T> {
4
+ item: T;
5
+ score: number;
6
+ }
7
+ export declare function reciprocalRankFusion<T>(rankings: RankedItem<T>[][], idFn: (item: T) => string, k?: number): RankedItem<T>[];
8
+ //# sourceMappingURL=hybrid.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hybrid.d.ts","sourceRoot":"","sources":["../src/hybrid.ts"],"names":[],"mappings":"AAUA,wBAAgB,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,CAK/C;AAED,wBAAgB,YAAY,CAAC,WAAW,EAAE,MAAM,EAAE,EAAE,OAAO,EAAE,MAAM,GAAG,MAAM,CAQ3E;AAED,MAAM,WAAW,UAAU,CAAC,CAAC;IACzB,IAAI,EAAE,CAAC,CAAC;IACR,KAAK,EAAE,MAAM,CAAC;CACjB;AAED,wBAAgB,oBAAoB,CAAC,CAAC,EAClC,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE,EAC3B,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM,EACzB,CAAC,SAAK,GACP,UAAU,CAAC,CAAC,CAAC,EAAE,CAejB"}