@usewhisper/mcp-server 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +168 -0
- package/dist/server.js +1648 -0
- package/dist/server.js.map +1 -0
- package/package.json +52 -0
package/dist/server.js
ADDED
|
@@ -0,0 +1,1648 @@
|
|
|
1
|
+
#\!/usr/bin/env node
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __export = (target, all) => {
|
|
4
|
+
for (var name in all)
|
|
5
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
6
|
+
};
|
|
7
|
+
|
|
8
|
+
// src/mcp/server.ts
|
|
9
|
+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
10
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
11
|
+
import { z } from "zod";
|
|
12
|
+
|
|
13
|
+
// src/db/index.ts
|
|
14
|
+
import { drizzle } from "drizzle-orm/postgres-js";
|
|
15
|
+
import postgres from "postgres";
|
|
16
|
+
|
|
17
|
+
// src/db/schema.ts
|
|
18
|
+
var schema_exports = {};
|
|
19
|
+
__export(schema_exports, {
|
|
20
|
+
apiKeys: () => apiKeys,
|
|
21
|
+
chunkTypeEnum: () => chunkTypeEnum,
|
|
22
|
+
chunks: () => chunks,
|
|
23
|
+
connectorTypeEnum: () => connectorTypeEnum,
|
|
24
|
+
conversations: () => conversations,
|
|
25
|
+
documents: () => documents,
|
|
26
|
+
entities: () => entities,
|
|
27
|
+
memories: () => memories,
|
|
28
|
+
memoryTypeEnum: () => memoryTypeEnum,
|
|
29
|
+
messages: () => messages,
|
|
30
|
+
organizations: () => organizations,
|
|
31
|
+
projects: () => projects,
|
|
32
|
+
queryCache: () => queryCache,
|
|
33
|
+
relationTypeEnum: () => relationTypeEnum,
|
|
34
|
+
relations: () => relations,
|
|
35
|
+
sourceStatusEnum: () => sourceStatusEnum,
|
|
36
|
+
sources: () => sources,
|
|
37
|
+
usageEvents: () => usageEvents,
|
|
38
|
+
webhooks: () => webhooks
|
|
39
|
+
});
|
|
40
|
+
import {
|
|
41
|
+
pgTable,
|
|
42
|
+
uuid,
|
|
43
|
+
text,
|
|
44
|
+
timestamp,
|
|
45
|
+
integer,
|
|
46
|
+
boolean,
|
|
47
|
+
jsonb,
|
|
48
|
+
vector,
|
|
49
|
+
index,
|
|
50
|
+
uniqueIndex,
|
|
51
|
+
pgEnum,
|
|
52
|
+
real,
|
|
53
|
+
serial
|
|
54
|
+
} from "drizzle-orm/pg-core";
|
|
55
|
+
var connectorTypeEnum = pgEnum("connector_type", [
|
|
56
|
+
"github",
|
|
57
|
+
"gitlab",
|
|
58
|
+
"url",
|
|
59
|
+
"sitemap",
|
|
60
|
+
"text",
|
|
61
|
+
"pdf",
|
|
62
|
+
"api_spec",
|
|
63
|
+
"database",
|
|
64
|
+
"confluence",
|
|
65
|
+
"notion",
|
|
66
|
+
"slack",
|
|
67
|
+
"discord",
|
|
68
|
+
"arxiv",
|
|
69
|
+
"huggingface",
|
|
70
|
+
"npm_package",
|
|
71
|
+
"pypi_package",
|
|
72
|
+
"custom"
|
|
73
|
+
]);
|
|
74
|
+
var sourceStatusEnum = pgEnum("source_status", [
|
|
75
|
+
"pending",
|
|
76
|
+
"indexing",
|
|
77
|
+
"ready",
|
|
78
|
+
"failed",
|
|
79
|
+
"stale",
|
|
80
|
+
"syncing"
|
|
81
|
+
]);
|
|
82
|
+
var chunkTypeEnum = pgEnum("chunk_type", [
|
|
83
|
+
"code",
|
|
84
|
+
"function",
|
|
85
|
+
"class",
|
|
86
|
+
"documentation",
|
|
87
|
+
"api_spec",
|
|
88
|
+
"schema",
|
|
89
|
+
"config",
|
|
90
|
+
"text",
|
|
91
|
+
"comment",
|
|
92
|
+
"readme",
|
|
93
|
+
"research",
|
|
94
|
+
"conversation",
|
|
95
|
+
"dataset"
|
|
96
|
+
]);
|
|
97
|
+
var memoryTypeEnum = pgEnum("memory_type", [
|
|
98
|
+
"factual",
|
|
99
|
+
// structured facts: "user prefers TypeScript"
|
|
100
|
+
"episodic",
|
|
101
|
+
// past interaction summaries
|
|
102
|
+
"semantic",
|
|
103
|
+
// general knowledge
|
|
104
|
+
"procedural"
|
|
105
|
+
// how-to knowledge
|
|
106
|
+
]);
|
|
107
|
+
var relationTypeEnum = pgEnum("relation_type", [
|
|
108
|
+
"imports",
|
|
109
|
+
"exports",
|
|
110
|
+
"calls",
|
|
111
|
+
"implements",
|
|
112
|
+
"extends",
|
|
113
|
+
"references",
|
|
114
|
+
"depends_on",
|
|
115
|
+
"related_to",
|
|
116
|
+
"part_of",
|
|
117
|
+
"contradicts",
|
|
118
|
+
"supersedes"
|
|
119
|
+
]);
|
|
120
|
+
var organizations = pgTable(
|
|
121
|
+
"organizations",
|
|
122
|
+
{
|
|
123
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
124
|
+
whisperOrgId: text("whisper_org_id").unique(),
|
|
125
|
+
// link to main Whisper org
|
|
126
|
+
name: text("name").notNull(),
|
|
127
|
+
plan: text("plan").default("free").notNull(),
|
|
128
|
+
// free, pro, enterprise
|
|
129
|
+
settings: jsonb("settings").$type().default({}),
|
|
130
|
+
usageLimits: jsonb("usage_limits").$type().default({
|
|
131
|
+
queriesPerDay: 1e3,
|
|
132
|
+
documentsTotal: 1e4,
|
|
133
|
+
sourcesTotal: 50,
|
|
134
|
+
storageBytes: 1073741824
|
|
135
|
+
// 1GB
|
|
136
|
+
}),
|
|
137
|
+
createdAt: timestamp("created_at").defaultNow().notNull(),
|
|
138
|
+
updatedAt: timestamp("updated_at").defaultNow().notNull()
|
|
139
|
+
},
|
|
140
|
+
(t) => [
|
|
141
|
+
uniqueIndex("orgs_whisper_org_idx").on(t.whisperOrgId)
|
|
142
|
+
]
|
|
143
|
+
);
|
|
144
|
+
var apiKeys = pgTable(
|
|
145
|
+
"api_keys",
|
|
146
|
+
{
|
|
147
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
148
|
+
orgId: uuid("org_id").notNull().references(() => organizations.id, { onDelete: "cascade" }),
|
|
149
|
+
name: text("name").notNull(),
|
|
150
|
+
keyHash: text("key_hash").notNull(),
|
|
151
|
+
keyPrefix: text("key_prefix").notNull(),
|
|
152
|
+
// "wctx_xxxx" for identification
|
|
153
|
+
scopes: jsonb("scopes").$type().default(["read", "write"]),
|
|
154
|
+
rateLimit: integer("rate_limit").default(100),
|
|
155
|
+
// requests per minute
|
|
156
|
+
lastUsedAt: timestamp("last_used_at"),
|
|
157
|
+
expiresAt: timestamp("expires_at"),
|
|
158
|
+
isActive: boolean("is_active").default(true).notNull(),
|
|
159
|
+
createdAt: timestamp("created_at").defaultNow().notNull()
|
|
160
|
+
},
|
|
161
|
+
(t) => [
|
|
162
|
+
uniqueIndex("api_keys_hash_idx").on(t.keyHash),
|
|
163
|
+
index("api_keys_org_idx").on(t.orgId),
|
|
164
|
+
index("api_keys_prefix_idx").on(t.keyPrefix)
|
|
165
|
+
]
|
|
166
|
+
);
|
|
167
|
+
var projects = pgTable(
|
|
168
|
+
"projects",
|
|
169
|
+
{
|
|
170
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
171
|
+
orgId: uuid("org_id").notNull().references(() => organizations.id, { onDelete: "cascade" }),
|
|
172
|
+
name: text("name").notNull(),
|
|
173
|
+
slug: text("slug").notNull(),
|
|
174
|
+
description: text("description"),
|
|
175
|
+
settings: jsonb("settings").$type().default({
|
|
176
|
+
embeddingModel: "text-embedding-3-small",
|
|
177
|
+
embeddingDimensions: 1536,
|
|
178
|
+
chunkSize: 1e3,
|
|
179
|
+
chunkOverlap: 200,
|
|
180
|
+
defaultTopK: 10,
|
|
181
|
+
defaultThreshold: 0.3
|
|
182
|
+
}),
|
|
183
|
+
isPublic: boolean("is_public").default(false).notNull(),
|
|
184
|
+
createdAt: timestamp("created_at").defaultNow().notNull(),
|
|
185
|
+
updatedAt: timestamp("updated_at").defaultNow().notNull()
|
|
186
|
+
},
|
|
187
|
+
(t) => [
|
|
188
|
+
uniqueIndex("projects_org_slug_idx").on(t.orgId, t.slug),
|
|
189
|
+
index("projects_org_idx").on(t.orgId),
|
|
190
|
+
index("projects_public_idx").on(t.isPublic)
|
|
191
|
+
]
|
|
192
|
+
);
|
|
193
|
+
var sources = pgTable(
|
|
194
|
+
"sources",
|
|
195
|
+
{
|
|
196
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
197
|
+
projectId: uuid("project_id").notNull().references(() => projects.id, { onDelete: "cascade" }),
|
|
198
|
+
connectorType: connectorTypeEnum("connector_type").notNull(),
|
|
199
|
+
name: text("name").notNull(),
|
|
200
|
+
config: jsonb("config").$type().default({}),
|
|
201
|
+
status: sourceStatusEnum("status").default("pending").notNull(),
|
|
202
|
+
syncSchedule: text("sync_schedule"),
|
|
203
|
+
// cron expression for auto-sync
|
|
204
|
+
lastSyncAt: timestamp("last_sync_at"),
|
|
205
|
+
lastSyncDurationMs: integer("last_sync_duration_ms"),
|
|
206
|
+
syncError: text("sync_error"),
|
|
207
|
+
documentCount: integer("document_count").default(0),
|
|
208
|
+
chunkCount: integer("chunk_count").default(0),
|
|
209
|
+
contentHash: text("content_hash"),
|
|
210
|
+
// for change detection
|
|
211
|
+
metadata: jsonb("metadata").$type().default({}),
|
|
212
|
+
createdAt: timestamp("created_at").defaultNow().notNull(),
|
|
213
|
+
updatedAt: timestamp("updated_at").defaultNow().notNull()
|
|
214
|
+
},
|
|
215
|
+
(t) => [
|
|
216
|
+
index("sources_project_idx").on(t.projectId),
|
|
217
|
+
index("sources_status_idx").on(t.status),
|
|
218
|
+
index("sources_type_idx").on(t.connectorType)
|
|
219
|
+
]
|
|
220
|
+
);
|
|
221
|
+
var documents = pgTable(
|
|
222
|
+
"documents",
|
|
223
|
+
{
|
|
224
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
225
|
+
sourceId: uuid("source_id").notNull().references(() => sources.id, { onDelete: "cascade" }),
|
|
226
|
+
projectId: uuid("project_id").notNull().references(() => projects.id, { onDelete: "cascade" }),
|
|
227
|
+
externalId: text("external_id"),
|
|
228
|
+
title: text("title"),
|
|
229
|
+
content: text("content"),
|
|
230
|
+
contentHash: text("content_hash"),
|
|
231
|
+
url: text("url"),
|
|
232
|
+
language: text("language"),
|
|
233
|
+
// programming language or natural language
|
|
234
|
+
metadata: jsonb("metadata").$type().default({}),
|
|
235
|
+
tokenCount: integer("token_count").default(0),
|
|
236
|
+
isActive: boolean("is_active").default(true).notNull(),
|
|
237
|
+
lastIndexedAt: timestamp("last_indexed_at"),
|
|
238
|
+
createdAt: timestamp("created_at").defaultNow().notNull(),
|
|
239
|
+
updatedAt: timestamp("updated_at").defaultNow().notNull()
|
|
240
|
+
},
|
|
241
|
+
(t) => [
|
|
242
|
+
index("documents_source_idx").on(t.sourceId),
|
|
243
|
+
index("documents_project_idx").on(t.projectId),
|
|
244
|
+
uniqueIndex("documents_source_external_idx").on(t.sourceId, t.externalId),
|
|
245
|
+
index("documents_hash_idx").on(t.contentHash),
|
|
246
|
+
index("documents_active_idx").on(t.projectId, t.isActive)
|
|
247
|
+
]
|
|
248
|
+
);
|
|
249
|
+
var chunks = pgTable(
|
|
250
|
+
"chunks",
|
|
251
|
+
{
|
|
252
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
253
|
+
documentId: uuid("document_id").notNull().references(() => documents.id, { onDelete: "cascade" }),
|
|
254
|
+
projectId: uuid("project_id").notNull().references(() => projects.id, { onDelete: "cascade" }),
|
|
255
|
+
content: text("content").notNull(),
|
|
256
|
+
chunkType: chunkTypeEnum("chunk_type").default("text").notNull(),
|
|
257
|
+
chunkIndex: integer("chunk_index").default(0),
|
|
258
|
+
// Search
|
|
259
|
+
embedding: vector("embedding", { dimensions: 1536 }),
|
|
260
|
+
// Full-text search column (populated via trigger or app)
|
|
261
|
+
searchContent: text("search_content"),
|
|
262
|
+
// stripped/normalized for BM25
|
|
263
|
+
// Metadata
|
|
264
|
+
metadata: jsonb("metadata").$type().default({}),
|
|
265
|
+
tokenCount: integer("token_count").default(0),
|
|
266
|
+
createdAt: timestamp("created_at").defaultNow().notNull()
|
|
267
|
+
},
|
|
268
|
+
(t) => [
|
|
269
|
+
index("chunks_document_idx").on(t.documentId),
|
|
270
|
+
index("chunks_project_idx").on(t.projectId),
|
|
271
|
+
index("chunks_type_idx").on(t.chunkType),
|
|
272
|
+
index("chunks_project_type_idx").on(t.projectId, t.chunkType)
|
|
273
|
+
]
|
|
274
|
+
);
|
|
275
|
+
var entities = pgTable(
|
|
276
|
+
"entities",
|
|
277
|
+
{
|
|
278
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
279
|
+
projectId: uuid("project_id").notNull().references(() => projects.id, { onDelete: "cascade" }),
|
|
280
|
+
name: text("name").notNull(),
|
|
281
|
+
entityType: text("entity_type").notNull(),
|
|
282
|
+
// function, class, module, concept, api_endpoint, etc.
|
|
283
|
+
description: text("description"),
|
|
284
|
+
metadata: jsonb("metadata").$type().default({}),
|
|
285
|
+
sourceChunkId: uuid("source_chunk_id").references(() => chunks.id, { onDelete: "set null" }),
|
|
286
|
+
embedding: vector("embedding", { dimensions: 1536 }),
|
|
287
|
+
createdAt: timestamp("created_at").defaultNow().notNull(),
|
|
288
|
+
updatedAt: timestamp("updated_at").defaultNow().notNull()
|
|
289
|
+
},
|
|
290
|
+
(t) => [
|
|
291
|
+
index("entities_project_idx").on(t.projectId),
|
|
292
|
+
index("entities_type_idx").on(t.projectId, t.entityType),
|
|
293
|
+
uniqueIndex("entities_project_name_type_idx").on(t.projectId, t.name, t.entityType)
|
|
294
|
+
]
|
|
295
|
+
);
|
|
296
|
+
var relations = pgTable(
|
|
297
|
+
"relations",
|
|
298
|
+
{
|
|
299
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
300
|
+
projectId: uuid("project_id").notNull().references(() => projects.id, { onDelete: "cascade" }),
|
|
301
|
+
fromEntityId: uuid("from_entity_id").notNull().references(() => entities.id, { onDelete: "cascade" }),
|
|
302
|
+
toEntityId: uuid("to_entity_id").notNull().references(() => entities.id, { onDelete: "cascade" }),
|
|
303
|
+
relationType: relationTypeEnum("relation_type").notNull(),
|
|
304
|
+
weight: real("weight").default(1),
|
|
305
|
+
metadata: jsonb("metadata").$type().default({}),
|
|
306
|
+
createdAt: timestamp("created_at").defaultNow().notNull()
|
|
307
|
+
},
|
|
308
|
+
(t) => [
|
|
309
|
+
index("relations_project_idx").on(t.projectId),
|
|
310
|
+
index("relations_from_idx").on(t.fromEntityId),
|
|
311
|
+
index("relations_to_idx").on(t.toEntityId),
|
|
312
|
+
uniqueIndex("relations_unique_idx").on(t.fromEntityId, t.toEntityId, t.relationType)
|
|
313
|
+
]
|
|
314
|
+
);
|
|
315
|
+
var memories = pgTable(
|
|
316
|
+
"memories",
|
|
317
|
+
{
|
|
318
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
319
|
+
projectId: uuid("project_id").notNull().references(() => projects.id, { onDelete: "cascade" }),
|
|
320
|
+
// Scoping
|
|
321
|
+
userId: text("user_id"),
|
|
322
|
+
// user-scoped memory
|
|
323
|
+
sessionId: text("session_id"),
|
|
324
|
+
// session-scoped memory
|
|
325
|
+
agentId: text("agent_id"),
|
|
326
|
+
// agent-scoped memory
|
|
327
|
+
// Memory content
|
|
328
|
+
memoryType: memoryTypeEnum("memory_type").default("factual").notNull(),
|
|
329
|
+
content: text("content").notNull(),
|
|
330
|
+
summary: text("summary"),
|
|
331
|
+
// LLM-generated summary
|
|
332
|
+
embedding: vector("embedding", { dimensions: 1536 }),
|
|
333
|
+
// Management
|
|
334
|
+
importance: real("importance").default(0.5),
|
|
335
|
+
// 0-1 how important is this
|
|
336
|
+
accessCount: integer("access_count").default(0),
|
|
337
|
+
lastAccessedAt: timestamp("last_accessed_at"),
|
|
338
|
+
expiresAt: timestamp("expires_at"),
|
|
339
|
+
// optional TTL
|
|
340
|
+
metadata: jsonb("metadata").$type().default({}),
|
|
341
|
+
isActive: boolean("is_active").default(true).notNull(),
|
|
342
|
+
createdAt: timestamp("created_at").defaultNow().notNull(),
|
|
343
|
+
updatedAt: timestamp("updated_at").defaultNow().notNull()
|
|
344
|
+
},
|
|
345
|
+
(t) => [
|
|
346
|
+
index("memories_project_idx").on(t.projectId),
|
|
347
|
+
index("memories_user_idx").on(t.projectId, t.userId),
|
|
348
|
+
index("memories_session_idx").on(t.projectId, t.sessionId),
|
|
349
|
+
index("memories_agent_idx").on(t.projectId, t.agentId),
|
|
350
|
+
index("memories_type_idx").on(t.projectId, t.memoryType),
|
|
351
|
+
index("memories_active_idx").on(t.projectId, t.isActive),
|
|
352
|
+
index("memories_expires_idx").on(t.expiresAt)
|
|
353
|
+
]
|
|
354
|
+
);
|
|
355
|
+
var conversations = pgTable(
|
|
356
|
+
"conversations",
|
|
357
|
+
{
|
|
358
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
359
|
+
projectId: uuid("project_id").notNull().references(() => projects.id, { onDelete: "cascade" }),
|
|
360
|
+
sessionId: text("session_id"),
|
|
361
|
+
userId: text("user_id"),
|
|
362
|
+
agentId: text("agent_id"),
|
|
363
|
+
title: text("title"),
|
|
364
|
+
summary: text("summary"),
|
|
365
|
+
embedding: vector("embedding", { dimensions: 1536 }),
|
|
366
|
+
metadata: jsonb("metadata").$type().default({}),
|
|
367
|
+
messageCount: integer("message_count").default(0),
|
|
368
|
+
createdAt: timestamp("created_at").defaultNow().notNull(),
|
|
369
|
+
updatedAt: timestamp("updated_at").defaultNow().notNull()
|
|
370
|
+
},
|
|
371
|
+
(t) => [
|
|
372
|
+
index("conversations_project_idx").on(t.projectId),
|
|
373
|
+
index("conversations_session_idx").on(t.sessionId),
|
|
374
|
+
index("conversations_user_idx").on(t.userId)
|
|
375
|
+
]
|
|
376
|
+
);
|
|
377
|
+
var messages = pgTable(
|
|
378
|
+
"messages",
|
|
379
|
+
{
|
|
380
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
381
|
+
conversationId: uuid("conversation_id").notNull().references(() => conversations.id, { onDelete: "cascade" }),
|
|
382
|
+
role: text("role").notNull(),
|
|
383
|
+
// user, assistant, system, tool
|
|
384
|
+
content: text("content").notNull(),
|
|
385
|
+
metadata: jsonb("metadata").$type().default({}),
|
|
386
|
+
createdAt: timestamp("created_at").defaultNow().notNull()
|
|
387
|
+
},
|
|
388
|
+
(t) => [
|
|
389
|
+
index("messages_conversation_idx").on(t.conversationId),
|
|
390
|
+
index("messages_created_idx").on(t.conversationId, t.createdAt)
|
|
391
|
+
]
|
|
392
|
+
);
|
|
393
|
+
var webhooks = pgTable(
|
|
394
|
+
"webhooks",
|
|
395
|
+
{
|
|
396
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
397
|
+
orgId: uuid("org_id").notNull().references(() => organizations.id, { onDelete: "cascade" }),
|
|
398
|
+
url: text("url").notNull(),
|
|
399
|
+
secret: text("secret").notNull(),
|
|
400
|
+
events: jsonb("events").$type().default([
|
|
401
|
+
"source.synced",
|
|
402
|
+
"document.indexed",
|
|
403
|
+
"memory.created"
|
|
404
|
+
]),
|
|
405
|
+
isActive: boolean("is_active").default(true).notNull(),
|
|
406
|
+
lastDeliveredAt: timestamp("last_delivered_at"),
|
|
407
|
+
failureCount: integer("failure_count").default(0),
|
|
408
|
+
createdAt: timestamp("created_at").defaultNow().notNull()
|
|
409
|
+
},
|
|
410
|
+
(t) => [
|
|
411
|
+
index("webhooks_org_idx").on(t.orgId)
|
|
412
|
+
]
|
|
413
|
+
);
|
|
414
|
+
var usageEvents = pgTable(
|
|
415
|
+
"usage_events",
|
|
416
|
+
{
|
|
417
|
+
id: serial("id").primaryKey(),
|
|
418
|
+
orgId: uuid("org_id").notNull().references(() => organizations.id, { onDelete: "cascade" }),
|
|
419
|
+
projectId: uuid("project_id"),
|
|
420
|
+
eventType: text("event_type").notNull(),
|
|
421
|
+
// query, ingest, sync, memory_add, memory_search
|
|
422
|
+
source: text("source").default("api"),
|
|
423
|
+
// api, mcp, sdk, webhook
|
|
424
|
+
tokensUsed: integer("tokens_used").default(0),
|
|
425
|
+
embeddingTokens: integer("embedding_tokens").default(0),
|
|
426
|
+
latencyMs: integer("latency_ms"),
|
|
427
|
+
metadata: jsonb("metadata").$type().default({}),
|
|
428
|
+
createdAt: timestamp("created_at").defaultNow().notNull()
|
|
429
|
+
},
|
|
430
|
+
(t) => [
|
|
431
|
+
index("usage_org_idx").on(t.orgId),
|
|
432
|
+
index("usage_project_idx").on(t.projectId),
|
|
433
|
+
index("usage_type_idx").on(t.eventType),
|
|
434
|
+
index("usage_created_idx").on(t.createdAt),
|
|
435
|
+
index("usage_org_created_idx").on(t.orgId, t.createdAt)
|
|
436
|
+
]
|
|
437
|
+
);
|
|
438
|
+
var queryCache = pgTable(
|
|
439
|
+
"query_cache",
|
|
440
|
+
{
|
|
441
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
442
|
+
projectId: uuid("project_id").notNull().references(() => projects.id, { onDelete: "cascade" }),
|
|
443
|
+
queryHash: text("query_hash").notNull(),
|
|
444
|
+
query: text("query").notNull(),
|
|
445
|
+
results: jsonb("results").$type().default([]),
|
|
446
|
+
hitCount: integer("hit_count").default(0),
|
|
447
|
+
expiresAt: timestamp("expires_at").notNull(),
|
|
448
|
+
createdAt: timestamp("created_at").defaultNow().notNull()
|
|
449
|
+
},
|
|
450
|
+
(t) => [
|
|
451
|
+
uniqueIndex("cache_project_query_idx").on(t.projectId, t.queryHash),
|
|
452
|
+
index("cache_expires_idx").on(t.expiresAt)
|
|
453
|
+
]
|
|
454
|
+
);
|
|
455
|
+
|
|
456
|
+
// src/db/index.ts
|
|
457
|
+
var connectionString = process.env.DATABASE_URL;
|
|
458
|
+
var client = postgres(connectionString, { max: 10 });
|
|
459
|
+
var db = drizzle(client, { schema: schema_exports });
|
|
460
|
+
|
|
461
|
+
// src/mcp/server.ts
|
|
462
|
+
import { eq as eq3, and as and2, sql as sql2 } from "drizzle-orm";
|
|
463
|
+
|
|
464
|
+
// src/engine/retriever.ts
|
|
465
|
+
import { eq, sql, and, inArray, or, isNull, gt } from "drizzle-orm";
|
|
466
|
+
|
|
467
|
+
// src/engine/embeddings.ts
|
|
468
|
+
import OpenAI from "openai";
|
|
469
|
+
var openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
470
|
+
async function embed(texts) {
|
|
471
|
+
const res = await openai.embeddings.create({
|
|
472
|
+
model: "text-embedding-3-small",
|
|
473
|
+
input: texts,
|
|
474
|
+
dimensions: 1536
|
|
475
|
+
});
|
|
476
|
+
return res.data.map((d) => d.embedding);
|
|
477
|
+
}
|
|
478
|
+
async function embedSingle(text2) {
|
|
479
|
+
const [embedding] = await embed([text2]);
|
|
480
|
+
return embedding;
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
// src/engine/compressor.ts
|
|
484
|
+
import OpenAI2 from "openai";
|
|
485
|
+
import { createHash } from "crypto";
|
|
486
|
+
var openai2 = new OpenAI2({ apiKey: process.env.OPENAI_API_KEY });
|
|
487
|
+
var deltaCache = /* @__PURE__ */ new Map();
|
|
488
|
+
var DELTA_CACHE_TTL = 6e5;
|
|
489
|
+
function hashContext(text2) {
|
|
490
|
+
return createHash("sha256").update(text2).digest("hex").slice(0, 16);
|
|
491
|
+
}
|
|
492
|
+
async function compressContext(rawContext, opts = {}) {
|
|
493
|
+
const {
|
|
494
|
+
maxTokens = 4e3,
|
|
495
|
+
strategy = "adaptive",
|
|
496
|
+
previousContextHash,
|
|
497
|
+
previousContext,
|
|
498
|
+
targetReduction = 0.5
|
|
499
|
+
} = opts;
|
|
500
|
+
const originalTokens = estimateTokens(rawContext);
|
|
501
|
+
if (originalTokens <= maxTokens) {
|
|
502
|
+
return {
|
|
503
|
+
context: rawContext,
|
|
504
|
+
originalTokens,
|
|
505
|
+
compressedTokens: originalTokens,
|
|
506
|
+
reductionPercent: 0,
|
|
507
|
+
strategy: "none"
|
|
508
|
+
};
|
|
509
|
+
}
|
|
510
|
+
switch (strategy) {
|
|
511
|
+
case "delta":
|
|
512
|
+
return deltaCompress(rawContext, originalTokens, maxTokens, previousContextHash, previousContext);
|
|
513
|
+
case "summarize":
|
|
514
|
+
return summarizeCompress(rawContext, originalTokens, maxTokens);
|
|
515
|
+
case "extract":
|
|
516
|
+
return extractCompress(rawContext, originalTokens, maxTokens);
|
|
517
|
+
case "adaptive":
|
|
518
|
+
default:
|
|
519
|
+
return adaptiveCompress(rawContext, originalTokens, maxTokens, previousContextHash, previousContext);
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
async function adaptiveCompress(rawContext, originalTokens, maxTokens, previousHash, previousCtx) {
|
|
523
|
+
const ratio = originalTokens / maxTokens;
|
|
524
|
+
if (previousHash || previousCtx) {
|
|
525
|
+
const delta = await deltaCompress(rawContext, originalTokens, maxTokens, previousHash, previousCtx);
|
|
526
|
+
if (delta.compressedTokens <= maxTokens) return delta;
|
|
527
|
+
}
|
|
528
|
+
if (ratio < 2) {
|
|
529
|
+
return extractCompress(rawContext, originalTokens, maxTokens);
|
|
530
|
+
}
|
|
531
|
+
return summarizeCompress(rawContext, originalTokens, maxTokens);
|
|
532
|
+
}
|
|
533
|
+
async function deltaCompress(rawContext, originalTokens, maxTokens, previousHash, previousCtx) {
|
|
534
|
+
const currentHash = hashContext(rawContext);
|
|
535
|
+
if (previousHash && previousHash === currentHash) {
|
|
536
|
+
return {
|
|
537
|
+
context: "[No changes since last context]",
|
|
538
|
+
originalTokens,
|
|
539
|
+
compressedTokens: 8,
|
|
540
|
+
reductionPercent: 99,
|
|
541
|
+
strategy: "delta-identical"
|
|
542
|
+
};
|
|
543
|
+
}
|
|
544
|
+
let prevCtx = previousCtx;
|
|
545
|
+
if (!prevCtx && previousHash) {
|
|
546
|
+
const cached = deltaCache.get(previousHash);
|
|
547
|
+
if (cached && Date.now() - cached.timestamp < DELTA_CACHE_TTL) {
|
|
548
|
+
prevCtx = cached.context;
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
if (!prevCtx) {
|
|
552
|
+
return extractCompress(rawContext, originalTokens, maxTokens);
|
|
553
|
+
}
|
|
554
|
+
const prevBlocks = new Set(prevCtx.split("\n---\n").map((b) => b.trim()));
|
|
555
|
+
const currentBlocks = rawContext.split("\n---\n").map((b) => b.trim());
|
|
556
|
+
const newBlocks = [];
|
|
557
|
+
const unchangedCount = { count: 0 };
|
|
558
|
+
for (const block of currentBlocks) {
|
|
559
|
+
if (prevBlocks.has(block)) {
|
|
560
|
+
unchangedCount.count++;
|
|
561
|
+
} else {
|
|
562
|
+
newBlocks.push(block);
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
let deltaContext;
|
|
566
|
+
if (newBlocks.length === 0) {
|
|
567
|
+
deltaContext = "[No new information since last query]";
|
|
568
|
+
} else {
|
|
569
|
+
const header = `[${unchangedCount.count} unchanged results omitted, ${newBlocks.length} new/updated]
|
|
570
|
+
|
|
571
|
+
`;
|
|
572
|
+
deltaContext = header + newBlocks.join("\n---\n");
|
|
573
|
+
}
|
|
574
|
+
const deltaTokens = estimateTokens(deltaContext);
|
|
575
|
+
if (deltaTokens > maxTokens) {
|
|
576
|
+
const truncated = truncateToTokens(deltaContext, maxTokens);
|
|
577
|
+
deltaCache.set(currentHash, { context: rawContext, hash: currentHash, timestamp: Date.now() });
|
|
578
|
+
return {
|
|
579
|
+
context: truncated,
|
|
580
|
+
originalTokens,
|
|
581
|
+
compressedTokens: estimateTokens(truncated),
|
|
582
|
+
reductionPercent: Math.round((1 - estimateTokens(truncated) / originalTokens) * 100),
|
|
583
|
+
strategy: "delta-truncated"
|
|
584
|
+
};
|
|
585
|
+
}
|
|
586
|
+
deltaCache.set(currentHash, { context: rawContext, hash: currentHash, timestamp: Date.now() });
|
|
587
|
+
return {
|
|
588
|
+
context: deltaContext,
|
|
589
|
+
originalTokens,
|
|
590
|
+
compressedTokens: estimateTokens(deltaContext),
|
|
591
|
+
reductionPercent: Math.round((1 - estimateTokens(deltaContext) / originalTokens) * 100),
|
|
592
|
+
strategy: "delta"
|
|
593
|
+
};
|
|
594
|
+
}
|
|
595
|
+
async function extractCompress(rawContext, originalTokens, maxTokens) {
|
|
596
|
+
try {
|
|
597
|
+
const res = await openai2.chat.completions.create({
|
|
598
|
+
model: "gpt-4.1-nano",
|
|
599
|
+
messages: [
|
|
600
|
+
{
|
|
601
|
+
role: "system",
|
|
602
|
+
content: `You are a context compressor. Extract and preserve ONLY the most important information from the provided context. Remove redundancy, boilerplate, and low-value content. Keep code snippets, key facts, API signatures, and important relationships. Output should be ${maxTokens} tokens or less. Do NOT add commentary \u2014 just output the compressed context.`
|
|
603
|
+
},
|
|
604
|
+
{ role: "user", content: rawContext }
|
|
605
|
+
],
|
|
606
|
+
max_tokens: maxTokens,
|
|
607
|
+
temperature: 0
|
|
608
|
+
});
|
|
609
|
+
const compressed = res.choices[0]?.message?.content?.trim() || rawContext;
|
|
610
|
+
const compressedTokens = estimateTokens(compressed);
|
|
611
|
+
return {
|
|
612
|
+
context: compressed,
|
|
613
|
+
originalTokens,
|
|
614
|
+
compressedTokens,
|
|
615
|
+
reductionPercent: Math.round((1 - compressedTokens / originalTokens) * 100),
|
|
616
|
+
strategy: "extract"
|
|
617
|
+
};
|
|
618
|
+
} catch {
|
|
619
|
+
const truncated = truncateToTokens(rawContext, maxTokens);
|
|
620
|
+
return {
|
|
621
|
+
context: truncated,
|
|
622
|
+
originalTokens,
|
|
623
|
+
compressedTokens: estimateTokens(truncated),
|
|
624
|
+
reductionPercent: Math.round((1 - estimateTokens(truncated) / originalTokens) * 100),
|
|
625
|
+
strategy: "truncate-fallback"
|
|
626
|
+
};
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
async function summarizeCompress(rawContext, originalTokens, maxTokens) {
|
|
630
|
+
const blocks = rawContext.split("\n---\n").filter((b) => b.trim());
|
|
631
|
+
if (blocks.length <= 3) {
|
|
632
|
+
return extractCompress(rawContext, originalTokens, maxTokens);
|
|
633
|
+
}
|
|
634
|
+
const budgetPerBlock = Math.floor(maxTokens / blocks.length);
|
|
635
|
+
try {
|
|
636
|
+
const summaries = await Promise.all(
|
|
637
|
+
blocks.map(async (block) => {
|
|
638
|
+
if (estimateTokens(block) <= budgetPerBlock) return block;
|
|
639
|
+
const res = await openai2.chat.completions.create({
|
|
640
|
+
model: "gpt-4.1-nano",
|
|
641
|
+
messages: [
|
|
642
|
+
{
|
|
643
|
+
role: "system",
|
|
644
|
+
content: `Summarize this context block in ${budgetPerBlock} tokens or less. Preserve code signatures, key facts, and important details. Output only the summary.`
|
|
645
|
+
},
|
|
646
|
+
{ role: "user", content: block }
|
|
647
|
+
],
|
|
648
|
+
max_tokens: budgetPerBlock,
|
|
649
|
+
temperature: 0
|
|
650
|
+
});
|
|
651
|
+
return res.choices[0]?.message?.content?.trim() || block.slice(0, budgetPerBlock * 4);
|
|
652
|
+
})
|
|
653
|
+
);
|
|
654
|
+
const compressed = summaries.join("\n\n---\n\n");
|
|
655
|
+
let compressedTokens = estimateTokens(compressed);
|
|
656
|
+
let finalContext = compressed;
|
|
657
|
+
if (compressedTokens > maxTokens) {
|
|
658
|
+
finalContext = truncateToTokens(compressed, maxTokens);
|
|
659
|
+
compressedTokens = estimateTokens(finalContext);
|
|
660
|
+
}
|
|
661
|
+
return {
|
|
662
|
+
context: finalContext,
|
|
663
|
+
originalTokens,
|
|
664
|
+
compressedTokens,
|
|
665
|
+
reductionPercent: Math.round((1 - compressedTokens / originalTokens) * 100),
|
|
666
|
+
strategy: "summarize"
|
|
667
|
+
};
|
|
668
|
+
} catch {
|
|
669
|
+
const truncated = truncateToTokens(rawContext, maxTokens);
|
|
670
|
+
return {
|
|
671
|
+
context: truncated,
|
|
672
|
+
originalTokens,
|
|
673
|
+
compressedTokens: estimateTokens(truncated),
|
|
674
|
+
reductionPercent: Math.round((1 - estimateTokens(truncated) / originalTokens) * 100),
|
|
675
|
+
strategy: "truncate-fallback"
|
|
676
|
+
};
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
function estimateTokens(text2) {
|
|
680
|
+
return Math.ceil(text2.length / 4);
|
|
681
|
+
}
|
|
682
|
+
function truncateToTokens(text2, maxTokens) {
|
|
683
|
+
const maxChars = maxTokens * 4;
|
|
684
|
+
if (text2.length <= maxChars) return text2;
|
|
685
|
+
return text2.slice(0, maxChars) + "\n\n[...truncated]";
|
|
686
|
+
}
|
|
687
|
+
setInterval(() => {
|
|
688
|
+
const now = Date.now();
|
|
689
|
+
for (const [key, val] of deltaCache) {
|
|
690
|
+
if (now - val.timestamp > DELTA_CACHE_TTL) deltaCache.delete(key);
|
|
691
|
+
}
|
|
692
|
+
}, 6e4);
|
|
693
|
+
|
|
694
|
+
// src/engine/retriever.ts
|
|
695
|
+
import { createHash as createHash2 } from "crypto";
|
|
696
|
+
import OpenAI3 from "openai";
|
|
697
|
+
var openai3 = new OpenAI3({ apiKey: process.env.OPENAI_API_KEY });
|
|
698
|
+
async function retrieve(opts) {
|
|
699
|
+
const {
|
|
700
|
+
projectId,
|
|
701
|
+
query,
|
|
702
|
+
topK = 10,
|
|
703
|
+
threshold = 0.3,
|
|
704
|
+
chunkTypes,
|
|
705
|
+
hybridSearch = true,
|
|
706
|
+
vectorWeight = 0.7,
|
|
707
|
+
bm25Weight = 0.3,
|
|
708
|
+
rerank = true,
|
|
709
|
+
rerankTopK,
|
|
710
|
+
includeMemories = false,
|
|
711
|
+
userId,
|
|
712
|
+
sessionId,
|
|
713
|
+
agentId,
|
|
714
|
+
includeGraph = false,
|
|
715
|
+
graphDepth = 1,
|
|
716
|
+
maxTokens,
|
|
717
|
+
compress = false,
|
|
718
|
+
compressionStrategy = "adaptive",
|
|
719
|
+
previousContextHash,
|
|
720
|
+
useCache = true,
|
|
721
|
+
cacheTtlSeconds = 300
|
|
722
|
+
} = opts;
|
|
723
|
+
const startTime = Date.now();
|
|
724
|
+
if (useCache) {
|
|
725
|
+
const cached = await checkCache(projectId, query);
|
|
726
|
+
if (cached) {
|
|
727
|
+
return {
|
|
728
|
+
results: cached,
|
|
729
|
+
context: packContext(cached, maxTokens),
|
|
730
|
+
meta: {
|
|
731
|
+
totalResults: cached.length,
|
|
732
|
+
latencyMs: Date.now() - startTime,
|
|
733
|
+
cacheHit: true,
|
|
734
|
+
tokensUsed: 0
|
|
735
|
+
}
|
|
736
|
+
};
|
|
737
|
+
}
|
|
738
|
+
}
|
|
739
|
+
const queryEmbedding = await embedSingle(query);
|
|
740
|
+
let allResults = [];
|
|
741
|
+
const vectorResults = await vectorSearch(projectId, queryEmbedding, topK * 2, chunkTypes);
|
|
742
|
+
allResults.push(...vectorResults);
|
|
743
|
+
if (hybridSearch) {
|
|
744
|
+
const bm25Results = await fullTextSearch(projectId, query, topK * 2, chunkTypes);
|
|
745
|
+
allResults.push(...bm25Results);
|
|
746
|
+
}
|
|
747
|
+
if (includeMemories) {
|
|
748
|
+
const memoryResults = await memorySearch(projectId, queryEmbedding, {
|
|
749
|
+
userId,
|
|
750
|
+
sessionId,
|
|
751
|
+
agentId,
|
|
752
|
+
topK: Math.ceil(topK / 3)
|
|
753
|
+
});
|
|
754
|
+
allResults.push(...memoryResults);
|
|
755
|
+
}
|
|
756
|
+
if (includeGraph) {
|
|
757
|
+
const graphResults = await graphSearch(projectId, queryEmbedding, {
|
|
758
|
+
depth: graphDepth,
|
|
759
|
+
topK: Math.ceil(topK / 3)
|
|
760
|
+
});
|
|
761
|
+
allResults.push(...graphResults);
|
|
762
|
+
}
|
|
763
|
+
allResults = deduplicateResults(allResults);
|
|
764
|
+
if (hybridSearch) {
|
|
765
|
+
allResults = reciprocalRankFusion(allResults, vectorWeight, bm25Weight);
|
|
766
|
+
}
|
|
767
|
+
allResults = allResults.filter((r) => r.score >= threshold);
|
|
768
|
+
if (rerank && allResults.length > 0) {
|
|
769
|
+
const reranked = await rerankResults(query, allResults, rerankTopK || topK);
|
|
770
|
+
allResults = reranked;
|
|
771
|
+
}
|
|
772
|
+
allResults = allResults.slice(0, topK);
|
|
773
|
+
allResults = await enrichResults(allResults);
|
|
774
|
+
let context = packContext(allResults, maxTokens);
|
|
775
|
+
const contextHash = createHash2("sha256").update(context).digest("hex").slice(0, 16);
|
|
776
|
+
let compressionMeta;
|
|
777
|
+
if (compress && context.length > 0) {
|
|
778
|
+
const compressed = await compressContext(context, {
|
|
779
|
+
maxTokens: maxTokens || 4e3,
|
|
780
|
+
strategy: compressionStrategy,
|
|
781
|
+
previousContextHash
|
|
782
|
+
});
|
|
783
|
+
context = compressed.context;
|
|
784
|
+
compressionMeta = {
|
|
785
|
+
originalTokens: compressed.originalTokens,
|
|
786
|
+
compressedTokens: compressed.compressedTokens,
|
|
787
|
+
reductionPercent: compressed.reductionPercent,
|
|
788
|
+
strategy: compressed.strategy
|
|
789
|
+
};
|
|
790
|
+
}
|
|
791
|
+
if (useCache && allResults.length > 0) {
|
|
792
|
+
await setCache(projectId, query, allResults, cacheTtlSeconds);
|
|
793
|
+
}
|
|
794
|
+
const latencyMs = Date.now() - startTime;
|
|
795
|
+
return {
|
|
796
|
+
results: allResults,
|
|
797
|
+
context,
|
|
798
|
+
meta: {
|
|
799
|
+
totalResults: allResults.length,
|
|
800
|
+
latencyMs,
|
|
801
|
+
cacheHit: false,
|
|
802
|
+
tokensUsed: estimateTokens2(context),
|
|
803
|
+
contextHash,
|
|
804
|
+
compression: compressionMeta
|
|
805
|
+
}
|
|
806
|
+
};
|
|
807
|
+
}
|
|
808
|
+
async function vectorSearch(projectId, queryEmbedding, limit, chunkTypes) {
|
|
809
|
+
const conditions = [eq(chunks.projectId, projectId)];
|
|
810
|
+
if (chunkTypes && chunkTypes.length > 0) {
|
|
811
|
+
conditions.push(inArray(chunks.chunkType, chunkTypes));
|
|
812
|
+
}
|
|
813
|
+
const results = await db.select({
|
|
814
|
+
id: chunks.id,
|
|
815
|
+
content: chunks.content,
|
|
816
|
+
chunkType: chunks.chunkType,
|
|
817
|
+
metadata: chunks.metadata,
|
|
818
|
+
documentId: chunks.documentId,
|
|
819
|
+
similarity: sql`1 - (${chunks.embedding} <=> ${JSON.stringify(queryEmbedding)}::vector)`
|
|
820
|
+
}).from(chunks).where(and(...conditions)).orderBy(sql`${chunks.embedding} <=> ${JSON.stringify(queryEmbedding)}::vector`).limit(limit);
|
|
821
|
+
return results.map((r) => ({
|
|
822
|
+
id: r.id,
|
|
823
|
+
content: r.content,
|
|
824
|
+
score: r.similarity,
|
|
825
|
+
metadata: r.metadata || {},
|
|
826
|
+
chunkType: r.chunkType,
|
|
827
|
+
source: "vector"
|
|
828
|
+
}));
|
|
829
|
+
}
|
|
830
|
+
async function fullTextSearch(projectId, query, limit, chunkTypes) {
|
|
831
|
+
const tsQuery = query.replace(/[^\w\s]/g, " ").trim().split(/\s+/).filter((w) => w.length > 1).join(" & ");
|
|
832
|
+
if (!tsQuery) return [];
|
|
833
|
+
const conditions = [eq(chunks.projectId, projectId)];
|
|
834
|
+
if (chunkTypes && chunkTypes.length > 0) {
|
|
835
|
+
conditions.push(inArray(chunks.chunkType, chunkTypes));
|
|
836
|
+
}
|
|
837
|
+
const results = await db.select({
|
|
838
|
+
id: chunks.id,
|
|
839
|
+
content: chunks.content,
|
|
840
|
+
chunkType: chunks.chunkType,
|
|
841
|
+
metadata: chunks.metadata,
|
|
842
|
+
documentId: chunks.documentId,
|
|
843
|
+
rank: sql`ts_rank(to_tsvector('english', coalesce(${chunks.searchContent}, ${chunks.content})), to_tsquery('english', ${tsQuery}))`
|
|
844
|
+
}).from(chunks).where(
|
|
845
|
+
and(
|
|
846
|
+
...conditions,
|
|
847
|
+
sql`to_tsvector('english', coalesce(${chunks.searchContent}, ${chunks.content})) @@ to_tsquery('english', ${tsQuery})`
|
|
848
|
+
)
|
|
849
|
+
).orderBy(sql`ts_rank(to_tsvector('english', coalesce(${chunks.searchContent}, ${chunks.content})), to_tsquery('english', ${tsQuery})) DESC`).limit(limit);
|
|
850
|
+
const maxRank = results.length > 0 ? Math.max(...results.map((r) => r.rank)) : 1;
|
|
851
|
+
return results.map((r) => ({
|
|
852
|
+
id: r.id,
|
|
853
|
+
content: r.content,
|
|
854
|
+
score: maxRank > 0 ? r.rank / maxRank : 0,
|
|
855
|
+
metadata: r.metadata || {},
|
|
856
|
+
chunkType: r.chunkType,
|
|
857
|
+
source: "bm25"
|
|
858
|
+
}));
|
|
859
|
+
}
|
|
860
|
+
async function memorySearch(projectId, queryEmbedding, opts) {
|
|
861
|
+
const conditions = [
|
|
862
|
+
eq(memories.projectId, projectId),
|
|
863
|
+
eq(memories.isActive, true),
|
|
864
|
+
or(isNull(memories.expiresAt), gt(memories.expiresAt, /* @__PURE__ */ new Date()))
|
|
865
|
+
];
|
|
866
|
+
if (opts.userId) conditions.push(eq(memories.userId, opts.userId));
|
|
867
|
+
if (opts.sessionId) conditions.push(eq(memories.sessionId, opts.sessionId));
|
|
868
|
+
if (opts.agentId) conditions.push(eq(memories.agentId, opts.agentId));
|
|
869
|
+
const results = await db.select({
|
|
870
|
+
id: memories.id,
|
|
871
|
+
content: memories.content,
|
|
872
|
+
memoryType: memories.memoryType,
|
|
873
|
+
metadata: memories.metadata,
|
|
874
|
+
importance: memories.importance,
|
|
875
|
+
similarity: sql`1 - (${memories.embedding} <=> ${JSON.stringify(queryEmbedding)}::vector)`
|
|
876
|
+
}).from(memories).where(and(...conditions)).orderBy(sql`${memories.embedding} <=> ${JSON.stringify(queryEmbedding)}::vector`).limit(opts.topK);
|
|
877
|
+
if (results.length > 0) {
|
|
878
|
+
const ids = results.map((r) => r.id);
|
|
879
|
+
await db.update(memories).set({
|
|
880
|
+
accessCount: sql`${memories.accessCount} + 1`,
|
|
881
|
+
lastAccessedAt: /* @__PURE__ */ new Date()
|
|
882
|
+
}).where(inArray(memories.id, ids));
|
|
883
|
+
}
|
|
884
|
+
return results.map((r) => ({
|
|
885
|
+
id: r.id,
|
|
886
|
+
content: r.content,
|
|
887
|
+
score: r.similarity * (r.importance || 0.5),
|
|
888
|
+
metadata: { ...r.metadata, memoryType: r.memoryType },
|
|
889
|
+
chunkType: "memory",
|
|
890
|
+
source: "memory"
|
|
891
|
+
}));
|
|
892
|
+
}
|
|
893
|
+
async function graphSearch(projectId, queryEmbedding, opts) {
|
|
894
|
+
const relevantEntities = await db.select({
|
|
895
|
+
id: entities.id,
|
|
896
|
+
name: entities.name,
|
|
897
|
+
entityType: entities.entityType,
|
|
898
|
+
description: entities.description,
|
|
899
|
+
metadata: entities.metadata,
|
|
900
|
+
sourceChunkId: entities.sourceChunkId,
|
|
901
|
+
similarity: sql`1 - (${entities.embedding} <=> ${JSON.stringify(queryEmbedding)}::vector)`
|
|
902
|
+
}).from(entities).where(eq(entities.projectId, projectId)).orderBy(sql`${entities.embedding} <=> ${JSON.stringify(queryEmbedding)}::vector`).limit(5);
|
|
903
|
+
if (relevantEntities.length === 0) return [];
|
|
904
|
+
const entityIds = relevantEntities.map((e) => e.id);
|
|
905
|
+
const relatedEntities = await db.select({
|
|
906
|
+
id: entities.id,
|
|
907
|
+
name: entities.name,
|
|
908
|
+
entityType: entities.entityType,
|
|
909
|
+
description: entities.description,
|
|
910
|
+
metadata: entities.metadata,
|
|
911
|
+
sourceChunkId: entities.sourceChunkId,
|
|
912
|
+
relationType: relations.relationType,
|
|
913
|
+
weight: relations.weight
|
|
914
|
+
}).from(relations).innerJoin(entities, eq(relations.toEntityId, entities.id)).where(
|
|
915
|
+
and(
|
|
916
|
+
eq(relations.projectId, projectId),
|
|
917
|
+
inArray(relations.fromEntityId, entityIds)
|
|
918
|
+
)
|
|
919
|
+
).limit(opts.topK);
|
|
920
|
+
const chunkIds = [
|
|
921
|
+
...relevantEntities.map((e) => e.sourceChunkId).filter(Boolean),
|
|
922
|
+
...relatedEntities.map((e) => e.sourceChunkId).filter(Boolean)
|
|
923
|
+
];
|
|
924
|
+
if (chunkIds.length === 0) return [];
|
|
925
|
+
const relatedChunks = await db.select().from(chunks).where(inArray(chunks.id, chunkIds));
|
|
926
|
+
return relatedChunks.map((c) => {
|
|
927
|
+
const entity = relevantEntities.find((e) => e.sourceChunkId === c.id);
|
|
928
|
+
return {
|
|
929
|
+
id: c.id,
|
|
930
|
+
content: c.content,
|
|
931
|
+
score: entity ? entity.similarity * 0.8 : 0.5,
|
|
932
|
+
metadata: {
|
|
933
|
+
...c.metadata,
|
|
934
|
+
entityName: entity?.name,
|
|
935
|
+
entityType: entity?.entityType
|
|
936
|
+
},
|
|
937
|
+
chunkType: c.chunkType,
|
|
938
|
+
source: "graph"
|
|
939
|
+
};
|
|
940
|
+
});
|
|
941
|
+
}
|
|
942
|
+
function reciprocalRankFusion(results, vectorWeight, bm25Weight, k = 60) {
|
|
943
|
+
const scoreMap = /* @__PURE__ */ new Map();
|
|
944
|
+
const vectorResults = results.filter((r) => r.source === "vector");
|
|
945
|
+
const bm25Results = results.filter((r) => r.source === "bm25");
|
|
946
|
+
const otherResults = results.filter((r) => r.source !== "vector" && r.source !== "bm25");
|
|
947
|
+
vectorResults.forEach((r, rank) => {
|
|
948
|
+
const existing = scoreMap.get(r.id);
|
|
949
|
+
const rrfScore = vectorWeight / (k + rank + 1);
|
|
950
|
+
if (existing) {
|
|
951
|
+
existing.score += rrfScore;
|
|
952
|
+
} else {
|
|
953
|
+
scoreMap.set(r.id, { result: r, score: rrfScore });
|
|
954
|
+
}
|
|
955
|
+
});
|
|
956
|
+
bm25Results.forEach((r, rank) => {
|
|
957
|
+
const existing = scoreMap.get(r.id);
|
|
958
|
+
const rrfScore = bm25Weight / (k + rank + 1);
|
|
959
|
+
if (existing) {
|
|
960
|
+
existing.score += rrfScore;
|
|
961
|
+
existing.result.source = "hybrid";
|
|
962
|
+
} else {
|
|
963
|
+
scoreMap.set(r.id, { result: { ...r, source: "hybrid" }, score: rrfScore });
|
|
964
|
+
}
|
|
965
|
+
});
|
|
966
|
+
otherResults.forEach((r) => {
|
|
967
|
+
if (!scoreMap.has(r.id)) {
|
|
968
|
+
scoreMap.set(r.id, { result: r, score: r.score * 0.5 });
|
|
969
|
+
}
|
|
970
|
+
});
|
|
971
|
+
return Array.from(scoreMap.values()).sort((a, b) => b.score - a.score).map((entry) => ({ ...entry.result, score: entry.score }));
|
|
972
|
+
}
|
|
973
|
+
async function rerankResults(query, results, topK) {
|
|
974
|
+
if (results.length <= 3) return results;
|
|
975
|
+
const candidates = results.slice(0, Math.min(results.length, topK * 3));
|
|
976
|
+
const prompt = `Given the query: "${query}"
|
|
977
|
+
|
|
978
|
+
Rank these ${candidates.length} text passages by relevance (most relevant first). Return ONLY a JSON array of indices (0-based), e.g. [2, 0, 4, 1, 3].
|
|
979
|
+
|
|
980
|
+
${candidates.map((r, i) => `[${i}] ${r.content.slice(0, 300)}`).join("\n\n")}`;
|
|
981
|
+
try {
|
|
982
|
+
const res = await openai3.chat.completions.create({
|
|
983
|
+
model: "gpt-4.1-nano",
|
|
984
|
+
messages: [{ role: "user", content: prompt }],
|
|
985
|
+
temperature: 0,
|
|
986
|
+
max_tokens: 200
|
|
987
|
+
});
|
|
988
|
+
const text2 = res.choices[0]?.message?.content?.trim() || "";
|
|
989
|
+
const match = text2.match(/\[[\d,\s]+\]/);
|
|
990
|
+
if (!match) return results;
|
|
991
|
+
const indices = JSON.parse(match[0]);
|
|
992
|
+
const reranked = [];
|
|
993
|
+
for (const idx of indices) {
|
|
994
|
+
if (idx >= 0 && idx < candidates.length) {
|
|
995
|
+
reranked.push({
|
|
996
|
+
...candidates[idx],
|
|
997
|
+
score: 1 - reranked.length * (1 / indices.length)
|
|
998
|
+
// normalize
|
|
999
|
+
});
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
for (const r of results) {
|
|
1003
|
+
if (!reranked.find((rr) => rr.id === r.id)) {
|
|
1004
|
+
reranked.push(r);
|
|
1005
|
+
}
|
|
1006
|
+
}
|
|
1007
|
+
return reranked.slice(0, topK);
|
|
1008
|
+
} catch {
|
|
1009
|
+
return results.slice(0, topK);
|
|
1010
|
+
}
|
|
1011
|
+
}
|
|
1012
|
+
function deduplicateResults(results) {
|
|
1013
|
+
const seen = /* @__PURE__ */ new Map();
|
|
1014
|
+
for (const r of results) {
|
|
1015
|
+
const existing = seen.get(r.id);
|
|
1016
|
+
if (!existing || r.score > existing.score) {
|
|
1017
|
+
seen.set(r.id, r);
|
|
1018
|
+
}
|
|
1019
|
+
}
|
|
1020
|
+
return Array.from(seen.values());
|
|
1021
|
+
}
|
|
1022
|
+
function packContext(results, maxTokens) {
|
|
1023
|
+
if (results.length === 0) return "";
|
|
1024
|
+
const limit = maxTokens || 8e3;
|
|
1025
|
+
let totalTokens = 0;
|
|
1026
|
+
const packed = [];
|
|
1027
|
+
for (const r of results) {
|
|
1028
|
+
const header = buildChunkHeader(r);
|
|
1029
|
+
const block = `${header}
|
|
1030
|
+
${r.content}
|
|
1031
|
+
`;
|
|
1032
|
+
const tokens = estimateTokens2(block);
|
|
1033
|
+
if (totalTokens + tokens > limit) break;
|
|
1034
|
+
packed.push(block);
|
|
1035
|
+
totalTokens += tokens;
|
|
1036
|
+
}
|
|
1037
|
+
return packed.join("\n---\n\n");
|
|
1038
|
+
}
|
|
1039
|
+
function buildChunkHeader(r) {
|
|
1040
|
+
const parts = [];
|
|
1041
|
+
if (r.sourceName) parts.push(`Source: ${r.sourceName}`);
|
|
1042
|
+
if (r.documentTitle) parts.push(`Document: ${r.documentTitle}`);
|
|
1043
|
+
if (r.metadata?.filePath) parts.push(`File: ${r.metadata.filePath}`);
|
|
1044
|
+
if (r.metadata?.startLine) parts.push(`Lines: ${r.metadata.startLine}-${r.metadata.endLine || "?"}`);
|
|
1045
|
+
if (r.chunkType && r.chunkType !== "text") parts.push(`Type: ${r.chunkType}`);
|
|
1046
|
+
return parts.length > 0 ? `[${parts.join(" | ")}]` : "";
|
|
1047
|
+
}
|
|
1048
|
+
function estimateTokens2(text2) {
|
|
1049
|
+
return Math.ceil(text2.length / 4);
|
|
1050
|
+
}
|
|
1051
|
+
async function enrichResults(results) {
|
|
1052
|
+
const chunkResults = results.filter((r) => r.source !== "memory");
|
|
1053
|
+
if (chunkResults.length === 0) return results;
|
|
1054
|
+
const chunkIds = chunkResults.map((r) => r.id);
|
|
1055
|
+
const chunkDocs = await db.select({
|
|
1056
|
+
chunkId: chunks.id,
|
|
1057
|
+
docTitle: documents.title,
|
|
1058
|
+
sourceName: sources.name,
|
|
1059
|
+
docId: documents.id,
|
|
1060
|
+
sourceId: sources.id
|
|
1061
|
+
}).from(chunks).innerJoin(documents, eq(chunks.documentId, documents.id)).innerJoin(sources, eq(documents.sourceId, sources.id)).where(inArray(chunks.id, chunkIds));
|
|
1062
|
+
const enrichMap = new Map(chunkDocs.map((d) => [d.chunkId, d]));
|
|
1063
|
+
return results.map((r) => {
|
|
1064
|
+
const enrichment = enrichMap.get(r.id);
|
|
1065
|
+
if (enrichment) {
|
|
1066
|
+
return {
|
|
1067
|
+
...r,
|
|
1068
|
+
documentTitle: enrichment.docTitle || void 0,
|
|
1069
|
+
sourceName: enrichment.sourceName || void 0
|
|
1070
|
+
};
|
|
1071
|
+
}
|
|
1072
|
+
return r;
|
|
1073
|
+
});
|
|
1074
|
+
}
|
|
1075
|
+
function hashQuery(query) {
|
|
1076
|
+
return createHash2("sha256").update(query.toLowerCase().trim()).digest("hex");
|
|
1077
|
+
}
|
|
1078
|
+
async function checkCache(projectId, query) {
|
|
1079
|
+
const hash = hashQuery(query);
|
|
1080
|
+
const [cached] = await db.select().from(queryCache).where(
|
|
1081
|
+
and(
|
|
1082
|
+
eq(queryCache.projectId, projectId),
|
|
1083
|
+
eq(queryCache.queryHash, hash),
|
|
1084
|
+
gt(queryCache.expiresAt, /* @__PURE__ */ new Date())
|
|
1085
|
+
)
|
|
1086
|
+
).limit(1);
|
|
1087
|
+
if (cached) {
|
|
1088
|
+
await db.update(queryCache).set({ hitCount: sql`${queryCache.hitCount} + 1` }).where(eq(queryCache.id, cached.id));
|
|
1089
|
+
return cached.results;
|
|
1090
|
+
}
|
|
1091
|
+
return null;
|
|
1092
|
+
}
|
|
1093
|
+
async function setCache(projectId, query, results, ttlSeconds) {
|
|
1094
|
+
const hash = hashQuery(query);
|
|
1095
|
+
const expiresAt = new Date(Date.now() + ttlSeconds * 1e3);
|
|
1096
|
+
await db.insert(queryCache).values({
|
|
1097
|
+
projectId,
|
|
1098
|
+
queryHash: hash,
|
|
1099
|
+
query,
|
|
1100
|
+
results,
|
|
1101
|
+
expiresAt
|
|
1102
|
+
}).onConflictDoUpdate({
|
|
1103
|
+
target: [queryCache.projectId, queryCache.queryHash],
|
|
1104
|
+
set: { results, expiresAt, hitCount: 0 }
|
|
1105
|
+
});
|
|
1106
|
+
}
|
|
1107
|
+
|
|
1108
|
+
// src/engine/ingest.ts
|
|
1109
|
+
import { eq as eq2 } from "drizzle-orm";
|
|
1110
|
+
|
|
1111
|
+
// src/engine/chunker.ts
|
|
1112
|
+
var CODE_EXTENSIONS = /* @__PURE__ */ new Set([
|
|
1113
|
+
".ts",
|
|
1114
|
+
".tsx",
|
|
1115
|
+
".js",
|
|
1116
|
+
".jsx",
|
|
1117
|
+
".py",
|
|
1118
|
+
".java",
|
|
1119
|
+
".go",
|
|
1120
|
+
".rb",
|
|
1121
|
+
".php",
|
|
1122
|
+
".cs",
|
|
1123
|
+
".rs",
|
|
1124
|
+
".swift",
|
|
1125
|
+
".kt",
|
|
1126
|
+
".scala",
|
|
1127
|
+
".c",
|
|
1128
|
+
".cpp",
|
|
1129
|
+
".h",
|
|
1130
|
+
".hpp",
|
|
1131
|
+
".sol",
|
|
1132
|
+
".vy"
|
|
1133
|
+
]);
|
|
1134
|
+
var CONFIG_EXTENSIONS = /* @__PURE__ */ new Set([
|
|
1135
|
+
".json",
|
|
1136
|
+
".yaml",
|
|
1137
|
+
".yml",
|
|
1138
|
+
".toml",
|
|
1139
|
+
".ini",
|
|
1140
|
+
".env",
|
|
1141
|
+
".xml"
|
|
1142
|
+
]);
|
|
1143
|
+
function detectChunkType(filePath, content) {
|
|
1144
|
+
if (!filePath) return "text";
|
|
1145
|
+
const ext = "." + filePath.split(".").pop()?.toLowerCase();
|
|
1146
|
+
if (CODE_EXTENSIONS.has(ext)) return "code";
|
|
1147
|
+
if (CONFIG_EXTENSIONS.has(ext)) return "config";
|
|
1148
|
+
if (filePath.includes("schema") || filePath.includes("migration")) return "schema";
|
|
1149
|
+
if (filePath.endsWith(".md") || filePath.endsWith(".mdx") || filePath.endsWith(".rst")) return "documentation";
|
|
1150
|
+
if (filePath.includes("openapi") || filePath.includes("swagger")) return "api_spec";
|
|
1151
|
+
return "text";
|
|
1152
|
+
}
|
|
1153
|
+
function chunkText(content, opts = {}) {
|
|
1154
|
+
const { chunkSize = 1e3, chunkOverlap = 200, filePath, metadata = {} } = opts;
|
|
1155
|
+
const chunkType = detectChunkType(filePath, content);
|
|
1156
|
+
if (chunkType === "code") {
|
|
1157
|
+
return chunkCode(content, { chunkSize, filePath, metadata });
|
|
1158
|
+
}
|
|
1159
|
+
return chunkBySize(content, { chunkSize, chunkOverlap, chunkType, metadata });
|
|
1160
|
+
}
|
|
1161
|
+
function chunkCode(content, opts) {
|
|
1162
|
+
const { chunkSize, filePath, metadata = {} } = opts;
|
|
1163
|
+
const lines = content.split("\n");
|
|
1164
|
+
const chunks2 = [];
|
|
1165
|
+
const boundaries = [
|
|
1166
|
+
/^(export\s+)?(async\s+)?function\s+/,
|
|
1167
|
+
/^(export\s+)?(default\s+)?class\s+/,
|
|
1168
|
+
/^(export\s+)?const\s+\w+\s*=\s*(async\s+)?\(/,
|
|
1169
|
+
/^(export\s+)?const\s+\w+\s*=\s*\{/,
|
|
1170
|
+
/^(export\s+)?interface\s+/,
|
|
1171
|
+
/^(export\s+)?type\s+/,
|
|
1172
|
+
/^(export\s+)?enum\s+/,
|
|
1173
|
+
/^def\s+/,
|
|
1174
|
+
// Python
|
|
1175
|
+
/^class\s+/,
|
|
1176
|
+
// Python/Java
|
|
1177
|
+
/^func\s+/,
|
|
1178
|
+
// Go
|
|
1179
|
+
/^pub\s+(fn|struct|enum|impl)/
|
|
1180
|
+
// Rust
|
|
1181
|
+
];
|
|
1182
|
+
let currentChunk = [];
|
|
1183
|
+
let currentStart = 0;
|
|
1184
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1185
|
+
const trimmed = lines[i].trimStart();
|
|
1186
|
+
const isBoundary = boundaries.some((b) => b.test(trimmed));
|
|
1187
|
+
if (isBoundary && currentChunk.length > 0) {
|
|
1188
|
+
const chunkContent = currentChunk.join("\n").trim();
|
|
1189
|
+
if (chunkContent.length > 0) {
|
|
1190
|
+
chunks2.push({
|
|
1191
|
+
content: chunkContent,
|
|
1192
|
+
chunkType: "code",
|
|
1193
|
+
chunkIndex: chunks2.length,
|
|
1194
|
+
metadata: {
|
|
1195
|
+
...metadata,
|
|
1196
|
+
filePath,
|
|
1197
|
+
startLine: currentStart + 1,
|
|
1198
|
+
endLine: i
|
|
1199
|
+
}
|
|
1200
|
+
});
|
|
1201
|
+
}
|
|
1202
|
+
currentChunk = [lines[i]];
|
|
1203
|
+
currentStart = i;
|
|
1204
|
+
} else {
|
|
1205
|
+
currentChunk.push(lines[i]);
|
|
1206
|
+
}
|
|
1207
|
+
if (currentChunk.join("\n").length > chunkSize * 1.5) {
|
|
1208
|
+
const chunkContent = currentChunk.join("\n").trim();
|
|
1209
|
+
if (chunkContent.length > 0) {
|
|
1210
|
+
chunks2.push({
|
|
1211
|
+
content: chunkContent,
|
|
1212
|
+
chunkType: "code",
|
|
1213
|
+
chunkIndex: chunks2.length,
|
|
1214
|
+
metadata: {
|
|
1215
|
+
...metadata,
|
|
1216
|
+
filePath,
|
|
1217
|
+
startLine: currentStart + 1,
|
|
1218
|
+
endLine: i + 1
|
|
1219
|
+
}
|
|
1220
|
+
});
|
|
1221
|
+
}
|
|
1222
|
+
currentChunk = [];
|
|
1223
|
+
currentStart = i + 1;
|
|
1224
|
+
}
|
|
1225
|
+
}
|
|
1226
|
+
if (currentChunk.length > 0) {
|
|
1227
|
+
const chunkContent = currentChunk.join("\n").trim();
|
|
1228
|
+
if (chunkContent.length > 0) {
|
|
1229
|
+
chunks2.push({
|
|
1230
|
+
content: chunkContent,
|
|
1231
|
+
chunkType: "code",
|
|
1232
|
+
chunkIndex: chunks2.length,
|
|
1233
|
+
metadata: {
|
|
1234
|
+
...metadata,
|
|
1235
|
+
filePath,
|
|
1236
|
+
startLine: currentStart + 1,
|
|
1237
|
+
endLine: lines.length
|
|
1238
|
+
}
|
|
1239
|
+
});
|
|
1240
|
+
}
|
|
1241
|
+
}
|
|
1242
|
+
return chunks2;
|
|
1243
|
+
}
|
|
1244
|
+
function chunkBySize(content, opts) {
|
|
1245
|
+
const { chunkSize, chunkOverlap, chunkType, metadata = {} } = opts;
|
|
1246
|
+
const chunks2 = [];
|
|
1247
|
+
const paragraphs = content.split(/\n\n+/);
|
|
1248
|
+
let current = "";
|
|
1249
|
+
for (const para of paragraphs) {
|
|
1250
|
+
if ((current + "\n\n" + para).length > chunkSize && current.length > 0) {
|
|
1251
|
+
chunks2.push({
|
|
1252
|
+
content: current.trim(),
|
|
1253
|
+
chunkType,
|
|
1254
|
+
chunkIndex: chunks2.length,
|
|
1255
|
+
metadata
|
|
1256
|
+
});
|
|
1257
|
+
const words = current.split(/\s+/);
|
|
1258
|
+
const overlapWords = words.slice(-Math.floor(chunkOverlap / 5));
|
|
1259
|
+
current = overlapWords.join(" ") + "\n\n" + para;
|
|
1260
|
+
} else {
|
|
1261
|
+
current = current ? current + "\n\n" + para : para;
|
|
1262
|
+
}
|
|
1263
|
+
}
|
|
1264
|
+
if (current.trim().length > 0) {
|
|
1265
|
+
chunks2.push({
|
|
1266
|
+
content: current.trim(),
|
|
1267
|
+
chunkType,
|
|
1268
|
+
chunkIndex: chunks2.length,
|
|
1269
|
+
metadata
|
|
1270
|
+
});
|
|
1271
|
+
}
|
|
1272
|
+
return chunks2;
|
|
1273
|
+
}
|
|
1274
|
+
|
|
1275
|
+
// src/engine/extractor.ts
|
|
1276
|
+
import OpenAI4 from "openai";
|
|
1277
|
+
var openai4 = new OpenAI4({ apiKey: process.env.OPENAI_API_KEY });
|
|
1278
|
+
async function extractEntities(projectId, content, chunkType, metadata = {}, chunkId) {
|
|
1279
|
+
if (content.length < 100) return { entities: 0, relations: 0 };
|
|
1280
|
+
const isCode = ["code", "function", "class"].includes(chunkType);
|
|
1281
|
+
const prompt = isCode ? `Analyze this code and extract entities and relationships.
|
|
1282
|
+
|
|
1283
|
+
Entities: functions, classes, interfaces, types, modules, variables, constants, API endpoints, services.
|
|
1284
|
+
Relations: imports, exports, calls, implements, extends, depends_on, references, part_of.
|
|
1285
|
+
|
|
1286
|
+
Code:
|
|
1287
|
+
\`\`\`
|
|
1288
|
+
${content.slice(0, 3e3)}
|
|
1289
|
+
\`\`\`
|
|
1290
|
+
|
|
1291
|
+
Respond with JSON only:
|
|
1292
|
+
{
|
|
1293
|
+
"entities": [{"name": "...", "type": "function|class|interface|module|constant|api_endpoint|service", "description": "one line"}],
|
|
1294
|
+
"relations": [{"from": "name", "fromType": "type", "to": "name", "toType": "type", "relation": "imports|calls|extends|implements|depends_on|references|part_of"}]
|
|
1295
|
+
}` : `Analyze this text and extract key entities (concepts, people, tools, services, APIs, technologies) and their relationships.
|
|
1296
|
+
|
|
1297
|
+
Text:
|
|
1298
|
+
${content.slice(0, 3e3)}
|
|
1299
|
+
|
|
1300
|
+
Respond with JSON only:
|
|
1301
|
+
{
|
|
1302
|
+
"entities": [{"name": "...", "type": "concept|tool|service|api|technology|person|organization", "description": "one line"}],
|
|
1303
|
+
"relations": [{"from": "name", "fromType": "type", "to": "name", "toType": "type", "relation": "references|depends_on|related_to|part_of|supersedes"}]
|
|
1304
|
+
}`;
|
|
1305
|
+
try {
|
|
1306
|
+
const res = await openai4.chat.completions.create({
|
|
1307
|
+
model: "gpt-4.1-nano",
|
|
1308
|
+
messages: [{ role: "user", content: prompt }],
|
|
1309
|
+
temperature: 0,
|
|
1310
|
+
max_tokens: 1e3,
|
|
1311
|
+
response_format: { type: "json_object" }
|
|
1312
|
+
});
|
|
1313
|
+
const text2 = res.choices[0]?.message?.content?.trim() || "{}";
|
|
1314
|
+
const parsed = JSON.parse(text2);
|
|
1315
|
+
const extractedEntities = parsed.entities || [];
|
|
1316
|
+
const extractedRelations = parsed.relations || [];
|
|
1317
|
+
let entityCount = 0;
|
|
1318
|
+
let relationCount = 0;
|
|
1319
|
+
const entityMap = /* @__PURE__ */ new Map();
|
|
1320
|
+
for (const ent of extractedEntities.slice(0, 20)) {
|
|
1321
|
+
if (!ent.name || !ent.type) continue;
|
|
1322
|
+
const embedding = await embedSingle(`${ent.type}: ${ent.name} - ${ent.description || ""}`);
|
|
1323
|
+
const [entity] = await db.insert(entities).values({
|
|
1324
|
+
projectId,
|
|
1325
|
+
name: ent.name,
|
|
1326
|
+
entityType: ent.type,
|
|
1327
|
+
description: ent.description,
|
|
1328
|
+
metadata: { ...metadata, autoExtracted: true },
|
|
1329
|
+
sourceChunkId: chunkId,
|
|
1330
|
+
embedding
|
|
1331
|
+
}).onConflictDoUpdate({
|
|
1332
|
+
target: [entities.projectId, entities.name, entities.entityType],
|
|
1333
|
+
set: {
|
|
1334
|
+
description: ent.description,
|
|
1335
|
+
sourceChunkId: chunkId,
|
|
1336
|
+
embedding,
|
|
1337
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
1338
|
+
}
|
|
1339
|
+
}).returning();
|
|
1340
|
+
entityMap.set(`${ent.name}:${ent.type}`, entity.id);
|
|
1341
|
+
entityCount++;
|
|
1342
|
+
}
|
|
1343
|
+
for (const rel of extractedRelations.slice(0, 30)) {
|
|
1344
|
+
if (!rel.from || !rel.to || !rel.relation) continue;
|
|
1345
|
+
const fromId = entityMap.get(`${rel.from}:${rel.fromType}`);
|
|
1346
|
+
const toId = entityMap.get(`${rel.to}:${rel.toType}`);
|
|
1347
|
+
if (!fromId || !toId) continue;
|
|
1348
|
+
const validRelations = [
|
|
1349
|
+
"imports",
|
|
1350
|
+
"exports",
|
|
1351
|
+
"calls",
|
|
1352
|
+
"implements",
|
|
1353
|
+
"extends",
|
|
1354
|
+
"references",
|
|
1355
|
+
"depends_on",
|
|
1356
|
+
"related_to",
|
|
1357
|
+
"part_of",
|
|
1358
|
+
"contradicts",
|
|
1359
|
+
"supersedes"
|
|
1360
|
+
];
|
|
1361
|
+
if (!validRelations.includes(rel.relation)) continue;
|
|
1362
|
+
await db.insert(relations).values({
|
|
1363
|
+
projectId,
|
|
1364
|
+
fromEntityId: fromId,
|
|
1365
|
+
toEntityId: toId,
|
|
1366
|
+
relationType: rel.relation,
|
|
1367
|
+
metadata: { autoExtracted: true }
|
|
1368
|
+
}).onConflictDoUpdate({
|
|
1369
|
+
target: [relations.fromEntityId, relations.toEntityId, relations.relationType],
|
|
1370
|
+
set: { metadata: { autoExtracted: true } }
|
|
1371
|
+
});
|
|
1372
|
+
relationCount++;
|
|
1373
|
+
}
|
|
1374
|
+
return { entities: entityCount, relations: relationCount };
|
|
1375
|
+
} catch {
|
|
1376
|
+
return { entities: 0, relations: 0 };
|
|
1377
|
+
}
|
|
1378
|
+
}
|
|
1379
|
+
|
|
1380
|
+
// src/engine/ingest.ts
|
|
1381
|
+
import { createHash as createHash3 } from "crypto";
|
|
1382
|
+
import PQueue from "p-queue";
|
|
1383
|
+
var queue = new PQueue({ concurrency: 3 });
|
|
1384
|
+
var ENABLE_AUTO_EXTRACTION = process.env.DISABLE_AUTO_EXTRACTION !== "true";
|
|
1385
|
+
async function ingestDocument(input) {
|
|
1386
|
+
const { sourceId, projectId, externalId, title, content, metadata = {}, filePath } = input;
|
|
1387
|
+
const contentHash = createHash3("sha256").update(content).digest("hex");
|
|
1388
|
+
const [doc] = await db.insert(documents).values({
|
|
1389
|
+
sourceId,
|
|
1390
|
+
projectId,
|
|
1391
|
+
externalId,
|
|
1392
|
+
title,
|
|
1393
|
+
content,
|
|
1394
|
+
metadata,
|
|
1395
|
+
contentHash
|
|
1396
|
+
}).onConflictDoUpdate({
|
|
1397
|
+
target: [documents.sourceId, documents.externalId],
|
|
1398
|
+
set: { title, content, metadata, contentHash, updatedAt: /* @__PURE__ */ new Date() }
|
|
1399
|
+
}).returning();
|
|
1400
|
+
await db.delete(chunks).where(eq2(chunks.documentId, doc.id));
|
|
1401
|
+
const textChunks = chunkText(content, {
|
|
1402
|
+
filePath: filePath || externalId,
|
|
1403
|
+
metadata: { ...metadata, title }
|
|
1404
|
+
});
|
|
1405
|
+
if (textChunks.length === 0) return doc;
|
|
1406
|
+
const batchSize = 50;
|
|
1407
|
+
const insertedChunkIds = [];
|
|
1408
|
+
for (let i = 0; i < textChunks.length; i += batchSize) {
|
|
1409
|
+
const batch = textChunks.slice(i, i + batchSize);
|
|
1410
|
+
const embeddings = await embed(batch.map((c) => c.content));
|
|
1411
|
+
const inserted = await db.insert(chunks).values(
|
|
1412
|
+
batch.map((chunk, j) => ({
|
|
1413
|
+
documentId: doc.id,
|
|
1414
|
+
projectId,
|
|
1415
|
+
content: chunk.content,
|
|
1416
|
+
chunkType: chunk.chunkType,
|
|
1417
|
+
chunkIndex: chunk.chunkIndex,
|
|
1418
|
+
metadata: chunk.metadata,
|
|
1419
|
+
embedding: embeddings[j],
|
|
1420
|
+
tokenCount: Math.ceil(chunk.content.length / 4)
|
|
1421
|
+
}))
|
|
1422
|
+
).returning({ id: chunks.id });
|
|
1423
|
+
insertedChunkIds.push(...inserted.map((c) => c.id));
|
|
1424
|
+
}
|
|
1425
|
+
if (ENABLE_AUTO_EXTRACTION && !input.skipEntityExtraction) {
|
|
1426
|
+
const chunksToExtract = textChunks.filter((c) => c.content.length > 200).slice(0, 5);
|
|
1427
|
+
for (let i = 0; i < chunksToExtract.length; i++) {
|
|
1428
|
+
const chunk = chunksToExtract[i];
|
|
1429
|
+
const chunkId = insertedChunkIds[textChunks.indexOf(chunk)];
|
|
1430
|
+
extractEntities(projectId, chunk.content, chunk.chunkType, metadata, chunkId).catch(() => {
|
|
1431
|
+
});
|
|
1432
|
+
}
|
|
1433
|
+
}
|
|
1434
|
+
const docCount = await db.select({ count: db.$count(documents, eq2(documents.sourceId, sourceId)) }).from(documents);
|
|
1435
|
+
const chunkCount = await db.select({ count: db.$count(chunks, eq2(chunks.documentId, doc.id)) }).from(chunks);
|
|
1436
|
+
await db.update(sources).set({
|
|
1437
|
+
documentCount: Number(docCount[0]?.count || 0),
|
|
1438
|
+
chunkCount: Number(chunkCount[0]?.count || 0),
|
|
1439
|
+
lastSyncAt: /* @__PURE__ */ new Date(),
|
|
1440
|
+
status: "ready",
|
|
1441
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
1442
|
+
}).where(eq2(sources.id, sourceId));
|
|
1443
|
+
return doc;
|
|
1444
|
+
}
|
|
1445
|
+
|
|
1446
|
+
// src/mcp/server.ts
|
|
1447
|
+
var ORG_ID = process.env.WHISPER_ORG_ID || "";
|
|
1448
|
+
var server = new McpServer({
|
|
1449
|
+
name: "whisper-context",
|
|
1450
|
+
version: "0.1.0"
|
|
1451
|
+
});
|
|
1452
|
+
async function resolveProject(name) {
|
|
1453
|
+
const [proj] = await db.select().from(projects).where(
|
|
1454
|
+
and2(
|
|
1455
|
+
eq3(projects.orgId, ORG_ID),
|
|
1456
|
+
sql2`(${projects.name} = ${name} OR ${projects.slug} = ${name})`
|
|
1457
|
+
)
|
|
1458
|
+
).limit(1);
|
|
1459
|
+
return proj;
|
|
1460
|
+
}
|
|
1461
|
+
server.tool(
|
|
1462
|
+
"query_context",
|
|
1463
|
+
"Search your knowledge base for relevant context. Returns packed context ready for LLM consumption. Supports hybrid vector+keyword search, memory inclusion, and knowledge graph traversal.",
|
|
1464
|
+
{
|
|
1465
|
+
project: z.string().describe("Project name or slug"),
|
|
1466
|
+
query: z.string().describe("What are you looking for?"),
|
|
1467
|
+
top_k: z.number().optional().default(10).describe("Number of results"),
|
|
1468
|
+
chunk_types: z.array(z.string()).optional().describe("Filter: code, function, class, documentation, api_spec, schema, config, text"),
|
|
1469
|
+
include_memories: z.boolean().optional().default(false).describe("Include relevant memories"),
|
|
1470
|
+
include_graph: z.boolean().optional().default(false).describe("Include knowledge graph traversal"),
|
|
1471
|
+
user_id: z.string().optional().describe("User ID for memory scoping"),
|
|
1472
|
+
session_id: z.string().optional().describe("Session ID for memory scoping"),
|
|
1473
|
+
max_tokens: z.number().optional().describe("Max tokens for packed context")
|
|
1474
|
+
},
|
|
1475
|
+
async ({ project, query, top_k, chunk_types, include_memories, include_graph, user_id, session_id, max_tokens }) => {
|
|
1476
|
+
const proj = await resolveProject(project);
|
|
1477
|
+
if (!proj) return { content: [{ type: "text", text: `Project '${project}' not found.` }] };
|
|
1478
|
+
const response = await retrieve({
|
|
1479
|
+
projectId: proj.id,
|
|
1480
|
+
query,
|
|
1481
|
+
topK: top_k,
|
|
1482
|
+
chunkTypes: chunk_types,
|
|
1483
|
+
includeMemories: include_memories,
|
|
1484
|
+
includeGraph: include_graph,
|
|
1485
|
+
userId: user_id,
|
|
1486
|
+
sessionId: session_id,
|
|
1487
|
+
maxTokens: max_tokens
|
|
1488
|
+
});
|
|
1489
|
+
if (response.results.length === 0) {
|
|
1490
|
+
return { content: [{ type: "text", text: "No relevant context found." }] };
|
|
1491
|
+
}
|
|
1492
|
+
const header = `Found ${response.meta.totalResults} results (${response.meta.latencyMs}ms${response.meta.cacheHit ? ", cached" : ""}):
|
|
1493
|
+
|
|
1494
|
+
`;
|
|
1495
|
+
return { content: [{ type: "text", text: header + response.context }] };
|
|
1496
|
+
}
|
|
1497
|
+
);
|
|
1498
|
+
server.tool(
|
|
1499
|
+
"add_memory",
|
|
1500
|
+
"Store a memory (fact, preference, decision) that persists across conversations. Memories can be scoped to a user, session, or agent.",
|
|
1501
|
+
{
|
|
1502
|
+
project: z.string().describe("Project name or slug"),
|
|
1503
|
+
content: z.string().describe("The memory content to store"),
|
|
1504
|
+
memory_type: z.enum(["factual", "episodic", "semantic", "procedural"]).optional().default("factual"),
|
|
1505
|
+
user_id: z.string().optional().describe("User this memory belongs to"),
|
|
1506
|
+
session_id: z.string().optional().describe("Session scope"),
|
|
1507
|
+
agent_id: z.string().optional().describe("Agent scope"),
|
|
1508
|
+
importance: z.number().optional().default(0.5).describe("Importance 0-1")
|
|
1509
|
+
},
|
|
1510
|
+
async ({ project, content, memory_type, user_id, session_id, agent_id, importance }) => {
|
|
1511
|
+
const proj = await resolveProject(project);
|
|
1512
|
+
if (!proj) return { content: [{ type: "text", text: `Project '${project}' not found.` }] };
|
|
1513
|
+
const embedding = await embedSingle(content);
|
|
1514
|
+
const [memory] = await db.insert(memories).values({
|
|
1515
|
+
projectId: proj.id,
|
|
1516
|
+
content,
|
|
1517
|
+
memoryType: memory_type,
|
|
1518
|
+
userId: user_id,
|
|
1519
|
+
sessionId: session_id,
|
|
1520
|
+
agentId: agent_id,
|
|
1521
|
+
importance,
|
|
1522
|
+
embedding
|
|
1523
|
+
}).returning();
|
|
1524
|
+
return { content: [{ type: "text", text: `Memory stored (id: ${memory.id}, type: ${memory_type}).` }] };
|
|
1525
|
+
}
|
|
1526
|
+
);
|
|
1527
|
+
server.tool(
|
|
1528
|
+
"search_memories",
|
|
1529
|
+
"Search stored memories by semantic similarity. Recall facts, preferences, past decisions from previous interactions.",
|
|
1530
|
+
{
|
|
1531
|
+
project: z.string().describe("Project name or slug"),
|
|
1532
|
+
query: z.string().describe("What to search for"),
|
|
1533
|
+
user_id: z.string().optional().describe("Filter by user"),
|
|
1534
|
+
session_id: z.string().optional().describe("Filter by session"),
|
|
1535
|
+
top_k: z.number().optional().default(10).describe("Number of results")
|
|
1536
|
+
},
|
|
1537
|
+
async ({ project, query, user_id, session_id, top_k }) => {
|
|
1538
|
+
const proj = await resolveProject(project);
|
|
1539
|
+
if (!proj) return { content: [{ type: "text", text: `Project '${project}' not found.` }] };
|
|
1540
|
+
const queryEmbedding = await embedSingle(query);
|
|
1541
|
+
const conditions = [
|
|
1542
|
+
eq3(memories.projectId, proj.id),
|
|
1543
|
+
eq3(memories.isActive, true),
|
|
1544
|
+
sql2`(${memories.expiresAt} IS NULL OR ${memories.expiresAt} > NOW())`
|
|
1545
|
+
];
|
|
1546
|
+
if (user_id) conditions.push(eq3(memories.userId, user_id));
|
|
1547
|
+
if (session_id) conditions.push(eq3(memories.sessionId, session_id));
|
|
1548
|
+
const results = await db.select({
|
|
1549
|
+
id: memories.id,
|
|
1550
|
+
content: memories.content,
|
|
1551
|
+
memoryType: memories.memoryType,
|
|
1552
|
+
importance: memories.importance,
|
|
1553
|
+
similarity: sql2`1 - (${memories.embedding} <=> ${JSON.stringify(queryEmbedding)}::vector)`
|
|
1554
|
+
}).from(memories).where(and2(...conditions)).orderBy(sql2`${memories.embedding} <=> ${JSON.stringify(queryEmbedding)}::vector`).limit(top_k);
|
|
1555
|
+
if (results.length === 0) return { content: [{ type: "text", text: "No memories found." }] };
|
|
1556
|
+
const text2 = results.map((r, i) => `${i + 1}. [${r.memoryType}, importance: ${r.importance}, score: ${r.similarity.toFixed(3)}]
|
|
1557
|
+
${r.content}`).join("\n\n");
|
|
1558
|
+
return { content: [{ type: "text", text: text2 }] };
|
|
1559
|
+
}
|
|
1560
|
+
);
|
|
1561
|
+
server.tool(
|
|
1562
|
+
"list_projects",
|
|
1563
|
+
"List all available context projects.",
|
|
1564
|
+
{},
|
|
1565
|
+
async () => {
|
|
1566
|
+
const projs = await db.select().from(projects).where(eq3(projects.orgId, ORG_ID));
|
|
1567
|
+
const text2 = projs.length === 0 ? "No projects found." : projs.map((p) => `- ${p.name} (${p.slug})${p.description ? `: ${p.description}` : ""}`).join("\n");
|
|
1568
|
+
return { content: [{ type: "text", text: text2 }] };
|
|
1569
|
+
}
|
|
1570
|
+
);
|
|
1571
|
+
server.tool(
|
|
1572
|
+
"list_sources",
|
|
1573
|
+
"List all data sources connected to a project.",
|
|
1574
|
+
{ project: z.string().describe("Project name or slug") },
|
|
1575
|
+
async ({ project }) => {
|
|
1576
|
+
const proj = await resolveProject(project);
|
|
1577
|
+
if (!proj) return { content: [{ type: "text", text: `Project '${project}' not found.` }] };
|
|
1578
|
+
const srcs = await db.select().from(sources).where(eq3(sources.projectId, proj.id));
|
|
1579
|
+
const text2 = srcs.length === 0 ? "No sources connected." : srcs.map((s) => `- ${s.name} (${s.connectorType}) \u2014 ${s.status} | ${s.documentCount} docs, ${s.chunkCount} chunks`).join("\n");
|
|
1580
|
+
return { content: [{ type: "text", text: text2 }] };
|
|
1581
|
+
}
|
|
1582
|
+
);
|
|
1583
|
+
server.tool(
|
|
1584
|
+
"add_context",
|
|
1585
|
+
"Add text content to a project's knowledge base.",
|
|
1586
|
+
{
|
|
1587
|
+
project: z.string().describe("Project name or slug"),
|
|
1588
|
+
title: z.string().describe("Title for this content"),
|
|
1589
|
+
content: z.string().describe("The text content to index")
|
|
1590
|
+
},
|
|
1591
|
+
async ({ project, title, content }) => {
|
|
1592
|
+
const proj = await resolveProject(project);
|
|
1593
|
+
if (!proj) return { content: [{ type: "text", text: `Project '${project}' not found.` }] };
|
|
1594
|
+
let [directSource] = await db.select().from(sources).where(and2(eq3(sources.projectId, proj.id), eq3(sources.connectorType, "custom"), eq3(sources.name, "mcp-ingest"))).limit(1);
|
|
1595
|
+
if (!directSource) {
|
|
1596
|
+
[directSource] = await db.insert(sources).values({ projectId: proj.id, name: "mcp-ingest", connectorType: "custom", config: {}, status: "ready" }).returning();
|
|
1597
|
+
}
|
|
1598
|
+
await ingestDocument({ sourceId: directSource.id, projectId: proj.id, externalId: `mcp-${title}`, title, content });
|
|
1599
|
+
return { content: [{ type: "text", text: `Indexed "${title}" (${content.length} chars) into '${project}'.` }] };
|
|
1600
|
+
}
|
|
1601
|
+
);
|
|
1602
|
+
server.tool(
|
|
1603
|
+
"track_conversation",
|
|
1604
|
+
"Add a message to a conversation. Creates the conversation if it doesn't exist.",
|
|
1605
|
+
{
|
|
1606
|
+
project: z.string().describe("Project name or slug"),
|
|
1607
|
+
session_id: z.string().describe("Unique session identifier"),
|
|
1608
|
+
role: z.enum(["user", "assistant", "system", "tool"]),
|
|
1609
|
+
content: z.string().describe("Message content"),
|
|
1610
|
+
user_id: z.string().optional().describe("User identifier")
|
|
1611
|
+
},
|
|
1612
|
+
async ({ project, session_id, role, content, user_id }) => {
|
|
1613
|
+
const proj = await resolveProject(project);
|
|
1614
|
+
if (!proj) return { content: [{ type: "text", text: `Project '${project}' not found.` }] };
|
|
1615
|
+
let [conv] = await db.select().from(conversations).where(and2(eq3(conversations.projectId, proj.id), eq3(conversations.sessionId, session_id))).limit(1);
|
|
1616
|
+
if (!conv) {
|
|
1617
|
+
[conv] = await db.insert(conversations).values({ projectId: proj.id, sessionId: session_id, userId: user_id }).returning();
|
|
1618
|
+
}
|
|
1619
|
+
await db.insert(messages).values({ conversationId: conv.id, role, content });
|
|
1620
|
+
await db.update(conversations).set({ messageCount: sql2`${conversations.messageCount} + 1`, updatedAt: /* @__PURE__ */ new Date() }).where(eq3(conversations.id, conv.id));
|
|
1621
|
+
return { content: [{ type: "text", text: `Message added (session: ${session_id}).` }] };
|
|
1622
|
+
}
|
|
1623
|
+
);
|
|
1624
|
+
server.tool(
|
|
1625
|
+
"get_conversation",
|
|
1626
|
+
"Retrieve conversation history for a session.",
|
|
1627
|
+
{
|
|
1628
|
+
project: z.string().describe("Project name or slug"),
|
|
1629
|
+
session_id: z.string().describe("Session identifier"),
|
|
1630
|
+
limit: z.number().optional().default(50)
|
|
1631
|
+
},
|
|
1632
|
+
async ({ project, session_id, limit }) => {
|
|
1633
|
+
const proj = await resolveProject(project);
|
|
1634
|
+
if (!proj) return { content: [{ type: "text", text: `Project '${project}' not found.` }] };
|
|
1635
|
+
const [conv] = await db.select().from(conversations).where(and2(eq3(conversations.projectId, proj.id), eq3(conversations.sessionId, session_id))).limit(1);
|
|
1636
|
+
if (!conv) return { content: [{ type: "text", text: "No conversation found for this session." }] };
|
|
1637
|
+
const msgs = await db.select().from(messages).where(eq3(messages.conversationId, conv.id)).orderBy(messages.createdAt).limit(limit);
|
|
1638
|
+
const text2 = msgs.map((m) => `[${m.role}]: ${m.content}`).join("\n\n");
|
|
1639
|
+
return { content: [{ type: "text", text: text2 || "No messages yet." }] };
|
|
1640
|
+
}
|
|
1641
|
+
);
|
|
1642
|
+
async function main() {
|
|
1643
|
+
const transport = new StdioServerTransport();
|
|
1644
|
+
await server.connect(transport);
|
|
1645
|
+
console.error("Whisper Context MCP server running on stdio");
|
|
1646
|
+
}
|
|
1647
|
+
main().catch(console.error);
|
|
1648
|
+
//# sourceMappingURL=server.js.map
|