@copilotkit/pathfinder 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. package/.env.example +20 -0
  2. package/.superpowers/brainstorm/47098-1775507869/content/homepage-mockup.html +324 -0
  3. package/.superpowers/brainstorm/47098-1775507869/state/server-stopped +1 -0
  4. package/.superpowers/brainstorm/47098-1775507869/state/server.log +13 -0
  5. package/.superpowers/brainstorm/47098-1775507869/state/server.pid +1 -0
  6. package/.superpowers/brainstorm/82141-1775511032/content/migration-v2.html +340 -0
  7. package/.superpowers/brainstorm/82141-1775511032/content/migration.html +340 -0
  8. package/.superpowers/brainstorm/82141-1775511032/state/server-stopped +1 -0
  9. package/.superpowers/brainstorm/82141-1775511032/state/server.log +4 -0
  10. package/.superpowers/brainstorm/82141-1775511032/state/server.pid +1 -0
  11. package/CHANGELOG.md +26 -0
  12. package/LICENSE +21 -0
  13. package/README.md +284 -0
  14. package/dist/config.d.ts +32 -0
  15. package/dist/config.d.ts.map +1 -0
  16. package/dist/config.js +180 -0
  17. package/dist/config.js.map +1 -0
  18. package/dist/db/client.d.ts +22 -0
  19. package/dist/db/client.d.ts.map +1 -0
  20. package/dist/db/client.js +134 -0
  21. package/dist/db/client.js.map +1 -0
  22. package/dist/db/queries.d.ts +51 -0
  23. package/dist/db/queries.d.ts.map +1 -0
  24. package/dist/db/queries.js +271 -0
  25. package/dist/db/queries.js.map +1 -0
  26. package/dist/db/schema.d.ts +11 -0
  27. package/dist/db/schema.d.ts.map +1 -0
  28. package/dist/db/schema.js +63 -0
  29. package/dist/db/schema.js.map +1 -0
  30. package/dist/index.d.ts +2 -0
  31. package/dist/index.d.ts.map +1 -0
  32. package/dist/index.js +366 -0
  33. package/dist/index.js.map +1 -0
  34. package/dist/indexing/chunking/code.d.ts +17 -0
  35. package/dist/indexing/chunking/code.d.ts.map +1 -0
  36. package/dist/indexing/chunking/code.js +277 -0
  37. package/dist/indexing/chunking/code.js.map +1 -0
  38. package/dist/indexing/chunking/index.d.ts +6 -0
  39. package/dist/indexing/chunking/index.d.ts.map +1 -0
  40. package/dist/indexing/chunking/index.js +19 -0
  41. package/dist/indexing/chunking/index.js.map +1 -0
  42. package/dist/indexing/chunking/markdown.d.ts +16 -0
  43. package/dist/indexing/chunking/markdown.d.ts.map +1 -0
  44. package/dist/indexing/chunking/markdown.js +283 -0
  45. package/dist/indexing/chunking/markdown.js.map +1 -0
  46. package/dist/indexing/chunking/raw-text.d.ts +11 -0
  47. package/dist/indexing/chunking/raw-text.d.ts.map +1 -0
  48. package/dist/indexing/chunking/raw-text.js +59 -0
  49. package/dist/indexing/chunking/raw-text.js.map +1 -0
  50. package/dist/indexing/embeddings.d.ts +10 -0
  51. package/dist/indexing/embeddings.d.ts.map +1 -0
  52. package/dist/indexing/embeddings.js +78 -0
  53. package/dist/indexing/embeddings.js.map +1 -0
  54. package/dist/indexing/orchestrator.d.ts +69 -0
  55. package/dist/indexing/orchestrator.d.ts.map +1 -0
  56. package/dist/indexing/orchestrator.js +387 -0
  57. package/dist/indexing/orchestrator.js.map +1 -0
  58. package/dist/indexing/source-indexer.d.ts +68 -0
  59. package/dist/indexing/source-indexer.d.ts.map +1 -0
  60. package/dist/indexing/source-indexer.js +379 -0
  61. package/dist/indexing/source-indexer.js.map +1 -0
  62. package/dist/indexing/url-derivation.d.ts +7 -0
  63. package/dist/indexing/url-derivation.d.ts.map +1 -0
  64. package/dist/indexing/url-derivation.js +31 -0
  65. package/dist/indexing/url-derivation.js.map +1 -0
  66. package/dist/mcp/server.d.ts +10 -0
  67. package/dist/mcp/server.d.ts.map +1 -0
  68. package/dist/mcp/server.js +67 -0
  69. package/dist/mcp/server.js.map +1 -0
  70. package/dist/mcp/tools/bash-fs.d.ts +19 -0
  71. package/dist/mcp/tools/bash-fs.d.ts.map +1 -0
  72. package/dist/mcp/tools/bash-fs.js +134 -0
  73. package/dist/mcp/tools/bash-fs.js.map +1 -0
  74. package/dist/mcp/tools/bash-grep.d.ts +29 -0
  75. package/dist/mcp/tools/bash-grep.d.ts.map +1 -0
  76. package/dist/mcp/tools/bash-grep.js +153 -0
  77. package/dist/mcp/tools/bash-grep.js.map +1 -0
  78. package/dist/mcp/tools/bash-related.d.ts +14 -0
  79. package/dist/mcp/tools/bash-related.d.ts.map +1 -0
  80. package/dist/mcp/tools/bash-related.js +54 -0
  81. package/dist/mcp/tools/bash-related.js.map +1 -0
  82. package/dist/mcp/tools/bash-session.d.ts +23 -0
  83. package/dist/mcp/tools/bash-session.d.ts.map +1 -0
  84. package/dist/mcp/tools/bash-session.js +60 -0
  85. package/dist/mcp/tools/bash-session.js.map +1 -0
  86. package/dist/mcp/tools/bash-telemetry.d.ts +26 -0
  87. package/dist/mcp/tools/bash-telemetry.d.ts.map +1 -0
  88. package/dist/mcp/tools/bash-telemetry.js +53 -0
  89. package/dist/mcp/tools/bash-telemetry.js.map +1 -0
  90. package/dist/mcp/tools/bash-virtual-files.d.ts +3 -0
  91. package/dist/mcp/tools/bash-virtual-files.d.ts.map +1 -0
  92. package/dist/mcp/tools/bash-virtual-files.js +65 -0
  93. package/dist/mcp/tools/bash-virtual-files.js.map +1 -0
  94. package/dist/mcp/tools/bash.d.ts +25 -0
  95. package/dist/mcp/tools/bash.d.ts.map +1 -0
  96. package/dist/mcp/tools/bash.js +140 -0
  97. package/dist/mcp/tools/bash.js.map +1 -0
  98. package/dist/mcp/tools/collect.d.ts +13 -0
  99. package/dist/mcp/tools/collect.d.ts.map +1 -0
  100. package/dist/mcp/tools/collect.js +56 -0
  101. package/dist/mcp/tools/collect.js.map +1 -0
  102. package/dist/mcp/tools/search.d.ts +5 -0
  103. package/dist/mcp/tools/search.d.ts.map +1 -0
  104. package/dist/mcp/tools/search.js +68 -0
  105. package/dist/mcp/tools/search.js.map +1 -0
  106. package/dist/types.d.ts +1237 -0
  107. package/dist/types.d.ts.map +1 -0
  108. package/dist/types.js +163 -0
  109. package/dist/types.js.map +1 -0
  110. package/dist/webhooks/github.d.ts +12 -0
  111. package/dist/webhooks/github.d.ts.map +1 -0
  112. package/dist/webhooks/github.js +117 -0
  113. package/dist/webhooks/github.js.map +1 -0
  114. package/package.json +48 -0
@@ -0,0 +1,51 @@
1
+ import type { Chunk, ChunkResult, IndexState } from "../types.js";
2
+ /**
3
+ * Cosine similarity search on the unified chunks table.
4
+ * Optionally filtered by source_name. Returns results ordered by similarity
5
+ * (highest first).
6
+ */
7
+ export declare function searchChunks(embedding: number[], limit: number, sourceName?: string): Promise<ChunkResult[]>;
8
+ /**
9
+ * Text search (ILIKE) on the unified chunks table.
10
+ * Optionally filtered by source_name. Returns results ordered by id.
11
+ */
12
+ export declare function textSearchChunks(pattern: string, limit: number, sourceName?: string): Promise<ChunkResult[]>;
13
+ /**
14
+ * Batch upsert chunks. Uses ON CONFLICT to update existing rows matched by
15
+ * (source_name, file_path, chunk_index).
16
+ */
17
+ export declare function upsertChunks(chunks: Chunk[]): Promise<void>;
18
+ /**
19
+ * Delete all chunks for a given source + file path.
20
+ */
21
+ export declare function deleteChunksByFile(sourceName: string, filePath: string): Promise<void>;
22
+ /**
23
+ * Delete all chunks for a source (useful for full reindex).
24
+ */
25
+ export declare function deleteChunksBySource(sourceName: string): Promise<void>;
26
+ /**
27
+ * Get the indexing state for a given source.
28
+ */
29
+ export declare function getIndexState(sourceType: string, sourceKey: string): Promise<IndexState | null>;
30
+ /**
31
+ * Upsert the indexing state for a given source.
32
+ */
33
+ export declare function upsertIndexState(state: IndexState): Promise<void>;
34
+ /**
35
+ * Insert a row into the collected_data table.
36
+ */
37
+ export declare function insertCollectedData(toolName: string, data: Record<string, unknown>): Promise<void>;
38
+ export interface IndexStats {
39
+ totalChunks: number;
40
+ bySource: Array<{
41
+ source_name: string;
42
+ count: number;
43
+ }>;
44
+ indexedRepos: number;
45
+ indexStates: IndexState[];
46
+ }
47
+ /**
48
+ * Get aggregate statistics for the health endpoint.
49
+ */
50
+ export declare function getIndexStats(): Promise<IndexStats>;
51
+ //# sourceMappingURL=queries.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"queries.d.ts","sourceRoot":"","sources":["../../src/db/queries.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,KAAK,EAAE,WAAW,EAAE,UAAU,EAAe,MAAM,aAAa,CAAC;AAM/E;;;;GAIG;AACH,wBAAsB,YAAY,CAC9B,SAAS,EAAE,MAAM,EAAE,EACnB,KAAK,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,GACpB,OAAO,CAAC,WAAW,EAAE,CAAC,CA6DxB;AAED;;;GAGG;AACH,wBAAsB,gBAAgB,CAClC,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,GACpB,OAAO,CAAC,WAAW,EAAE,CAAC,CA2BxB;AAMD;;;GAGG;AACH,wBAAsB,YAAY,CAAC,MAAM,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAuDjE;AAMD;;GAEG;AACH,wBAAsB,kBAAkB,CACpC,UAAU,EAAE,MAAM,EAClB,QAAQ,EAAE,MAAM,GACjB,OAAO,CAAC,IAAI,CAAC,CAMf;AAED;;GAEG;AACH,wBAAsB,oBAAoB,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAG5E;AAMD;;GAEG;AACH,wBAAsB,aAAa,CAC/B,UAAU,EAAE,MAAM,EAClB,SAAS,EAAE,MAAM,GAClB,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC,CAmB5B;AAED;;GAEG;AACH,wBAAsB,gBAAgB,CAAC,KAAK,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CAqBvE;AAMD;;GAEG;AACH,wBAAsB,mBAAmB,CACrC,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAC9B,OAAO,CAAC,IAAI,CAAC,CAMf;AAMD,MAAM,WAAW,UAAU;IACvB,WAAW,EAAE,MAAM,CAAC;IACpB,QAAQ,EAAE,KAAK,CAAC;QAAE,WAAW,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IACxD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,EAAE,UAAU,EAAE,CAAC;CAC7B;AAED;;GAEG;AACH,wBAAsB,aAAa,IAAI,OAAO,CAAC,UAAU,CAAC,CA8BzD"}
@@ -0,0 +1,271 @@
1
+ import pgvector from "pgvector";
2
+ import { getPool } from "./client.js";
3
+ // ---------------------------------------------------------------------------
4
+ // Search
5
+ // ---------------------------------------------------------------------------
6
+ /**
7
+ * Cosine similarity search on the unified chunks table.
8
+ * Optionally filtered by source_name. Returns results ordered by similarity
9
+ * (highest first).
10
+ */
11
+ export async function searchChunks(embedding, limit, sourceName) {
12
+ const pool = getPool();
13
+ let sql;
14
+ let params;
15
+ if (sourceName) {
16
+ sql = `
17
+ SELECT
18
+ id,
19
+ source_name,
20
+ source_url,
21
+ title,
22
+ content,
23
+ repo_url,
24
+ file_path,
25
+ start_line,
26
+ end_line,
27
+ language,
28
+ 1 - (embedding <=> $1) AS similarity
29
+ FROM chunks
30
+ WHERE source_name = $2
31
+ ORDER BY embedding <=> $1
32
+ LIMIT $3
33
+ `;
34
+ params = [pgvector.toSql(embedding), sourceName, limit];
35
+ }
36
+ else {
37
+ sql = `
38
+ SELECT
39
+ id,
40
+ source_name,
41
+ source_url,
42
+ title,
43
+ content,
44
+ repo_url,
45
+ file_path,
46
+ start_line,
47
+ end_line,
48
+ language,
49
+ 1 - (embedding <=> $1) AS similarity
50
+ FROM chunks
51
+ ORDER BY embedding <=> $1
52
+ LIMIT $2
53
+ `;
54
+ params = [pgvector.toSql(embedding), limit];
55
+ }
56
+ const { rows } = await pool.query(sql, params);
57
+ return rows.map((r) => ({
58
+ id: r.id,
59
+ source_name: r.source_name,
60
+ source_url: r.source_url ?? null,
61
+ title: r.title ?? null,
62
+ content: r.content,
63
+ repo_url: r.repo_url ?? null,
64
+ file_path: r.file_path,
65
+ start_line: r.start_line ?? null,
66
+ end_line: r.end_line ?? null,
67
+ language: r.language ?? null,
68
+ similarity: parseFloat(r.similarity),
69
+ }));
70
+ }
71
+ /**
72
+ * Text search (ILIKE) on the unified chunks table.
73
+ * Optionally filtered by source_name. Returns results ordered by id.
74
+ */
75
+ export async function textSearchChunks(pattern, limit, sourceName) {
76
+ const pool = getPool();
77
+ const escaped = pattern.replace(/[%_\\]/g, '\\$&');
78
+ const likePattern = `%${escaped}%`;
79
+ let sql;
80
+ let params;
81
+ if (sourceName) {
82
+ sql = `SELECT id, source_name, source_url, title, content, repo_url, file_path, start_line, end_line, language, 0.0 AS similarity FROM chunks WHERE source_name = $1 AND content ILIKE $2 ORDER BY id LIMIT $3`;
83
+ params = [sourceName, likePattern, limit];
84
+ }
85
+ else {
86
+ sql = `SELECT id, source_name, source_url, title, content, repo_url, file_path, start_line, end_line, language, 0.0 AS similarity FROM chunks WHERE content ILIKE $1 ORDER BY id LIMIT $2`;
87
+ params = [likePattern, limit];
88
+ }
89
+ const { rows } = await pool.query(sql, params);
90
+ return rows.map((r) => ({
91
+ id: r.id,
92
+ source_name: r.source_name,
93
+ source_url: r.source_url ?? null,
94
+ title: r.title ?? null,
95
+ content: r.content,
96
+ repo_url: r.repo_url ?? null,
97
+ file_path: r.file_path,
98
+ start_line: r.start_line ?? null,
99
+ end_line: r.end_line ?? null,
100
+ language: r.language ?? null,
101
+ similarity: parseFloat(r.similarity),
102
+ }));
103
+ }
104
+ // ---------------------------------------------------------------------------
105
+ // Upsert
106
+ // ---------------------------------------------------------------------------
107
+ /**
108
+ * Batch upsert chunks. Uses ON CONFLICT to update existing rows matched by
109
+ * (source_name, file_path, chunk_index).
110
+ */
111
+ export async function upsertChunks(chunks) {
112
+ if (chunks.length === 0)
113
+ return;
114
+ const pool = getPool();
115
+ const client = await pool.connect();
116
+ try {
117
+ await client.query("BEGIN");
118
+ const sql = `
119
+ INSERT INTO chunks
120
+ (source_name, source_url, title, content, embedding, repo_url,
121
+ file_path, start_line, end_line, language, chunk_index,
122
+ metadata, commit_sha, indexed_at)
123
+ VALUES
124
+ ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, NOW())
125
+ ON CONFLICT (source_name, file_path, chunk_index) DO UPDATE SET
126
+ source_url = EXCLUDED.source_url,
127
+ title = EXCLUDED.title,
128
+ content = EXCLUDED.content,
129
+ embedding = EXCLUDED.embedding,
130
+ repo_url = EXCLUDED.repo_url,
131
+ start_line = EXCLUDED.start_line,
132
+ end_line = EXCLUDED.end_line,
133
+ language = EXCLUDED.language,
134
+ metadata = EXCLUDED.metadata,
135
+ commit_sha = EXCLUDED.commit_sha,
136
+ indexed_at = NOW()
137
+ `;
138
+ for (const chunk of chunks) {
139
+ await client.query(sql, [
140
+ chunk.source_name,
141
+ chunk.source_url ?? null,
142
+ chunk.title ?? null,
143
+ chunk.content,
144
+ pgvector.toSql(chunk.embedding),
145
+ chunk.repo_url,
146
+ chunk.file_path,
147
+ chunk.start_line ?? null,
148
+ chunk.end_line ?? null,
149
+ chunk.language ?? null,
150
+ chunk.chunk_index,
151
+ JSON.stringify(chunk.metadata ?? {}),
152
+ chunk.commit_sha ?? null,
153
+ ]);
154
+ }
155
+ await client.query("COMMIT");
156
+ }
157
+ catch (err) {
158
+ await client.query("ROLLBACK");
159
+ throw err;
160
+ }
161
+ finally {
162
+ client.release();
163
+ }
164
+ }
165
+ // ---------------------------------------------------------------------------
166
+ // Delete
167
+ // ---------------------------------------------------------------------------
168
+ /**
169
+ * Delete all chunks for a given source + file path.
170
+ */
171
+ export async function deleteChunksByFile(sourceName, filePath) {
172
+ const pool = getPool();
173
+ await pool.query("DELETE FROM chunks WHERE source_name = $1 AND file_path = $2", [sourceName, filePath]);
174
+ }
175
+ /**
176
+ * Delete all chunks for a source (useful for full reindex).
177
+ */
178
+ export async function deleteChunksBySource(sourceName) {
179
+ const pool = getPool();
180
+ await pool.query("DELETE FROM chunks WHERE source_name = $1", [sourceName]);
181
+ }
182
+ // ---------------------------------------------------------------------------
183
+ // Index state
184
+ // ---------------------------------------------------------------------------
185
+ /**
186
+ * Get the indexing state for a given source.
187
+ */
188
+ export async function getIndexState(sourceType, sourceKey) {
189
+ const pool = getPool();
190
+ const sql = `
191
+ SELECT source_type, source_key, last_commit_sha, last_indexed_at, status, error_message
192
+ FROM index_state
193
+ WHERE source_type = $1 AND source_key = $2
194
+ `;
195
+ const { rows } = await pool.query(sql, [sourceType, sourceKey]);
196
+ if (rows.length === 0)
197
+ return null;
198
+ const row = rows[0];
199
+ return {
200
+ source_type: row.source_type,
201
+ source_key: row.source_key,
202
+ last_commit_sha: row.last_commit_sha,
203
+ last_indexed_at: row.last_indexed_at,
204
+ status: row.status,
205
+ error_message: row.error_message,
206
+ };
207
+ }
208
+ /**
209
+ * Upsert the indexing state for a given source.
210
+ */
211
+ export async function upsertIndexState(state) {
212
+ const pool = getPool();
213
+ const sql = `
214
+ INSERT INTO index_state
215
+ (source_type, source_key, last_commit_sha, last_indexed_at, status, error_message)
216
+ VALUES
217
+ ($1, $2, $3, $4, $5, $6)
218
+ ON CONFLICT (source_type, source_key) DO UPDATE SET
219
+ last_commit_sha = EXCLUDED.last_commit_sha,
220
+ last_indexed_at = EXCLUDED.last_indexed_at,
221
+ status = EXCLUDED.status,
222
+ error_message = EXCLUDED.error_message
223
+ `;
224
+ await pool.query(sql, [
225
+ state.source_type,
226
+ state.source_key,
227
+ state.last_commit_sha ?? null,
228
+ state.last_indexed_at ?? null,
229
+ state.status ?? "idle",
230
+ state.error_message ?? null,
231
+ ]);
232
+ }
233
+ // ---------------------------------------------------------------------------
234
+ // Collected data
235
+ // ---------------------------------------------------------------------------
236
+ /**
237
+ * Insert a row into the collected_data table.
238
+ */
239
+ export async function insertCollectedData(toolName, data) {
240
+ const pool = getPool();
241
+ await pool.query("INSERT INTO collected_data (tool_name, data) VALUES ($1, $2)", [toolName, JSON.stringify(data)]);
242
+ }
243
+ /**
244
+ * Get aggregate statistics for the health endpoint.
245
+ */
246
+ export async function getIndexStats() {
247
+ const pool = getPool();
248
+ const [totalCount, bySource, repoCount, states] = await Promise.all([
249
+ pool.query("SELECT count(*)::int AS count FROM chunks"),
250
+ pool.query("SELECT source_name, count(*)::int AS count FROM chunks GROUP BY source_name ORDER BY source_name"),
251
+ pool.query("SELECT count(DISTINCT repo_url)::int AS count FROM chunks WHERE repo_url IS NOT NULL"),
252
+ pool.query("SELECT source_type, source_key, last_commit_sha, last_indexed_at, status, error_message FROM index_state ORDER BY source_type, source_key"),
253
+ ]);
254
+ return {
255
+ totalChunks: totalCount.rows[0]?.count ?? 0,
256
+ bySource: bySource.rows.map((r) => ({
257
+ source_name: r.source_name,
258
+ count: r.count,
259
+ })),
260
+ indexedRepos: repoCount.rows[0]?.count ?? 0,
261
+ indexStates: states.rows.map((r) => ({
262
+ source_type: r.source_type,
263
+ source_key: r.source_key,
264
+ last_commit_sha: r.last_commit_sha,
265
+ last_indexed_at: r.last_indexed_at,
266
+ status: r.status,
267
+ error_message: r.error_message,
268
+ })),
269
+ };
270
+ }
271
+ //# sourceMappingURL=queries.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"queries.js","sourceRoot":"","sources":["../../src/db/queries.ts"],"names":[],"mappings":"AAAA,OAAO,QAAQ,MAAM,UAAU,CAAC;AAChC,OAAO,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAGtC,8EAA8E;AAC9E,SAAS;AACT,8EAA8E;AAE9E;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAC9B,SAAmB,EACnB,KAAa,EACb,UAAmB;IAEnB,MAAM,IAAI,GAAG,OAAO,EAAE,CAAC;IAEvB,IAAI,GAAW,CAAC;IAChB,IAAI,MAAiB,CAAC;IAEtB,IAAI,UAAU,EAAE,CAAC;QACb,GAAG,GAAG;;;;;;;;;;;;;;;;;SAiBL,CAAC;QACF,MAAM,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,UAAU,EAAE,KAAK,CAAC,CAAC;IAC5D,CAAC;SAAM,CAAC;QACJ,GAAG,GAAG;;;;;;;;;;;;;;;;SAgBL,CAAC;QACF,MAAM,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,KAAK,CAAC,CAAC;IAChD,CAAC;IAED,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC;IAC/C,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAA0B,EAAE,EAAE,CAAC,CAAC;QAC7C,EAAE,EAAE,CAAC,CAAC,EAAY;QAClB,WAAW,EAAE,CAAC,CAAC,WAAqB;QACpC,UAAU,EAAG,CAAC,CAAC,UAAqB,IAAI,IAAI;QAC5C,KAAK,EAAG,CAAC,CAAC,KAAgB,IAAI,IAAI;QAClC,OAAO,EAAE,CAAC,CAAC,OAAiB;QAC5B,QAAQ,EAAG,CAAC,CAAC,QAAmB,IAAI,IAAI;QACxC,SAAS,EAAE,CAAC,CAAC,SAAmB;QAChC,UAAU,EAAG,CAAC,CAAC,UAAqB,IAAI,IAAI;QAC5C,QAAQ,EAAG,CAAC,CAAC,QAAmB,IAAI,IAAI;QACxC,QAAQ,EAAG,CAAC,CAAC,QAAmB,IAAI,IAAI;QACxC,UAAU,EAAE,UAAU,CAAC,CAAC,CAAC,UAAoB,CAAC;KACjD,CAAC,CAAC,CAAC;AACR,CAAC;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CAClC,OAAe,EACf,KAAa,EACb,UAAmB;IAEnB,MAAM,IAAI,GAAG,OAAO,EAAE,CAAC;IACvB,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IACnD,MAAM,WAAW,GAAG,IAAI,OAAO,GAAG,CAAC;IACnC,IAAI,GAAW,CAAC;IAChB,IAAI,MAAiB,CAAC;IACtB,IAAI,UAAU,EAAE,CAAC;QACb,GAAG,GAAG,yMAAyM,CAAC;QAChN,MAAM,GAAG,CAAC,UAAU,EAAE,WAAW,EAAE,KAAK,CAAC,CAAC;IAC9C,CAAC;SAAM,CAAC;QACJ,GAAG,GAAG,oLAAoL,CAAC;QAC3L,MAAM,GAAG,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC;IAClC,CAAC;IACD,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC;IAC/C,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAA0B,EAAE,EAAE,CAAC,CAAC;QAC7C,EAAE,EAAE,CAAC,CAAC,EAAY;QAClB,WAAW,EAAE,CAAC,CAAC,WAAqB;QACpC,UAAU,EAAG,CAAC,CAAC,UAAqB,IAAI,IAAI;QAC5C,KAAK,EAAG,CAAC,CAAC,KAAgB,IAAI,IAAI;QAClC,OAAO,EAAE,CAAC,CAAC,OAAiB;QAC5B,QAAQ,EAAG,CAAC,CAAC,QAAmB,IAAI,IAAI;QACxC,SAAS,EAAE,CAAC,CAAC,SAAmB;QAChC,UAAU,EAAG,CAAC,CAAC,UAAqB,IAAI,IAAI;QAC5C,QAAQ,EAAG,CAAC,CAAC,QAAmB,IAAI,IAAI;QACxC,QAAQ,EAAG,CAAC,CAAC,QAAmB,IAAI,IAAI;QACxC,UAAU,EAAE,UAAU,CAAC,CAAC,CAAC,UAAoB,CAAC;KACjD,CAAC,CAAC,CAAC;AACR,CAAC;AAED,8EAA8E;AAC9E,SAAS;AACT,8EAA8E;AAE9E;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAAC,MAAe;IAC9C,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO;IAEhC,MAAM,IAAI,GAAG,OAAO,EAAE,CAAC;IACvB,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;IAEpC,IAAI,CAAC;QACD,MAAM,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE5B,MAAM,GAAG,GAAG;;;;;;;;;;;;;;;;;;;SAmBX,CAAC;QAEF,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YACzB,MAAM,MAAM,CAAC,KAAK,CAAC,GAAG,EAAE;gBACpB,KAAK,CAAC,WAAW;gBACjB,KAAK,CAAC,UAAU,IAAI,IAAI;gBACxB,KAAK,CAAC,KAAK,IAAI,IAAI;gBACnB,KAAK,CAAC,OAAO;gBACb,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,SAAS,CAAC;gBAC/B,KAAK,CAAC,QAAQ;gBACd,KAAK,CAAC,SAAS;gBACf,KAAK,CAAC,UAAU,IAAI,IAAI;gBACxB,KAAK,CAAC,QAAQ,IAAI,IAAI;gBACtB,KAAK,CAAC,QAAQ,IAAI,IAAI;gBACtB,KAAK,CAAC,WAAW;gBACjB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,QAAQ,IAAI,EAAE,CAAC;gBACpC,KAAK,CAAC,UAAU,IAAI,IAAI;aAC3B,CAAC,CAAC;QACP,CAAC;QAED,MAAM,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IACjC,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACX,MAAM,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;QAC/B,MAAM,GAAG,CAAC;IACd,CAAC;YAAS,CAAC;QACP,MAAM,CAAC,OAAO,EAAE,CAAC;IACrB,CAAC;AACL,CAAC;AAED,8EAA8E;AAC9E,SAAS;AACT,8EAA8E;AAE9E;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,kBAAkB,CACpC,UAAkB,EAClB,QAAgB;IAEhB,MAAM,IAAI,GAAG,OAAO,EAAE,CAAC;IACvB,MAAM,IAAI,CAAC,KAAK,CACZ,8DAA8D,EAC9D,CAAC,UAAU,EAAE,QAAQ,CAAC,CACzB,CAAC;AACN,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,oBAAoB,CAAC,UAAkB;IACzD,MAAM,IAAI,GAAG,OAAO,EAAE,CAAC;IACvB,MAAM,IAAI,CAAC,KAAK,CAAC,2CAA2C,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC;AAChF,CAAC;AAED,8EAA8E;AAC9E,cAAc;AACd,8EAA8E;AAE9E;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CAC/B,UAAkB,EAClB,SAAiB;IAEjB,MAAM,IAAI,GAAG,OAAO,EAAE,CAAC;IACvB,MAAM,GAAG,GAAG;;;;KAIX,CAAC;IACF,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,UAAU,EAAE,SAAS,CAAC,CAAC,CAAC;IAChE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,IAAI,CAAC;IAEnC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;IACpB,OAAO;QACH,WAAW,EAAE,GAAG,CAAC,WAAW;QAC5B,UAAU,EAAE,GAAG,CAAC,UAAU;QAC1B,eAAe,EAAE,GAAG,CAAC,eAAe;QACpC,eAAe,EAAE,GAAG,CAAC,eAAe;QACpC,MAAM,EAAE,GAAG,CAAC,MAAqB;QACjC,aAAa,EAAE,GAAG,CAAC,aAAa;KACnC,CAAC;AACN,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CAAC,KAAiB;IACpD,MAAM,IAAI,GAAG,OAAO,EAAE,CAAC;IACvB,MAAM,GAAG,GAAG;;;;;;;;;;KAUX,CAAC;IACF,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE;QAClB,KAAK,CAAC,WAAW;QACjB,KAAK,CAAC,UAAU;QAChB,KAAK,CAAC,eAAe,IAAI,IAAI;QAC7B,KAAK,CAAC,eAAe,IAAI,IAAI;QAC7B,KAAK,CAAC,MAAM,IAAI,MAAM;QACtB,KAAK,CAAC,aAAa,IAAI,IAAI;KAC9B,CAAC,CAAC;AACP,CAAC;AAED,8EAA8E;AAC9E,iBAAiB;AACjB,8EAA8E;AAE9E;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,mBAAmB,CACrC,QAAgB,EAChB,IAA6B;IAE7B,MAAM,IAAI,GAAG,OAAO,EAAE,CAAC;IACvB,MAAM,IAAI,CAAC,KAAK,CACZ,8DAA8D,EAC9D,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CACnC,CAAC;AACN,CAAC;AAaD;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa;IAC/B,MAAM,IAAI,GAAG,OAAO,EAAE,CAAC;IAEvB,MAAM,CAAC,UAAU,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC;QAChE,IAAI,CAAC,KAAK,CAAC,2CAA2C,CAAC;QACvD,IAAI,CAAC,KAAK,CACN,kGAAkG,CACrG;QACD,IAAI,CAAC,KAAK,CAAC,sFAAsF,CAAC;QAClG,IAAI,CAAC,KAAK,CACN,2IAA2I,CAC9I;KACJ,CAAC,CAAC;IAEH,OAAO;QACH,WAAW,EAAE,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI,CAAC;QAC3C,QAAQ,EAAE,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAA0B,EAAE,EAAE,CAAC,CAAC;YACzD,WAAW,EAAE,CAAC,CAAC,WAAqB;YACpC,KAAK,EAAE,CAAC,CAAC,KAAe;SAC3B,CAAC,CAAC;QACH,YAAY,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI,CAAC;QAC3C,WAAW,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAA0B,EAAE,EAAE,CAAC,CAAC;YAC1D,WAAW,EAAE,CAAC,CAAC,WAAqB;YACpC,UAAU,EAAE,CAAC,CAAC,UAAoB;YAClC,eAAe,EAAE,CAAC,CAAC,eAAgC;YACnD,eAAe,EAAE,CAAC,CAAC,eAA8B;YACjD,MAAM,EAAE,CAAC,CAAC,MAAqB;YAC/B,aAAa,EAAE,CAAC,CAAC,aAA8B;SAClD,CAAC,CAAC;KACN,CAAC;AACN,CAAC"}
@@ -0,0 +1,11 @@
1
+ /**
2
+ * Generate the full DDL for creating the unified chunks schema.
3
+ * The vector dimension is parameterized from config.
4
+ */
5
+ export declare function generateSchema(dimensions: number): string;
6
+ /**
7
+ * Generate migration SQL that drops the old split tables.
8
+ * Safe to run even if they don't exist (IF EXISTS).
9
+ */
10
+ export declare function generateMigration(): string;
11
+ //# sourceMappingURL=schema.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../../src/db/schema.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,cAAc,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CA6CzD;AAED;;;GAGG;AACH,wBAAgB,iBAAiB,IAAI,MAAM,CAK1C"}
@@ -0,0 +1,63 @@
1
+ // Programmatic DDL generation for the unified chunks schema.
2
+ // Replaces the old static schema.sql file.
3
+ /**
4
+ * Generate the full DDL for creating the unified chunks schema.
5
+ * The vector dimension is parameterized from config.
6
+ */
7
+ export function generateSchema(dimensions) {
8
+ return `
9
+ CREATE EXTENSION IF NOT EXISTS vector;
10
+
11
+ CREATE TABLE IF NOT EXISTS chunks (
12
+ id SERIAL PRIMARY KEY,
13
+ source_name TEXT NOT NULL,
14
+ source_url TEXT,
15
+ title TEXT,
16
+ content TEXT NOT NULL,
17
+ embedding vector(${dimensions}) NOT NULL,
18
+ repo_url TEXT,
19
+ file_path TEXT NOT NULL,
20
+ start_line INTEGER,
21
+ end_line INTEGER,
22
+ language TEXT,
23
+ chunk_index INTEGER NOT NULL,
24
+ metadata JSONB NOT NULL DEFAULT '{}',
25
+ indexed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
26
+ commit_sha TEXT,
27
+ CONSTRAINT chunks_source_file_chunk_uniq UNIQUE (source_name, file_path, chunk_index)
28
+ );
29
+
30
+ CREATE TABLE IF NOT EXISTS index_state (
31
+ id SERIAL PRIMARY KEY,
32
+ source_type TEXT NOT NULL,
33
+ source_key TEXT NOT NULL,
34
+ last_commit_sha TEXT,
35
+ last_indexed_at TIMESTAMPTZ,
36
+ status TEXT NOT NULL DEFAULT 'idle',
37
+ error_message TEXT,
38
+ CONSTRAINT index_state_source_uniq UNIQUE (source_type, source_key)
39
+ );
40
+
41
+ CREATE INDEX IF NOT EXISTS idx_chunks_embedding ON chunks USING hnsw (embedding vector_cosine_ops);
42
+ CREATE INDEX IF NOT EXISTS idx_chunks_source_name ON chunks (source_name);
43
+ CREATE INDEX IF NOT EXISTS idx_chunks_repo_url ON chunks (repo_url);
44
+
45
+ CREATE TABLE IF NOT EXISTS collected_data (
46
+ id SERIAL PRIMARY KEY,
47
+ tool_name TEXT NOT NULL,
48
+ data JSONB NOT NULL,
49
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
50
+ );
51
+ `;
52
+ }
53
+ /**
54
+ * Generate migration SQL that drops the old split tables.
55
+ * Safe to run even if they don't exist (IF EXISTS).
56
+ */
57
+ export function generateMigration() {
58
+ return `
59
+ DROP TABLE IF EXISTS doc_chunks CASCADE;
60
+ DROP TABLE IF EXISTS code_chunks CASCADE;
61
+ `;
62
+ }
63
+ //# sourceMappingURL=schema.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.js","sourceRoot":"","sources":["../../src/db/schema.ts"],"names":[],"mappings":"AAAA,6DAA6D;AAC7D,2CAA2C;AAE3C;;;GAGG;AACH,MAAM,UAAU,cAAc,CAAC,UAAkB;IAC7C,OAAO;;;;;;;;;6BASkB,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkCtC,CAAC;AACF,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,iBAAiB;IAC7B,OAAO;;;CAGV,CAAC;AACF,CAAC"}
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":""}