@tekmidian/pai 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +567 -0
- package/FEATURE.md +108 -0
- package/LICENSE +21 -0
- package/README.md +101 -0
- package/dist/auto-route-D7W6RE06.mjs +86 -0
- package/dist/auto-route-D7W6RE06.mjs.map +1 -0
- package/dist/cli/index.d.mts +1 -0
- package/dist/cli/index.mjs +5927 -0
- package/dist/cli/index.mjs.map +1 -0
- package/dist/config-DBh1bYM2.mjs +151 -0
- package/dist/config-DBh1bYM2.mjs.map +1 -0
- package/dist/daemon/index.d.mts +1 -0
- package/dist/daemon/index.mjs +56 -0
- package/dist/daemon/index.mjs.map +1 -0
- package/dist/daemon-mcp/index.d.mts +1 -0
- package/dist/daemon-mcp/index.mjs +185 -0
- package/dist/daemon-mcp/index.mjs.map +1 -0
- package/dist/daemon-v5O897D4.mjs +773 -0
- package/dist/daemon-v5O897D4.mjs.map +1 -0
- package/dist/db-4lSqLFb8.mjs +199 -0
- package/dist/db-4lSqLFb8.mjs.map +1 -0
- package/dist/db-BcDxXVBu.mjs +110 -0
- package/dist/db-BcDxXVBu.mjs.map +1 -0
- package/dist/detect-BHqYcjJ1.mjs +86 -0
- package/dist/detect-BHqYcjJ1.mjs.map +1 -0
- package/dist/detector-DKA83aTZ.mjs +74 -0
- package/dist/detector-DKA83aTZ.mjs.map +1 -0
- package/dist/embeddings-mfqv-jFu.mjs +91 -0
- package/dist/embeddings-mfqv-jFu.mjs.map +1 -0
- package/dist/factory-BDAiKtYR.mjs +42 -0
- package/dist/factory-BDAiKtYR.mjs.map +1 -0
- package/dist/index.d.mts +307 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +11 -0
- package/dist/indexer-B20bPHL-.mjs +677 -0
- package/dist/indexer-B20bPHL-.mjs.map +1 -0
- package/dist/indexer-backend-BXaocO5r.mjs +360 -0
- package/dist/indexer-backend-BXaocO5r.mjs.map +1 -0
- package/dist/ipc-client-DPy7s3iu.mjs +156 -0
- package/dist/ipc-client-DPy7s3iu.mjs.map +1 -0
- package/dist/mcp/index.d.mts +1 -0
- package/dist/mcp/index.mjs +373 -0
- package/dist/mcp/index.mjs.map +1 -0
- package/dist/migrate-Bwj7qPaE.mjs +241 -0
- package/dist/migrate-Bwj7qPaE.mjs.map +1 -0
- package/dist/pai-marker-DX_mFLum.mjs +186 -0
- package/dist/pai-marker-DX_mFLum.mjs.map +1 -0
- package/dist/postgres-Ccvpc6fC.mjs +335 -0
- package/dist/postgres-Ccvpc6fC.mjs.map +1 -0
- package/dist/rolldown-runtime-95iHPtFO.mjs +18 -0
- package/dist/schemas-DjdwzIQ8.mjs +3405 -0
- package/dist/schemas-DjdwzIQ8.mjs.map +1 -0
- package/dist/search-PjftDxxs.mjs +282 -0
- package/dist/search-PjftDxxs.mjs.map +1 -0
- package/dist/sqlite-CHUrNtbI.mjs +90 -0
- package/dist/sqlite-CHUrNtbI.mjs.map +1 -0
- package/dist/tools-CLK4080-.mjs +805 -0
- package/dist/tools-CLK4080-.mjs.map +1 -0
- package/dist/utils-DEWdIFQ0.mjs +160 -0
- package/dist/utils-DEWdIFQ0.mjs.map +1 -0
- package/package.json +72 -0
- package/templates/README.md +181 -0
- package/templates/agent-prefs.example.md +362 -0
- package/templates/claude-md.template.md +733 -0
- package/templates/pai-project.template.md +13 -0
- package/templates/voices.example.json +251 -0
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
import pg from "pg";
|
|
2
|
+
|
|
3
|
+
//#region src/storage/postgres.ts
|
|
4
|
+
/**
|
|
5
|
+
* PostgresBackend — implements StorageBackend using PostgreSQL + pgvector.
|
|
6
|
+
*
|
|
7
|
+
* Vector similarity: pgvector's <=> cosine distance operator
|
|
8
|
+
* Full-text search: PostgreSQL tsvector/tsquery (replaces SQLite FTS5)
|
|
9
|
+
* Connection pooling: node-postgres Pool
|
|
10
|
+
*
|
|
11
|
+
* Schema is initialized via docker/init.sql.
|
|
12
|
+
* This module only handles runtime queries — schema creation is external.
|
|
13
|
+
*/
|
|
14
|
+
const { Pool: PgPool } = pg;
|
|
15
|
+
var PostgresBackend = class {
|
|
16
|
+
backendType = "postgres";
|
|
17
|
+
pool;
|
|
18
|
+
constructor(config) {
|
|
19
|
+
this.pool = new PgPool({
|
|
20
|
+
connectionString: config.connectionString ?? `postgresql://${config.user ?? "pai"}:${config.password ?? "pai"}@${config.host ?? "localhost"}:${config.port ?? 5432}/${config.database ?? "pai"}`,
|
|
21
|
+
max: config.maxConnections ?? 5,
|
|
22
|
+
connectionTimeoutMillis: config.connectionTimeoutMs ?? 5e3,
|
|
23
|
+
idleTimeoutMillis: 3e4
|
|
24
|
+
});
|
|
25
|
+
this.pool.on("error", (err) => {
|
|
26
|
+
process.stderr.write(`[pai-postgres] Pool error: ${err.message}\n`);
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
async close() {
|
|
30
|
+
await this.pool.end();
|
|
31
|
+
}
|
|
32
|
+
async getStats() {
|
|
33
|
+
const client = await this.pool.connect();
|
|
34
|
+
try {
|
|
35
|
+
const filesResult = await client.query("SELECT COUNT(*)::text AS n FROM pai_files");
|
|
36
|
+
const chunksResult = await client.query("SELECT COUNT(*)::text AS n FROM pai_chunks");
|
|
37
|
+
return {
|
|
38
|
+
files: parseInt(filesResult.rows[0]?.n ?? "0", 10),
|
|
39
|
+
chunks: parseInt(chunksResult.rows[0]?.n ?? "0", 10)
|
|
40
|
+
};
|
|
41
|
+
} finally {
|
|
42
|
+
client.release();
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Test the connection by running a trivial query.
|
|
47
|
+
* Returns null on success, error message on failure.
|
|
48
|
+
*/
|
|
49
|
+
async testConnection() {
|
|
50
|
+
let client = null;
|
|
51
|
+
try {
|
|
52
|
+
client = await this.pool.connect();
|
|
53
|
+
await client.query("SELECT 1");
|
|
54
|
+
return null;
|
|
55
|
+
} catch (e) {
|
|
56
|
+
return e instanceof Error ? e.message : String(e);
|
|
57
|
+
} finally {
|
|
58
|
+
client?.release();
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
async getFileHash(projectId, path) {
|
|
62
|
+
return (await this.pool.query("SELECT hash FROM pai_files WHERE project_id = $1 AND path = $2", [projectId, path])).rows[0]?.hash;
|
|
63
|
+
}
|
|
64
|
+
async upsertFile(file) {
|
|
65
|
+
await this.pool.query(`INSERT INTO pai_files (project_id, path, source, tier, hash, mtime, size)
|
|
66
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
|
67
|
+
ON CONFLICT (project_id, path) DO UPDATE SET
|
|
68
|
+
source = EXCLUDED.source,
|
|
69
|
+
tier = EXCLUDED.tier,
|
|
70
|
+
hash = EXCLUDED.hash,
|
|
71
|
+
mtime = EXCLUDED.mtime,
|
|
72
|
+
size = EXCLUDED.size`, [
|
|
73
|
+
file.projectId,
|
|
74
|
+
file.path,
|
|
75
|
+
file.source,
|
|
76
|
+
file.tier,
|
|
77
|
+
file.hash,
|
|
78
|
+
file.mtime,
|
|
79
|
+
file.size
|
|
80
|
+
]);
|
|
81
|
+
}
|
|
82
|
+
async getChunkIds(projectId, path) {
|
|
83
|
+
return (await this.pool.query("SELECT id FROM pai_chunks WHERE project_id = $1 AND path = $2", [projectId, path])).rows.map((r) => r.id);
|
|
84
|
+
}
|
|
85
|
+
async deleteChunksForFile(projectId, path) {
|
|
86
|
+
await this.pool.query("DELETE FROM pai_chunks WHERE project_id = $1 AND path = $2", [projectId, path]);
|
|
87
|
+
}
|
|
88
|
+
async insertChunks(chunks) {
|
|
89
|
+
if (chunks.length === 0) return;
|
|
90
|
+
const client = await this.pool.connect();
|
|
91
|
+
try {
|
|
92
|
+
await client.query("BEGIN");
|
|
93
|
+
for (const c of chunks) await client.query(`INSERT INTO pai_chunks
|
|
94
|
+
(id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at, fts_vector)
|
|
95
|
+
VALUES
|
|
96
|
+
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,
|
|
97
|
+
to_tsvector('simple', $9))
|
|
98
|
+
ON CONFLICT (id) DO UPDATE SET
|
|
99
|
+
project_id = EXCLUDED.project_id,
|
|
100
|
+
source = EXCLUDED.source,
|
|
101
|
+
tier = EXCLUDED.tier,
|
|
102
|
+
path = EXCLUDED.path,
|
|
103
|
+
start_line = EXCLUDED.start_line,
|
|
104
|
+
end_line = EXCLUDED.end_line,
|
|
105
|
+
hash = EXCLUDED.hash,
|
|
106
|
+
text = EXCLUDED.text,
|
|
107
|
+
updated_at = EXCLUDED.updated_at,
|
|
108
|
+
fts_vector = EXCLUDED.fts_vector`, [
|
|
109
|
+
c.id,
|
|
110
|
+
c.projectId,
|
|
111
|
+
c.source,
|
|
112
|
+
c.tier,
|
|
113
|
+
c.path,
|
|
114
|
+
c.startLine,
|
|
115
|
+
c.endLine,
|
|
116
|
+
c.hash,
|
|
117
|
+
c.text,
|
|
118
|
+
c.updatedAt
|
|
119
|
+
]);
|
|
120
|
+
await client.query("COMMIT");
|
|
121
|
+
} catch (e) {
|
|
122
|
+
await client.query("ROLLBACK");
|
|
123
|
+
throw e;
|
|
124
|
+
} finally {
|
|
125
|
+
client.release();
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
async getUnembeddedChunkIds(projectId) {
|
|
129
|
+
if (projectId !== void 0) return (await this.pool.query("SELECT id, text FROM pai_chunks WHERE embedding IS NULL AND project_id = $1 ORDER BY id", [projectId])).rows;
|
|
130
|
+
return (await this.pool.query("SELECT id, text FROM pai_chunks WHERE embedding IS NULL ORDER BY id")).rows;
|
|
131
|
+
}
|
|
132
|
+
async updateEmbedding(chunkId, embedding) {
|
|
133
|
+
const vecStr = "[" + bufferToVector(embedding).join(",") + "]";
|
|
134
|
+
await this.pool.query("UPDATE pai_chunks SET embedding = $1::vector WHERE id = $2", [vecStr, chunkId]);
|
|
135
|
+
}
|
|
136
|
+
async searchKeyword(query, opts) {
|
|
137
|
+
const maxResults = opts?.maxResults ?? 10;
|
|
138
|
+
const tsQuery = buildPgTsQuery(query);
|
|
139
|
+
if (!tsQuery) return [];
|
|
140
|
+
const conditions = ["fts_vector @@ to_tsquery('simple', $1)"];
|
|
141
|
+
const params = [tsQuery];
|
|
142
|
+
let paramIdx = 2;
|
|
143
|
+
if (opts?.projectIds && opts.projectIds.length > 0) {
|
|
144
|
+
const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(", ");
|
|
145
|
+
conditions.push(`project_id IN (${placeholders})`);
|
|
146
|
+
params.push(...opts.projectIds);
|
|
147
|
+
}
|
|
148
|
+
if (opts?.sources && opts.sources.length > 0) {
|
|
149
|
+
const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(", ");
|
|
150
|
+
conditions.push(`source IN (${placeholders})`);
|
|
151
|
+
params.push(...opts.sources);
|
|
152
|
+
}
|
|
153
|
+
if (opts?.tiers && opts.tiers.length > 0) {
|
|
154
|
+
const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(", ");
|
|
155
|
+
conditions.push(`tier IN (${placeholders})`);
|
|
156
|
+
params.push(...opts.tiers);
|
|
157
|
+
}
|
|
158
|
+
params.push(maxResults);
|
|
159
|
+
const limitParam = `$${paramIdx}`;
|
|
160
|
+
const sql = `
|
|
161
|
+
SELECT
|
|
162
|
+
project_id,
|
|
163
|
+
path,
|
|
164
|
+
start_line,
|
|
165
|
+
end_line,
|
|
166
|
+
text AS snippet,
|
|
167
|
+
tier,
|
|
168
|
+
source,
|
|
169
|
+
ts_rank(fts_vector, to_tsquery('simple', $1)) AS rank_score
|
|
170
|
+
FROM pai_chunks
|
|
171
|
+
WHERE ${conditions.join(" AND ")}
|
|
172
|
+
ORDER BY rank_score DESC
|
|
173
|
+
LIMIT ${limitParam}
|
|
174
|
+
`;
|
|
175
|
+
try {
|
|
176
|
+
return (await this.pool.query(sql, params)).rows.map((row) => ({
|
|
177
|
+
projectId: row.project_id,
|
|
178
|
+
path: row.path,
|
|
179
|
+
startLine: row.start_line,
|
|
180
|
+
endLine: row.end_line,
|
|
181
|
+
snippet: row.snippet,
|
|
182
|
+
score: row.rank_score,
|
|
183
|
+
tier: row.tier,
|
|
184
|
+
source: row.source
|
|
185
|
+
}));
|
|
186
|
+
} catch (e) {
|
|
187
|
+
process.stderr.write(`[pai-postgres] searchKeyword error: ${e}\n`);
|
|
188
|
+
return [];
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
async searchSemantic(queryEmbedding, opts) {
|
|
192
|
+
const maxResults = opts?.maxResults ?? 10;
|
|
193
|
+
const conditions = ["embedding IS NOT NULL"];
|
|
194
|
+
const params = [];
|
|
195
|
+
let paramIdx = 1;
|
|
196
|
+
const vecStr = "[" + Array.from(queryEmbedding).join(",") + "]";
|
|
197
|
+
params.push(vecStr);
|
|
198
|
+
const vecParam = `$${paramIdx++}`;
|
|
199
|
+
if (opts?.projectIds && opts.projectIds.length > 0) {
|
|
200
|
+
const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(", ");
|
|
201
|
+
conditions.push(`project_id IN (${placeholders})`);
|
|
202
|
+
params.push(...opts.projectIds);
|
|
203
|
+
}
|
|
204
|
+
if (opts?.sources && opts.sources.length > 0) {
|
|
205
|
+
const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(", ");
|
|
206
|
+
conditions.push(`source IN (${placeholders})`);
|
|
207
|
+
params.push(...opts.sources);
|
|
208
|
+
}
|
|
209
|
+
if (opts?.tiers && opts.tiers.length > 0) {
|
|
210
|
+
const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(", ");
|
|
211
|
+
conditions.push(`tier IN (${placeholders})`);
|
|
212
|
+
params.push(...opts.tiers);
|
|
213
|
+
}
|
|
214
|
+
params.push(maxResults);
|
|
215
|
+
const limitParam = `$${paramIdx}`;
|
|
216
|
+
const sql = `
|
|
217
|
+
SELECT
|
|
218
|
+
project_id,
|
|
219
|
+
path,
|
|
220
|
+
start_line,
|
|
221
|
+
end_line,
|
|
222
|
+
text AS snippet,
|
|
223
|
+
tier,
|
|
224
|
+
source,
|
|
225
|
+
1 - (embedding <=> ${vecParam}::vector) AS cosine_similarity
|
|
226
|
+
FROM pai_chunks
|
|
227
|
+
WHERE ${conditions.join(" AND ")}
|
|
228
|
+
ORDER BY embedding <=> ${vecParam}::vector
|
|
229
|
+
LIMIT ${limitParam}
|
|
230
|
+
`;
|
|
231
|
+
try {
|
|
232
|
+
const result = await this.pool.query(sql, params);
|
|
233
|
+
const minScore = opts?.minScore ?? -Infinity;
|
|
234
|
+
return result.rows.map((row) => ({
|
|
235
|
+
projectId: row.project_id,
|
|
236
|
+
path: row.path,
|
|
237
|
+
startLine: row.start_line,
|
|
238
|
+
endLine: row.end_line,
|
|
239
|
+
snippet: row.snippet,
|
|
240
|
+
score: row.cosine_similarity,
|
|
241
|
+
tier: row.tier,
|
|
242
|
+
source: row.source
|
|
243
|
+
})).filter((r) => r.score >= minScore);
|
|
244
|
+
} catch (e) {
|
|
245
|
+
process.stderr.write(`[pai-postgres] searchSemantic error: ${e}\n`);
|
|
246
|
+
return [];
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
};
|
|
250
|
+
/**
|
|
251
|
+
* Convert a Buffer of Float32 LE bytes (as stored in SQLite) to number[].
|
|
252
|
+
*/
|
|
253
|
+
function bufferToVector(buf) {
|
|
254
|
+
const floats = [];
|
|
255
|
+
for (let i = 0; i < buf.length; i += 4) floats.push(buf.readFloatLE(i));
|
|
256
|
+
return floats;
|
|
257
|
+
}
|
|
258
|
+
/**
|
|
259
|
+
* Convert a free-text query to a Postgres tsquery string.
|
|
260
|
+
*
|
|
261
|
+
* Uses OR (|) semantics so that a chunk matching ANY query term is returned,
|
|
262
|
+
* ranked by ts_rank (which scores higher when more terms match). AND (&)
|
|
263
|
+
* semantics are too strict for multi-word queries because all terms rarely
|
|
264
|
+
* co-occur in a single chunk.
|
|
265
|
+
*
|
|
266
|
+
* Example: "Synchrotech interview follow-up Gilles"
|
|
267
|
+
* → "synchrotech | interview | follow | gilles"
|
|
268
|
+
* → returns chunks containing any of these words, highest-matching first
|
|
269
|
+
*/
|
|
270
|
+
function buildPgTsQuery(query) {
|
|
271
|
+
const STOP_WORDS = new Set([
|
|
272
|
+
"a",
|
|
273
|
+
"an",
|
|
274
|
+
"and",
|
|
275
|
+
"are",
|
|
276
|
+
"as",
|
|
277
|
+
"at",
|
|
278
|
+
"be",
|
|
279
|
+
"been",
|
|
280
|
+
"but",
|
|
281
|
+
"by",
|
|
282
|
+
"do",
|
|
283
|
+
"for",
|
|
284
|
+
"from",
|
|
285
|
+
"has",
|
|
286
|
+
"have",
|
|
287
|
+
"he",
|
|
288
|
+
"her",
|
|
289
|
+
"him",
|
|
290
|
+
"his",
|
|
291
|
+
"how",
|
|
292
|
+
"i",
|
|
293
|
+
"if",
|
|
294
|
+
"in",
|
|
295
|
+
"is",
|
|
296
|
+
"it",
|
|
297
|
+
"its",
|
|
298
|
+
"me",
|
|
299
|
+
"my",
|
|
300
|
+
"not",
|
|
301
|
+
"of",
|
|
302
|
+
"on",
|
|
303
|
+
"or",
|
|
304
|
+
"our",
|
|
305
|
+
"out",
|
|
306
|
+
"she",
|
|
307
|
+
"so",
|
|
308
|
+
"that",
|
|
309
|
+
"the",
|
|
310
|
+
"their",
|
|
311
|
+
"them",
|
|
312
|
+
"they",
|
|
313
|
+
"this",
|
|
314
|
+
"to",
|
|
315
|
+
"up",
|
|
316
|
+
"us",
|
|
317
|
+
"was",
|
|
318
|
+
"we",
|
|
319
|
+
"were",
|
|
320
|
+
"what",
|
|
321
|
+
"when",
|
|
322
|
+
"who",
|
|
323
|
+
"will",
|
|
324
|
+
"with",
|
|
325
|
+
"you",
|
|
326
|
+
"your"
|
|
327
|
+
]);
|
|
328
|
+
const tokens = query.toLowerCase().split(/[\s\p{P}]+/u).filter(Boolean).filter((t) => t.length >= 2).filter((t) => !STOP_WORDS.has(t)).map((t) => t.replace(/'/g, "''").replace(/[&|!():]/g, "")).filter(Boolean);
|
|
329
|
+
if (tokens.length === 0) return query.replace(/[^a-z0-9]/gi, " ").trim().split(/\s+/).filter(Boolean).join(" | ") || "";
|
|
330
|
+
return tokens.join(" | ");
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
//#endregion
|
|
334
|
+
export { PostgresBackend };
|
|
335
|
+
//# sourceMappingURL=postgres-Ccvpc6fC.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"postgres-Ccvpc6fC.mjs","names":[],"sources":["../src/storage/postgres.ts"],"sourcesContent":["/**\n * PostgresBackend — implements StorageBackend using PostgreSQL + pgvector.\n *\n * Vector similarity: pgvector's <=> cosine distance operator\n * Full-text search: PostgreSQL tsvector/tsquery (replaces SQLite FTS5)\n * Connection pooling: node-postgres Pool\n *\n * Schema is initialized via docker/init.sql.\n * This module only handles runtime queries — schema creation is external.\n */\n\nimport pg from \"pg\";\nimport type { Pool, PoolClient } from \"pg\";\nimport type { StorageBackend, ChunkRow, FileRow, FederationStats } from \"./interface.js\";\nimport type { SearchResult, SearchOptions } from \"../memory/search.js\";\nimport { buildFtsQuery } from \"../memory/search.js\";\n\nconst { Pool: PgPool } = pg;\n\n// ---------------------------------------------------------------------------\n// Postgres config\n// ---------------------------------------------------------------------------\n\nexport interface PostgresConfig {\n connectionString?: string;\n host?: string;\n port?: number;\n database?: string;\n user?: string;\n password?: string;\n /** Maximum pool connections. Default 5 */\n maxConnections?: number;\n /** Connection timeout in ms. Default 5000 */\n connectionTimeoutMs?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Implementation\n// ---------------------------------------------------------------------------\n\nexport class PostgresBackend implements StorageBackend {\n readonly backendType = \"postgres\" as const;\n\n private pool: Pool;\n\n constructor(config: PostgresConfig) {\n const connStr =\n config.connectionString ??\n `postgresql://${config.user ?? \"pai\"}:${config.password ?? \"pai\"}@${config.host ?? \"localhost\"}:${config.port ?? 5432}/${config.database ?? \"pai\"}`;\n\n this.pool = new PgPool({\n connectionString: connStr,\n max: config.maxConnections ?? 5,\n connectionTimeoutMillis: config.connectionTimeoutMs ?? 5000,\n idleTimeoutMillis: 30_000,\n });\n\n // Log pool errors so they don't crash the process silently\n this.pool.on(\"error\", (err) => {\n process.stderr.write(`[pai-postgres] Pool error: ${err.message}\\n`);\n });\n }\n\n // -------------------------------------------------------------------------\n // Lifecycle\n // -------------------------------------------------------------------------\n\n async close(): Promise<void> {\n await this.pool.end();\n }\n\n async getStats(): Promise<FederationStats> {\n const client = await this.pool.connect();\n try {\n const filesResult = await client.query<{ n: string }>(\n \"SELECT COUNT(*)::text AS n FROM pai_files\"\n );\n const chunksResult = await client.query<{ n: string }>(\n \"SELECT COUNT(*)::text AS n FROM pai_chunks\"\n );\n return {\n files: parseInt(filesResult.rows[0]?.n ?? \"0\", 10),\n chunks: parseInt(chunksResult.rows[0]?.n ?? \"0\", 10),\n };\n } finally {\n client.release();\n }\n }\n\n /**\n * Test the connection by running a trivial query.\n * Returns null on success, error message on failure.\n */\n async testConnection(): Promise<string | null> {\n let client: PoolClient | null = null;\n try {\n client = await this.pool.connect();\n await client.query(\"SELECT 1\");\n return null;\n } catch (e) {\n return e instanceof Error ? e.message : String(e);\n } finally {\n client?.release();\n }\n }\n\n // -------------------------------------------------------------------------\n // File tracking\n // -------------------------------------------------------------------------\n\n async getFileHash(projectId: number, path: string): Promise<string | undefined> {\n const result = await this.pool.query<{ hash: string }>(\n \"SELECT hash FROM pai_files WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n return result.rows[0]?.hash;\n }\n\n async upsertFile(file: FileRow): Promise<void> {\n await this.pool.query(\n `INSERT INTO pai_files (project_id, path, source, tier, hash, mtime, size)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n ON CONFLICT (project_id, path) DO UPDATE SET\n source = EXCLUDED.source,\n tier = EXCLUDED.tier,\n hash = EXCLUDED.hash,\n mtime = EXCLUDED.mtime,\n size = EXCLUDED.size`,\n [file.projectId, file.path, file.source, file.tier, file.hash, file.mtime, file.size]\n );\n }\n\n // -------------------------------------------------------------------------\n // Chunk management\n // -------------------------------------------------------------------------\n\n async getChunkIds(projectId: number, path: string): Promise<string[]> {\n const result = await this.pool.query<{ id: string }>(\n \"SELECT id FROM pai_chunks WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n return result.rows.map((r) => r.id);\n }\n\n async deleteChunksForFile(projectId: number, path: string): Promise<void> {\n // Foreign key CASCADE handles pai_chunks deletion automatically\n // but we don't have FK to pai_chunks from pai_files, so delete explicitly\n await this.pool.query(\n \"DELETE FROM pai_chunks WHERE project_id = $1 AND path = $2\",\n [projectId, path]\n );\n }\n\n async insertChunks(chunks: ChunkRow[]): Promise<void> {\n if (chunks.length === 0) return;\n\n const client = await this.pool.connect();\n try {\n await client.query(\"BEGIN\");\n\n for (const c of chunks) {\n // embedding is null at insert time; updated separately via updateEmbedding()\n await client.query(\n `INSERT INTO pai_chunks\n (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at, fts_vector)\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,\n to_tsvector('simple', $9))\n ON CONFLICT (id) DO UPDATE SET\n project_id = EXCLUDED.project_id,\n source = EXCLUDED.source,\n tier = EXCLUDED.tier,\n path = EXCLUDED.path,\n start_line = EXCLUDED.start_line,\n end_line = EXCLUDED.end_line,\n hash = EXCLUDED.hash,\n text = EXCLUDED.text,\n updated_at = EXCLUDED.updated_at,\n fts_vector = EXCLUDED.fts_vector`,\n [\n c.id,\n c.projectId,\n c.source,\n c.tier,\n c.path,\n c.startLine,\n c.endLine,\n c.hash,\n c.text,\n c.updatedAt,\n ]\n );\n }\n\n await client.query(\"COMMIT\");\n } catch (e) {\n await client.query(\"ROLLBACK\");\n throw e;\n } finally {\n client.release();\n }\n }\n\n async getUnembeddedChunkIds(projectId?: number): Promise<Array<{ id: string; text: string }>> {\n if (projectId !== undefined) {\n const result = await this.pool.query<{ id: string; text: string }>(\n \"SELECT id, text FROM pai_chunks WHERE embedding IS NULL AND project_id = $1 ORDER BY id\",\n [projectId]\n );\n return result.rows;\n }\n const result = await this.pool.query<{ id: string; text: string }>(\n \"SELECT id, text FROM pai_chunks WHERE embedding IS NULL ORDER BY id\"\n );\n return result.rows;\n }\n\n async updateEmbedding(chunkId: string, embedding: Buffer): Promise<void> {\n // Deserialize the Buffer (Float32Array LE bytes) to a number[] for pgvector\n const vec = bufferToVector(embedding);\n const vecStr = \"[\" + vec.join(\",\") + \"]\";\n await this.pool.query(\n \"UPDATE pai_chunks SET embedding = $1::vector WHERE id = $2\",\n [vecStr, chunkId]\n );\n }\n\n // -------------------------------------------------------------------------\n // Search — keyword (tsvector/tsquery)\n // -------------------------------------------------------------------------\n\n async searchKeyword(query: string, opts?: SearchOptions): Promise<SearchResult[]> {\n const maxResults = opts?.maxResults ?? 10;\n\n // Build tsquery from the same token logic as buildFtsQuery, but for Postgres\n const tsQuery = buildPgTsQuery(query);\n if (!tsQuery) return [];\n\n // Use 'simple' dictionary: preserves tokens as-is, no language-specific\n // stemming. Works reliably with any language (German, French, etc.).\n const conditions: string[] = [\"fts_vector @@ to_tsquery('simple', $1)\"];\n const params: (string | number)[] = [tsQuery];\n let paramIdx = 2;\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n params.push(maxResults);\n const limitParam = `$${paramIdx}`;\n\n const sql = `\n SELECT\n project_id,\n path,\n start_line,\n end_line,\n text AS snippet,\n tier,\n source,\n ts_rank(fts_vector, to_tsquery('simple', $1)) AS rank_score\n FROM pai_chunks\n WHERE ${conditions.join(\" AND \")}\n ORDER BY rank_score DESC\n LIMIT ${limitParam}\n `;\n\n try {\n const result = await this.pool.query<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n rank_score: number;\n }>(sql, params);\n\n return result.rows.map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n score: row.rank_score,\n tier: row.tier,\n source: row.source,\n }));\n } catch (e) {\n process.stderr.write(`[pai-postgres] searchKeyword error: ${e}\\n`);\n return [];\n }\n }\n\n // -------------------------------------------------------------------------\n // Search — semantic (pgvector cosine distance)\n // -------------------------------------------------------------------------\n\n async searchSemantic(queryEmbedding: Float32Array, opts?: SearchOptions): Promise<SearchResult[]> {\n const maxResults = opts?.maxResults ?? 10;\n\n const conditions: string[] = [\"embedding IS NOT NULL\"];\n const params: (string | number | string)[] = [];\n let paramIdx = 1;\n\n // pgvector vector literal\n const vecStr = \"[\" + Array.from(queryEmbedding).join(\",\") + \"]\";\n params.push(vecStr);\n const vecParam = `$${paramIdx++}`;\n\n if (opts?.projectIds && opts.projectIds.length > 0) {\n const placeholders = opts.projectIds.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`project_id IN (${placeholders})`);\n params.push(...opts.projectIds);\n }\n\n if (opts?.sources && opts.sources.length > 0) {\n const placeholders = opts.sources.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`source IN (${placeholders})`);\n params.push(...opts.sources);\n }\n\n if (opts?.tiers && opts.tiers.length > 0) {\n const placeholders = opts.tiers.map(() => `$${paramIdx++}`).join(\", \");\n conditions.push(`tier IN (${placeholders})`);\n params.push(...opts.tiers);\n }\n\n params.push(maxResults);\n const limitParam = `$${paramIdx}`;\n\n // <=> is cosine distance; 1 - distance = cosine similarity\n const sql = `\n SELECT\n project_id,\n path,\n start_line,\n end_line,\n text AS snippet,\n tier,\n source,\n 1 - (embedding <=> ${vecParam}::vector) AS cosine_similarity\n FROM pai_chunks\n WHERE ${conditions.join(\" AND \")}\n ORDER BY embedding <=> ${vecParam}::vector\n LIMIT ${limitParam}\n `;\n\n try {\n const result = await this.pool.query<{\n project_id: number;\n path: string;\n start_line: number;\n end_line: number;\n snippet: string;\n tier: string;\n source: string;\n cosine_similarity: number;\n }>(sql, params);\n\n const minScore = opts?.minScore ?? -Infinity;\n\n return result.rows\n .map((row) => ({\n projectId: row.project_id,\n path: row.path,\n startLine: row.start_line,\n endLine: row.end_line,\n snippet: row.snippet,\n score: row.cosine_similarity,\n tier: row.tier,\n source: row.source,\n }))\n .filter((r) => r.score >= minScore);\n } catch (e) {\n process.stderr.write(`[pai-postgres] searchSemantic error: ${e}\\n`);\n return [];\n }\n }\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/**\n * Convert a Buffer of Float32 LE bytes (as stored in SQLite) to number[].\n */\nfunction bufferToVector(buf: Buffer): number[] {\n const floats: number[] = [];\n for (let i = 0; i < buf.length; i += 4) {\n floats.push(buf.readFloatLE(i));\n }\n return floats;\n}\n\n/**\n * Convert a free-text query to a Postgres tsquery string.\n *\n * Uses OR (|) semantics so that a chunk matching ANY query term is returned,\n * ranked by ts_rank (which scores higher when more terms match). AND (&)\n * semantics are too strict for multi-word queries because all terms rarely\n * co-occur in a single chunk.\n *\n * Example: \"Synchrotech interview follow-up Gilles\"\n * → \"synchrotech | interview | follow | gilles\"\n * → returns chunks containing any of these words, highest-matching first\n */\nfunction buildPgTsQuery(query: string): string {\n const STOP_WORDS = new Set([\n \"a\", \"an\", \"and\", \"are\", \"as\", \"at\", \"be\", \"been\", \"but\", \"by\",\n \"do\", \"for\", \"from\", \"has\", \"have\", \"he\", \"her\", \"him\", \"his\",\n \"how\", \"i\", \"if\", \"in\", \"is\", \"it\", \"its\", \"me\", \"my\", \"not\",\n \"of\", \"on\", \"or\", \"our\", \"out\", \"she\", \"so\", \"that\", \"the\",\n \"their\", \"them\", \"they\", \"this\", \"to\", \"up\", \"us\", \"was\", \"we\",\n \"were\", \"what\", \"when\", \"who\", \"will\", \"with\", \"you\", \"your\",\n ]);\n\n const tokens = query\n .toLowerCase()\n .split(/[\\s\\p{P}]+/u)\n .filter(Boolean)\n .filter((t) => t.length >= 2)\n .filter((t) => !STOP_WORDS.has(t))\n // Sanitize: strip tsquery special characters to prevent syntax errors\n .map((t) => t.replace(/'/g, \"''\").replace(/[&|!():]/g, \"\"))\n .filter(Boolean);\n\n if (tokens.length === 0) {\n // Fallback: sanitize the raw query and use it as a single term\n const raw = query.replace(/[^a-z0-9]/gi, \" \").trim().split(/\\s+/).filter(Boolean).join(\" | \");\n return raw || \"\";\n }\n\n // Use OR (|) so that chunks matching ANY term are returned.\n // ts_rank naturally scores chunks higher when more terms match, so the\n // most relevant results still bubble to the top.\n return tokens.join(\" | \");\n}\n\n// Re-export buildFtsQuery so it is accessible without importing search.ts\nexport { buildPgTsQuery };\n"],"mappings":";;;;;;;;;;;;;AAiBA,MAAM,EAAE,MAAM,WAAW;AAuBzB,IAAa,kBAAb,MAAuD;CACrD,AAAS,cAAc;CAEvB,AAAQ;CAER,YAAY,QAAwB;AAKlC,OAAK,OAAO,IAAI,OAAO;GACrB,kBAJA,OAAO,oBACP,gBAAgB,OAAO,QAAQ,MAAM,GAAG,OAAO,YAAY,MAAM,GAAG,OAAO,QAAQ,YAAY,GAAG,OAAO,QAAQ,KAAK,GAAG,OAAO,YAAY;GAI5I,KAAK,OAAO,kBAAkB;GAC9B,yBAAyB,OAAO,uBAAuB;GACvD,mBAAmB;GACpB,CAAC;AAGF,OAAK,KAAK,GAAG,UAAU,QAAQ;AAC7B,WAAQ,OAAO,MAAM,8BAA8B,IAAI,QAAQ,IAAI;IACnE;;CAOJ,MAAM,QAAuB;AAC3B,QAAM,KAAK,KAAK,KAAK;;CAGvB,MAAM,WAAqC;EACzC,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;GACF,MAAM,cAAc,MAAM,OAAO,MAC/B,4CACD;GACD,MAAM,eAAe,MAAM,OAAO,MAChC,6CACD;AACD,UAAO;IACL,OAAO,SAAS,YAAY,KAAK,IAAI,KAAK,KAAK,GAAG;IAClD,QAAQ,SAAS,aAAa,KAAK,IAAI,KAAK,KAAK,GAAG;IACrD;YACO;AACR,UAAO,SAAS;;;;;;;CAQpB,MAAM,iBAAyC;EAC7C,IAAI,SAA4B;AAChC,MAAI;AACF,YAAS,MAAM,KAAK,KAAK,SAAS;AAClC,SAAM,OAAO,MAAM,WAAW;AAC9B,UAAO;WACA,GAAG;AACV,UAAO,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;YACzC;AACR,WAAQ,SAAS;;;CAQrB,MAAM,YAAY,WAAmB,MAA2C;AAK9E,UAJe,MAAM,KAAK,KAAK,MAC7B,kEACA,CAAC,WAAW,KAAK,CAClB,EACa,KAAK,IAAI;;CAGzB,MAAM,WAAW,MAA8B;AAC7C,QAAM,KAAK,KAAK,MACd;;;;;;;kCAQA;GAAC,KAAK;GAAW,KAAK;GAAM,KAAK;GAAQ,KAAK;GAAM,KAAK;GAAM,KAAK;GAAO,KAAK;GAAK,CACtF;;CAOH,MAAM,YAAY,WAAmB,MAAiC;AAKpE,UAJe,MAAM,KAAK,KAAK,MAC7B,iEACA,CAAC,WAAW,KAAK,CAClB,EACa,KAAK,KAAK,MAAM,EAAE,GAAG;;CAGrC,MAAM,oBAAoB,WAAmB,MAA6B;AAGxE,QAAM,KAAK,KAAK,MACd,8DACA,CAAC,WAAW,KAAK,CAClB;;CAGH,MAAM,aAAa,QAAmC;AACpD,MAAI,OAAO,WAAW,EAAG;EAEzB,MAAM,SAAS,MAAM,KAAK,KAAK,SAAS;AACxC,MAAI;AACF,SAAM,OAAO,MAAM,QAAQ;AAE3B,QAAK,MAAM,KAAK,OAEd,OAAM,OAAO,MACX;;;;;;;;;;;;;;;gDAgBA;IACE,EAAE;IACF,EAAE;IACF,EAAE;IACF,EAAE;IACF,EAAE;IACF,EAAE;IACF,EAAE;IACF,EAAE;IACF,EAAE;IACF,EAAE;IACH,CACF;AAGH,SAAM,OAAO,MAAM,SAAS;WACrB,GAAG;AACV,SAAM,OAAO,MAAM,WAAW;AAC9B,SAAM;YACE;AACR,UAAO,SAAS;;;CAIpB,MAAM,sBAAsB,WAAkE;AAC5F,MAAI,cAAc,OAKhB,SAJe,MAAM,KAAK,KAAK,MAC7B,2FACA,CAAC,UAAU,CACZ,EACa;AAKhB,UAHe,MAAM,KAAK,KAAK,MAC7B,sEACD,EACa;;CAGhB,MAAM,gBAAgB,SAAiB,WAAkC;EAGvE,MAAM,SAAS,MADH,eAAe,UAAU,CACZ,KAAK,IAAI,GAAG;AACrC,QAAM,KAAK,KAAK,MACd,8DACA,CAAC,QAAQ,QAAQ,CAClB;;CAOH,MAAM,cAAc,OAAe,MAA+C;EAChF,MAAM,aAAa,MAAM,cAAc;EAGvC,MAAM,UAAU,eAAe,MAAM;AACrC,MAAI,CAAC,QAAS,QAAO,EAAE;EAIvB,MAAM,aAAuB,CAAC,yCAAyC;EACvE,MAAM,SAA8B,CAAC,QAAQ;EAC7C,IAAI,WAAW;AAEf,MAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;GAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AAC3E,cAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,UAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,MAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;GAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACxE,cAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,UAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,MAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;GACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACtE,cAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,UAAO,KAAK,GAAG,KAAK,MAAM;;AAG5B,SAAO,KAAK,WAAW;EACvB,MAAM,aAAa,IAAI;EAEvB,MAAM,MAAM;;;;;;;;;;;cAWF,WAAW,KAAK,QAAQ,CAAC;;cAEzB,WAAW;;AAGrB,MAAI;AAYF,WAXe,MAAM,KAAK,KAAK,MAS5B,KAAK,OAAO,EAED,KAAK,KAAK,SAAS;IAC/B,WAAW,IAAI;IACf,MAAM,IAAI;IACV,WAAW,IAAI;IACf,SAAS,IAAI;IACb,SAAS,IAAI;IACb,OAAO,IAAI;IACX,MAAM,IAAI;IACV,QAAQ,IAAI;IACb,EAAE;WACI,GAAG;AACV,WAAQ,OAAO,MAAM,uCAAuC,EAAE,IAAI;AAClE,UAAO,EAAE;;;CAQb,MAAM,eAAe,gBAA8B,MAA+C;EAChG,MAAM,aAAa,MAAM,cAAc;EAEvC,MAAM,aAAuB,CAAC,wBAAwB;EACtD,MAAM,SAAuC,EAAE;EAC/C,IAAI,WAAW;EAGf,MAAM,SAAS,MAAM,MAAM,KAAK,eAAe,CAAC,KAAK,IAAI,GAAG;AAC5D,SAAO,KAAK,OAAO;EACnB,MAAM,WAAW,IAAI;AAErB,MAAI,MAAM,cAAc,KAAK,WAAW,SAAS,GAAG;GAClD,MAAM,eAAe,KAAK,WAAW,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AAC3E,cAAW,KAAK,kBAAkB,aAAa,GAAG;AAClD,UAAO,KAAK,GAAG,KAAK,WAAW;;AAGjC,MAAI,MAAM,WAAW,KAAK,QAAQ,SAAS,GAAG;GAC5C,MAAM,eAAe,KAAK,QAAQ,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACxE,cAAW,KAAK,cAAc,aAAa,GAAG;AAC9C,UAAO,KAAK,GAAG,KAAK,QAAQ;;AAG9B,MAAI,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG;GACxC,MAAM,eAAe,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,KAAK,KAAK;AACtE,cAAW,KAAK,YAAY,aAAa,GAAG;AAC5C,UAAO,KAAK,GAAG,KAAK,MAAM;;AAG5B,SAAO,KAAK,WAAW;EACvB,MAAM,aAAa,IAAI;EAGvB,MAAM,MAAM;;;;;;;;;6BASa,SAAS;;cAExB,WAAW,KAAK,QAAQ,CAAC;+BACR,SAAS;cAC1B,WAAW;;AAGrB,MAAI;GACF,MAAM,SAAS,MAAM,KAAK,KAAK,MAS5B,KAAK,OAAO;GAEf,MAAM,WAAW,MAAM,YAAY;AAEnC,UAAO,OAAO,KACX,KAAK,SAAS;IACb,WAAW,IAAI;IACf,MAAM,IAAI;IACV,WAAW,IAAI;IACf,SAAS,IAAI;IACb,SAAS,IAAI;IACb,OAAO,IAAI;IACX,MAAM,IAAI;IACV,QAAQ,IAAI;IACb,EAAE,CACF,QAAQ,MAAM,EAAE,SAAS,SAAS;WAC9B,GAAG;AACV,WAAQ,OAAO,MAAM,wCAAwC,EAAE,IAAI;AACnE,UAAO,EAAE;;;;;;;AAYf,SAAS,eAAe,KAAuB;CAC7C,MAAM,SAAmB,EAAE;AAC3B,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,EACnC,QAAO,KAAK,IAAI,YAAY,EAAE,CAAC;AAEjC,QAAO;;;;;;;;;;;;;;AAeT,SAAS,eAAe,OAAuB;CAC7C,MAAM,aAAa,IAAI,IAAI;EACzB;EAAK;EAAM;EAAO;EAAO;EAAM;EAAM;EAAM;EAAQ;EAAO;EAC1D;EAAM;EAAO;EAAQ;EAAO;EAAQ;EAAM;EAAO;EAAO;EACxD;EAAO;EAAK;EAAM;EAAM;EAAM;EAAM;EAAO;EAAM;EAAM;EACvD;EAAM;EAAM;EAAM;EAAO;EAAO;EAAO;EAAM;EAAQ;EACrD;EAAS;EAAQ;EAAQ;EAAQ;EAAM;EAAM;EAAM;EAAO;EAC1D;EAAQ;EAAQ;EAAQ;EAAO;EAAQ;EAAQ;EAAO;EACvD,CAAC;CAEF,MAAM,SAAS,MACZ,aAAa,CACb,MAAM,cAAc,CACpB,OAAO,QAAQ,CACf,QAAQ,MAAM,EAAE,UAAU,EAAE,CAC5B,QAAQ,MAAM,CAAC,WAAW,IAAI,EAAE,CAAC,CAEjC,KAAK,MAAM,EAAE,QAAQ,MAAM,KAAK,CAAC,QAAQ,aAAa,GAAG,CAAC,CAC1D,OAAO,QAAQ;AAElB,KAAI,OAAO,WAAW,EAGpB,QADY,MAAM,QAAQ,eAAe,IAAI,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,OAAO,QAAQ,CAAC,KAAK,MAAM,IAC/E;AAMhB,QAAO,OAAO,KAAK,MAAM"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
//#region \0rolldown/runtime.js
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __exportAll = (all, no_symbols) => {
|
|
4
|
+
let target = {};
|
|
5
|
+
for (var name in all) {
|
|
6
|
+
__defProp(target, name, {
|
|
7
|
+
get: all[name],
|
|
8
|
+
enumerable: true
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
if (!no_symbols) {
|
|
12
|
+
__defProp(target, Symbol.toStringTag, { value: "Module" });
|
|
13
|
+
}
|
|
14
|
+
return target;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
//#endregion
|
|
18
|
+
export { __exportAll as t };
|