@kernel.chat/kbot 2.23.1 → 2.24.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,751 @@
1
+ // K:BOT Database Tools — SQL queries, schema inspection, migrations, Prisma, ER diagrams, seeding
2
+ // Bridges kbot to Postgres, MySQL, and SQLite databases via CLI tools.
3
+ import { execSync } from 'node:child_process';
4
+ import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs';
5
+ import { resolve, join } from 'node:path';
6
+ import { registerTool } from './index.js';
7
+ /** Detect database type from a connection string */
8
+ function detectDbType(url) {
9
+ if (url.startsWith('postgres://') || url.startsWith('postgresql://'))
10
+ return 'postgres';
11
+ if (url.startsWith('mysql://'))
12
+ return 'mysql';
13
+ if (url.startsWith('sqlite:') || url.endsWith('.db') || url.endsWith('.sqlite') || url.endsWith('.sqlite3'))
14
+ return 'sqlite';
15
+ throw new Error(`Cannot detect database type from URL: ${url.slice(0, 30)}... — expected postgres://, mysql://, or sqlite: prefix`);
16
+ }
17
+ /** Resolve a database connection from explicit arg or environment variables */
18
+ function resolveConnection(connectionString) {
19
+ if (connectionString) {
20
+ return { type: detectDbType(connectionString), url: connectionString };
21
+ }
22
+ // Check environment variables in priority order
23
+ const envVars = ['DATABASE_URL', 'POSTGRES_URL', 'MYSQL_URL', 'SQLITE_PATH'];
24
+ for (const envVar of envVars) {
25
+ const val = process.env[envVar];
26
+ if (val) {
27
+ if (envVar === 'SQLITE_PATH') {
28
+ return { type: 'sqlite', url: val };
29
+ }
30
+ return { type: detectDbType(val), url: val };
31
+ }
32
+ }
33
+ throw new Error('No database connection found. Pass a connection_string or set DATABASE_URL, POSTGRES_URL, MYSQL_URL, or SQLITE_PATH.');
34
+ }
35
+ /** Extract SQLite file path from connection string */
36
+ function sqlitePath(url) {
37
+ return url.replace(/^sqlite:\/?\/?/, '').replace(/^sqlite:/, '');
38
+ }
39
+ /** Parse a Postgres connection string into psql-compatible env vars */
40
+ function pgEnv(url) {
41
+ try {
42
+ const u = new URL(url);
43
+ return {
44
+ ...process.env,
45
+ PGHOST: u.hostname,
46
+ PGPORT: u.port || '5432',
47
+ PGUSER: u.username,
48
+ PGPASSWORD: decodeURIComponent(u.password),
49
+ PGDATABASE: u.pathname.slice(1),
50
+ PGSSLMODE: u.searchParams.get('sslmode') || 'prefer',
51
+ };
52
+ }
53
+ catch {
54
+ return process.env;
55
+ }
56
+ }
57
+ /** Parse a MySQL connection string for CLI usage */
58
+ function mysqlArgs(url) {
59
+ try {
60
+ const u = new URL(url);
61
+ const args = [];
62
+ if (u.hostname)
63
+ args.push('-h', u.hostname);
64
+ if (u.port)
65
+ args.push('-P', u.port);
66
+ if (u.username)
67
+ args.push('-u', u.username);
68
+ if (u.password)
69
+ args.push(`-p${decodeURIComponent(u.password)}`);
70
+ const db = u.pathname.slice(1);
71
+ if (db)
72
+ args.push(db);
73
+ return args;
74
+ }
75
+ catch {
76
+ return [];
77
+ }
78
+ }
79
+ /** Execute a shell command and return stdout, with timeout */
80
+ function shell(cmd, opts) {
81
+ try {
82
+ return execSync(cmd, {
83
+ encoding: 'utf-8',
84
+ timeout: opts?.timeout ?? 30_000,
85
+ maxBuffer: 10 * 1024 * 1024,
86
+ env: opts?.env ?? process.env,
87
+ cwd: opts?.cwd ?? process.cwd(),
88
+ stdio: ['pipe', 'pipe', 'pipe'],
89
+ }).trim();
90
+ }
91
+ catch (err) {
92
+ const e = err;
93
+ const msg = e.stderr?.trim() || e.stdout?.trim() || e.message || 'Command failed';
94
+ throw new Error(msg);
95
+ }
96
+ }
97
+ /** Execute a SQL query against the resolved database */
98
+ function execQuery(conn, sql, timeout = 30_000) {
99
+ switch (conn.type) {
100
+ case 'postgres': {
101
+ const env = pgEnv(conn.url);
102
+ return shell(`psql -A -t -c ${JSON.stringify(sql)}`, { env, timeout });
103
+ }
104
+ case 'mysql': {
105
+ const args = mysqlArgs(conn.url);
106
+ return shell(`mysql ${args.map(a => JSON.stringify(a)).join(' ')} -e ${JSON.stringify(sql)}`, { timeout });
107
+ }
108
+ case 'sqlite': {
109
+ const dbPath = sqlitePath(conn.url);
110
+ return shell(`sqlite3 ${JSON.stringify(dbPath)} ${JSON.stringify(sql)}`, { timeout });
111
+ }
112
+ }
113
+ }
114
+ // ── Fake data generators for seeding ─────────────────────────────────
115
+ const FIRST_NAMES = ['Alice', 'Bob', 'Charlie', 'Diana', 'Eve', 'Frank', 'Grace', 'Hank', 'Ivy', 'Jack', 'Karen', 'Leo', 'Mia', 'Noah', 'Olivia', 'Paul', 'Quinn', 'Ruby', 'Sam', 'Tina'];
116
+ const LAST_NAMES = ['Smith', 'Johnson', 'Williams', 'Brown', 'Jones', 'Garcia', 'Miller', 'Davis', 'Rodriguez', 'Martinez', 'Hernandez', 'Lopez', 'Gonzalez', 'Wilson', 'Anderson', 'Thomas', 'Taylor', 'Moore', 'Jackson', 'Martin'];
117
+ const DOMAINS = ['example.com', 'mail.com', 'test.io', 'demo.org', 'corp.net'];
118
+ const LOREM = ['Lorem ipsum dolor sit amet', 'Consectetur adipiscing elit', 'Sed do eiusmod tempor incididunt', 'Ut labore et dolore magna aliqua', 'Ut enim ad minim veniam', 'Quis nostrud exercitation ullamco', 'Duis aute irure dolor in reprehenderit', 'Excepteur sint occaecat cupidatat', 'Sunt in culpa qui officia', 'Mollit anim id est laborum'];
119
+ const CITIES = ['New York', 'London', 'Tokyo', 'Paris', 'Berlin', 'Sydney', 'Toronto', 'Mumbai', 'Seoul', 'Mexico City'];
120
+ const COUNTRIES = ['US', 'UK', 'JP', 'FR', 'DE', 'AU', 'CA', 'IN', 'KR', 'MX'];
121
+ function pick(arr) {
122
+ return arr[Math.floor(Math.random() * arr.length)];
123
+ }
124
+ function randInt(min, max) {
125
+ return Math.floor(Math.random() * (max - min + 1)) + min;
126
+ }
127
+ function randDate(startYear = 2020, endYear = 2026) {
128
+ const y = randInt(startYear, endYear);
129
+ const m = String(randInt(1, 12)).padStart(2, '0');
130
+ const d = String(randInt(1, 28)).padStart(2, '0');
131
+ return `${y}-${m}-${d}`;
132
+ }
133
+ function randTimestamp() {
134
+ return `${randDate()} ${String(randInt(0, 23)).padStart(2, '0')}:${String(randInt(0, 59)).padStart(2, '0')}:${String(randInt(0, 59)).padStart(2, '0')}`;
135
+ }
136
+ function randBool() {
137
+ return Math.random() > 0.5 ? 'true' : 'false';
138
+ }
139
+ function randUuid() {
140
+ return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => {
141
+ const r = Math.random() * 16 | 0;
142
+ return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16);
143
+ });
144
+ }
145
+ /** Generate a realistic fake value based on column name and type */
146
+ function fakeValue(colName, colType, rowIndex) {
147
+ const name = colName.toLowerCase();
148
+ const type = colType.toLowerCase();
149
+ // UUID / ID columns
150
+ if (name === 'id' || name === 'uuid' || type.includes('uuid'))
151
+ return `'${randUuid()}'`;
152
+ if (name === 'id' && (type.includes('int') || type.includes('serial')))
153
+ return String(rowIndex + 1);
154
+ // Name patterns
155
+ if (name.includes('first_name') || name === 'fname')
156
+ return `'${pick(FIRST_NAMES)}'`;
157
+ if (name.includes('last_name') || name === 'lname' || name === 'surname')
158
+ return `'${pick(LAST_NAMES)}'`;
159
+ if (name === 'name' || name === 'full_name' || name === 'display_name' || name === 'username')
160
+ return `'${pick(FIRST_NAMES)} ${pick(LAST_NAMES)}'`;
161
+ // Email
162
+ if (name.includes('email'))
163
+ return `'${pick(FIRST_NAMES).toLowerCase()}${randInt(1, 999)}@${pick(DOMAINS)}'`;
164
+ // Phone
165
+ if (name.includes('phone') || name.includes('tel'))
166
+ return `'+1${randInt(200, 999)}${randInt(100, 999)}${randInt(1000, 9999)}'`;
167
+ // URLs
168
+ if (name.includes('url') || name.includes('website') || name.includes('link'))
169
+ return `'https://${pick(DOMAINS)}/page/${randInt(1, 1000)}'`;
170
+ if (name.includes('avatar') || name.includes('image') || name.includes('photo'))
171
+ return `'https://${pick(DOMAINS)}/img/${randInt(1, 500)}.jpg'`;
172
+ // Location
173
+ if (name.includes('city'))
174
+ return `'${pick(CITIES)}'`;
175
+ if (name.includes('country') || name.includes('country_code'))
176
+ return `'${pick(COUNTRIES)}'`;
177
+ if (name.includes('address') || name.includes('street'))
178
+ return `'${randInt(1, 9999)} ${pick(LAST_NAMES)} St'`;
179
+ if (name.includes('zip') || name.includes('postal'))
180
+ return `'${String(randInt(10000, 99999))}'`;
181
+ if (name.includes('lat'))
182
+ return String((Math.random() * 180 - 90).toFixed(6));
183
+ if (name.includes('lng') || name.includes('lon'))
184
+ return String((Math.random() * 360 - 180).toFixed(6));
185
+ // Text / description
186
+ if (name.includes('title') || name === 'subject')
187
+ return `'${pick(LOREM)}'`;
188
+ if (name.includes('description') || name.includes('body') || name.includes('content') || name.includes('text') || name.includes('bio') || name.includes('note'))
189
+ return `'${pick(LOREM)}. ${pick(LOREM)}.'`;
190
+ // Status / enums
191
+ if (name.includes('status'))
192
+ return `'${pick(['active', 'inactive', 'pending', 'archived'])}'`;
193
+ if (name.includes('role'))
194
+ return `'${pick(['admin', 'user', 'editor', 'viewer'])}'`;
195
+ if (name.includes('type') || name.includes('category') || name.includes('kind'))
196
+ return `'${pick(['standard', 'premium', 'basic', 'enterprise'])}'`;
197
+ // Boolean
198
+ if (type.includes('bool') || name.startsWith('is_') || name.startsWith('has_') || name.startsWith('can_'))
199
+ return randBool();
200
+ // Dates / timestamps
201
+ if (name.includes('created') || name.includes('updated') || name.includes('deleted') || name.includes('_at') || type.includes('timestamp'))
202
+ return `'${randTimestamp()}'`;
203
+ if (type.includes('date'))
204
+ return `'${randDate()}'`;
205
+ // Numeric
206
+ if (name.includes('price') || name.includes('amount') || name.includes('cost') || name.includes('total') || name.includes('balance'))
207
+ return (Math.random() * 1000).toFixed(2);
208
+ if (name.includes('count') || name.includes('quantity') || name.includes('qty'))
209
+ return String(randInt(1, 100));
210
+ if (name.includes('age'))
211
+ return String(randInt(18, 80));
212
+ if (name.includes('score') || name.includes('rating'))
213
+ return (Math.random() * 5).toFixed(1);
214
+ if (name.includes('percent') || name.includes('rate'))
215
+ return (Math.random() * 100).toFixed(1);
216
+ if (type.includes('int') || type.includes('serial'))
217
+ return String(randInt(1, 10000));
218
+ if (type.includes('float') || type.includes('double') || type.includes('decimal') || type.includes('numeric') || type.includes('real'))
219
+ return (Math.random() * 1000).toFixed(2);
220
+ // JSON columns
221
+ if (type.includes('json'))
222
+ return `'${JSON.stringify({ key: pick(FIRST_NAMES).toLowerCase(), value: randInt(1, 100) })}'`;
223
+ // Default: short text
224
+ return `'${pick(LOREM).slice(0, 50)}'`;
225
+ }
226
+ function getPostgresSchema(conn, tableFilter) {
227
+ const tableClause = tableFilter ? `AND c.table_name = '${tableFilter}'` : '';
228
+ const sql = `
229
+ SELECT
230
+ c.table_name,
231
+ c.column_name,
232
+ c.data_type || COALESCE('(' || c.character_maximum_length || ')', ''),
233
+ c.is_nullable,
234
+ COALESCE(c.column_default, ''),
235
+ CASE WHEN pk.column_name IS NOT NULL THEN 'YES' ELSE 'NO' END,
236
+ COALESCE(fk.ref, '')
237
+ FROM information_schema.columns c
238
+ LEFT JOIN (
239
+ SELECT kcu.table_name, kcu.column_name
240
+ FROM information_schema.table_constraints tc
241
+ JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name
242
+ WHERE tc.constraint_type = 'PRIMARY KEY'
243
+ ) pk ON c.table_name = pk.table_name AND c.column_name = pk.column_name
244
+ LEFT JOIN (
245
+ SELECT
246
+ kcu.table_name,
247
+ kcu.column_name,
248
+ ccu.table_name || '.' || ccu.column_name AS ref
249
+ FROM information_schema.referential_constraints rc
250
+ JOIN information_schema.key_column_usage kcu ON rc.constraint_name = kcu.constraint_name
251
+ JOIN information_schema.constraint_column_usage ccu ON rc.unique_constraint_name = ccu.constraint_name
252
+ ) fk ON c.table_name = fk.table_name AND c.column_name = fk.column_name
253
+ WHERE c.table_schema = 'public' ${tableClause}
254
+ ORDER BY c.table_name, c.ordinal_position;
255
+ `;
256
+ const raw = execQuery(conn, sql);
257
+ if (!raw.trim())
258
+ return [];
259
+ return raw.split('\n').filter(l => l.trim()).map(line => {
260
+ const parts = line.split('|');
261
+ return {
262
+ table: parts[0]?.trim() || '',
263
+ column: parts[1]?.trim() || '',
264
+ type: parts[2]?.trim() || '',
265
+ nullable: parts[3]?.trim() || '',
266
+ default_val: parts[4]?.trim() || '',
267
+ is_pk: parts[5]?.trim() === 'YES',
268
+ fk_ref: parts[6]?.trim() || '',
269
+ };
270
+ });
271
+ }
272
+ function getMysqlSchema(conn, tableFilter) {
273
+ // Extract database name from URL
274
+ let dbName = '';
275
+ try {
276
+ dbName = new URL(conn.url).pathname.slice(1);
277
+ }
278
+ catch { }
279
+ const tableClause = tableFilter ? `AND c.TABLE_NAME = '${tableFilter}'` : '';
280
+ const sql = `
281
+ SELECT
282
+ c.TABLE_NAME,
283
+ c.COLUMN_NAME,
284
+ c.COLUMN_TYPE,
285
+ c.IS_NULLABLE,
286
+ IFNULL(c.COLUMN_DEFAULT, ''),
287
+ CASE WHEN c.COLUMN_KEY = 'PRI' THEN 'YES' ELSE 'NO' END,
288
+ IFNULL(
289
+ (SELECT CONCAT(kcu.REFERENCED_TABLE_NAME, '.', kcu.REFERENCED_COLUMN_NAME)
290
+ FROM information_schema.KEY_COLUMN_USAGE kcu
291
+ WHERE kcu.TABLE_SCHEMA = c.TABLE_SCHEMA
292
+ AND kcu.TABLE_NAME = c.TABLE_NAME
293
+ AND kcu.COLUMN_NAME = c.COLUMN_NAME
294
+ AND kcu.REFERENCED_TABLE_NAME IS NOT NULL
295
+ LIMIT 1), ''
296
+ )
297
+ FROM information_schema.COLUMNS c
298
+ WHERE c.TABLE_SCHEMA = '${dbName}' ${tableClause}
299
+ ORDER BY c.TABLE_NAME, c.ORDINAL_POSITION;
300
+ `;
301
+ const raw = execQuery(conn, sql);
302
+ if (!raw.trim())
303
+ return [];
304
+ return raw.split('\n').filter(l => l.trim() && !l.startsWith('+')).map(line => {
305
+ const parts = line.split('\t');
306
+ return {
307
+ table: parts[0]?.trim() || '',
308
+ column: parts[1]?.trim() || '',
309
+ type: parts[2]?.trim() || '',
310
+ nullable: parts[3]?.trim() || '',
311
+ default_val: parts[4]?.trim() || '',
312
+ is_pk: parts[5]?.trim() === 'YES',
313
+ fk_ref: parts[6]?.trim() || '',
314
+ };
315
+ });
316
+ }
317
+ function getSqliteSchema(conn, tableFilter) {
318
+ const dbPath = sqlitePath(conn.url);
319
+ // Get table list
320
+ let tables;
321
+ if (tableFilter) {
322
+ tables = [tableFilter];
323
+ }
324
+ else {
325
+ const raw = shell(`sqlite3 ${JSON.stringify(dbPath)} ".tables"`);
326
+ tables = raw.split(/\s+/).filter(t => t.trim() && !t.startsWith('sqlite_'));
327
+ }
328
+ const columns = [];
329
+ for (const table of tables) {
330
+ const info = shell(`sqlite3 ${JSON.stringify(dbPath)} "PRAGMA table_info('${table}')"`);
331
+ if (!info.trim())
332
+ continue;
333
+ // Get foreign keys for this table
334
+ const fkRaw = shell(`sqlite3 ${JSON.stringify(dbPath)} "PRAGMA foreign_key_list('${table}')"`);
335
+ const fkMap = new Map();
336
+ if (fkRaw.trim()) {
337
+ for (const fkLine of fkRaw.split('\n')) {
338
+ const fkParts = fkLine.split('|');
339
+ // Format: id|seq|table|from|to|on_update|on_delete|match
340
+ if (fkParts[3] && fkParts[2]) {
341
+ fkMap.set(fkParts[3], `${fkParts[2]}.${fkParts[4] || 'id'}`);
342
+ }
343
+ }
344
+ }
345
+ for (const line of info.split('\n')) {
346
+ if (!line.trim())
347
+ continue;
348
+ const parts = line.split('|');
349
+ // Format: cid|name|type|notnull|dflt_value|pk
350
+ columns.push({
351
+ table,
352
+ column: parts[1] || '',
353
+ type: parts[2] || '',
354
+ nullable: parts[3] === '1' ? 'NO' : 'YES',
355
+ default_val: parts[4] || '',
356
+ is_pk: parts[5] === '1',
357
+ fk_ref: fkMap.get(parts[1] || '') || '',
358
+ });
359
+ }
360
+ }
361
+ return columns;
362
+ }
363
+ function getSchema(conn, tableFilter) {
364
+ switch (conn.type) {
365
+ case 'postgres': return getPostgresSchema(conn, tableFilter);
366
+ case 'mysql': return getMysqlSchema(conn, tableFilter);
367
+ case 'sqlite': return getSqliteSchema(conn, tableFilter);
368
+ }
369
+ }
370
+ /** Format schema info into a readable table */
371
+ function formatSchema(columns) {
372
+ if (columns.length === 0)
373
+ return 'No tables found or no columns returned.';
374
+ const tables = new Map();
375
+ for (const col of columns) {
376
+ if (!tables.has(col.table))
377
+ tables.set(col.table, []);
378
+ tables.get(col.table).push(col);
379
+ }
380
+ const lines = [];
381
+ for (const [tableName, cols] of tables) {
382
+ lines.push(`\n### ${tableName}`);
383
+ lines.push('');
384
+ lines.push('| Column | Type | Nullable | PK | Default | FK |');
385
+ lines.push('|--------|------|----------|----|---------|----|');
386
+ for (const c of cols) {
387
+ lines.push(`| ${c.column} | ${c.type} | ${c.nullable} | ${c.is_pk ? 'YES' : ''} | ${c.default_val || ''} | ${c.fk_ref || ''} |`);
388
+ }
389
+ }
390
+ return lines.join('\n');
391
+ }
392
+ /** Generate Mermaid ER diagram from schema columns */
393
+ function generateMermaidDiagram(columns) {
394
+ if (columns.length === 0)
395
+ return 'No schema data to diagram.';
396
+ const tables = new Map();
397
+ for (const col of columns) {
398
+ if (!tables.has(col.table))
399
+ tables.set(col.table, []);
400
+ tables.get(col.table).push(col);
401
+ }
402
+ const lines = ['erDiagram'];
403
+ // Collect relationships (deduplicated)
404
+ const relationships = new Set();
405
+ for (const [tableName, cols] of tables) {
406
+ for (const col of cols) {
407
+ if (col.fk_ref) {
408
+ const [refTable] = col.fk_ref.split('.');
409
+ // table }o--|| refTable : "column"
410
+ relationships.add(` ${refTable} ||--o{ ${tableName} : "${col.column}"`);
411
+ }
412
+ }
413
+ }
414
+ // Write relationships
415
+ for (const rel of relationships) {
416
+ lines.push(rel);
417
+ }
418
+ // Write entities
419
+ for (const [tableName, cols] of tables) {
420
+ lines.push(` ${tableName} {`);
421
+ for (const col of cols) {
422
+ const mermaidType = col.type.replace(/\s+/g, '_').replace(/[()]/g, '');
423
+ const pk = col.is_pk ? ' PK' : '';
424
+ const fk = col.fk_ref ? ' FK' : '';
425
+ lines.push(` ${mermaidType} ${col.column}${pk}${fk}`);
426
+ }
427
+ lines.push(' }');
428
+ }
429
+ return lines.join('\n');
430
+ }
431
+ // ── Tool Registration ────────────────────────────────────────────────
432
+ export function registerDatabaseTools() {
433
+ // ── db_query ─────────────────────────────────────────────────────
434
+ registerTool({
435
+ name: 'db_query',
436
+ description: 'Execute a SQL query against a database (Postgres, MySQL, SQLite). Returns formatted results. Use for SELECT, INSERT, UPDATE, DELETE.',
437
+ parameters: {
438
+ sql: { type: 'string', description: 'SQL query to execute', required: true },
439
+ connection_string: { type: 'string', description: 'Database connection string (e.g., postgres://user:pass@host/db). If omitted, reads from DATABASE_URL env.' },
440
+ timeout: { type: 'number', description: 'Query timeout in ms (default: 30000)' },
441
+ },
442
+ tier: 'pro',
443
+ timeout: 60_000,
444
+ async execute(args) {
445
+ try {
446
+ const conn = resolveConnection(args.connection_string ? String(args.connection_string) : undefined);
447
+ const sql = String(args.sql);
448
+ const timeout = typeof args.timeout === 'number' ? args.timeout : 30_000;
449
+ // Basic safety: warn on destructive operations
450
+ const trimmedSql = sql.trim().toUpperCase();
451
+ if (trimmedSql.startsWith('DROP') || trimmedSql.startsWith('TRUNCATE')) {
452
+ return `Warning: Destructive operation detected (${trimmedSql.split(/\s+/)[0]}). Re-run with explicit confirmation if intended.\n\nQuery: ${sql}`;
453
+ }
454
+ const result = execQuery(conn, sql, timeout);
455
+ if (!result.trim())
456
+ return `Query executed successfully. (no output)`;
457
+ return `**${conn.type}** query result:\n\n${result}`;
458
+ }
459
+ catch (err) {
460
+ return `Database query error: ${err instanceof Error ? err.message : String(err)}`;
461
+ }
462
+ },
463
+ });
464
+ // ── db_schema ────────────────────────────────────────────────────
465
+ registerTool({
466
+ name: 'db_schema',
467
+ description: 'Inspect database schema — tables, columns, types, nullable, primary keys, foreign keys. Works with Postgres, MySQL, SQLite.',
468
+ parameters: {
469
+ connection_string: { type: 'string', description: 'Database connection string. If omitted, reads from DATABASE_URL env.' },
470
+ table: { type: 'string', description: 'Specific table name to inspect (default: all tables)' },
471
+ },
472
+ tier: 'free',
473
+ timeout: 30_000,
474
+ async execute(args) {
475
+ try {
476
+ const conn = resolveConnection(args.connection_string ? String(args.connection_string) : undefined);
477
+ const tableFilter = args.table ? String(args.table) : undefined;
478
+ const columns = getSchema(conn, tableFilter);
479
+ const formatted = formatSchema(columns);
480
+ const tableCount = new Set(columns.map(c => c.table)).size;
481
+ const header = tableFilter
482
+ ? `**${conn.type}** — table \`${tableFilter}\` (${columns.length} columns)`
483
+ : `**${conn.type}** — ${tableCount} tables, ${columns.length} columns`;
484
+ return `${header}\n${formatted}`;
485
+ }
486
+ catch (err) {
487
+ return `Schema inspection error: ${err instanceof Error ? err.message : String(err)}`;
488
+ }
489
+ },
490
+ });
491
+ // ── db_migrate ───────────────────────────────────────────────────
492
+ registerTool({
493
+ name: 'db_migrate',
494
+ description: 'Generate and optionally run a SQL migration. Writes timestamped .sql file to ./migrations/ or ./supabase/migrations/. Supports --dry-run.',
495
+ parameters: {
496
+ sql: { type: 'string', description: 'Migration SQL statements (ALTER TABLE, CREATE TABLE, etc.)', required: true },
497
+ name: { type: 'string', description: 'Migration name (e.g., "add_users_table")', required: true },
498
+ connection_string: { type: 'string', description: 'Database connection string. If omitted, reads from DATABASE_URL env.' },
499
+ dry_run: { type: 'boolean', description: 'If true, only show the SQL without executing or writing (default: false)' },
500
+ migrations_dir: { type: 'string', description: 'Custom migrations directory path (default: auto-detect ./supabase/migrations/ or ./migrations/)' },
501
+ },
502
+ tier: 'pro',
503
+ timeout: 60_000,
504
+ async execute(args) {
505
+ try {
506
+ const sql = String(args.sql);
507
+ const name = String(args.name).replace(/[^a-zA-Z0-9_-]/g, '_');
508
+ const dryRun = args.dry_run === true;
509
+ if (dryRun) {
510
+ return `**Dry run** — migration \`${name}\`:\n\n\`\`\`sql\n${sql}\n\`\`\`\n\nNo changes made.`;
511
+ }
512
+ // Determine migrations directory
513
+ let migrationsDir;
514
+ if (args.migrations_dir) {
515
+ migrationsDir = resolve(String(args.migrations_dir));
516
+ }
517
+ else if (existsSync(resolve(process.cwd(), 'supabase/migrations'))) {
518
+ migrationsDir = resolve(process.cwd(), 'supabase/migrations');
519
+ }
520
+ else {
521
+ migrationsDir = resolve(process.cwd(), 'migrations');
522
+ }
523
+ // Ensure directory exists
524
+ mkdirSync(migrationsDir, { recursive: true });
525
+ // Generate timestamped filename
526
+ const timestamp = new Date().toISOString().replace(/[-:T]/g, '').slice(0, 14);
527
+ const filename = `${timestamp}_${name}.sql`;
528
+ const filepath = join(migrationsDir, filename);
529
+ // Write migration file
530
+ const header = `-- Migration: ${name}\n-- Generated by K:BOT at ${new Date().toISOString()}\n-- Run: apply with db_query or your migration tool\n\n`;
531
+ writeFileSync(filepath, header + sql + '\n', 'utf-8');
532
+ // Optionally execute
533
+ let execResult = '';
534
+ if (args.connection_string || process.env.DATABASE_URL || process.env.POSTGRES_URL || process.env.MYSQL_URL || process.env.SQLITE_PATH) {
535
+ try {
536
+ const conn = resolveConnection(args.connection_string ? String(args.connection_string) : undefined);
537
+ execQuery(conn, sql, 60_000);
538
+ execResult = '\n\nMigration **executed successfully** against the database.';
539
+ }
540
+ catch (execErr) {
541
+ execResult = `\n\nMigration file saved but **execution failed**: ${execErr instanceof Error ? execErr.message : String(execErr)}`;
542
+ }
543
+ }
544
+ return `Migration file created: \`${filepath}\`\n\n\`\`\`sql\n${sql}\n\`\`\`${execResult}`;
545
+ }
546
+ catch (err) {
547
+ return `Migration error: ${err instanceof Error ? err.message : String(err)}`;
548
+ }
549
+ },
550
+ });
551
+ // ── db_seed ──────────────────────────────────────────────────────
552
+ registerTool({
553
+ name: 'db_seed',
554
+ description: 'Generate realistic seed data for a database table. Queries the schema, then generates INSERT statements with fake but plausible data.',
555
+ parameters: {
556
+ table: { type: 'string', description: 'Table name to seed', required: true },
557
+ count: { type: 'number', description: 'Number of rows to generate (default: 10)' },
558
+ connection_string: { type: 'string', description: 'Database connection string. If omitted, reads from DATABASE_URL env.' },
559
+ execute: { type: 'boolean', description: 'If true, execute the INSERT statements against the database (default: false — just returns SQL)' },
560
+ },
561
+ tier: 'pro',
562
+ async execute(args) {
563
+ try {
564
+ const tableName = String(args.table);
565
+ const count = typeof args.count === 'number' ? Math.min(args.count, 1000) : 10;
566
+ const conn = resolveConnection(args.connection_string ? String(args.connection_string) : undefined);
567
+ // Get schema for this table
568
+ const columns = getSchema(conn, tableName);
569
+ if (columns.length === 0) {
570
+ return `Table \`${tableName}\` not found or has no columns.`;
571
+ }
572
+ // Filter out auto-generated columns (serial/autoincrement PKs with defaults)
573
+ const seedColumns = columns.filter(c => {
574
+ // Skip serial / auto-increment columns
575
+ if (c.is_pk && (c.default_val.includes('nextval') || c.type.toLowerCase().includes('serial') || c.type.toLowerCase().includes('autoincrement'))) {
576
+ return false;
577
+ }
578
+ // Skip columns with generated defaults that should be left alone
579
+ if (c.default_val.includes('gen_random_uuid') || c.default_val.includes('now()') || c.default_val.includes('CURRENT_TIMESTAMP')) {
580
+ return false;
581
+ }
582
+ return true;
583
+ });
584
+ if (seedColumns.length === 0) {
585
+ return `All columns in \`${tableName}\` appear to be auto-generated. No seed data needed.`;
586
+ }
587
+ const colNames = seedColumns.map(c => c.column);
588
+ const inserts = [];
589
+ for (let i = 0; i < count; i++) {
590
+ const values = seedColumns.map(c => fakeValue(c.column, c.type, i));
591
+ inserts.push(`INSERT INTO ${tableName} (${colNames.join(', ')}) VALUES (${values.join(', ')});`);
592
+ }
593
+ const fullSql = inserts.join('\n');
594
+ // Optionally execute
595
+ if (args.execute === true) {
596
+ try {
597
+ execQuery(conn, fullSql, 60_000);
598
+ return `**Seeded ${count} rows** into \`${tableName}\`.\n\n\`\`\`sql\n${fullSql}\n\`\`\``;
599
+ }
600
+ catch (execErr) {
601
+ return `Seed SQL generated but **execution failed**: ${execErr instanceof Error ? execErr.message : String(execErr)}\n\n\`\`\`sql\n${fullSql}\n\`\`\``;
602
+ }
603
+ }
604
+ return `**${count} INSERT statements** for \`${tableName}\` (${colNames.length} columns):\n\n\`\`\`sql\n${fullSql}\n\`\`\`\n\nSet \`execute: true\` to run these against the database.`;
605
+ }
606
+ catch (err) {
607
+ return `Seed generation error: ${err instanceof Error ? err.message : String(err)}`;
608
+ }
609
+ },
610
+ });
611
+ // ── prisma_introspect ────────────────────────────────────────────
612
+ registerTool({
613
+ name: 'prisma_introspect',
614
+ description: 'Run Prisma introspect (db pull) on an existing database to generate schema.prisma from the live schema.',
615
+ parameters: {
616
+ path: { type: 'string', description: 'Project directory containing package.json (default: cwd)', },
617
+ connection_string: { type: 'string', description: 'Database URL to introspect. If omitted, uses DATABASE_URL from .env or env.' },
618
+ },
619
+ tier: 'pro',
620
+ timeout: 60_000,
621
+ async execute(args) {
622
+ try {
623
+ const cwd = args.path ? resolve(String(args.path)) : process.cwd();
624
+ // Check if Prisma is available
625
+ const hasPrisma = checkPrisma(cwd);
626
+ if (!hasPrisma) {
627
+ return 'Prisma not found in this project. Install it with:\n\n```\nnpm install prisma @prisma/client --save-dev\nnpx prisma init\n```';
628
+ }
629
+ const env = { ...process.env };
630
+ if (args.connection_string) {
631
+ env.DATABASE_URL = String(args.connection_string);
632
+ }
633
+ const result = shell('npx prisma db pull', { cwd, env, timeout: 60_000 });
634
+ return `**Prisma introspect** completed:\n\n${result}\n\nSchema written to \`prisma/schema.prisma\`.`;
635
+ }
636
+ catch (err) {
637
+ return `Prisma introspect error: ${err instanceof Error ? err.message : String(err)}`;
638
+ }
639
+ },
640
+ });
641
+ // ── prisma_generate ──────────────────────────────────────────────
642
+ registerTool({
643
+ name: 'prisma_generate',
644
+ description: 'Generate Prisma client from schema.prisma. Creates typed database client in node_modules/@prisma/client.',
645
+ parameters: {
646
+ path: { type: 'string', description: 'Project directory containing package.json (default: cwd)' },
647
+ },
648
+ tier: 'pro',
649
+ timeout: 60_000,
650
+ async execute(args) {
651
+ try {
652
+ const cwd = args.path ? resolve(String(args.path)) : process.cwd();
653
+ const hasPrisma = checkPrisma(cwd);
654
+ if (!hasPrisma) {
655
+ return 'Prisma not found in this project. Install it with:\n\n```\nnpm install prisma @prisma/client --save-dev\nnpx prisma init\n```';
656
+ }
657
+ // Check if schema.prisma exists
658
+ const schemaPath = join(cwd, 'prisma', 'schema.prisma');
659
+ if (!existsSync(schemaPath)) {
660
+ return `No \`prisma/schema.prisma\` found in ${cwd}. Run \`prisma_introspect\` first or create a schema with \`npx prisma init\`.`;
661
+ }
662
+ const result = shell('npx prisma generate', { cwd, timeout: 60_000 });
663
+ return `**Prisma generate** completed:\n\n${result}\n\nClient available at \`@prisma/client\`.`;
664
+ }
665
+ catch (err) {
666
+ return `Prisma generate error: ${err instanceof Error ? err.message : String(err)}`;
667
+ }
668
+ },
669
+ });
670
+ // ── prisma_migrate ───────────────────────────────────────────────
671
+ registerTool({
672
+ name: 'prisma_migrate',
673
+ description: 'Run Prisma migration (prisma migrate dev). Creates a migration from schema changes and applies it to the database.',
674
+ parameters: {
675
+ name: { type: 'string', description: 'Migration name (e.g., "add_posts_table")', required: true },
676
+ path: { type: 'string', description: 'Project directory containing package.json (default: cwd)' },
677
+ connection_string: { type: 'string', description: 'Database URL. If omitted, uses DATABASE_URL from .env or env.' },
678
+ create_only: { type: 'boolean', description: 'If true, create migration files without applying (default: false)' },
679
+ },
680
+ tier: 'pro',
681
+ timeout: 120_000,
682
+ async execute(args) {
683
+ try {
684
+ const cwd = args.path ? resolve(String(args.path)) : process.cwd();
685
+ const migrationName = String(args.name).replace(/[^a-zA-Z0-9_-]/g, '_');
686
+ const hasPrisma = checkPrisma(cwd);
687
+ if (!hasPrisma) {
688
+ return 'Prisma not found in this project. Install it with:\n\n```\nnpm install prisma @prisma/client --save-dev\nnpx prisma init\n```';
689
+ }
690
+ const schemaPath = join(cwd, 'prisma', 'schema.prisma');
691
+ if (!existsSync(schemaPath)) {
692
+ return `No \`prisma/schema.prisma\` found in ${cwd}. Create a schema first.`;
693
+ }
694
+ const env = { ...process.env };
695
+ if (args.connection_string) {
696
+ env.DATABASE_URL = String(args.connection_string);
697
+ }
698
+ const createOnly = args.create_only === true ? ' --create-only' : '';
699
+ const result = shell(`npx prisma migrate dev --name ${migrationName}${createOnly}`, { cwd, env, timeout: 120_000 });
700
+ return `**Prisma migrate** completed:\n\n${result}`;
701
+ }
702
+ catch (err) {
703
+ return `Prisma migrate error: ${err instanceof Error ? err.message : String(err)}`;
704
+ }
705
+ },
706
+ });
707
+ // ── db_diagram ───────────────────────────────────────────────────
708
+ registerTool({
709
+ name: 'db_diagram',
710
+ description: 'Generate an ER diagram in Mermaid format from database schema. Outputs entities, attributes, types, and relationships.',
711
+ parameters: {
712
+ connection_string: { type: 'string', description: 'Database connection string. If omitted, reads from DATABASE_URL env.' },
713
+ table: { type: 'string', description: 'Specific table to include (default: all tables)' },
714
+ },
715
+ tier: 'free',
716
+ timeout: 30_000,
717
+ async execute(args) {
718
+ try {
719
+ const conn = resolveConnection(args.connection_string ? String(args.connection_string) : undefined);
720
+ const tableFilter = args.table ? String(args.table) : undefined;
721
+ const columns = getSchema(conn, tableFilter);
722
+ const mermaid = generateMermaidDiagram(columns);
723
+ const tableCount = new Set(columns.map(c => c.table)).size;
724
+ const relCount = columns.filter(c => c.fk_ref).length;
725
+ return `**ER Diagram** — ${tableCount} tables, ${columns.length} columns, ${relCount} relationships\n\n\`\`\`mermaid\n${mermaid}\n\`\`\``;
726
+ }
727
+ catch (err) {
728
+ return `ER diagram error: ${err instanceof Error ? err.message : String(err)}`;
729
+ }
730
+ },
731
+ });
732
+ }
733
+ // ── Prisma helper ────────────────────────────────────────────────────
734
+ /** Check if Prisma is available in the project */
735
+ function checkPrisma(cwd) {
736
+ try {
737
+ const pkgPath = join(cwd, 'package.json');
738
+ if (!existsSync(pkgPath))
739
+ return false;
740
+ const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'));
741
+ const allDeps = {
742
+ ...(pkg.dependencies || {}),
743
+ ...(pkg.devDependencies || {}),
744
+ };
745
+ return 'prisma' in allDeps || '@prisma/client' in allDeps;
746
+ }
747
+ catch {
748
+ return false;
749
+ }
750
+ }
751
+ //# sourceMappingURL=database.js.map