relq 1.0.31 → 1.0.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -113,7 +113,7 @@ function parseSchemaFileForComparison(schemaPath) {
113
113
  continue;
114
114
  const tsName = colMatch[1];
115
115
  const type = colMatch[2];
116
- const explicitNameMatch = colDef.match(new RegExp(`${type}\\s*\\(['\"]([^'"]+)['\"]`));
116
+ const explicitNameMatch = colDef.match(new RegExp(`${type}(?:<[^>]+>)?\\s*\\(['\"]([^'"]+)['\"]`));
117
117
  const dbColName = explicitNameMatch ? explicitNameMatch[1] : tsName;
118
118
  tsToDbNameMap.set(tsName, dbColName);
119
119
  let defaultValue = null;
@@ -493,6 +493,12 @@ function parseSchemaFileForComparison(schemaPath) {
493
493
  }
494
494
  }
495
495
  }
496
+ const schemaDir = path.dirname(schemaPath);
497
+ const schemaBaseName = path.basename(schemaPath, '.ts');
498
+ const functionsPath = path.join(schemaDir, `${schemaBaseName}.functions.ts`);
499
+ const triggersPath = path.join(schemaDir, `${schemaBaseName}.triggers.ts`);
500
+ const functions = parseFunctionsFile(functionsPath);
501
+ const triggers = parseTriggersFile(triggersPath);
496
502
  return {
497
503
  tables,
498
504
  enums,
@@ -500,8 +506,8 @@ function parseSchemaFileForComparison(schemaPath) {
500
506
  compositeTypes: [],
501
507
  sequences: [],
502
508
  collations: [],
503
- functions: [],
504
- triggers: [],
509
+ functions,
510
+ triggers,
505
511
  policies: [],
506
512
  partitions: [],
507
513
  foreignServers: [],
@@ -509,6 +515,132 @@ function parseSchemaFileForComparison(schemaPath) {
509
515
  extensions,
510
516
  };
511
517
  }
518
+ function parseFunctionsFile(filePath) {
519
+ if (!fs.existsSync(filePath)) {
520
+ return [];
521
+ }
522
+ const rawContent = fs.readFileSync(filePath, 'utf-8');
523
+ const content = (0, strip_comments_1.default)(rawContent);
524
+ const functions = [];
525
+ const funcPattern = /pgFunction\s*\(\s*['"]([^'"]+)['"]\s*,\s*\{/g;
526
+ let funcMatch;
527
+ while ((funcMatch = funcPattern.exec(content)) !== null) {
528
+ const funcName = funcMatch[1];
529
+ const startIdx = funcMatch.index + funcMatch[0].length;
530
+ let braceCount = 1;
531
+ let endIdx = startIdx;
532
+ while (braceCount > 0 && endIdx < content.length) {
533
+ const char = content[endIdx];
534
+ if (char === '{')
535
+ braceCount++;
536
+ else if (char === '}')
537
+ braceCount--;
538
+ endIdx++;
539
+ }
540
+ const optionsBlock = content.substring(startIdx, endIdx - 1);
541
+ const returnsMatch = optionsBlock.match(/returns:\s*['"]([^'"]+)['"]/);
542
+ const returnType = returnsMatch ? returnsMatch[1] : 'void';
543
+ const languageMatch = optionsBlock.match(/language:\s*['"]([^'"]+)['"]/);
544
+ const language = languageMatch ? languageMatch[1] : 'plpgsql';
545
+ const volatilityMatch = optionsBlock.match(/volatility:\s*['"]([^'"]+)['"]/);
546
+ const volatility = volatilityMatch ? volatilityMatch[1] : 'VOLATILE';
547
+ const argsMatch = optionsBlock.match(/args:\s*\[([^\]]*)\]/s);
548
+ const argTypes = [];
549
+ if (argsMatch) {
550
+ const argsBlock = argsMatch[1];
551
+ const typeMatches = argsBlock.matchAll(/type:\s*['"]([^'"]+)['"]/g);
552
+ for (const m of typeMatches) {
553
+ argTypes.push(m[1]);
554
+ }
555
+ }
556
+ const bodyMatch = optionsBlock.match(/body:\s*`([^`]*)`/s) || optionsBlock.match(/raw:\s*`([^`]*)`/s);
557
+ const definition = bodyMatch ? bodyMatch[1].trim() : '';
558
+ functions.push({
559
+ name: funcName,
560
+ schema: 'public',
561
+ returnType,
562
+ argTypes,
563
+ language,
564
+ definition,
565
+ isAggregate: false,
566
+ volatility,
567
+ });
568
+ }
569
+ return functions;
570
+ }
571
+ function parseTriggersFile(filePath) {
572
+ if (!fs.existsSync(filePath)) {
573
+ return [];
574
+ }
575
+ const rawContent = fs.readFileSync(filePath, 'utf-8');
576
+ const content = (0, strip_comments_1.default)(rawContent);
577
+ const triggers = [];
578
+ const trigPattern = /pgTrigger\s*\(\s*['"]([^'"]+)['"]\s*,\s*\{/g;
579
+ let trigMatch;
580
+ while ((trigMatch = trigPattern.exec(content)) !== null) {
581
+ const trigName = trigMatch[1];
582
+ const startIdx = trigMatch.index + trigMatch[0].length;
583
+ let braceCount = 1;
584
+ let endIdx = startIdx;
585
+ while (braceCount > 0 && endIdx < content.length) {
586
+ const char = content[endIdx];
587
+ if (char === '{')
588
+ braceCount++;
589
+ else if (char === '}')
590
+ braceCount--;
591
+ endIdx++;
592
+ }
593
+ const optionsBlock = content.substring(startIdx, endIdx - 1);
594
+ let tableName = '';
595
+ const onSchemaMatch = optionsBlock.match(/on:\s*schema\.(\w+)/);
596
+ const onStringMatch = optionsBlock.match(/on:\s*['"]([^'"]+)['"]/);
597
+ if (onSchemaMatch) {
598
+ tableName = onSchemaMatch[1].replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
599
+ }
600
+ else if (onStringMatch) {
601
+ tableName = onStringMatch[1];
602
+ }
603
+ let timing = 'BEFORE';
604
+ let event = 'UPDATE';
605
+ const beforeMatch = optionsBlock.match(/before:\s*['"]?(\w+)['"]?/);
606
+ const afterMatch = optionsBlock.match(/after:\s*['"]?(\w+)['"]?/);
607
+ const insteadOfMatch = optionsBlock.match(/insteadOf:\s*['"]?(\w+)['"]?/);
608
+ if (beforeMatch) {
609
+ timing = 'BEFORE';
610
+ event = beforeMatch[1].toUpperCase();
611
+ }
612
+ else if (afterMatch) {
613
+ timing = 'AFTER';
614
+ event = afterMatch[1].toUpperCase();
615
+ }
616
+ else if (insteadOfMatch) {
617
+ timing = 'INSTEAD OF';
618
+ event = insteadOfMatch[1].toUpperCase();
619
+ }
620
+ const forEachMatch = optionsBlock.match(/forEach:\s*['"]?(\w+)['"]?/);
621
+ const forEach = forEachMatch && forEachMatch[1].toUpperCase() === 'STATEMENT' ? 'STATEMENT' : 'ROW';
622
+ let functionName = '';
623
+ const execFuncMatch = optionsBlock.match(/execute:\s*functions\.(\w+)/);
624
+ const execStringMatch = optionsBlock.match(/execute:\s*['"]([^'"]+)['"]/);
625
+ if (execFuncMatch) {
626
+ functionName = execFuncMatch[1].replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
627
+ }
628
+ else if (execStringMatch) {
629
+ functionName = execStringMatch[1];
630
+ }
631
+ triggers.push({
632
+ name: trigName,
633
+ tableName,
634
+ event,
635
+ timing,
636
+ forEach,
637
+ functionName,
638
+ definition: '',
639
+ isEnabled: true,
640
+ });
641
+ }
642
+ return triggers;
643
+ }
512
644
  function snapshotToDatabaseSchema(snapshot) {
513
645
  const tables = (snapshot.tables || []).map(t => ({
514
646
  name: t.name,
@@ -549,6 +681,26 @@ function snapshotToDatabaseSchema(snapshot) {
549
681
  partitionType: t.partitionType,
550
682
  partitionKey: t.partitionKey ? (Array.isArray(t.partitionKey) ? t.partitionKey : [t.partitionKey]) : undefined,
551
683
  }));
684
+ const functions = (snapshot.functions || []).map(f => ({
685
+ name: f.name,
686
+ schema: f.schema || 'public',
687
+ returnType: f.returnType,
688
+ argTypes: f.argTypes || [],
689
+ language: f.language,
690
+ definition: f.body || '',
691
+ isAggregate: false,
692
+ volatility: f.volatility || 'VOLATILE',
693
+ }));
694
+ const triggers = (snapshot.triggers || []).map(t => ({
695
+ name: t.name,
696
+ tableName: t.table,
697
+ event: t.events?.[0] || 'UPDATE',
698
+ timing: t.timing,
699
+ forEach: t.forEach || 'ROW',
700
+ functionName: t.functionName,
701
+ definition: '',
702
+ isEnabled: true,
703
+ }));
552
704
  return {
553
705
  tables,
554
706
  enums: (snapshot.enums || []).map(e => ({ name: e.name, values: e.values })),
@@ -556,8 +708,8 @@ function snapshotToDatabaseSchema(snapshot) {
556
708
  compositeTypes: [],
557
709
  sequences: [],
558
710
  collations: [],
559
- functions: [],
560
- triggers: [],
711
+ functions,
712
+ triggers,
561
713
  policies: [],
562
714
  partitions: [],
563
715
  foreignServers: [],
@@ -40,6 +40,7 @@ const config_loader_1 = require("../utils/config-loader.cjs");
40
40
  const fast_introspect_1 = require("../utils/fast-introspect.cjs");
41
41
  const ast_transformer_1 = require("../utils/ast-transformer.cjs");
42
42
  const ast_codegen_1 = require("../utils/ast-codegen.cjs");
43
+ const types_manager_1 = require("../utils/types-manager.cjs");
43
44
  const env_loader_1 = require("../utils/env-loader.cjs");
44
45
  const cli_utils_1 = require("../utils/cli-utils.cjs");
45
46
  const relqignore_1 = require("../utils/relqignore.cjs");
@@ -545,7 +546,18 @@ async function pullCommand(context) {
545
546
  (0, cli_utils_1.fatal)('Automatic merge failed; fix conflicts and then commit', `${cli_utils_1.colors.cyan('relq resolve --theirs <name>')} Take remote version\n${cli_utils_1.colors.cyan('relq resolve --all-theirs')} Take all remote\n${cli_utils_1.colors.cyan('relq pull --force')} Force overwrite local`);
546
547
  }
547
548
  if (allChanges.length === 0) {
548
- console.log('Already up to date with remote');
549
+ const localCommits = (0, repo_manager_1.getAllCommits)(projectRoot);
550
+ const localHashes = new Set(localCommits.map(c => c.hash));
551
+ const missingCommits = remoteCommits.filter(c => !localHashes.has(c.hash));
552
+ if (missingCommits.length > 0) {
553
+ for (const commit of missingCommits.reverse()) {
554
+ (0, repo_manager_1.saveCommit)(commit, projectRoot);
555
+ }
556
+ console.log(`Synced ${missingCommits.length} commit(s) from remote`);
557
+ }
558
+ else {
559
+ console.log('Already up to date with remote');
560
+ }
549
561
  console.log('');
550
562
  return;
551
563
  }
@@ -708,6 +720,17 @@ async function pullCommand(context) {
708
720
  spinner.succeed(`Written ${cli_utils_1.colors.cyan(triggersPath)} ${cli_utils_1.colors.muted(`(${(0, cli_utils_1.formatBytes)(trigFileSize)})`)}`);
709
721
  }
710
722
  }
723
+ const typesFilePath = (0, types_manager_1.getTypesFilePath)(schemaPath);
724
+ const typesResult = await (0, types_manager_1.syncTypesFromDb)(connection, typesFilePath);
725
+ if (typesResult.generated) {
726
+ const typesFileSize = fs.existsSync(typesFilePath) ? fs.statSync(typesFilePath).size : 0;
727
+ if (typesResult.typesCount > 0) {
728
+ spinner.succeed(`Written ${cli_utils_1.colors.cyan(typesFilePath)} ${cli_utils_1.colors.muted(`(${typesResult.typesCount} types, ${(0, cli_utils_1.formatBytes)(typesFileSize)})`)}`);
729
+ }
730
+ else {
731
+ spinner.succeed(`Created ${cli_utils_1.colors.cyan(typesFilePath)} ${cli_utils_1.colors.muted('(empty template)')}`);
732
+ }
733
+ }
711
734
  const oldSnapshot = (0, repo_manager_1.loadSnapshot)(projectRoot);
712
735
  const beforeSchema = oldSnapshot ? {
713
736
  extensions: oldSnapshot.extensions?.map(e => e.name) || [],
@@ -42,6 +42,8 @@ const cli_utils_1 = require("../utils/cli-utils.cjs");
42
42
  const fast_introspect_1 = require("../utils/fast-introspect.cjs");
43
43
  const relqignore_1 = require("../utils/relqignore.cjs");
44
44
  const repo_manager_1 = require("../utils/repo-manager.cjs");
45
+ const types_manager_1 = require("../utils/types-manager.cjs");
46
+ const config_loader_2 = require("../utils/config-loader.cjs");
45
47
  async function pushCommand(context) {
46
48
  const { config, flags } = context;
47
49
  if (!config) {
@@ -146,8 +148,32 @@ async function pushCommand(context) {
146
148
  }
147
149
  }
148
150
  }
151
+ const schemaPath = (0, config_loader_2.getSchemaPath)(config);
152
+ const typesFilePath = (0, types_manager_1.getTypesFilePath)(schemaPath);
153
+ let typesSynced = false;
154
+ if (fs.existsSync(typesFilePath) && !dryRun) {
155
+ spinner.start('Syncing TypeScript types...');
156
+ const typesResult = await (0, types_manager_1.syncTypesToDb)(connection, typesFilePath, schemaPath);
157
+ const totalChanges = typesResult.added.length + typesResult.updated.length + typesResult.removed.length;
158
+ if (totalChanges > 0) {
159
+ const parts = [];
160
+ if (typesResult.added.length > 0)
161
+ parts.push(`${typesResult.added.length} added`);
162
+ if (typesResult.updated.length > 0)
163
+ parts.push(`${typesResult.updated.length} updated`);
164
+ if (typesResult.removed.length > 0)
165
+ parts.push(`${typesResult.removed.length} removed`);
166
+ spinner.succeed(`Synced types: ${parts.join(', ')}`);
167
+ typesSynced = true;
168
+ }
169
+ else {
170
+ spinner.succeed('Types in sync');
171
+ }
172
+ }
149
173
  if (toPush.length === 0 && !hasObjectsToDrop) {
150
- console.log('Everything up-to-date');
174
+ if (!typesSynced) {
175
+ console.log('Everything up-to-date');
176
+ }
151
177
  console.log('');
152
178
  return;
153
179
  }
@@ -0,0 +1,413 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.ensureTypesTable = ensureTypesTable;
37
+ exports.typesTableExists = typesTableExists;
38
+ exports.parseTypesFile = parseTypesFile;
39
+ exports.extractUsedTypes = extractUsedTypes;
40
+ exports.extractTypeUsages = extractTypeUsages;
41
+ exports.getTypesFromDb = getTypesFromDb;
42
+ exports.storeTypesInDb = storeTypesInDb;
43
+ exports.removeStaleTypes = removeStaleTypes;
44
+ exports.generateTypesFile = generateTypesFile;
45
+ exports.validateTypesUsage = validateTypesUsage;
46
+ exports.isValidTypesFilePath = isValidTypesFilePath;
47
+ exports.getTypesFilePath = getTypesFilePath;
48
+ exports.syncTypesToDb = syncTypesToDb;
49
+ exports.syncTypesFromDb = syncTypesFromDb;
50
+ const fs = __importStar(require("fs"));
51
+ const path = __importStar(require("path"));
52
+ const CREATE_TABLE_SQL = `
53
+ CREATE TABLE IF NOT EXISTS _relq_types (
54
+ name TEXT PRIMARY KEY,
55
+ kind TEXT NOT NULL CHECK (kind IN ('interface', 'type')),
56
+ source TEXT NOT NULL,
57
+ usages JSONB DEFAULT '[]',
58
+ created_at TIMESTAMPTZ DEFAULT NOW(),
59
+ updated_at TIMESTAMPTZ DEFAULT NOW()
60
+ );
61
+
62
+ CREATE INDEX IF NOT EXISTS idx_relq_types_kind ON _relq_types(kind);
63
+ `;
64
+ const ADD_USAGES_COLUMN_SQL = `
65
+ ALTER TABLE _relq_types ADD COLUMN IF NOT EXISTS usages JSONB DEFAULT '[]';
66
+ `;
67
+ async function createPool(connection) {
68
+ const { Pool } = await Promise.resolve().then(() => __importStar(require("../../addon/pg/index.cjs")));
69
+ return new Pool({
70
+ host: connection.host,
71
+ port: connection.port || 5432,
72
+ database: connection.database,
73
+ user: connection.user,
74
+ password: connection.password,
75
+ connectionString: connection.url,
76
+ ssl: connection.ssl,
77
+ });
78
+ }
79
+ async function ensureTypesTable(connection) {
80
+ const pool = await createPool(connection);
81
+ try {
82
+ await pool.query(CREATE_TABLE_SQL);
83
+ await pool.query(ADD_USAGES_COLUMN_SQL);
84
+ }
85
+ finally {
86
+ await pool.end();
87
+ }
88
+ }
89
+ async function typesTableExists(connection) {
90
+ const pool = await createPool(connection);
91
+ try {
92
+ const result = await pool.query(`
93
+ SELECT EXISTS (
94
+ SELECT FROM information_schema.tables
95
+ WHERE table_name = '_relq_types'
96
+ ) as exists
97
+ `);
98
+ return result.rows[0]?.exists ?? false;
99
+ }
100
+ finally {
101
+ await pool.end();
102
+ }
103
+ }
104
+ function parseTypesFile(content) {
105
+ const definitions = [];
106
+ const lines = content.split('\n');
107
+ let i = 0;
108
+ while (i < lines.length) {
109
+ const line = lines[i];
110
+ const interfaceMatch = line.match(/^(?:export\s+)?interface\s+(\w+)(?:<[^>]+>)?\s*\{?\s*$/);
111
+ if (interfaceMatch) {
112
+ const name = interfaceMatch[1];
113
+ const startLine = i;
114
+ const { endLine, source } = extractBlock(lines, i);
115
+ definitions.push({ name, kind: 'interface', source, startLine, endLine });
116
+ i = endLine + 1;
117
+ continue;
118
+ }
119
+ const interfaceExtendsMatch = line.match(/^(?:export\s+)?interface\s+(\w+)(?:<[^>]+>)?\s+extends\s+/);
120
+ if (interfaceExtendsMatch) {
121
+ const name = interfaceExtendsMatch[1];
122
+ const startLine = i;
123
+ const { endLine, source } = extractBlock(lines, i);
124
+ definitions.push({ name, kind: 'interface', source, startLine, endLine });
125
+ i = endLine + 1;
126
+ continue;
127
+ }
128
+ const typeMatch = line.match(/^(?:export\s+)?type\s+(\w+)(?:<[^>]+>)?\s*=\s*/);
129
+ if (typeMatch) {
130
+ const name = typeMatch[1];
131
+ const startLine = i;
132
+ const { endLine, source } = extractTypeAlias(lines, i);
133
+ definitions.push({ name, kind: 'type', source, startLine, endLine });
134
+ i = endLine + 1;
135
+ continue;
136
+ }
137
+ i++;
138
+ }
139
+ return definitions;
140
+ }
141
+ function extractBlock(lines, startIdx) {
142
+ let depth = 0;
143
+ let started = false;
144
+ let endIdx = startIdx;
145
+ for (let i = startIdx; i < lines.length; i++) {
146
+ const line = lines[i];
147
+ for (const char of line) {
148
+ if (char === '{') {
149
+ depth++;
150
+ started = true;
151
+ }
152
+ else if (char === '}') {
153
+ depth--;
154
+ }
155
+ }
156
+ if (started && depth === 0) {
157
+ endIdx = i;
158
+ break;
159
+ }
160
+ }
161
+ const source = lines.slice(startIdx, endIdx + 1).join('\n');
162
+ return { endLine: endIdx, source };
163
+ }
164
+ function extractTypeAlias(lines, startIdx) {
165
+ const firstLine = lines[startIdx];
166
+ if (firstLine.includes(';') && !firstLine.includes('{')) {
167
+ return { endLine: startIdx, source: firstLine };
168
+ }
169
+ let depth = 0;
170
+ let endIdx = startIdx;
171
+ for (let i = startIdx; i < lines.length; i++) {
172
+ const line = lines[i];
173
+ for (const char of line) {
174
+ if (char === '{' || char === '(' || char === '<') {
175
+ depth++;
176
+ }
177
+ else if (char === '}' || char === ')' || char === '>') {
178
+ depth--;
179
+ }
180
+ }
181
+ if (line.includes(';') && depth === 0) {
182
+ endIdx = i;
183
+ break;
184
+ }
185
+ if (i > startIdx && depth === 0) {
186
+ const trimmed = line.trim();
187
+ if (trimmed === '' || trimmed.startsWith('export ')) {
188
+ endIdx = i - 1;
189
+ break;
190
+ }
191
+ }
192
+ }
193
+ const source = lines.slice(startIdx, endIdx + 1).join('\n');
194
+ return { endLine: endIdx, source };
195
+ }
196
+ function extractUsedTypes(schemaContent) {
197
+ const usedTypes = new Set();
198
+ const genericPattern = /(?:jsonb|json|varchar|text|char)\s*<\s*([A-Z][a-zA-Z0-9_]*)/g;
199
+ let match;
200
+ while ((match = genericPattern.exec(schemaContent)) !== null) {
201
+ usedTypes.add(match[1]);
202
+ }
203
+ return Array.from(usedTypes);
204
+ }
205
+ function extractTypeUsages(schemaContent) {
206
+ const usages = {};
207
+ const tablePattern = /(?:(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*)?defineTable\s*\(\s*['"]([^'"]+)['"]\s*,\s*\{/g;
208
+ let tableMatch;
209
+ while ((tableMatch = tablePattern.exec(schemaContent)) !== null) {
210
+ const varName = tableMatch[1];
211
+ const tableName = tableMatch[2];
212
+ const tableStartIdx = tableMatch.index + tableMatch[0].length;
213
+ let depth = 1;
214
+ let columnsEndIdx = tableStartIdx;
215
+ for (let i = tableStartIdx; i < schemaContent.length && depth > 0; i++) {
216
+ if (schemaContent[i] === '{')
217
+ depth++;
218
+ else if (schemaContent[i] === '}')
219
+ depth--;
220
+ columnsEndIdx = i;
221
+ }
222
+ const columnsBlock = schemaContent.substring(tableStartIdx, columnsEndIdx);
223
+ const columnPattern = /(\w+)\s*:\s*(?:jsonb|json|varchar|text|char)\s*<\s*([A-Z][a-zA-Z0-9_]*)/g;
224
+ let columnMatch;
225
+ while ((columnMatch = columnPattern.exec(columnsBlock)) !== null) {
226
+ const columnName = columnMatch[1];
227
+ const typeName = columnMatch[2];
228
+ const tableRef = varName || tableName;
229
+ const usage = `${tableRef}.${columnName}`;
230
+ if (!usages[typeName]) {
231
+ usages[typeName] = [];
232
+ }
233
+ if (!usages[typeName].includes(usage)) {
234
+ usages[typeName].push(usage);
235
+ }
236
+ }
237
+ }
238
+ return usages;
239
+ }
240
+ async function getTypesFromDb(connection) {
241
+ const exists = await typesTableExists(connection);
242
+ if (!exists)
243
+ return [];
244
+ const pool = await createPool(connection);
245
+ try {
246
+ const result = await pool.query(`
247
+ SELECT name, kind, source, usages, created_at, updated_at
248
+ FROM _relq_types
249
+ ORDER BY name
250
+ `);
251
+ return result.rows.map(row => ({
252
+ name: row.name,
253
+ kind: row.kind,
254
+ source: row.source,
255
+ usages: row.usages || [],
256
+ createdAt: row.created_at,
257
+ updatedAt: row.updated_at,
258
+ }));
259
+ }
260
+ finally {
261
+ await pool.end();
262
+ }
263
+ }
264
+ async function storeTypesInDb(connection, types, usages) {
265
+ await ensureTypesTable(connection);
266
+ const pool = await createPool(connection);
267
+ try {
268
+ for (const type of types) {
269
+ const typeUsages = usages?.[type.name] || [];
270
+ await pool.query(`
271
+ INSERT INTO _relq_types (name, kind, source, usages, updated_at)
272
+ VALUES ($1, $2, $3, $4, NOW())
273
+ ON CONFLICT (name) DO UPDATE SET
274
+ kind = EXCLUDED.kind,
275
+ source = EXCLUDED.source,
276
+ usages = EXCLUDED.usages,
277
+ updated_at = NOW()
278
+ `, [type.name, type.kind, type.source, JSON.stringify(typeUsages)]);
279
+ }
280
+ }
281
+ finally {
282
+ await pool.end();
283
+ }
284
+ }
285
+ async function removeStaleTypes(connection, currentTypeNames) {
286
+ const exists = await typesTableExists(connection);
287
+ if (!exists)
288
+ return [];
289
+ const pool = await createPool(connection);
290
+ try {
291
+ const result = await pool.query(`
292
+ DELETE FROM _relq_types
293
+ WHERE name != ALL($1::text[])
294
+ RETURNING name
295
+ `, [currentTypeNames]);
296
+ return result.rows.map(r => r.name);
297
+ }
298
+ finally {
299
+ await pool.end();
300
+ }
301
+ }
302
+ function generateTypesFile(types) {
303
+ if (types.length === 0) {
304
+ return `/**
305
+ * Schema Types
306
+ *
307
+ * TypeScript type definitions for JSON/JSONB columns.
308
+ * These types are tracked in the database and synced across workspaces.
309
+ *
310
+ * Usage in schema.ts:
311
+ * import type { UserMetadata } from './schema.types';
312
+ * const users = defineTable('users', {
313
+ * metadata: jsonb<UserMetadata>(),
314
+ * });
315
+ */
316
+
317
+ // Add your type definitions here:
318
+ // export interface UserMetadata {
319
+ // name: string;
320
+ // age: number;
321
+ // }
322
+ `;
323
+ }
324
+ const interfaceTypes = types.filter(t => t.kind === 'interface');
325
+ const typeAliases = types.filter(t => t.kind === 'type');
326
+ let content = `/**
327
+ * Schema Types
328
+ *
329
+ * TypeScript type definitions for JSON/JSONB columns.
330
+ * These types are tracked in the database and synced across workspaces.
331
+ *
332
+ * Generated by: relq pull
333
+ * Types: ${types.length}
334
+ */
335
+
336
+ `;
337
+ if (interfaceTypes.length > 0) {
338
+ content += '// =============================================================================\n';
339
+ content += '// INTERFACES\n';
340
+ content += '// =============================================================================\n\n';
341
+ for (const type of interfaceTypes) {
342
+ content += type.source + '\n\n';
343
+ }
344
+ }
345
+ if (typeAliases.length > 0) {
346
+ content += '// =============================================================================\n';
347
+ content += '// TYPE ALIASES\n';
348
+ content += '// =============================================================================\n\n';
349
+ for (const type of typeAliases) {
350
+ content += type.source + '\n\n';
351
+ }
352
+ }
353
+ return content;
354
+ }
355
+ function validateTypesUsage(usedTypes, definedTypes) {
356
+ const definedSet = new Set(definedTypes);
357
+ const missingTypes = usedTypes.filter(t => !definedSet.has(t));
358
+ return {
359
+ valid: missingTypes.length === 0,
360
+ missingTypes,
361
+ };
362
+ }
363
+ function isValidTypesFilePath(filePath) {
364
+ const basename = path.basename(filePath);
365
+ return basename.endsWith('.types.ts');
366
+ }
367
+ function getTypesFilePath(schemaPath) {
368
+ const dir = path.dirname(schemaPath);
369
+ const basename = path.basename(schemaPath, '.ts');
370
+ return path.join(dir, `${basename}.types.ts`);
371
+ }
372
+ async function syncTypesToDb(connection, typesFilePath, schemaFilePath) {
373
+ const result = { added: [], updated: [], removed: [] };
374
+ if (!fs.existsSync(typesFilePath)) {
375
+ return result;
376
+ }
377
+ const content = fs.readFileSync(typesFilePath, 'utf-8');
378
+ const localTypes = parseTypesFile(content);
379
+ const localTypeNames = localTypes.map(t => t.name);
380
+ let usages;
381
+ if (schemaFilePath && fs.existsSync(schemaFilePath)) {
382
+ const schemaContent = fs.readFileSync(schemaFilePath, 'utf-8');
383
+ usages = extractTypeUsages(schemaContent);
384
+ }
385
+ const dbTypes = await getTypesFromDb(connection);
386
+ const dbTypeMap = new Map(dbTypes.map(t => [t.name, t]));
387
+ for (const localType of localTypes) {
388
+ const dbType = dbTypeMap.get(localType.name);
389
+ if (!dbType) {
390
+ result.added.push(localType.name);
391
+ }
392
+ else if (dbType.source !== localType.source) {
393
+ result.updated.push(localType.name);
394
+ }
395
+ }
396
+ await storeTypesInDb(connection, localTypes, usages);
397
+ result.removed = await removeStaleTypes(connection, localTypeNames);
398
+ return result;
399
+ }
400
+ async function syncTypesFromDb(connection, typesFilePath) {
401
+ const types = await getTypesFromDb(connection);
402
+ if (types.length === 0) {
403
+ if (!fs.existsSync(typesFilePath)) {
404
+ const content = generateTypesFile([]);
405
+ fs.writeFileSync(typesFilePath, content);
406
+ return { typesCount: 0, generated: true };
407
+ }
408
+ return { typesCount: 0, generated: false };
409
+ }
410
+ const content = generateTypesFile(types);
411
+ fs.writeFileSync(typesFilePath, content);
412
+ return { typesCount: types.length, generated: true };
413
+ }
@@ -73,7 +73,7 @@ function parseSchemaFileForComparison(schemaPath) {
73
73
  continue;
74
74
  const tsName = colMatch[1];
75
75
  const type = colMatch[2];
76
- const explicitNameMatch = colDef.match(new RegExp(`${type}\\s*\\(['\"]([^'"]+)['\"]`));
76
+ const explicitNameMatch = colDef.match(new RegExp(`${type}(?:<[^>]+>)?\\s*\\(['\"]([^'"]+)['\"]`));
77
77
  const dbColName = explicitNameMatch ? explicitNameMatch[1] : tsName;
78
78
  tsToDbNameMap.set(tsName, dbColName);
79
79
  let defaultValue = null;
@@ -453,6 +453,12 @@ function parseSchemaFileForComparison(schemaPath) {
453
453
  }
454
454
  }
455
455
  }
456
+ const schemaDir = path.dirname(schemaPath);
457
+ const schemaBaseName = path.basename(schemaPath, '.ts');
458
+ const functionsPath = path.join(schemaDir, `${schemaBaseName}.functions.ts`);
459
+ const triggersPath = path.join(schemaDir, `${schemaBaseName}.triggers.ts`);
460
+ const functions = parseFunctionsFile(functionsPath);
461
+ const triggers = parseTriggersFile(triggersPath);
456
462
  return {
457
463
  tables,
458
464
  enums,
@@ -460,8 +466,8 @@ function parseSchemaFileForComparison(schemaPath) {
460
466
  compositeTypes: [],
461
467
  sequences: [],
462
468
  collations: [],
463
- functions: [],
464
- triggers: [],
469
+ functions,
470
+ triggers,
465
471
  policies: [],
466
472
  partitions: [],
467
473
  foreignServers: [],
@@ -469,6 +475,132 @@ function parseSchemaFileForComparison(schemaPath) {
469
475
  extensions,
470
476
  };
471
477
  }
478
+ function parseFunctionsFile(filePath) {
479
+ if (!fs.existsSync(filePath)) {
480
+ return [];
481
+ }
482
+ const rawContent = fs.readFileSync(filePath, 'utf-8');
483
+ const content = stripComments(rawContent);
484
+ const functions = [];
485
+ const funcPattern = /pgFunction\s*\(\s*['"]([^'"]+)['"]\s*,\s*\{/g;
486
+ let funcMatch;
487
+ while ((funcMatch = funcPattern.exec(content)) !== null) {
488
+ const funcName = funcMatch[1];
489
+ const startIdx = funcMatch.index + funcMatch[0].length;
490
+ let braceCount = 1;
491
+ let endIdx = startIdx;
492
+ while (braceCount > 0 && endIdx < content.length) {
493
+ const char = content[endIdx];
494
+ if (char === '{')
495
+ braceCount++;
496
+ else if (char === '}')
497
+ braceCount--;
498
+ endIdx++;
499
+ }
500
+ const optionsBlock = content.substring(startIdx, endIdx - 1);
501
+ const returnsMatch = optionsBlock.match(/returns:\s*['"]([^'"]+)['"]/);
502
+ const returnType = returnsMatch ? returnsMatch[1] : 'void';
503
+ const languageMatch = optionsBlock.match(/language:\s*['"]([^'"]+)['"]/);
504
+ const language = languageMatch ? languageMatch[1] : 'plpgsql';
505
+ const volatilityMatch = optionsBlock.match(/volatility:\s*['"]([^'"]+)['"]/);
506
+ const volatility = volatilityMatch ? volatilityMatch[1] : 'VOLATILE';
507
+ const argsMatch = optionsBlock.match(/args:\s*\[([^\]]*)\]/s);
508
+ const argTypes = [];
509
+ if (argsMatch) {
510
+ const argsBlock = argsMatch[1];
511
+ const typeMatches = argsBlock.matchAll(/type:\s*['"]([^'"]+)['"]/g);
512
+ for (const m of typeMatches) {
513
+ argTypes.push(m[1]);
514
+ }
515
+ }
516
+ const bodyMatch = optionsBlock.match(/body:\s*`([^`]*)`/s) || optionsBlock.match(/raw:\s*`([^`]*)`/s);
517
+ const definition = bodyMatch ? bodyMatch[1].trim() : '';
518
+ functions.push({
519
+ name: funcName,
520
+ schema: 'public',
521
+ returnType,
522
+ argTypes,
523
+ language,
524
+ definition,
525
+ isAggregate: false,
526
+ volatility,
527
+ });
528
+ }
529
+ return functions;
530
+ }
531
+ function parseTriggersFile(filePath) {
532
+ if (!fs.existsSync(filePath)) {
533
+ return [];
534
+ }
535
+ const rawContent = fs.readFileSync(filePath, 'utf-8');
536
+ const content = stripComments(rawContent);
537
+ const triggers = [];
538
+ const trigPattern = /pgTrigger\s*\(\s*['"]([^'"]+)['"]\s*,\s*\{/g;
539
+ let trigMatch;
540
+ while ((trigMatch = trigPattern.exec(content)) !== null) {
541
+ const trigName = trigMatch[1];
542
+ const startIdx = trigMatch.index + trigMatch[0].length;
543
+ let braceCount = 1;
544
+ let endIdx = startIdx;
545
+ while (braceCount > 0 && endIdx < content.length) {
546
+ const char = content[endIdx];
547
+ if (char === '{')
548
+ braceCount++;
549
+ else if (char === '}')
550
+ braceCount--;
551
+ endIdx++;
552
+ }
553
+ const optionsBlock = content.substring(startIdx, endIdx - 1);
554
+ let tableName = '';
555
+ const onSchemaMatch = optionsBlock.match(/on:\s*schema\.(\w+)/);
556
+ const onStringMatch = optionsBlock.match(/on:\s*['"]([^'"]+)['"]/);
557
+ if (onSchemaMatch) {
558
+ tableName = onSchemaMatch[1].replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
559
+ }
560
+ else if (onStringMatch) {
561
+ tableName = onStringMatch[1];
562
+ }
563
+ let timing = 'BEFORE';
564
+ let event = 'UPDATE';
565
+ const beforeMatch = optionsBlock.match(/before:\s*['"]?(\w+)['"]?/);
566
+ const afterMatch = optionsBlock.match(/after:\s*['"]?(\w+)['"]?/);
567
+ const insteadOfMatch = optionsBlock.match(/insteadOf:\s*['"]?(\w+)['"]?/);
568
+ if (beforeMatch) {
569
+ timing = 'BEFORE';
570
+ event = beforeMatch[1].toUpperCase();
571
+ }
572
+ else if (afterMatch) {
573
+ timing = 'AFTER';
574
+ event = afterMatch[1].toUpperCase();
575
+ }
576
+ else if (insteadOfMatch) {
577
+ timing = 'INSTEAD OF';
578
+ event = insteadOfMatch[1].toUpperCase();
579
+ }
580
+ const forEachMatch = optionsBlock.match(/forEach:\s*['"]?(\w+)['"]?/);
581
+ const forEach = forEachMatch && forEachMatch[1].toUpperCase() === 'STATEMENT' ? 'STATEMENT' : 'ROW';
582
+ let functionName = '';
583
+ const execFuncMatch = optionsBlock.match(/execute:\s*functions\.(\w+)/);
584
+ const execStringMatch = optionsBlock.match(/execute:\s*['"]([^'"]+)['"]/);
585
+ if (execFuncMatch) {
586
+ functionName = execFuncMatch[1].replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
587
+ }
588
+ else if (execStringMatch) {
589
+ functionName = execStringMatch[1];
590
+ }
591
+ triggers.push({
592
+ name: trigName,
593
+ tableName,
594
+ event,
595
+ timing,
596
+ forEach,
597
+ functionName,
598
+ definition: '',
599
+ isEnabled: true,
600
+ });
601
+ }
602
+ return triggers;
603
+ }
472
604
  function snapshotToDatabaseSchema(snapshot) {
473
605
  const tables = (snapshot.tables || []).map(t => ({
474
606
  name: t.name,
@@ -509,6 +641,26 @@ function snapshotToDatabaseSchema(snapshot) {
509
641
  partitionType: t.partitionType,
510
642
  partitionKey: t.partitionKey ? (Array.isArray(t.partitionKey) ? t.partitionKey : [t.partitionKey]) : undefined,
511
643
  }));
644
+ const functions = (snapshot.functions || []).map(f => ({
645
+ name: f.name,
646
+ schema: f.schema || 'public',
647
+ returnType: f.returnType,
648
+ argTypes: f.argTypes || [],
649
+ language: f.language,
650
+ definition: f.body || '',
651
+ isAggregate: false,
652
+ volatility: f.volatility || 'VOLATILE',
653
+ }));
654
+ const triggers = (snapshot.triggers || []).map(t => ({
655
+ name: t.name,
656
+ tableName: t.table,
657
+ event: t.events?.[0] || 'UPDATE',
658
+ timing: t.timing,
659
+ forEach: t.forEach || 'ROW',
660
+ functionName: t.functionName,
661
+ definition: '',
662
+ isEnabled: true,
663
+ }));
512
664
  return {
513
665
  tables,
514
666
  enums: (snapshot.enums || []).map(e => ({ name: e.name, values: e.values })),
@@ -516,8 +668,8 @@ function snapshotToDatabaseSchema(snapshot) {
516
668
  compositeTypes: [],
517
669
  sequences: [],
518
670
  collations: [],
519
- functions: [],
520
- triggers: [],
671
+ functions,
672
+ triggers,
521
673
  policies: [],
522
674
  partitions: [],
523
675
  foreignServers: [],
@@ -4,10 +4,11 @@ import { requireValidConfig, getSchemaPath } from "../utils/config-loader.js";
4
4
  import { fastIntrospectDatabase } from "../utils/fast-introspect.js";
5
5
  import { introspectedToParsedSchema } from "../utils/ast-transformer.js";
6
6
  import { generateTypeScriptFromAST, assignTrackingIds, copyTrackingIdsToNormalized, generateFunctionsFile, generateTriggersFile } from "../utils/ast-codegen.js";
7
+ import { syncTypesFromDb, getTypesFilePath } from "../utils/types-manager.js";
7
8
  import { getConnectionDescription } from "../utils/env-loader.js";
8
9
  import { createSpinner, colors, formatBytes, formatDuration, fatal, confirm, warning, createMultiProgress } from "../utils/cli-utils.js";
9
10
  import { loadRelqignore, isTableIgnored, isColumnIgnored, isIndexIgnored, isConstraintIgnored, isEnumIgnored, isDomainIgnored, isCompositeTypeIgnored, isFunctionIgnored, } from "../utils/relqignore.js";
10
- import { isInitialized, initRepository, getHead, saveSnapshot, loadSnapshot, createCommit, shortHash, fetchRemoteCommits, ensureRemoteTable, setFetchHead, addUnstagedChanges, getStagedChanges, getUnstagedChanges, clearWorkingState, hashFileContent, saveFileHash, markCommitAsPulled, } from "../utils/repo-manager.js";
11
+ import { isInitialized, initRepository, getHead, saveCommit, saveSnapshot, loadSnapshot, createCommit, shortHash, fetchRemoteCommits, ensureRemoteTable, setFetchHead, addUnstagedChanges, getStagedChanges, getUnstagedChanges, clearWorkingState, hashFileContent, saveFileHash, markCommitAsPulled, getAllCommits, } from "../utils/repo-manager.js";
11
12
  import { compareSchemas } from "../utils/schema-comparator.js";
12
13
  function toCamelCase(str) {
13
14
  return str.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
@@ -509,7 +510,18 @@ export async function pullCommand(context) {
509
510
  fatal('Automatic merge failed; fix conflicts and then commit', `${colors.cyan('relq resolve --theirs <name>')} Take remote version\n${colors.cyan('relq resolve --all-theirs')} Take all remote\n${colors.cyan('relq pull --force')} Force overwrite local`);
510
511
  }
511
512
  if (allChanges.length === 0) {
512
- console.log('Already up to date with remote');
513
+ const localCommits = getAllCommits(projectRoot);
514
+ const localHashes = new Set(localCommits.map(c => c.hash));
515
+ const missingCommits = remoteCommits.filter(c => !localHashes.has(c.hash));
516
+ if (missingCommits.length > 0) {
517
+ for (const commit of missingCommits.reverse()) {
518
+ saveCommit(commit, projectRoot);
519
+ }
520
+ console.log(`Synced ${missingCommits.length} commit(s) from remote`);
521
+ }
522
+ else {
523
+ console.log('Already up to date with remote');
524
+ }
513
525
  console.log('');
514
526
  return;
515
527
  }
@@ -672,6 +684,17 @@ export async function pullCommand(context) {
672
684
  spinner.succeed(`Written ${colors.cyan(triggersPath)} ${colors.muted(`(${formatBytes(trigFileSize)})`)}`);
673
685
  }
674
686
  }
687
+ const typesFilePath = getTypesFilePath(schemaPath);
688
+ const typesResult = await syncTypesFromDb(connection, typesFilePath);
689
+ if (typesResult.generated) {
690
+ const typesFileSize = fs.existsSync(typesFilePath) ? fs.statSync(typesFilePath).size : 0;
691
+ if (typesResult.typesCount > 0) {
692
+ spinner.succeed(`Written ${colors.cyan(typesFilePath)} ${colors.muted(`(${typesResult.typesCount} types, ${formatBytes(typesFileSize)})`)}`);
693
+ }
694
+ else {
695
+ spinner.succeed(`Created ${colors.cyan(typesFilePath)} ${colors.muted('(empty template)')}`);
696
+ }
697
+ }
675
698
  const oldSnapshot = loadSnapshot(projectRoot);
676
699
  const beforeSchema = oldSnapshot ? {
677
700
  extensions: oldSnapshot.extensions?.map(e => e.name) || [],
@@ -6,6 +6,8 @@ import { colors, createSpinner, fatal, confirm, warning } from "../utils/cli-uti
6
6
  import { fastIntrospectDatabase } from "../utils/fast-introspect.js";
7
7
  import { loadRelqignore, isTableIgnored, isColumnIgnored, isEnumIgnored, isDomainIgnored, isFunctionIgnored, } from "../utils/relqignore.js";
8
8
  import { isInitialized, getHead, shortHash, fetchRemoteCommits, pushCommit, ensureRemoteTable, getAllCommits, loadSnapshot, isCommitSyncedWith, markCommitAsPushed, markCommitAsApplied, getConnectionLabel, } from "../utils/repo-manager.js";
9
+ import { syncTypesToDb, getTypesFilePath } from "../utils/types-manager.js";
10
+ import { getSchemaPath } from "../utils/config-loader.js";
9
11
  export async function pushCommand(context) {
10
12
  const { config, flags } = context;
11
13
  if (!config) {
@@ -110,8 +112,32 @@ export async function pushCommand(context) {
110
112
  }
111
113
  }
112
114
  }
115
+ const schemaPath = getSchemaPath(config);
116
+ const typesFilePath = getTypesFilePath(schemaPath);
117
+ let typesSynced = false;
118
+ if (fs.existsSync(typesFilePath) && !dryRun) {
119
+ spinner.start('Syncing TypeScript types...');
120
+ const typesResult = await syncTypesToDb(connection, typesFilePath, schemaPath);
121
+ const totalChanges = typesResult.added.length + typesResult.updated.length + typesResult.removed.length;
122
+ if (totalChanges > 0) {
123
+ const parts = [];
124
+ if (typesResult.added.length > 0)
125
+ parts.push(`${typesResult.added.length} added`);
126
+ if (typesResult.updated.length > 0)
127
+ parts.push(`${typesResult.updated.length} updated`);
128
+ if (typesResult.removed.length > 0)
129
+ parts.push(`${typesResult.removed.length} removed`);
130
+ spinner.succeed(`Synced types: ${parts.join(', ')}`);
131
+ typesSynced = true;
132
+ }
133
+ else {
134
+ spinner.succeed('Types in sync');
135
+ }
136
+ }
113
137
  if (toPush.length === 0 && !hasObjectsToDrop) {
114
- console.log('Everything up-to-date');
138
+ if (!typesSynced) {
139
+ console.log('Everything up-to-date');
140
+ }
115
141
  console.log('');
116
142
  return;
117
143
  }
@@ -0,0 +1,364 @@
1
+ import * as fs from 'fs';
2
+ import * as path from 'path';
3
+ const CREATE_TABLE_SQL = `
4
+ CREATE TABLE IF NOT EXISTS _relq_types (
5
+ name TEXT PRIMARY KEY,
6
+ kind TEXT NOT NULL CHECK (kind IN ('interface', 'type')),
7
+ source TEXT NOT NULL,
8
+ usages JSONB DEFAULT '[]',
9
+ created_at TIMESTAMPTZ DEFAULT NOW(),
10
+ updated_at TIMESTAMPTZ DEFAULT NOW()
11
+ );
12
+
13
+ CREATE INDEX IF NOT EXISTS idx_relq_types_kind ON _relq_types(kind);
14
+ `;
15
+ const ADD_USAGES_COLUMN_SQL = `
16
+ ALTER TABLE _relq_types ADD COLUMN IF NOT EXISTS usages JSONB DEFAULT '[]';
17
+ `;
18
+ async function createPool(connection) {
19
+ const { Pool } = await import("../../addon/pg/index.js");
20
+ return new Pool({
21
+ host: connection.host,
22
+ port: connection.port || 5432,
23
+ database: connection.database,
24
+ user: connection.user,
25
+ password: connection.password,
26
+ connectionString: connection.url,
27
+ ssl: connection.ssl,
28
+ });
29
+ }
30
+ export async function ensureTypesTable(connection) {
31
+ const pool = await createPool(connection);
32
+ try {
33
+ await pool.query(CREATE_TABLE_SQL);
34
+ await pool.query(ADD_USAGES_COLUMN_SQL);
35
+ }
36
+ finally {
37
+ await pool.end();
38
+ }
39
+ }
40
+ export async function typesTableExists(connection) {
41
+ const pool = await createPool(connection);
42
+ try {
43
+ const result = await pool.query(`
44
+ SELECT EXISTS (
45
+ SELECT FROM information_schema.tables
46
+ WHERE table_name = '_relq_types'
47
+ ) as exists
48
+ `);
49
+ return result.rows[0]?.exists ?? false;
50
+ }
51
+ finally {
52
+ await pool.end();
53
+ }
54
+ }
55
+ export function parseTypesFile(content) {
56
+ const definitions = [];
57
+ const lines = content.split('\n');
58
+ let i = 0;
59
+ while (i < lines.length) {
60
+ const line = lines[i];
61
+ const interfaceMatch = line.match(/^(?:export\s+)?interface\s+(\w+)(?:<[^>]+>)?\s*\{?\s*$/);
62
+ if (interfaceMatch) {
63
+ const name = interfaceMatch[1];
64
+ const startLine = i;
65
+ const { endLine, source } = extractBlock(lines, i);
66
+ definitions.push({ name, kind: 'interface', source, startLine, endLine });
67
+ i = endLine + 1;
68
+ continue;
69
+ }
70
+ const interfaceExtendsMatch = line.match(/^(?:export\s+)?interface\s+(\w+)(?:<[^>]+>)?\s+extends\s+/);
71
+ if (interfaceExtendsMatch) {
72
+ const name = interfaceExtendsMatch[1];
73
+ const startLine = i;
74
+ const { endLine, source } = extractBlock(lines, i);
75
+ definitions.push({ name, kind: 'interface', source, startLine, endLine });
76
+ i = endLine + 1;
77
+ continue;
78
+ }
79
+ const typeMatch = line.match(/^(?:export\s+)?type\s+(\w+)(?:<[^>]+>)?\s*=\s*/);
80
+ if (typeMatch) {
81
+ const name = typeMatch[1];
82
+ const startLine = i;
83
+ const { endLine, source } = extractTypeAlias(lines, i);
84
+ definitions.push({ name, kind: 'type', source, startLine, endLine });
85
+ i = endLine + 1;
86
+ continue;
87
+ }
88
+ i++;
89
+ }
90
+ return definitions;
91
+ }
92
+ function extractBlock(lines, startIdx) {
93
+ let depth = 0;
94
+ let started = false;
95
+ let endIdx = startIdx;
96
+ for (let i = startIdx; i < lines.length; i++) {
97
+ const line = lines[i];
98
+ for (const char of line) {
99
+ if (char === '{') {
100
+ depth++;
101
+ started = true;
102
+ }
103
+ else if (char === '}') {
104
+ depth--;
105
+ }
106
+ }
107
+ if (started && depth === 0) {
108
+ endIdx = i;
109
+ break;
110
+ }
111
+ }
112
+ const source = lines.slice(startIdx, endIdx + 1).join('\n');
113
+ return { endLine: endIdx, source };
114
+ }
115
+ function extractTypeAlias(lines, startIdx) {
116
+ const firstLine = lines[startIdx];
117
+ if (firstLine.includes(';') && !firstLine.includes('{')) {
118
+ return { endLine: startIdx, source: firstLine };
119
+ }
120
+ let depth = 0;
121
+ let endIdx = startIdx;
122
+ for (let i = startIdx; i < lines.length; i++) {
123
+ const line = lines[i];
124
+ for (const char of line) {
125
+ if (char === '{' || char === '(' || char === '<') {
126
+ depth++;
127
+ }
128
+ else if (char === '}' || char === ')' || char === '>') {
129
+ depth--;
130
+ }
131
+ }
132
+ if (line.includes(';') && depth === 0) {
133
+ endIdx = i;
134
+ break;
135
+ }
136
+ if (i > startIdx && depth === 0) {
137
+ const trimmed = line.trim();
138
+ if (trimmed === '' || trimmed.startsWith('export ')) {
139
+ endIdx = i - 1;
140
+ break;
141
+ }
142
+ }
143
+ }
144
+ const source = lines.slice(startIdx, endIdx + 1).join('\n');
145
+ return { endLine: endIdx, source };
146
+ }
147
+ export function extractUsedTypes(schemaContent) {
148
+ const usedTypes = new Set();
149
+ const genericPattern = /(?:jsonb|json|varchar|text|char)\s*<\s*([A-Z][a-zA-Z0-9_]*)/g;
150
+ let match;
151
+ while ((match = genericPattern.exec(schemaContent)) !== null) {
152
+ usedTypes.add(match[1]);
153
+ }
154
+ return Array.from(usedTypes);
155
+ }
156
+ export function extractTypeUsages(schemaContent) {
157
+ const usages = {};
158
+ const tablePattern = /(?:(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*)?defineTable\s*\(\s*['"]([^'"]+)['"]\s*,\s*\{/g;
159
+ let tableMatch;
160
+ while ((tableMatch = tablePattern.exec(schemaContent)) !== null) {
161
+ const varName = tableMatch[1];
162
+ const tableName = tableMatch[2];
163
+ const tableStartIdx = tableMatch.index + tableMatch[0].length;
164
+ let depth = 1;
165
+ let columnsEndIdx = tableStartIdx;
166
+ for (let i = tableStartIdx; i < schemaContent.length && depth > 0; i++) {
167
+ if (schemaContent[i] === '{')
168
+ depth++;
169
+ else if (schemaContent[i] === '}')
170
+ depth--;
171
+ columnsEndIdx = i;
172
+ }
173
+ const columnsBlock = schemaContent.substring(tableStartIdx, columnsEndIdx);
174
+ const columnPattern = /(\w+)\s*:\s*(?:jsonb|json|varchar|text|char)\s*<\s*([A-Z][a-zA-Z0-9_]*)/g;
175
+ let columnMatch;
176
+ while ((columnMatch = columnPattern.exec(columnsBlock)) !== null) {
177
+ const columnName = columnMatch[1];
178
+ const typeName = columnMatch[2];
179
+ const tableRef = varName || tableName;
180
+ const usage = `${tableRef}.${columnName}`;
181
+ if (!usages[typeName]) {
182
+ usages[typeName] = [];
183
+ }
184
+ if (!usages[typeName].includes(usage)) {
185
+ usages[typeName].push(usage);
186
+ }
187
+ }
188
+ }
189
+ return usages;
190
+ }
191
+ export async function getTypesFromDb(connection) {
192
+ const exists = await typesTableExists(connection);
193
+ if (!exists)
194
+ return [];
195
+ const pool = await createPool(connection);
196
+ try {
197
+ const result = await pool.query(`
198
+ SELECT name, kind, source, usages, created_at, updated_at
199
+ FROM _relq_types
200
+ ORDER BY name
201
+ `);
202
+ return result.rows.map(row => ({
203
+ name: row.name,
204
+ kind: row.kind,
205
+ source: row.source,
206
+ usages: row.usages || [],
207
+ createdAt: row.created_at,
208
+ updatedAt: row.updated_at,
209
+ }));
210
+ }
211
+ finally {
212
+ await pool.end();
213
+ }
214
+ }
215
+ export async function storeTypesInDb(connection, types, usages) {
216
+ await ensureTypesTable(connection);
217
+ const pool = await createPool(connection);
218
+ try {
219
+ for (const type of types) {
220
+ const typeUsages = usages?.[type.name] || [];
221
+ await pool.query(`
222
+ INSERT INTO _relq_types (name, kind, source, usages, updated_at)
223
+ VALUES ($1, $2, $3, $4, NOW())
224
+ ON CONFLICT (name) DO UPDATE SET
225
+ kind = EXCLUDED.kind,
226
+ source = EXCLUDED.source,
227
+ usages = EXCLUDED.usages,
228
+ updated_at = NOW()
229
+ `, [type.name, type.kind, type.source, JSON.stringify(typeUsages)]);
230
+ }
231
+ }
232
+ finally {
233
+ await pool.end();
234
+ }
235
+ }
236
+ export async function removeStaleTypes(connection, currentTypeNames) {
237
+ const exists = await typesTableExists(connection);
238
+ if (!exists)
239
+ return [];
240
+ const pool = await createPool(connection);
241
+ try {
242
+ const result = await pool.query(`
243
+ DELETE FROM _relq_types
244
+ WHERE name != ALL($1::text[])
245
+ RETURNING name
246
+ `, [currentTypeNames]);
247
+ return result.rows.map(r => r.name);
248
+ }
249
+ finally {
250
+ await pool.end();
251
+ }
252
+ }
253
+ export function generateTypesFile(types) {
254
+ if (types.length === 0) {
255
+ return `/**
256
+ * Schema Types
257
+ *
258
+ * TypeScript type definitions for JSON/JSONB columns.
259
+ * These types are tracked in the database and synced across workspaces.
260
+ *
261
+ * Usage in schema.ts:
262
+ * import type { UserMetadata } from "./schema.types.js";
263
+ * const users = defineTable('users', {
264
+ * metadata: jsonb<UserMetadata>(),
265
+ * });
266
+ */
267
+
268
+ // Add your type definitions here:
269
+ // export interface UserMetadata {
270
+ // name: string;
271
+ // age: number;
272
+ // }
273
+ `;
274
+ }
275
+ const interfaceTypes = types.filter(t => t.kind === 'interface');
276
+ const typeAliases = types.filter(t => t.kind === 'type');
277
+ let content = `/**
278
+ * Schema Types
279
+ *
280
+ * TypeScript type definitions for JSON/JSONB columns.
281
+ * These types are tracked in the database and synced across workspaces.
282
+ *
283
+ * Generated by: relq pull
284
+ * Types: ${types.length}
285
+ */
286
+
287
+ `;
288
+ if (interfaceTypes.length > 0) {
289
+ content += '// =============================================================================\n';
290
+ content += '// INTERFACES\n';
291
+ content += '// =============================================================================\n\n';
292
+ for (const type of interfaceTypes) {
293
+ content += type.source + '\n\n';
294
+ }
295
+ }
296
+ if (typeAliases.length > 0) {
297
+ content += '// =============================================================================\n';
298
+ content += '// TYPE ALIASES\n';
299
+ content += '// =============================================================================\n\n';
300
+ for (const type of typeAliases) {
301
+ content += type.source + '\n\n';
302
+ }
303
+ }
304
+ return content;
305
+ }
306
+ export function validateTypesUsage(usedTypes, definedTypes) {
307
+ const definedSet = new Set(definedTypes);
308
+ const missingTypes = usedTypes.filter(t => !definedSet.has(t));
309
+ return {
310
+ valid: missingTypes.length === 0,
311
+ missingTypes,
312
+ };
313
+ }
314
+ export function isValidTypesFilePath(filePath) {
315
+ const basename = path.basename(filePath);
316
+ return basename.endsWith('.types.ts');
317
+ }
318
+ export function getTypesFilePath(schemaPath) {
319
+ const dir = path.dirname(schemaPath);
320
+ const basename = path.basename(schemaPath, '.ts');
321
+ return path.join(dir, `${basename}.types.ts`);
322
+ }
323
+ export async function syncTypesToDb(connection, typesFilePath, schemaFilePath) {
324
+ const result = { added: [], updated: [], removed: [] };
325
+ if (!fs.existsSync(typesFilePath)) {
326
+ return result;
327
+ }
328
+ const content = fs.readFileSync(typesFilePath, 'utf-8');
329
+ const localTypes = parseTypesFile(content);
330
+ const localTypeNames = localTypes.map(t => t.name);
331
+ let usages;
332
+ if (schemaFilePath && fs.existsSync(schemaFilePath)) {
333
+ const schemaContent = fs.readFileSync(schemaFilePath, 'utf-8');
334
+ usages = extractTypeUsages(schemaContent);
335
+ }
336
+ const dbTypes = await getTypesFromDb(connection);
337
+ const dbTypeMap = new Map(dbTypes.map(t => [t.name, t]));
338
+ for (const localType of localTypes) {
339
+ const dbType = dbTypeMap.get(localType.name);
340
+ if (!dbType) {
341
+ result.added.push(localType.name);
342
+ }
343
+ else if (dbType.source !== localType.source) {
344
+ result.updated.push(localType.name);
345
+ }
346
+ }
347
+ await storeTypesInDb(connection, localTypes, usages);
348
+ result.removed = await removeStaleTypes(connection, localTypeNames);
349
+ return result;
350
+ }
351
+ export async function syncTypesFromDb(connection, typesFilePath) {
352
+ const types = await getTypesFromDb(connection);
353
+ if (types.length === 0) {
354
+ if (!fs.existsSync(typesFilePath)) {
355
+ const content = generateTypesFile([]);
356
+ fs.writeFileSync(typesFilePath, content);
357
+ return { typesCount: 0, generated: true };
358
+ }
359
+ return { typesCount: 0, generated: false };
360
+ }
361
+ const content = generateTypesFile(types);
362
+ fs.writeFileSync(typesFilePath, content);
363
+ return { typesCount: types.length, generated: true };
364
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "relq",
3
- "version": "1.0.31",
3
+ "version": "1.0.33",
4
4
  "description": "The Fully-Typed PostgreSQL ORM for TypeScript",
5
5
  "author": "Olajide Mathew O. <olajide.mathew@yuniq.solutions>",
6
6
  "license": "MIT",