supatool 0.4.3 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +86 -22
- package/dist/bin/helptext.js +40 -31
- package/dist/bin/supatool.js +74 -149
- package/dist/sync/config.js +72 -13
- package/dist/sync/definitionExtractor.js +14 -1
- package/dist/sync/fetchRemoteSchemas.js +4 -2
- package/dist/sync/generateMigration.js +129 -28
- package/dist/sync/migrateRemote.js +114 -0
- package/dist/sync/seedGenerator.js +15 -4
- package/dist/sync/sync.js +272 -2
- package/package.json +7 -6
- package/dist/integrations/supabase/crud-autogen/tasks.js +0 -220
- package/dist/integrations/supabase/crud-autogen/workflows.js +0 -220
package/dist/sync/sync.js
CHANGED
|
@@ -1,14 +1,63 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
2
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
36
|
exports.syncAllTables = syncAllTables;
|
|
37
|
+
const path = __importStar(require("path"));
|
|
4
38
|
const parseLocalSchemas_1 = require("./parseLocalSchemas");
|
|
5
39
|
const fetchRemoteSchemas_1 = require("./fetchRemoteSchemas");
|
|
6
40
|
const writeSchema_1 = require("./writeSchema");
|
|
41
|
+
const fs = __importStar(require("fs"));
|
|
7
42
|
const generateMigration_1 = require("./generateMigration");
|
|
8
43
|
const diff_1 = require("diff");
|
|
9
44
|
const utils_1 = require("./utils");
|
|
10
45
|
// Global approval state (shared with writeSchema.ts)
|
|
11
46
|
let globalApproveAll = false;
|
|
47
|
+
/**
|
|
48
|
+
* Extract column names from a CREATE TABLE DDL string.
|
|
49
|
+
*/
|
|
50
|
+
function extractColumnNames(ddl) {
|
|
51
|
+
const match = ddl.match(/CREATE TABLE[^(]*\(([\s\S]*)\)/i);
|
|
52
|
+
if (!match)
|
|
53
|
+
return [];
|
|
54
|
+
return match[1]
|
|
55
|
+
.split(',')
|
|
56
|
+
.map(line => line.trim())
|
|
57
|
+
.filter(line => !line.match(/^(PRIMARY|CONSTRAINT|UNIQUE|FOREIGN|CHECK)/i))
|
|
58
|
+
.map(line => { const m = line.match(/^([a-zA-Z_]\w*)/); return m ? m[1] : ''; })
|
|
59
|
+
.filter(Boolean);
|
|
60
|
+
}
|
|
12
61
|
/**
|
|
13
62
|
* Normalize DDL string (unify spaces, newlines, tabs)
|
|
14
63
|
*/
|
|
@@ -37,7 +86,7 @@ function formatSQL(sql) {
|
|
|
37
86
|
/**
|
|
38
87
|
* Synchronize all table schemas
|
|
39
88
|
*/
|
|
40
|
-
async function syncAllTables({ connectionString, schemaDir, tablePattern = '*', force = false, dryRun = false, generateOnly = false, requireConfirmation = false }) {
|
|
89
|
+
async function syncAllTables({ connectionString, schemaDir, tablePattern = '*', force = false, dryRun = false, generateOnly = false, requireConfirmation = false, migrationConfig, rlsMode = 'skip' }) {
|
|
41
90
|
// Reset approval state
|
|
42
91
|
(0, writeSchema_1.resetApprovalState)();
|
|
43
92
|
const localSchemas = await (0, parseLocalSchemas_1.parseLocalSchemas)(schemaDir);
|
|
@@ -53,6 +102,34 @@ async function syncAllTables({ connectionString, schemaDir, tablePattern = '*',
|
|
|
53
102
|
}
|
|
54
103
|
// Fetch only the remote schemas for target tables
|
|
55
104
|
const remoteSchemas = await (0, fetchRemoteSchemas_1.fetchRemoteSchemas)(connectionString, targetLocalTables);
|
|
105
|
+
// --- Table rename detection ---
|
|
106
|
+
// Tables that exist locally but not remotely (potential new name)
|
|
107
|
+
const localOnly = targetLocalTables.filter(t => !remoteSchemas[t]);
|
|
108
|
+
// Tables that exist remotely but not locally (potential old name)
|
|
109
|
+
const remoteOnly = Object.keys(remoteSchemas).filter(t => !localSchemas[t]);
|
|
110
|
+
for (const newName of localOnly) {
|
|
111
|
+
const localDdl = localSchemas[newName]?.ddl ?? '';
|
|
112
|
+
const localCols = extractColumnNames(localDdl);
|
|
113
|
+
if (localCols.length === 0)
|
|
114
|
+
continue;
|
|
115
|
+
for (const oldName of remoteOnly) {
|
|
116
|
+
const remoteDdl = remoteSchemas[oldName]?.ddl ?? '';
|
|
117
|
+
const remoteCols = extractColumnNames(remoteDdl);
|
|
118
|
+
if (remoteCols.length === 0)
|
|
119
|
+
continue;
|
|
120
|
+
const shared = localCols.filter(c => remoteCols.includes(c)).length;
|
|
121
|
+
const similarity = shared / Math.max(localCols.length, remoteCols.length);
|
|
122
|
+
if (similarity >= 0.7) {
|
|
123
|
+
console.warn(`⚠️ Possible table rename detected: "${oldName}" → "${newName}" ` +
|
|
124
|
+
`(${Math.round(similarity * 100)}% column match). ` +
|
|
125
|
+
`Generating rename migration — review before applying.`);
|
|
126
|
+
// Infer schema from CREATE TABLE <schema>.<table> in DDL (default public)
|
|
127
|
+
const schemaMatch = localDdl.match(/CREATE TABLE\s+(\w+)\.\w+/i);
|
|
128
|
+
const schema = schemaMatch ? schemaMatch[1] : 'public';
|
|
129
|
+
await (0, generateMigration_1.generateRenameTableMigrationFile)(schema, oldName, newName, process.cwd(), migrationConfig);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
}
|
|
56
133
|
for (const tableName of targetLocalTables) {
|
|
57
134
|
const local = localSchemas[tableName];
|
|
58
135
|
const remote = remoteSchemas[tableName];
|
|
@@ -129,7 +206,7 @@ async function syncAllTables({ connectionString, schemaDir, tablePattern = '*',
|
|
|
129
206
|
// Generate migration file (local → remote diff)
|
|
130
207
|
const migrationPath = await (0, generateMigration_1.generateMigrationFile)(tableName, normalizedRemote, // from (current remote state)
|
|
131
208
|
normalizedLocal, // to (local target state)
|
|
132
|
-
process.cwd());
|
|
209
|
+
process.cwd(), migrationConfig);
|
|
133
210
|
if (migrationPath) {
|
|
134
211
|
console.log(`[${tableName}] 📝 UPDATE migration generated: ${migrationPath}`);
|
|
135
212
|
}
|
|
@@ -140,4 +217,197 @@ async function syncAllTables({ connectionString, schemaDir, tablePattern = '*',
|
|
|
140
217
|
}
|
|
141
218
|
}
|
|
142
219
|
}
|
|
220
|
+
// --- Function diff: scan local rpc/*.sql vs remote ---
|
|
221
|
+
await syncFunctions(connectionString, schemaDir, migrationConfig);
|
|
222
|
+
// --- RLS diff ---
|
|
223
|
+
if (rlsMode === 'rewrite') {
|
|
224
|
+
await syncRls(connectionString, schemaDir, migrationConfig);
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
/**
|
|
228
|
+
* Scan local <schemaDir>/<schema>/rpc/*.sql, compare with remote pg_get_functiondef,
|
|
229
|
+
* and generate CREATE OR REPLACE migrations for any changed functions.
|
|
230
|
+
*/
|
|
231
|
+
async function syncFunctions(connectionString, schemaDir, migrationConfig) {
|
|
232
|
+
const { Client } = await Promise.resolve().then(() => __importStar(require('pg')));
|
|
233
|
+
// Collect all local rpc sql files: { schema, funcName, localDdl }
|
|
234
|
+
const entries = [];
|
|
235
|
+
if (!fs.existsSync(schemaDir))
|
|
236
|
+
return;
|
|
237
|
+
for (const schemaEntry of fs.readdirSync(schemaDir, { withFileTypes: true })) {
|
|
238
|
+
if (!schemaEntry.isDirectory())
|
|
239
|
+
continue;
|
|
240
|
+
const rpcDir = path.join(schemaDir, schemaEntry.name, 'rpc');
|
|
241
|
+
if (!fs.existsSync(rpcDir))
|
|
242
|
+
continue;
|
|
243
|
+
for (const file of fs.readdirSync(rpcDir)) {
|
|
244
|
+
if (!file.endsWith('.sql'))
|
|
245
|
+
continue;
|
|
246
|
+
const funcName = file.replace(/\.sql$/, '');
|
|
247
|
+
const localDdl = fs.readFileSync(path.join(rpcDir, file), 'utf-8');
|
|
248
|
+
entries.push({ schema: schemaEntry.name, funcName, localDdl });
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
if (entries.length === 0)
|
|
252
|
+
return;
|
|
253
|
+
const client = new Client({ connectionString });
|
|
254
|
+
await client.connect();
|
|
255
|
+
let changed = 0;
|
|
256
|
+
try {
|
|
257
|
+
for (const { schema, funcName, localDdl } of entries) {
|
|
258
|
+
const result = await client.query(`SELECT pg_get_functiondef(p.oid) as definition
|
|
259
|
+
FROM pg_proc p
|
|
260
|
+
JOIN pg_namespace n ON p.pronamespace = n.oid
|
|
261
|
+
WHERE n.nspname = $1 AND p.proname = $2
|
|
262
|
+
LIMIT 1`, [schema, funcName]);
|
|
263
|
+
if (result.rows.length === 0) {
|
|
264
|
+
// Function not in remote yet — generate migration to create it
|
|
265
|
+
const migrationPath = await (0, generateMigration_1.generateFunctionMigrationFile)(schema, funcName, localDdl, '', process.cwd(), migrationConfig);
|
|
266
|
+
if (migrationPath) {
|
|
267
|
+
console.log(`[${schema}.${funcName}] 📝 NEW function migration generated: ${migrationPath}`);
|
|
268
|
+
changed++;
|
|
269
|
+
}
|
|
270
|
+
continue;
|
|
271
|
+
}
|
|
272
|
+
const remoteDdl = result.rows[0].definition;
|
|
273
|
+
const migrationPath = await (0, generateMigration_1.generateFunctionMigrationFile)(schema, funcName, localDdl, remoteDdl, process.cwd(), migrationConfig);
|
|
274
|
+
if (migrationPath) {
|
|
275
|
+
console.log(`[${schema}.${funcName}] 📝 UPDATE function migration generated: ${migrationPath}`);
|
|
276
|
+
changed++;
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
finally {
|
|
281
|
+
await client.end();
|
|
282
|
+
}
|
|
283
|
+
if (changed === 0) {
|
|
284
|
+
console.log('Functions: no differences found');
|
|
285
|
+
}
|
|
286
|
+
else {
|
|
287
|
+
console.log(`Functions: ${changed} migration(s) generated`);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* Fetch remote RLS policies, compare with local <schemaDir>/<schema>/rls/*.sql,
|
|
292
|
+
* and generate DROP + CREATE migrations for changed/new/deleted policies.
|
|
293
|
+
*/
|
|
294
|
+
async function syncRls(connectionString, schemaDir, migrationConfig) {
|
|
295
|
+
const { Client } = await Promise.resolve().then(() => __importStar(require('pg')));
|
|
296
|
+
const client = new Client({ connectionString });
|
|
297
|
+
await client.connect();
|
|
298
|
+
try {
|
|
299
|
+
// Fetch all policies from remote
|
|
300
|
+
const result = await client.query(`
|
|
301
|
+
SELECT schemaname, tablename, policyname, cmd,
|
|
302
|
+
array_to_string(roles, ',') as roles,
|
|
303
|
+
qual, with_check,
|
|
304
|
+
permissive
|
|
305
|
+
FROM pg_policies
|
|
306
|
+
ORDER BY schemaname, tablename, policyname
|
|
307
|
+
`);
|
|
308
|
+
// Build remote policy map: key = "schema.table.policyname"
|
|
309
|
+
const remoteMap = new Map();
|
|
310
|
+
for (const row of result.rows) {
|
|
311
|
+
const tableQualified = row.schemaname === 'public'
|
|
312
|
+
? row.tablename
|
|
313
|
+
: `${row.schemaname}.${row.tablename}`;
|
|
314
|
+
const key = `${row.schemaname}.${row.tablename}.${row.policyname}`;
|
|
315
|
+
remoteMap.set(key, {
|
|
316
|
+
policyName: row.policyname,
|
|
317
|
+
tableName: tableQualified,
|
|
318
|
+
cmd: row.cmd ?? 'ALL',
|
|
319
|
+
roles: row.roles ?? 'public',
|
|
320
|
+
qual: row.qual ?? null,
|
|
321
|
+
withCheck: row.with_check ?? null,
|
|
322
|
+
permissive: row.permissive !== 'RESTRICTIVE'
|
|
323
|
+
});
|
|
324
|
+
}
|
|
325
|
+
// Build local policy map from rls/*.sql files
|
|
326
|
+
const localMap = new Map(); // key → raw SQL
|
|
327
|
+
if (fs.existsSync(schemaDir)) {
|
|
328
|
+
for (const schemaEntry of fs.readdirSync(schemaDir, { withFileTypes: true })) {
|
|
329
|
+
if (!schemaEntry.isDirectory())
|
|
330
|
+
continue;
|
|
331
|
+
const rlsDir = path.join(schemaDir, schemaEntry.name, 'rls');
|
|
332
|
+
if (!fs.existsSync(rlsDir))
|
|
333
|
+
continue;
|
|
334
|
+
for (const file of fs.readdirSync(rlsDir)) {
|
|
335
|
+
if (!file.endsWith('.sql'))
|
|
336
|
+
continue;
|
|
337
|
+
// filename: tablename__policyname.sql
|
|
338
|
+
const baseName = file.replace(/\.sql$/, '');
|
|
339
|
+
const key = `${schemaEntry.name}.${baseName.replace('__', '.')}`;
|
|
340
|
+
localMap.set(key, fs.readFileSync(path.join(rlsDir, file), 'utf-8'));
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
// Detect changes: remote policies not matching local
|
|
345
|
+
const changed = [];
|
|
346
|
+
const dropped = [];
|
|
347
|
+
// Policies in remote that differ from local (or absent in local → drop)
|
|
348
|
+
for (const [key, remotePolicy] of remoteMap) {
|
|
349
|
+
if (!localMap.has(key)) {
|
|
350
|
+
// Remote has policy but local doesn't — local wins: drop it
|
|
351
|
+
dropped.push({ policyName: remotePolicy.policyName, tableName: remotePolicy.tableName });
|
|
352
|
+
}
|
|
353
|
+
// If local has it, we trust local as SSoT — will re-create from local below
|
|
354
|
+
}
|
|
355
|
+
// Policies in local but not in remote, or that differ → re-create
|
|
356
|
+
for (const [key, localSql] of localMap) {
|
|
357
|
+
const remotePolicy = remoteMap.get(key);
|
|
358
|
+
const normalizedLocal = localSql.replace(/\s+/g, ' ').trim();
|
|
359
|
+
if (!remotePolicy) {
|
|
360
|
+
// New policy in local
|
|
361
|
+
changed.push(parsePolicySql(localSql, key));
|
|
362
|
+
}
|
|
363
|
+
else {
|
|
364
|
+
// Compare: rebuild remote SQL and compare normalized
|
|
365
|
+
const remoteSql = buildPolicySql(remotePolicy);
|
|
366
|
+
if (normalizedLocal !== remoteSql.replace(/\s+/g, ' ').trim()) {
|
|
367
|
+
changed.push(parsePolicySql(localSql, key));
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
const migrationPath = await (0, generateMigration_1.generateRlsMigrationFile)(changed, dropped, process.cwd(), migrationConfig);
|
|
372
|
+
if (!migrationPath) {
|
|
373
|
+
console.log('RLS: no differences found');
|
|
374
|
+
}
|
|
375
|
+
else {
|
|
376
|
+
console.log(`RLS: ${changed.length} changed, ${dropped.length} dropped`);
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
finally {
|
|
380
|
+
await client.end();
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
function buildPolicySql(p) {
|
|
384
|
+
const permissive = p.permissive ? 'PERMISSIVE' : 'RESTRICTIVE';
|
|
385
|
+
let sql = `CREATE POLICY "${p.policyName}" ON ${p.tableName} AS ${permissive} FOR ${p.cmd} TO ${p.roles}`;
|
|
386
|
+
if (p.qual)
|
|
387
|
+
sql += ` USING (${p.qual})`;
|
|
388
|
+
if (p.withCheck)
|
|
389
|
+
sql += ` WITH CHECK (${p.withCheck})`;
|
|
390
|
+
return sql + ';';
|
|
391
|
+
}
|
|
392
|
+
function parsePolicySql(sql, key) {
|
|
393
|
+
// Best-effort parse of CREATE POLICY DDL to RlsPolicy struct
|
|
394
|
+
const tableMatch = sql.match(/ON\s+(\S+)/i);
|
|
395
|
+
const policyMatch = sql.match(/CREATE POLICY\s+"?([^"\s]+)"?/i);
|
|
396
|
+
const cmdMatch = sql.match(/FOR\s+(SELECT|INSERT|UPDATE|DELETE|ALL)/i);
|
|
397
|
+
const rolesMatch = sql.match(/TO\s+([^\n]+?)(?:\s+USING|\s+WITH CHECK|;|$)/i);
|
|
398
|
+
const usingMatch = sql.match(/USING\s*\(([^)]+)\)/i);
|
|
399
|
+
const withCheckMatch = sql.match(/WITH CHECK\s*\(([^)]+)\)/i);
|
|
400
|
+
const permissive = !/RESTRICTIVE/i.test(sql);
|
|
401
|
+
const parts = key.split('.');
|
|
402
|
+
const policyName = policyMatch?.[1] ?? parts[parts.length - 1];
|
|
403
|
+
const tableName = tableMatch?.[1] ?? `${parts[0]}.${parts[1]}`;
|
|
404
|
+
return {
|
|
405
|
+
policyName,
|
|
406
|
+
tableName,
|
|
407
|
+
cmd: cmdMatch?.[1] ?? 'ALL',
|
|
408
|
+
roles: rolesMatch?.[1]?.trim() ?? 'public',
|
|
409
|
+
qual: usingMatch?.[1] ?? null,
|
|
410
|
+
withCheck: withCheckMatch?.[1] ?? null,
|
|
411
|
+
permissive
|
|
412
|
+
};
|
|
143
413
|
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "supatool",
|
|
3
|
-
"version": "0.
|
|
4
|
-
"description": "CLI for Supabase: extract schema
|
|
3
|
+
"version": "0.6.0",
|
|
4
|
+
"description": "CLI for PostgreSQL (Cloud SQL / Supabase): extract schema to files, deploy schema diffs, apply migrations, seed export.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
7
7
|
"bin": {
|
|
@@ -9,6 +9,7 @@
|
|
|
9
9
|
},
|
|
10
10
|
"scripts": {
|
|
11
11
|
"build": "tsc",
|
|
12
|
+
"test": "npm run build && node test/smoke.js",
|
|
12
13
|
"start": "tsx src/bin/supatool.ts",
|
|
13
14
|
"local": "tsx src/bin/supatool.ts"
|
|
14
15
|
},
|
|
@@ -17,18 +18,18 @@
|
|
|
17
18
|
"bin"
|
|
18
19
|
],
|
|
19
20
|
"keywords": [
|
|
21
|
+
"postgresql",
|
|
22
|
+
"cloud-sql",
|
|
20
23
|
"supabase",
|
|
21
|
-
"crud",
|
|
22
24
|
"cli",
|
|
23
25
|
"typescript",
|
|
24
|
-
"React",
|
|
25
26
|
"postgres",
|
|
26
|
-
"database"
|
|
27
|
+
"database",
|
|
28
|
+
"migration"
|
|
27
29
|
],
|
|
28
30
|
"author": "IdeaGarage",
|
|
29
31
|
"license": "MIT",
|
|
30
32
|
"dependencies": {
|
|
31
|
-
"@supabase/supabase-js": "^2.49.4",
|
|
32
33
|
"commander": "^13.1.0",
|
|
33
34
|
"diff": "^5.2.0",
|
|
34
35
|
"dotenv": "^16.5.0",
|
|
@@ -1,220 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getTasksByFilters = getTasksByFilters;
|
|
4
|
-
exports.getTasksSingleByFilters = getTasksSingleByFilters;
|
|
5
|
-
exports.getTasksById = getTasksById;
|
|
6
|
-
exports.createTasks = createTasks;
|
|
7
|
-
exports.updateTasks = updateTasks;
|
|
8
|
-
exports.deleteTasks = deleteTasks;
|
|
9
|
-
exports.queryTasks = queryTasks;
|
|
10
|
-
// Supabase CRUD operations for tasks
|
|
11
|
-
// This file is automatically generated. Do not edit it directly.
|
|
12
|
-
const client_1 = require("../client");
|
|
13
|
-
// Function to apply filters to a query
|
|
14
|
-
function applyFilters(query, filters) {
|
|
15
|
-
for (const [key, value] of Object.entries(filters)) {
|
|
16
|
-
if (Array.isArray(value)) {
|
|
17
|
-
query = query.in(key, value); // Use 'in' for array values
|
|
18
|
-
}
|
|
19
|
-
else if (typeof value === 'object' && value !== null) {
|
|
20
|
-
for (const [operator, val] of Object.entries(value)) {
|
|
21
|
-
switch (operator) {
|
|
22
|
-
case 'eq':
|
|
23
|
-
query = query.eq(key, val);
|
|
24
|
-
break;
|
|
25
|
-
case 'neq':
|
|
26
|
-
query = query.neq(key, val);
|
|
27
|
-
break;
|
|
28
|
-
case 'like':
|
|
29
|
-
query = query.like(key, val);
|
|
30
|
-
break;
|
|
31
|
-
case 'ilike':
|
|
32
|
-
query = query.ilike(key, val);
|
|
33
|
-
break;
|
|
34
|
-
case 'lt':
|
|
35
|
-
query = query.lt(key, val);
|
|
36
|
-
break;
|
|
37
|
-
case 'lte':
|
|
38
|
-
query = query.lte(key, val);
|
|
39
|
-
break;
|
|
40
|
-
case 'gte':
|
|
41
|
-
query = query.gte(key, val);
|
|
42
|
-
break;
|
|
43
|
-
case 'gt':
|
|
44
|
-
query = query.gt(key, val);
|
|
45
|
-
break;
|
|
46
|
-
case 'contains':
|
|
47
|
-
query = query.contains(key, val);
|
|
48
|
-
break;
|
|
49
|
-
case 'contains_any':
|
|
50
|
-
query = query.contains_any(key, val);
|
|
51
|
-
break;
|
|
52
|
-
case 'contains_all':
|
|
53
|
-
query = query.contains_all(key, val);
|
|
54
|
-
break;
|
|
55
|
-
// Add more operators as needed
|
|
56
|
-
default:
|
|
57
|
-
throw new Error('Unsupported operator: ' + operator);
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
else {
|
|
62
|
-
query = query.eq(key, value); // Default to 'eq' for simple values
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
return query;
|
|
66
|
-
}
|
|
67
|
-
// Read multiple rows with dynamic filters
|
|
68
|
-
async function getTasksByFilters({ filters }) {
|
|
69
|
-
try {
|
|
70
|
-
let query = client_1.supabase.from('tasks').select('*');
|
|
71
|
-
query = applyFilters(query, filters);
|
|
72
|
-
const result = await query;
|
|
73
|
-
if (result.error) {
|
|
74
|
-
throw new Error(`Failed to fetch tasks: ${result.error.message}`);
|
|
75
|
-
}
|
|
76
|
-
return result.data || [];
|
|
77
|
-
}
|
|
78
|
-
catch (error) {
|
|
79
|
-
console.error('Error in getTasksByFilters:', error);
|
|
80
|
-
throw error;
|
|
81
|
-
}
|
|
82
|
-
}
|
|
83
|
-
// Read a single row with dynamic filters
|
|
84
|
-
async function getTasksSingleByFilters({ filters }) {
|
|
85
|
-
try {
|
|
86
|
-
let query = client_1.supabase.from('tasks').select('*');
|
|
87
|
-
query = applyFilters(query, filters).single();
|
|
88
|
-
const result = await query;
|
|
89
|
-
if (result.error) {
|
|
90
|
-
if (result.error.code === 'PGRST116') {
|
|
91
|
-
return null;
|
|
92
|
-
}
|
|
93
|
-
throw new Error(`Failed to fetch tasks: ${result.error.message}`);
|
|
94
|
-
}
|
|
95
|
-
return result.data;
|
|
96
|
-
}
|
|
97
|
-
catch (error) {
|
|
98
|
-
console.error('Error in getTasksSingleByFilters:', error);
|
|
99
|
-
throw error;
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
// Read single row using id
|
|
103
|
-
async function getTasksById({ id }) {
|
|
104
|
-
if (!id) {
|
|
105
|
-
throw new Error('ID is required');
|
|
106
|
-
}
|
|
107
|
-
try {
|
|
108
|
-
const result = await client_1.supabase
|
|
109
|
-
.from('tasks')
|
|
110
|
-
.select('*')
|
|
111
|
-
.eq('id', id)
|
|
112
|
-
.single();
|
|
113
|
-
if (result.error) {
|
|
114
|
-
if (result.error.code === 'PGRST116') {
|
|
115
|
-
return null;
|
|
116
|
-
}
|
|
117
|
-
throw new Error(`Failed to fetch tasks: ${result.error.message}`);
|
|
118
|
-
}
|
|
119
|
-
return result.data;
|
|
120
|
-
}
|
|
121
|
-
catch (error) {
|
|
122
|
-
console.error('Error in getTasksById:', error);
|
|
123
|
-
throw error;
|
|
124
|
-
}
|
|
125
|
-
}
|
|
126
|
-
// Create Function
|
|
127
|
-
async function createTasks({ data }) {
|
|
128
|
-
if (!data) {
|
|
129
|
-
throw new Error('Data is required for creation');
|
|
130
|
-
}
|
|
131
|
-
try {
|
|
132
|
-
const result = await client_1.supabase
|
|
133
|
-
.from('tasks')
|
|
134
|
-
.insert([data])
|
|
135
|
-
.select()
|
|
136
|
-
.single();
|
|
137
|
-
if (result.error) {
|
|
138
|
-
throw new Error(`Failed to create tasks: ${result.error.message}`);
|
|
139
|
-
}
|
|
140
|
-
if (!result.data) {
|
|
141
|
-
throw new Error('No data returned after creation');
|
|
142
|
-
}
|
|
143
|
-
return result.data;
|
|
144
|
-
}
|
|
145
|
-
catch (error) {
|
|
146
|
-
console.error('Error in createTasks:', error);
|
|
147
|
-
throw error;
|
|
148
|
-
}
|
|
149
|
-
}
|
|
150
|
-
// Update Function
|
|
151
|
-
async function updateTasks({ id, data }) {
|
|
152
|
-
if (!id) {
|
|
153
|
-
throw new Error('ID is required for update');
|
|
154
|
-
}
|
|
155
|
-
if (!data || Object.keys(data).length === 0) {
|
|
156
|
-
throw new Error('Update data is required');
|
|
157
|
-
}
|
|
158
|
-
try {
|
|
159
|
-
const result = await client_1.supabase
|
|
160
|
-
.from('tasks')
|
|
161
|
-
.update(data)
|
|
162
|
-
.eq('id', id)
|
|
163
|
-
.select()
|
|
164
|
-
.single();
|
|
165
|
-
if (result.error) {
|
|
166
|
-
if (result.error.code === 'PGRST116') {
|
|
167
|
-
throw new Error(`tasks with ID ${id} not found`);
|
|
168
|
-
}
|
|
169
|
-
throw new Error(`Failed to update tasks: ${result.error.message}`);
|
|
170
|
-
}
|
|
171
|
-
if (!result.data) {
|
|
172
|
-
throw new Error(`tasks with ID ${id} not found`);
|
|
173
|
-
}
|
|
174
|
-
return result.data;
|
|
175
|
-
}
|
|
176
|
-
catch (error) {
|
|
177
|
-
console.error('Error in updateTasks:', error);
|
|
178
|
-
throw error;
|
|
179
|
-
}
|
|
180
|
-
}
|
|
181
|
-
// Delete Function
|
|
182
|
-
async function deleteTasks({ id }) {
|
|
183
|
-
if (!id) {
|
|
184
|
-
throw new Error('ID is required for deletion');
|
|
185
|
-
}
|
|
186
|
-
try {
|
|
187
|
-
const result = await client_1.supabase
|
|
188
|
-
.from('tasks')
|
|
189
|
-
.delete()
|
|
190
|
-
.eq('id', id);
|
|
191
|
-
if (result.error) {
|
|
192
|
-
throw new Error(`Failed to delete tasks: ${result.error.message}`);
|
|
193
|
-
}
|
|
194
|
-
return true;
|
|
195
|
-
}
|
|
196
|
-
catch (error) {
|
|
197
|
-
console.error('Error in deleteTasks:', error);
|
|
198
|
-
throw error;
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
// Custom query function
|
|
202
|
-
async function queryTasks({ query }) {
|
|
203
|
-
if (!query) {
|
|
204
|
-
throw new Error('Query is required');
|
|
205
|
-
}
|
|
206
|
-
try {
|
|
207
|
-
const result = await client_1.supabase
|
|
208
|
-
.from('tasks')
|
|
209
|
-
.select(query);
|
|
210
|
-
if (result.error) {
|
|
211
|
-
throw new Error(`Failed to execute query: ${result.error.message}`);
|
|
212
|
-
}
|
|
213
|
-
return result.data || [];
|
|
214
|
-
}
|
|
215
|
-
catch (error) {
|
|
216
|
-
console.error('Error in queryTasks:', error);
|
|
217
|
-
throw error;
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
// All functions are exported individually above
|