@creatorem/cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.mjs +40 -0
- package/package.json +17 -0
- package/src/commands/generate-migration.mjs +157 -0
- package/src/commands/generate-schemas.mjs +159 -0
- package/src/commands/help.mjs +20 -0
- package/src/utils/sql-generator.mjs +183 -0
- package/src/utils/validation.mjs +81 -0
package/bin/cli.mjs
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Creatorem CLI
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { runGenerateSchemas } from '../src/commands/generate-schemas.mjs';
|
|
8
|
+
import { runGenerateMigration } from '../src/commands/generate-migration.mjs';
|
|
9
|
+
import { showHelp } from '../src/commands/help.mjs';
|
|
10
|
+
|
|
11
|
+
function main() {
|
|
12
|
+
const args = process.argv.slice(2);
|
|
13
|
+
|
|
14
|
+
if (args.length === 0) {
|
|
15
|
+
showHelp();
|
|
16
|
+
process.exit(1);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const command = args[0];
|
|
20
|
+
|
|
21
|
+
switch (command) {
|
|
22
|
+
case 'generate-schemas':
|
|
23
|
+
runGenerateSchemas(args.slice(1));
|
|
24
|
+
break;
|
|
25
|
+
case 'generate-migration':
|
|
26
|
+
runGenerateMigration(args.slice(1));
|
|
27
|
+
break;
|
|
28
|
+
case 'help':
|
|
29
|
+
case '-h':
|
|
30
|
+
case '--help':
|
|
31
|
+
showHelp();
|
|
32
|
+
break;
|
|
33
|
+
default:
|
|
34
|
+
console.error(`Unknown command: ${command}`);
|
|
35
|
+
showHelp();
|
|
36
|
+
process.exit(1);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
main();
|
package/package.json
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@creatorem/cli",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "CLI tool for Creatorem SaaS Kit",
|
|
5
|
+
"author": "Creatorem",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"type": "module",
|
|
8
|
+
"bin": {
|
|
9
|
+
"creatorem": "./bin/cli.mjs"
|
|
10
|
+
},
|
|
11
|
+
"publishConfig": {
|
|
12
|
+
"access": "public"
|
|
13
|
+
},
|
|
14
|
+
"scripts": {
|
|
15
|
+
"build": "echo 'no build needed'"
|
|
16
|
+
}
|
|
17
|
+
}
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import { existsSync, mkdirSync, readdirSync, readFileSync, unlinkSync, writeFileSync } from 'fs';
|
|
2
|
+
import { basename, join } from 'path';
|
|
3
|
+
import { showHelp } from './help.mjs';
|
|
4
|
+
|
|
5
|
+
export function runGenerateMigration(args) {
|
|
6
|
+
// Parse arguments specific to this command
|
|
7
|
+
let includeEnvSchemas = '';
|
|
8
|
+
let listEnvSchemas = false;
|
|
9
|
+
|
|
10
|
+
for (let i = 0; i < args.length; i++) {
|
|
11
|
+
const arg = args[i];
|
|
12
|
+
switch (arg) {
|
|
13
|
+
case '--app-schemas': {
|
|
14
|
+
const nextArg = args[i + 1];
|
|
15
|
+
if (i + 1 >= args.length || !nextArg || nextArg.startsWith('--')) {
|
|
16
|
+
console.error('Error: --app-schemas requires a comma-separated list of schema names');
|
|
17
|
+
console.error('Example: --app-schemas booking,other-schema');
|
|
18
|
+
process.exit(1);
|
|
19
|
+
}
|
|
20
|
+
includeEnvSchemas = nextArg;
|
|
21
|
+
i++;
|
|
22
|
+
break;
|
|
23
|
+
}
|
|
24
|
+
case '--list-app-schemas':
|
|
25
|
+
listEnvSchemas = true;
|
|
26
|
+
break;
|
|
27
|
+
default:
|
|
28
|
+
console.error(`Unknown option: ${arg}`);
|
|
29
|
+
showHelp();
|
|
30
|
+
process.exit(1);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Default paths based on CWD
|
|
35
|
+
const baseDir = process.cwd();
|
|
36
|
+
const supabaseDir = join(baseDir, 'supabase');
|
|
37
|
+
const envSchemasDir = join(supabaseDir, 'app-schemas');
|
|
38
|
+
const schemasDir = join(supabaseDir, 'schemas');
|
|
39
|
+
const migrationsDir = join(supabaseDir, 'migrations');
|
|
40
|
+
|
|
41
|
+
if (listEnvSchemas) {
|
|
42
|
+
console.log('Available environment schemas:');
|
|
43
|
+
if (existsSync(envSchemasDir)) {
|
|
44
|
+
const files = readdirSync(envSchemasDir).filter((file) => file.endsWith('.sql'));
|
|
45
|
+
files.forEach((file) => {
|
|
46
|
+
console.log(` ${basename(file, '.sql')}`);
|
|
47
|
+
});
|
|
48
|
+
} else {
|
|
49
|
+
console.log('No app-schemas directory found at ' + envSchemasDir);
|
|
50
|
+
}
|
|
51
|
+
process.exit(0);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const timestamp = new Date().toISOString().replace(/[-:]/g, '').replace(/\..+/, '').replace('T', '');
|
|
55
|
+
const migrationFile = join(migrationsDir, `${timestamp}_generated_from_schemas.sql`);
|
|
56
|
+
|
|
57
|
+
console.log('Generating migration file from schema files...');
|
|
58
|
+
if (includeEnvSchemas) {
|
|
59
|
+
console.log(`Environment schemas will be included: ${includeEnvSchemas}`);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Create migrations directory if it doesn't exist
|
|
63
|
+
if (!existsSync(migrationsDir)) {
|
|
64
|
+
mkdirSync(migrationsDir, { recursive: true });
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Remove any existing generated migration
|
|
68
|
+
if (existsSync(migrationsDir)) {
|
|
69
|
+
const existingMigrations = readdirSync(migrationsDir).filter((file) => file.includes('_generated_from_schemas.sql'));
|
|
70
|
+
existingMigrations.forEach((file) => {
|
|
71
|
+
unlinkSync(join(migrationsDir, file));
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Automatically discover all SQL files in schemas directory and sort them
|
|
76
|
+
console.log(`Checking schemas directory: ${schemasDir}`);
|
|
77
|
+
let schemaFiles = [];
|
|
78
|
+
|
|
79
|
+
if (existsSync(schemasDir)) {
|
|
80
|
+
schemaFiles = readdirSync(schemasDir)
|
|
81
|
+
.filter((file) => file.endsWith('.sql') && !file.startsWith('_') && file !== 'README.md')
|
|
82
|
+
.sort()
|
|
83
|
+
.map((file) => join(schemasDir, file));
|
|
84
|
+
} else {
|
|
85
|
+
console.warn(`Warning: schemas directory not found at ${schemasDir}`);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Add environment schemas if specified
|
|
89
|
+
if (includeEnvSchemas) {
|
|
90
|
+
const envSchemas = includeEnvSchemas.split(',');
|
|
91
|
+
|
|
92
|
+
envSchemas.forEach((schema) => {
|
|
93
|
+
if (!existsSync(envSchemasDir)) {
|
|
94
|
+
console.warn(`Warning: app-schemas directory not found at ${envSchemasDir}`);
|
|
95
|
+
return;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const envFiles = readdirSync(envSchemasDir)
|
|
99
|
+
.filter((file) => file.includes(schema) && file.endsWith('.sql'))
|
|
100
|
+
.sort()
|
|
101
|
+
.map((file) => join(envSchemasDir, file));
|
|
102
|
+
|
|
103
|
+
if (envFiles.length === 0) {
|
|
104
|
+
console.warn(`Warning: No environment schema files found for '${schema}'`);
|
|
105
|
+
} else {
|
|
106
|
+
schemaFiles.push(...envFiles);
|
|
107
|
+
}
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Check if any schema files were found
|
|
112
|
+
if (schemaFiles.length === 0) {
|
|
113
|
+
console.error('Error: No SQL schema files found');
|
|
114
|
+
process.exit(1);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
console.log(`Found ${schemaFiles.length} schema files:`);
|
|
118
|
+
schemaFiles.forEach((file) => {
|
|
119
|
+
console.log(` - ${basename(file)}`);
|
|
120
|
+
});
|
|
121
|
+
console.log('');
|
|
122
|
+
|
|
123
|
+
// Generate header comment
|
|
124
|
+
let header = `/*
|
|
125
|
+
* ---------------------------------------------------------------------------------
|
|
126
|
+
* Generated Migration from Schema Files
|
|
127
|
+
* ---------------------------------------------------------------------------------
|
|
128
|
+
*
|
|
129
|
+
* This file is auto-generated from individual schema files.
|
|
130
|
+
* Do not edit this file directly - edit the source files in supabase/schemas/
|
|
131
|
+
*
|
|
132
|
+
* Generated from schema files in dependency order:
|
|
133
|
+
`;
|
|
134
|
+
|
|
135
|
+
// Add the list of files to the header
|
|
136
|
+
schemaFiles.forEach((file) => {
|
|
137
|
+
header += ` * - ${basename(file)}\n`;
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
header += ' */\n\n';
|
|
141
|
+
|
|
142
|
+
// Combine all schema files
|
|
143
|
+
let content = header;
|
|
144
|
+
schemaFiles.forEach((file) => {
|
|
145
|
+
if (existsSync(file)) {
|
|
146
|
+
console.log(`Adding ${basename(file)}...`);
|
|
147
|
+
content += `${readFileSync(file, 'utf8')}\n\n`;
|
|
148
|
+
} else {
|
|
149
|
+
console.warn(`Warning: ${file} not found, skipping...`);
|
|
150
|
+
}
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
// Write the migration file
|
|
154
|
+
writeFileSync(migrationFile, content);
|
|
155
|
+
|
|
156
|
+
console.log(`Migration file generated: ${migrationFile}`);
|
|
157
|
+
}
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync, copyFileSync, readdirSync, unlinkSync, statSync } from 'fs';
|
|
2
|
+
import { dirname, join, resolve } from 'path';
|
|
3
|
+
import { validateSchema } from '../utils/validation.mjs';
|
|
4
|
+
import {
|
|
5
|
+
generateHeader,
|
|
6
|
+
generateEnums,
|
|
7
|
+
generateTable,
|
|
8
|
+
generateComments,
|
|
9
|
+
generatePermissions,
|
|
10
|
+
generateRLS,
|
|
11
|
+
generateTriggers
|
|
12
|
+
} from '../utils/sql-generator.mjs';
|
|
13
|
+
|
|
14
|
+
export function runGenerateSchemas(args) {
|
|
15
|
+
// Logic from original generate-app-schemas.mjs
|
|
16
|
+
|
|
17
|
+
let data;
|
|
18
|
+
let inputDir;
|
|
19
|
+
|
|
20
|
+
let arg0 = 'cm-supabase'; // Default to looking for cm-supabase folder in cwd
|
|
21
|
+
if (args.length > 0) {
|
|
22
|
+
arg0 = args[0].trim();
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
try {
|
|
26
|
+
if (arg0.startsWith('{') || arg0.startsWith('[')) {
|
|
27
|
+
// Assume JSON string input
|
|
28
|
+
console.log("Parsing input as JSON string...");
|
|
29
|
+
data = JSON.parse(arg0);
|
|
30
|
+
inputDir = process.cwd(); // Default input dir for relative paths if needed
|
|
31
|
+
} else {
|
|
32
|
+
// Assume file or directory path
|
|
33
|
+
let inputPath = resolve(process.cwd(), arg0);
|
|
34
|
+
|
|
35
|
+
if (!existsSync(inputPath)) {
|
|
36
|
+
console.error(`Error: Path not found at ${inputPath}`);
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const stats = statSync(inputPath);
|
|
41
|
+
if (stats.isDirectory()) {
|
|
42
|
+
// If directory, look for setup.json
|
|
43
|
+
inputPath = join(inputPath, 'setup.json');
|
|
44
|
+
if (!existsSync(inputPath)) {
|
|
45
|
+
console.error(`Error: setup.json not found in directory ${args[0]} (defaults to setup.json if directory provided)`);
|
|
46
|
+
process.exit(1);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
console.log(`Reading configuration from ${inputPath}...`);
|
|
51
|
+
const fileContent = readFileSync(inputPath, 'utf8');
|
|
52
|
+
data = JSON.parse(fileContent);
|
|
53
|
+
inputDir = dirname(inputPath);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Output dir: default to current dir/supabase/app-schemas logic
|
|
57
|
+
const defaultOutputDir = resolve(process.cwd(), 'supabase/app-schemas');
|
|
58
|
+
const outputDir = args[1] ? resolve(process.cwd(), args[1]) : defaultOutputDir;
|
|
59
|
+
|
|
60
|
+
// ... validation ...
|
|
61
|
+
const schemaItems = validateSchema(data);
|
|
62
|
+
|
|
63
|
+
if (!existsSync(outputDir)) {
|
|
64
|
+
mkdirSync(outputDir, { recursive: true });
|
|
65
|
+
} else {
|
|
66
|
+
// Clean existing files in output directory
|
|
67
|
+
console.log(`Cleaning output directory: ${outputDir}`);
|
|
68
|
+
if (existsSync(outputDir)) {
|
|
69
|
+
const files = readdirSync(outputDir);
|
|
70
|
+
for (const file of files) {
|
|
71
|
+
const filePath = join(outputDir, file);
|
|
72
|
+
if (statSync(filePath).isFile()) {
|
|
73
|
+
unlinkSync(filePath);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Copy optional schemas
|
|
80
|
+
// inputDir is determined above
|
|
81
|
+
const extraSchemasDir = join(inputDir, 'schemas');
|
|
82
|
+
|
|
83
|
+
if (existsSync(extraSchemasDir)) {
|
|
84
|
+
console.log(`Copying extra schemas from: ${extraSchemasDir}`);
|
|
85
|
+
const extraFiles = readdirSync(extraSchemasDir).filter(f => f.endsWith('.sql'));
|
|
86
|
+
|
|
87
|
+
for (const file of extraFiles) {
|
|
88
|
+
const srcPath = join(extraSchemasDir, file);
|
|
89
|
+
const destPath = join(outputDir, file);
|
|
90
|
+
copyFileSync(srcPath, destPath);
|
|
91
|
+
console.log(` Copied ${file}`);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
let fileCounter = 10;
|
|
96
|
+
|
|
97
|
+
let pendingEnums = [];
|
|
98
|
+
|
|
99
|
+
schemaItems.forEach((item) => {
|
|
100
|
+
if (item.enums) {
|
|
101
|
+
pendingEnums.push(...item.enums);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (item.table) {
|
|
105
|
+
const table = item.table;
|
|
106
|
+
const prefix = fileCounter.toString().padStart(3, '0');
|
|
107
|
+
const fileName = `${prefix}-${table.name}.sql`;
|
|
108
|
+
const filePath = join(outputDir, fileName);
|
|
109
|
+
|
|
110
|
+
console.log(`Generating ${fileName}...`);
|
|
111
|
+
|
|
112
|
+
// Determine dependencies
|
|
113
|
+
const dependencies = [];
|
|
114
|
+
|
|
115
|
+
// Check for organization_id in common columns
|
|
116
|
+
if (table.commonColumns && table.commonColumns.includes('organization_id')) {
|
|
117
|
+
dependencies.push('031-kit-org.sql (organizations table)');
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Check for user_has_org_permission in RLS policies
|
|
121
|
+
let hasOrgPermissionShortcut = false;
|
|
122
|
+
if (table.rls && table.rls.policies) {
|
|
123
|
+
for (const policy of table.rls.policies) {
|
|
124
|
+
if (policy.conditions.some(c => c.includes('user_has_org_permission'))) {
|
|
125
|
+
hasOrgPermissionShortcut = true;
|
|
126
|
+
break;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
if (hasOrgPermissionShortcut) {
|
|
132
|
+
dependencies.push('034-organization-members.sql (members table for organization access)');
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
let fileContent = generateHeader(table.name, dependencies);
|
|
136
|
+
|
|
137
|
+
if (pendingEnums.length > 0) {
|
|
138
|
+
fileContent += generateEnums(pendingEnums);
|
|
139
|
+
pendingEnums = []; // Clear after using
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
fileContent += generateTable(table);
|
|
143
|
+
fileContent += generateComments(table);
|
|
144
|
+
fileContent += generatePermissions(table);
|
|
145
|
+
fileContent += generateRLS(table);
|
|
146
|
+
fileContent += generateTriggers(table);
|
|
147
|
+
|
|
148
|
+
writeFileSync(filePath, fileContent);
|
|
149
|
+
fileCounter += 10;
|
|
150
|
+
}
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
console.log("Success! Schemas generated.");
|
|
154
|
+
|
|
155
|
+
} catch (e) {
|
|
156
|
+
console.error("Error processing schema:", e);
|
|
157
|
+
process.exit(1);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export function showHelp() {
|
|
2
|
+
console.log(`
|
|
3
|
+
Usage: creatorem <command> [options]
|
|
4
|
+
|
|
5
|
+
Commands:
|
|
6
|
+
generate-schemas [input] [output] Generate Supabase SQL schema files from JSON definition
|
|
7
|
+
generate-migration [options] Generate a single migration file from schema files
|
|
8
|
+
help Show this help message
|
|
9
|
+
|
|
10
|
+
Options (generate-migration):
|
|
11
|
+
--app-schemas <list> Comma-separated list of schema names to include from app-schemas
|
|
12
|
+
--list-app-schemas List available environment schemas
|
|
13
|
+
|
|
14
|
+
Examples:
|
|
15
|
+
creatorem generate-schemas
|
|
16
|
+
creatorem generate-schemas apps/my-app/setup.json
|
|
17
|
+
creatorem generate-migration
|
|
18
|
+
creatorem generate-migration --app-schemas booking,orders
|
|
19
|
+
`);
|
|
20
|
+
}
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
export function generateHeader(name, dependencies = []) {
|
|
2
|
+
let depString = '';
|
|
3
|
+
if (dependencies.length > 0) {
|
|
4
|
+
depString = ` * Dependencies:\n${dependencies.map(d => ` * - ${d}`).join('\n')}`;
|
|
5
|
+
} else {
|
|
6
|
+
depString = ` * Dependencies: None`;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
return `/*
|
|
10
|
+
* ---------------------------------------------------------------------------------
|
|
11
|
+
* ${name.charAt(0).toUpperCase() + name.slice(1)} Table Schema
|
|
12
|
+
* ---------------------------------------------------------------------------------
|
|
13
|
+
*
|
|
14
|
+
* Purpose: Store ${name} data for organizations
|
|
15
|
+
*
|
|
16
|
+
* Table of contents:
|
|
17
|
+
* - ${name.charAt(0).toUpperCase() + name.slice(1)} table
|
|
18
|
+
* - ${name.charAt(0).toUpperCase() + name.slice(1)} Comments
|
|
19
|
+
* - ${name.charAt(0).toUpperCase() + name.slice(1)} Permissions
|
|
20
|
+
* - ${name.charAt(0).toUpperCase() + name.slice(1)} RLS Policies
|
|
21
|
+
* - ${name.charAt(0).toUpperCase() + name.slice(1)} Triggers
|
|
22
|
+
*
|
|
23
|
+
${depString}
|
|
24
|
+
*/
|
|
25
|
+
|
|
26
|
+
`;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export function generateEnums(enums) {
|
|
30
|
+
if (!enums || enums.length === 0) return '';
|
|
31
|
+
|
|
32
|
+
let sql = `------------------------------------- ENUMS -------------------------------------\n\n`;
|
|
33
|
+
|
|
34
|
+
enums.forEach(e => {
|
|
35
|
+
const values = e.values.map(v => `'${v}'`).join(', ');
|
|
36
|
+
sql += `/*\n * ${e.name.toUpperCase()} ENUM\n */\n`;
|
|
37
|
+
sql += `DO $$ BEGIN\n CREATE TYPE ${e.name} AS ENUM (${values});\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n\n`;
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
return sql;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export function generateTable(table) {
|
|
44
|
+
let sql = `------------------------------------- ${table.name.toUpperCase()} TABLE -------------------------------------\n\n`;
|
|
45
|
+
sql += `/*\n * ${table.name.toUpperCase()} TABLE:\n * This table contains the ${table.name} data for organizations.\n */\n`;
|
|
46
|
+
sql += `CREATE TABLE IF NOT EXISTS "${table.schema}"."${table.name}" (\n`;
|
|
47
|
+
|
|
48
|
+
// Common columns
|
|
49
|
+
if (table.commonColumns && table.commonColumns.includes('id')) {
|
|
50
|
+
sql += ` id uuid unique not null default extensions.uuid_generate_v4(),\n`;
|
|
51
|
+
}
|
|
52
|
+
if (table.commonColumns && table.commonColumns.includes('organization_id')) {
|
|
53
|
+
sql += ` organization_id uuid references public.organization (id) on delete cascade not null,\n`;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Custom columns
|
|
57
|
+
Object.entries(table.columns).forEach(([colName, colDef]) => {
|
|
58
|
+
let line = ` ${colName} ${colDef.type}`;
|
|
59
|
+
if (colDef.notNull) line += ` not null`;
|
|
60
|
+
if (colDef.default !== undefined) line += ` default '${colDef.default}'`; // Assuming string defaults for now, optimize if needed
|
|
61
|
+
if (colDef.check) line += ` check (${colDef.check})`;
|
|
62
|
+
|
|
63
|
+
if (colDef.fk_ref) {
|
|
64
|
+
line += ` references ${colDef.fk_ref.table} (${colDef.fk_ref.column})`;
|
|
65
|
+
if (colDef.fk_ref.on_delete) {
|
|
66
|
+
line += ` on delete ${colDef.fk_ref.on_delete}`;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
sql += `${line},\n`;
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
// Common timestamps
|
|
74
|
+
if (table.commonColumns && table.commonColumns.includes('updated_at')) {
|
|
75
|
+
sql += ` updated_at timestamptz DEFAULT "now"() NOT NULL,\n`;
|
|
76
|
+
}
|
|
77
|
+
if (table.commonColumns && table.commonColumns.includes('created_at')) {
|
|
78
|
+
sql += ` created_at timestamptz DEFAULT "now"() NOT NULL,\n`;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
sql += ` primary key (id)\n);\n\n`;
|
|
82
|
+
|
|
83
|
+
// Indexes (Basic: FKs usually need indexes)
|
|
84
|
+
// For every column that has fk_ref, add an index
|
|
85
|
+
Object.entries(table.columns).forEach(([colName, colDef]) => {
|
|
86
|
+
if (colDef.fk_ref || colName.endsWith('_id') || colName === 'status') {
|
|
87
|
+
sql += `CREATE INDEX IF NOT EXISTS idx_${table.name}_${colName} ON ${table.schema}.${table.name} (${colName});\n`;
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
sql += `\n`;
|
|
91
|
+
|
|
92
|
+
return sql;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
export function generateComments(table) {
|
|
96
|
+
let sql = `------------------------------------- TABLE COMMENTS -------------------------------------\n\n`;
|
|
97
|
+
sql += `COMMENT ON TABLE "${table.schema}"."${table.name}" IS 'Allow to store ${table.name} data for organizations';\n`;
|
|
98
|
+
|
|
99
|
+
Object.entries(table.columns).forEach(([colName, colDef]) => {
|
|
100
|
+
if (colDef.comment) {
|
|
101
|
+
sql += `COMMENT ON COLUMN "${table.schema}"."${table.name}"."${colName}" IS '${colDef.comment}';\n`;
|
|
102
|
+
}
|
|
103
|
+
});
|
|
104
|
+
sql += `\n`;
|
|
105
|
+
return sql;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export function generatePermissions(table) {
|
|
109
|
+
return `------------------------------------- PERMISSIONS -------------------------------------
|
|
110
|
+
|
|
111
|
+
-- Reset the RLS access to authenticated and service_role roles
|
|
112
|
+
REVOKE all on ${table.schema}.${table.name} from authenticated, service_role;
|
|
113
|
+
GRANT select, insert, update, delete on table ${table.schema}.${table.name} to authenticated, service_role;
|
|
114
|
+
|
|
115
|
+
-- Enable RLS on the ${table.name} table
|
|
116
|
+
ALTER TABLE ${table.schema}.${table.name} ENABLE ROW LEVEL SECURITY;
|
|
117
|
+
|
|
118
|
+
`;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
export function expandCondition(condition) {
|
|
122
|
+
// Shortcuts expansion
|
|
123
|
+
// "user_has_org_permission('organization.manage')"
|
|
124
|
+
// -> kit.user_is_member_of_org(organization_id) AND kit.has_org_permission(organization_id, 'organization.manage'::org_permission)
|
|
125
|
+
|
|
126
|
+
if (condition.includes("user_has_org_permission")) {
|
|
127
|
+
// Regex to extract permission
|
|
128
|
+
const match = condition.match(/user_has_org_permission\('([^']+)'\)/);
|
|
129
|
+
if (match) {
|
|
130
|
+
const permission = match[1];
|
|
131
|
+
return `kit.user_is_member_of_org(organization_id) AND\n kit.has_org_permission(organization_id, '${permission}'::org_permission)`;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
return condition;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
export function generateRLS(table) {
|
|
138
|
+
if (!table.rls || !table.rls.policies) return '';
|
|
139
|
+
|
|
140
|
+
let sql = `------------------------------------- RLS POLICIES -------------------------------------\n\n`;
|
|
141
|
+
|
|
142
|
+
table.rls.policies.forEach((policy, idx) => {
|
|
143
|
+
const policyName = `${table.name}_${policy.command.toLowerCase()}_${idx}`; // Simple naming, or just use command if unique
|
|
144
|
+
// Actually, user example uses "product_all"
|
|
145
|
+
const name = `${table.name}_${policy.command === 'ALL' ? 'all' : policy.command.toLowerCase()}${idx > 0 ? '_' + idx : ''}`;
|
|
146
|
+
|
|
147
|
+
sql += `CREATE POLICY ${name} ON ${table.schema}.${table.name}\n`;
|
|
148
|
+
sql += `FOR ${policy.command} TO ${policy.roles.join(', ')}\n`;
|
|
149
|
+
|
|
150
|
+
// Expanding conditions
|
|
151
|
+
const conditionsExpanded = policy.conditions.map(expandCondition).join(' AND\n ');
|
|
152
|
+
|
|
153
|
+
if (policy.command !== 'INSERT') {
|
|
154
|
+
sql += `USING (\n ${conditionsExpanded}\n)`;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if (policy.command === 'INSERT' || policy.command === 'ALL') {
|
|
158
|
+
if (policy.command === 'ALL') sql += `\nWITH CHECK (\n ${conditionsExpanded}\n)`;
|
|
159
|
+
else sql += `WITH CHECK (\n ${conditionsExpanded}\n)`;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
sql += `;\n\n`;
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
return sql;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
export function generateTriggers(table) {
|
|
169
|
+
// Check if updated_at exists
|
|
170
|
+
if (table.commonColumns && table.commonColumns.includes('updated_at')) {
|
|
171
|
+
return `------------------------------------- TRIGGERS -------------------------------------
|
|
172
|
+
|
|
173
|
+
/*
|
|
174
|
+
* Trigger: Update timestamp on ${table.name} changes
|
|
175
|
+
* Automatically updates updated_at field when ${table.name} is modified
|
|
176
|
+
*/
|
|
177
|
+
create trigger reset_updated_at_on_${table.name}_on_update
|
|
178
|
+
after update on ${table.schema}.${table.name} for each row
|
|
179
|
+
execute procedure kit.reset_updated_at ();
|
|
180
|
+
`;
|
|
181
|
+
}
|
|
182
|
+
return '';
|
|
183
|
+
}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @typedef {Object} FKRef
|
|
3
|
+
* @property {string} table
|
|
4
|
+
* @property {string} column
|
|
5
|
+
* @property {'cascade'|'restrict'|'set null'|'no action'} [on_delete]
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* @typedef {Object} Column
|
|
10
|
+
* @property {string} type
|
|
11
|
+
* @property {boolean} [notNull]
|
|
12
|
+
* @property {string} [default]
|
|
13
|
+
* @property {string} [check]
|
|
14
|
+
* @property {string} [comment]
|
|
15
|
+
* @property {FKRef} [fk_ref]
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* @typedef {Object} Policy
|
|
20
|
+
* @property {string} command - ALL, SELECT, INSERT, UPDATE, DELETE
|
|
21
|
+
* @property {string[]} roles
|
|
22
|
+
* @property {string[]} conditions
|
|
23
|
+
*/
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* @typedef {Object} Table
|
|
27
|
+
* @property {string} name
|
|
28
|
+
* @property {string} schema
|
|
29
|
+
* @property {string[]} commonColumns
|
|
30
|
+
* @property {Object.<string, Column>} columns
|
|
31
|
+
* @property {Object} rls
|
|
32
|
+
* @property {Policy[]} rls.policies
|
|
33
|
+
*/
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* @typedef {Object} Enum
|
|
37
|
+
* @property {string} name
|
|
38
|
+
* @property {string[]} values
|
|
39
|
+
*/
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* @typedef {Object} SchemaItem
|
|
43
|
+
* @property {Table} [table]
|
|
44
|
+
* @property {Enum[]} [enums]
|
|
45
|
+
*/
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Validates the input JSON object against the expected schema.
|
|
49
|
+
* @param {any} p_data
|
|
50
|
+
* @returns {SchemaItem[]}
|
|
51
|
+
*/
|
|
52
|
+
export function validateSchema(p_data) {
|
|
53
|
+
let items = p_data;
|
|
54
|
+
|
|
55
|
+
// Handle new structure: { name: string, tables: [...] }
|
|
56
|
+
if (!Array.isArray(p_data) && p_data.tables && Array.isArray(p_data.tables)) {
|
|
57
|
+
items = p_data.tables;
|
|
58
|
+
} else if (!Array.isArray(p_data)) {
|
|
59
|
+
throw new Error("Root element must be an array or an object with a 'tables' array");
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
items.forEach((item, index) => {
|
|
63
|
+
if (item.table) {
|
|
64
|
+
const t = item.table;
|
|
65
|
+
if (!t.name) throw new Error(`Item ${index}: Table missing 'name'`);
|
|
66
|
+
if (!t.schema) throw new Error(`Item ${index}: Table missing 'schema'`);
|
|
67
|
+
if (!t.columns) throw new Error(`Item ${index}: Table missing 'columns'`);
|
|
68
|
+
// Basic structure check passed
|
|
69
|
+
} else if (item.enums) {
|
|
70
|
+
if (!Array.isArray(item.enums)) throw new Error(`Item ${index}: 'enums' must be an array`);
|
|
71
|
+
item.enums.forEach((e, eIdx) => {
|
|
72
|
+
if (!e.name) throw new Error(`Item ${index} Enum ${eIdx}: missing 'name'`);
|
|
73
|
+
if (!Array.isArray(e.values)) throw new Error(`Item ${index} Enum ${eIdx}: missing 'values' array`);
|
|
74
|
+
});
|
|
75
|
+
} else {
|
|
76
|
+
throw new Error(`Item ${index}: Must contain 'table' or 'enums'`);
|
|
77
|
+
}
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
return items;
|
|
81
|
+
}
|