supatypes 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +108 -0
- package/bin/cli.js +70 -0
- package/lib/generate.js +170 -0
- package/lib/init.js +70 -0
- package/lib/remote-generator.js +397 -0
- package/package.json +27 -0
package/README.md
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
# supatypes
|
|
2
|
+
|
|
3
|
+
Generate TypeScript types from a remote Supabase PostgreSQL database via SSH.
|
|
4
|
+
|
|
5
|
+
Connects to your server, dumps the database schema, parses tables/views/functions, and generates a fully typed `database.types.ts` file — all in one command.
|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
- Generates types for **tables**, **views**, and **RPC functions**
|
|
10
|
+
- Handles PostgreSQL **array types** (`TEXT[]` → `string[]`)
|
|
11
|
+
- Multi-word types (`character varying`, `double precision`, `timestamp with time zone`)
|
|
12
|
+
- Supports SSH key and password authentication
|
|
13
|
+
- Uploads the generator to the server, runs it there, downloads the result, and cleans up
|
|
14
|
+
- Config file so you only set up once per project
|
|
15
|
+
|
|
16
|
+
## Install
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
# Global
|
|
20
|
+
npm install -g supatypes
|
|
21
|
+
|
|
22
|
+
# Per project
|
|
23
|
+
npm install --save-dev supatypes
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
## Quick start
|
|
27
|
+
|
|
28
|
+
```bash
|
|
29
|
+
# 1. Create config file
|
|
30
|
+
npx supatypes init
|
|
31
|
+
|
|
32
|
+
# 2. Generate types
|
|
33
|
+
npx supatypes generate
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
## Configuration
|
|
37
|
+
|
|
38
|
+
Running `init` creates a `.supatypes.json` file:
|
|
39
|
+
|
|
40
|
+
```json
|
|
41
|
+
{
|
|
42
|
+
"server": "root@1.2.3.4",
|
|
43
|
+
"sshKey": "~/.ssh/id_rsa",
|
|
44
|
+
"dbContainer": "supabase-db-abc123",
|
|
45
|
+
"output": "./database.types.ts"
|
|
46
|
+
}
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
### Options
|
|
50
|
+
|
|
51
|
+
| Field | Required | Description |
|
|
52
|
+
|-------|----------|-------------|
|
|
53
|
+
| `server` | Yes | SSH connection string (e.g. `root@1.2.3.4`) |
|
|
54
|
+
| `sshKey` | One of these | Path to SSH private key |
|
|
55
|
+
| `sshPassword` | One of these | SSH password (uses `sshpass`) |
|
|
56
|
+
| `dbContainer` | Yes | Docker container name for the Supabase PostgreSQL instance |
|
|
57
|
+
| `output` | No | Output file path (default: `./database.types.ts`) |
|
|
58
|
+
|
|
59
|
+
The config file is automatically added to `.gitignore` since it may contain server credentials.
|
|
60
|
+
|
|
61
|
+
## CLI
|
|
62
|
+
|
|
63
|
+
```bash
|
|
64
|
+
# Generate with default config
|
|
65
|
+
npx supatypes generate
|
|
66
|
+
|
|
67
|
+
# Custom config file
|
|
68
|
+
npx supatypes generate -c ./config/typegen.json
|
|
69
|
+
|
|
70
|
+
# Override output path
|
|
71
|
+
npx supatypes generate -o ./src/types/database.ts
|
|
72
|
+
|
|
73
|
+
# Preview what would happen
|
|
74
|
+
npx supatypes generate --dry-run
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
## How it works
|
|
78
|
+
|
|
79
|
+
1. Reads your `.supatypes.json` config
|
|
80
|
+
2. Uploads a self-contained generator script to the server via SCP
|
|
81
|
+
3. SSHs in and runs it — the script calls `pg_dump` on the Docker container
|
|
82
|
+
4. Parses the SQL schema into TypeScript types (tables, views, functions)
|
|
83
|
+
5. Downloads the generated file to your chosen output path
|
|
84
|
+
6. Cleans up all remote files
|
|
85
|
+
|
|
86
|
+
The generator runs on the server because it needs direct access to the Docker container running PostgreSQL. No ports need to be exposed beyond SSH.
|
|
87
|
+
|
|
88
|
+
## Type mapping
|
|
89
|
+
|
|
90
|
+
| PostgreSQL | TypeScript |
|
|
91
|
+
|-----------|-----------|
|
|
92
|
+
| `text`, `varchar`, `char`, `uuid` | `string` |
|
|
93
|
+
| `integer`, `bigint`, `serial`, `real`, `numeric` | `number` |
|
|
94
|
+
| `boolean` | `boolean` |
|
|
95
|
+
| `jsonb`, `json` | `Json` |
|
|
96
|
+
| `timestamptz`, `date`, `time` | `string` |
|
|
97
|
+
| `text[]`, `integer[]`, etc. | `string[]`, `number[]`, etc. |
|
|
98
|
+
|
|
99
|
+
## Requirements
|
|
100
|
+
|
|
101
|
+
- Node.js >= 18
|
|
102
|
+
- SSH access to the server running Supabase
|
|
103
|
+
- `sshpass` installed locally (only if using password auth)
|
|
104
|
+
- Docker must be accessible on the server (the SSH user needs docker permissions)
|
|
105
|
+
|
|
106
|
+
## Licence
|
|
107
|
+
|
|
108
|
+
MIT
|
package/bin/cli.js
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import { parseArgs } from "node:util";
|
|
4
|
+
import { init } from "../lib/init.js";
|
|
5
|
+
import { generate } from "../lib/generate.js";
|
|
6
|
+
|
|
7
|
+
const HELP = `
|
|
8
|
+
supatypes — Generate TypeScript types from a remote Supabase database
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
supatypes init Create a config file in the current directory
|
|
12
|
+
supatypes generate [options] Generate types from the remote database
|
|
13
|
+
supatypes help Show this help message
|
|
14
|
+
|
|
15
|
+
Options (generate):
|
|
16
|
+
-c, --config <path> Path to config file (default: ./.supatypes.json)
|
|
17
|
+
-o, --output <path> Override output file path from config
|
|
18
|
+
--dry-run Show what would be done without executing
|
|
19
|
+
|
|
20
|
+
Config file (.supatypes.json):
|
|
21
|
+
{
|
|
22
|
+
"server": "root@1.2.3.4",
|
|
23
|
+
"sshKey": "~/.ssh/id_rsa",
|
|
24
|
+
"dbContainer": "supabase-db-abc123",
|
|
25
|
+
"output": "./database.types.ts"
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
You can also use a password instead of an SSH key:
|
|
29
|
+
{
|
|
30
|
+
"server": "root@1.2.3.4",
|
|
31
|
+
"sshPassword": "your-password",
|
|
32
|
+
"dbContainer": "supabase-db-abc123",
|
|
33
|
+
"output": "./database.types.ts"
|
|
34
|
+
}
|
|
35
|
+
`;
|
|
36
|
+
|
|
37
|
+
const command = process.argv[2];
|
|
38
|
+
|
|
39
|
+
if (!command || command === "help" || command === "--help" || command === "-h") {
|
|
40
|
+
console.log(HELP);
|
|
41
|
+
process.exit(0);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if (command === "init") {
|
|
45
|
+
await init();
|
|
46
|
+
process.exit(0);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (command === "generate") {
|
|
50
|
+
const { values } = parseArgs({
|
|
51
|
+
args: process.argv.slice(3),
|
|
52
|
+
options: {
|
|
53
|
+
config: { type: "string", short: "c", default: "./.supatypes.json" },
|
|
54
|
+
output: { type: "string", short: "o" },
|
|
55
|
+
"dry-run": { type: "boolean", default: false },
|
|
56
|
+
},
|
|
57
|
+
strict: false,
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
await generate({
|
|
61
|
+
configPath: values.config,
|
|
62
|
+
outputOverride: values.output,
|
|
63
|
+
dryRun: values["dry-run"],
|
|
64
|
+
});
|
|
65
|
+
process.exit(0);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
console.error(`Unknown command: ${command}`);
|
|
69
|
+
console.log('Run "supatypes help" for usage.');
|
|
70
|
+
process.exit(1);
|
package/lib/generate.js
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { execSync } from "node:child_process";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
|
|
6
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
7
|
+
const REMOTE_SCRIPT = path.join(__dirname, "remote-generator.js");
|
|
8
|
+
const REMOTE_DIR = "/tmp/supatypes";
|
|
9
|
+
const REMOTE_SCRIPT_PATH = `${REMOTE_DIR}/generator.js`;
|
|
10
|
+
const REMOTE_OUTPUT_PATH = `${REMOTE_DIR}/output.ts`;
|
|
11
|
+
|
|
12
|
+
export async function generate({ configPath, outputOverride, dryRun }) {
|
|
13
|
+
// Load config
|
|
14
|
+
const resolvedConfig = path.resolve(configPath);
|
|
15
|
+
if (!fs.existsSync(resolvedConfig)) {
|
|
16
|
+
console.error(`Config file not found: ${resolvedConfig}`);
|
|
17
|
+
console.error('Run "supatypes init" to create one.');
|
|
18
|
+
process.exit(1);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const config = JSON.parse(fs.readFileSync(resolvedConfig, "utf8"));
|
|
22
|
+
const { server, dbContainer } = config;
|
|
23
|
+
const sshKey = config.sshKey ? expandHome(config.sshKey) : null;
|
|
24
|
+
const sshPassword = config.sshPassword || null;
|
|
25
|
+
const output = path.resolve(outputOverride || config.output || "./database.types.ts");
|
|
26
|
+
|
|
27
|
+
// Validate
|
|
28
|
+
if (!server) { console.error("Missing 'server' in config"); process.exit(1); }
|
|
29
|
+
if (!dbContainer) { console.error("Missing 'dbContainer' in config"); process.exit(1); }
|
|
30
|
+
|
|
31
|
+
if (sshKey && !fs.existsSync(sshKey)) {
|
|
32
|
+
console.error(`SSH key not found: ${sshKey}`);
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const sshBase = buildSshBase(server, sshKey, sshPassword);
|
|
37
|
+
const scpBase = buildScpBase(server, sshKey, sshPassword);
|
|
38
|
+
|
|
39
|
+
if (dryRun) {
|
|
40
|
+
console.log("Dry run — would execute:");
|
|
41
|
+
console.log(` 1. Create ${REMOTE_DIR} on ${server}`);
|
|
42
|
+
console.log(` 2. Upload generator script to ${REMOTE_SCRIPT_PATH}`);
|
|
43
|
+
console.log(` 3. Run: node ${REMOTE_SCRIPT_PATH} ${dbContainer} ${REMOTE_OUTPUT_PATH}`);
|
|
44
|
+
console.log(` 4. Download ${REMOTE_OUTPUT_PATH} to ${output}`);
|
|
45
|
+
console.log(` 5. Clean up ${REMOTE_DIR} on server`);
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
console.log("supatypes\n");
|
|
50
|
+
console.log(` Server: ${server}`);
|
|
51
|
+
console.log(` Container: ${dbContainer}`);
|
|
52
|
+
console.log(` Output: ${output}`);
|
|
53
|
+
console.log(` Auth: ${sshKey ? "SSH key" : "password"}\n`);
|
|
54
|
+
|
|
55
|
+
try {
|
|
56
|
+
// Step 1: Create remote directory
|
|
57
|
+
process.stdout.write("Creating remote workspace...");
|
|
58
|
+
ssh(sshBase, `mkdir -p ${REMOTE_DIR}`);
|
|
59
|
+
console.log(" done");
|
|
60
|
+
|
|
61
|
+
// Step 2: Upload generator script
|
|
62
|
+
process.stdout.write("Uploading generator...");
|
|
63
|
+
scp(scpBase, REMOTE_SCRIPT, `${server.split("@").length > 1 ? server : `root@${server}`}:${REMOTE_SCRIPT_PATH}`);
|
|
64
|
+
console.log(" done");
|
|
65
|
+
|
|
66
|
+
// Step 3: Run generator on server
|
|
67
|
+
process.stdout.write("Generating types...");
|
|
68
|
+
const remoteOutput = ssh(
|
|
69
|
+
sshBase,
|
|
70
|
+
`cd ${REMOTE_DIR} && node generator.js ${dbContainer} ${REMOTE_OUTPUT_PATH}`
|
|
71
|
+
);
|
|
72
|
+
console.log(" done");
|
|
73
|
+
|
|
74
|
+
// Parse stats from output
|
|
75
|
+
const statsMatch = remoteOutput.match(/__STATS__(.+)/);
|
|
76
|
+
if (statsMatch) {
|
|
77
|
+
const stats = JSON.parse(statsMatch[1]);
|
|
78
|
+
console.log(`\n Tables: ${stats.tables}`);
|
|
79
|
+
console.log(` Views: ${stats.views}`);
|
|
80
|
+
console.log(` Functions: ${stats.functions}`);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// Step 4: Download result
|
|
84
|
+
process.stdout.write("\nDownloading types...");
|
|
85
|
+
const outputDir = path.dirname(output);
|
|
86
|
+
if (!fs.existsSync(outputDir)) {
|
|
87
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
88
|
+
}
|
|
89
|
+
scp(scpBase, `${server.split("@").length > 1 ? server : `root@${server}`}:${REMOTE_OUTPUT_PATH}`, output);
|
|
90
|
+
console.log(" done");
|
|
91
|
+
|
|
92
|
+
// Step 5: Clean up
|
|
93
|
+
process.stdout.write("Cleaning up remote files...");
|
|
94
|
+
ssh(sshBase, `rm -rf ${REMOTE_DIR}`);
|
|
95
|
+
console.log(" done");
|
|
96
|
+
|
|
97
|
+
const fileSize = fs.statSync(output).size;
|
|
98
|
+
console.log(`\nTypes saved to ${output} (${formatBytes(fileSize)})`);
|
|
99
|
+
} catch (err) {
|
|
100
|
+
console.error(`\n\nFailed: ${err.message}`);
|
|
101
|
+
|
|
102
|
+
// Try to clean up even on failure
|
|
103
|
+
try {
|
|
104
|
+
ssh(sshBase, `rm -rf ${REMOTE_DIR}`);
|
|
105
|
+
} catch {
|
|
106
|
+
// ignore cleanup errors
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
process.exit(1);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
function expandHome(p) {
|
|
114
|
+
if (p.startsWith("~/")) {
|
|
115
|
+
return path.join(process.env.HOME || process.env.USERPROFILE || "", p.slice(2));
|
|
116
|
+
}
|
|
117
|
+
return p;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
function buildSshBase(server, sshKey, sshPassword) {
|
|
121
|
+
const opts = ["-o", "StrictHostKeyChecking=accept-new", "-o", "ConnectTimeout=10"];
|
|
122
|
+
|
|
123
|
+
if (sshPassword) {
|
|
124
|
+
// Use sshpass for password-based auth
|
|
125
|
+
return ["sshpass", "-p", sshPassword, "ssh", ...opts, server];
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
if (sshKey) {
|
|
129
|
+
return ["ssh", "-i", sshKey, ...opts, server];
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Default: let SSH use its default key
|
|
133
|
+
return ["ssh", ...opts, server];
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
function buildScpBase(server, sshKey, sshPassword) {
|
|
137
|
+
const opts = ["-o", "StrictHostKeyChecking=accept-new", "-o", "ConnectTimeout=10"];
|
|
138
|
+
|
|
139
|
+
if (sshPassword) {
|
|
140
|
+
return ["sshpass", "-p", sshPassword, "scp", ...opts];
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
if (sshKey) {
|
|
144
|
+
return ["scp", "-i", sshKey, ...opts];
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
return ["scp", ...opts];
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
function ssh(base, command) {
|
|
151
|
+
const cmd = [...base, command].map(shellEscape).join(" ");
|
|
152
|
+
return execSync(cmd, { encoding: "utf8", maxBuffer: 50 * 1024 * 1024 });
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
function scp(base, src, dest) {
|
|
156
|
+
const cmd = [...base, src, dest].map(shellEscape).join(" ");
|
|
157
|
+
execSync(cmd, { encoding: "utf8" });
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
function shellEscape(arg) {
|
|
161
|
+
// Don't escape if it looks safe
|
|
162
|
+
if (/^[a-zA-Z0-9_./:@~=-]+$/.test(arg)) return arg;
|
|
163
|
+
return `'${arg.replace(/'/g, "'\\''")}'`;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
function formatBytes(bytes) {
|
|
167
|
+
if (bytes < 1024) return `${bytes} B`;
|
|
168
|
+
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
|
169
|
+
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
|
170
|
+
}
|
package/lib/init.js
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import readline from "node:readline";
|
|
4
|
+
|
|
5
|
+
const CONFIG_FILE = ".supatypes.json";
|
|
6
|
+
|
|
7
|
+
function ask(rl, question, defaultValue) {
|
|
8
|
+
const suffix = defaultValue ? ` (${defaultValue})` : "";
|
|
9
|
+
return new Promise((resolve) => {
|
|
10
|
+
rl.question(`${question}${suffix}: `, (answer) => {
|
|
11
|
+
resolve(answer.trim() || defaultValue || "");
|
|
12
|
+
});
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export async function init() {
|
|
17
|
+
const configPath = path.resolve(CONFIG_FILE);
|
|
18
|
+
|
|
19
|
+
if (fs.existsSync(configPath)) {
|
|
20
|
+
console.log(`Config file already exists: ${configPath}`);
|
|
21
|
+
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
|
22
|
+
const overwrite = await ask(rl, "Overwrite? (y/N)", "N");
|
|
23
|
+
rl.close();
|
|
24
|
+
if (overwrite.toLowerCase() !== "y") {
|
|
25
|
+
console.log("Aborted.");
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
|
31
|
+
|
|
32
|
+
console.log("\nSupaTypes — Configuration\n");
|
|
33
|
+
|
|
34
|
+
const server = await ask(rl, "SSH server (e.g. root@1.2.3.4)", "");
|
|
35
|
+
const authMethod = await ask(rl, "Auth method: key or password?", "key");
|
|
36
|
+
|
|
37
|
+
let sshKey = "";
|
|
38
|
+
let sshPassword = "";
|
|
39
|
+
|
|
40
|
+
if (authMethod === "password") {
|
|
41
|
+
sshPassword = await ask(rl, "SSH password", "");
|
|
42
|
+
} else {
|
|
43
|
+
sshKey = await ask(rl, "SSH key path", "~/.ssh/id_rsa");
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const dbContainer = await ask(rl, "Docker container name (e.g. supabase-db-abc123)", "");
|
|
47
|
+
const output = await ask(rl, "Output file path", "./database.types.ts");
|
|
48
|
+
|
|
49
|
+
rl.close();
|
|
50
|
+
|
|
51
|
+
const config = { server, dbContainer, output };
|
|
52
|
+
|
|
53
|
+
if (sshKey) config.sshKey = sshKey;
|
|
54
|
+
if (sshPassword) config.sshPassword = sshPassword;
|
|
55
|
+
|
|
56
|
+
fs.writeFileSync(configPath, JSON.stringify(config, null, 2) + "\n");
|
|
57
|
+
|
|
58
|
+
console.log(`\nConfig saved to ${configPath}`);
|
|
59
|
+
console.log('Run "supatypes generate" to generate types.\n');
|
|
60
|
+
|
|
61
|
+
// Add to .gitignore if it exists
|
|
62
|
+
const gitignorePath = path.resolve(".gitignore");
|
|
63
|
+
if (fs.existsSync(gitignorePath)) {
|
|
64
|
+
const content = fs.readFileSync(gitignorePath, "utf8");
|
|
65
|
+
if (!content.includes(CONFIG_FILE)) {
|
|
66
|
+
fs.appendFileSync(gitignorePath, `\n# SupaTypes config (contains server credentials)\n${CONFIG_FILE}\n`);
|
|
67
|
+
console.log(`Added ${CONFIG_FILE} to .gitignore`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
@@ -0,0 +1,397 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// ============================================
|
|
4
|
+
// Remote Generator Script
|
|
5
|
+
// This file is uploaded to the server and executed there.
|
|
6
|
+
// It dumps the PostgreSQL schema and generates TypeScript types.
|
|
7
|
+
// ============================================
|
|
8
|
+
|
|
9
|
+
const { execSync } = require("child_process");
|
|
10
|
+
const fs = require("fs");
|
|
11
|
+
|
|
12
|
+
const DB_CONTAINER = process.argv[2];
|
|
13
|
+
const OUTPUT_FILE = process.argv[3] || "/tmp/supatypes-output.ts";
|
|
14
|
+
|
|
15
|
+
if (!DB_CONTAINER) {
|
|
16
|
+
console.error("Usage: node remote-generator.js <db_container> [output_file]");
|
|
17
|
+
process.exit(1);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// Step 1: Dump schema
|
|
21
|
+
const schema = execSync(
|
|
22
|
+
`docker exec ${DB_CONTAINER} pg_dump -U postgres -d postgres --schema-only --schema=public`,
|
|
23
|
+
{ encoding: "utf8", maxBuffer: 50 * 1024 * 1024 }
|
|
24
|
+
);
|
|
25
|
+
|
|
26
|
+
// Step 2: Parse tables
|
|
27
|
+
const tables = {};
|
|
28
|
+
const tableRegex = /CREATE TABLE (?:public\.)?(\w+) \(([\s\S]*?)\);/g;
|
|
29
|
+
let match;
|
|
30
|
+
|
|
31
|
+
while ((match = tableRegex.exec(schema)) !== null) {
|
|
32
|
+
const tableName = match[1];
|
|
33
|
+
const columnDefs = match[2];
|
|
34
|
+
const columns = [];
|
|
35
|
+
const columnLines = columnDefs.split(",\n").map((l) => l.trim());
|
|
36
|
+
|
|
37
|
+
for (const line of columnLines) {
|
|
38
|
+
if (line.startsWith("CONSTRAINT") || line.startsWith("CHECK")) continue;
|
|
39
|
+
|
|
40
|
+
const colMatch = line.match(
|
|
41
|
+
/^"?(\w+)"?\s+((?:character varying|double precision|timestamp with(?:out)? time zone|\w+)(?:\[\])?)(?:\s+(.*))?/
|
|
42
|
+
);
|
|
43
|
+
if (colMatch) {
|
|
44
|
+
const [, name, type, constraints = ""] = colMatch;
|
|
45
|
+
columns.push({
|
|
46
|
+
name,
|
|
47
|
+
type: mapPostgresType(type),
|
|
48
|
+
nullable: !constraints.includes("NOT NULL"),
|
|
49
|
+
hasDefault: constraints.includes("DEFAULT"),
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (columns.length > 0) {
|
|
55
|
+
tables[tableName] = columns;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Step 3: Parse views
|
|
60
|
+
const views = {};
|
|
61
|
+
const fullViewRegex =
|
|
62
|
+
/CREATE (?:OR REPLACE )?(?:MATERIALIZED )?VIEW (?:public\.)?(\w+)\s+AS\s+([\s\S]*?);(?=\s*(?:CREATE|ALTER|--|$))/g;
|
|
63
|
+
|
|
64
|
+
while ((match = fullViewRegex.exec(schema)) !== null) {
|
|
65
|
+
const viewName = match[1];
|
|
66
|
+
let viewDefinition = match[2];
|
|
67
|
+
|
|
68
|
+
if (viewDefinition.trim().toUpperCase().startsWith("WITH")) {
|
|
69
|
+
let depth = 0;
|
|
70
|
+
let inCTE = false;
|
|
71
|
+
for (let i = 0; i < viewDefinition.length; i++) {
|
|
72
|
+
const char = viewDefinition[i];
|
|
73
|
+
if (char === "(") { depth++; if (!inCTE && depth === 1) inCTE = true; }
|
|
74
|
+
else if (char === ")") { depth--; if (inCTE && depth === 0) inCTE = false; }
|
|
75
|
+
|
|
76
|
+
if (depth === 0 && !inCTE) {
|
|
77
|
+
const remaining = viewDefinition.substring(i).trim();
|
|
78
|
+
if (remaining.toUpperCase().startsWith("SELECT")) {
|
|
79
|
+
viewDefinition = viewDefinition.substring(i);
|
|
80
|
+
break;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const selectMatch = viewDefinition.match(/SELECT\s+([\s\S]*?)\s+FROM/i);
|
|
87
|
+
if (!selectMatch) continue;
|
|
88
|
+
|
|
89
|
+
const columns = [];
|
|
90
|
+
const columnExpressions = splitSelectColumns(selectMatch[1]);
|
|
91
|
+
|
|
92
|
+
for (const expr of columnExpressions) {
|
|
93
|
+
const trimmed = expr.trim();
|
|
94
|
+
if (!trimmed) continue;
|
|
95
|
+
|
|
96
|
+
let name;
|
|
97
|
+
const asMatch = trimmed.match(/\s+[Aa][Ss]\s+(\w+)\s*$/);
|
|
98
|
+
if (asMatch) {
|
|
99
|
+
name = asMatch[1];
|
|
100
|
+
} else {
|
|
101
|
+
const simpleMatch = trimmed.match(/^(\w+)\.(\w+)$/);
|
|
102
|
+
if (simpleMatch) name = simpleMatch[2];
|
|
103
|
+
else {
|
|
104
|
+
const lastWord = trimmed.match(/(\w+)\s*$/);
|
|
105
|
+
if (lastWord) name = lastWord[1];
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (name) {
|
|
110
|
+
let type = "string";
|
|
111
|
+
const lower = name.toLowerCase();
|
|
112
|
+
if (
|
|
113
|
+
lower.includes("count") || lower.includes("total") ||
|
|
114
|
+
lower.includes("points") || lower.includes("rank")
|
|
115
|
+
) {
|
|
116
|
+
type = "number";
|
|
117
|
+
}
|
|
118
|
+
columns.push({ name, type, nullable: true });
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
const unique = Array.from(new Map(columns.map((c) => [c.name, c])).values());
|
|
123
|
+
if (unique.length > 0) views[viewName] = unique;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Step 4: Parse functions (RPC)
|
|
127
|
+
const functions = {};
|
|
128
|
+
const funcRegex =
|
|
129
|
+
/CREATE (?:OR REPLACE )?FUNCTION (?:public\.)?(\w+)\(([^)]*)\)\s*\n?\s*RETURNS\s+(\w+)/g;
|
|
130
|
+
|
|
131
|
+
while ((match = funcRegex.exec(schema)) !== null) {
|
|
132
|
+
const funcName = match[1];
|
|
133
|
+
const paramsStr = match[2].trim();
|
|
134
|
+
const returnType = match[3].toLowerCase();
|
|
135
|
+
|
|
136
|
+
if (returnType === "trigger") continue;
|
|
137
|
+
if (funcName === "update_updated_at_column") continue;
|
|
138
|
+
|
|
139
|
+
const params = [];
|
|
140
|
+
if (paramsStr) {
|
|
141
|
+
for (const part of paramsStr.split(",").map((p) => p.trim())) {
|
|
142
|
+
const cleaned = part.replace(/^(?:IN|OUT|INOUT)\s+/i, "");
|
|
143
|
+
const paramMatch = cleaned.match(/^(\w+)\s+(\w+(?:\[\])?)$/);
|
|
144
|
+
if (paramMatch) {
|
|
145
|
+
params.push({
|
|
146
|
+
name: paramMatch[1],
|
|
147
|
+
type: mapPostgresType(paramMatch[2]),
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
functions[funcName] = {
|
|
154
|
+
params,
|
|
155
|
+
returnType: returnType === "void" ? "undefined" : mapPostgresType(returnType),
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// Step 5: Generate TypeScript output
|
|
160
|
+
let output = `export type Json =
|
|
161
|
+
| string
|
|
162
|
+
| number
|
|
163
|
+
| boolean
|
|
164
|
+
| null
|
|
165
|
+
| { [key: string]: Json | undefined }
|
|
166
|
+
| Json[]
|
|
167
|
+
|
|
168
|
+
export type Database = {
|
|
169
|
+
__InternalSupabase: {
|
|
170
|
+
PostgrestVersion: "12.2.12"
|
|
171
|
+
}
|
|
172
|
+
public: {
|
|
173
|
+
Tables: {
|
|
174
|
+
`;
|
|
175
|
+
|
|
176
|
+
for (const [tableName, columns] of Object.entries(tables)) {
|
|
177
|
+
output += ` ${tableName}: {\n Row: {\n`;
|
|
178
|
+
for (const col of columns) {
|
|
179
|
+
output += ` ${col.name}: ${col.type}${col.nullable ? " | null" : ""}\n`;
|
|
180
|
+
}
|
|
181
|
+
output += ` }\n Insert: {\n`;
|
|
182
|
+
for (const col of columns) {
|
|
183
|
+
const opt = col.hasDefault || col.nullable ? "?" : "";
|
|
184
|
+
output += ` ${col.name}${opt}: ${col.type}${col.nullable ? " | null" : ""}\n`;
|
|
185
|
+
}
|
|
186
|
+
output += ` }\n Update: {\n`;
|
|
187
|
+
for (const col of columns) {
|
|
188
|
+
output += ` ${col.name}?: ${col.type}${col.nullable ? " | null" : ""}\n`;
|
|
189
|
+
}
|
|
190
|
+
output += ` }\n Relationships: []\n }\n`;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
output += ` }\n Views: {\n`;
|
|
194
|
+
|
|
195
|
+
for (const [viewName, columns] of Object.entries(views)) {
|
|
196
|
+
output += ` ${viewName}: {\n Row: {\n`;
|
|
197
|
+
for (const col of columns) {
|
|
198
|
+
output += ` ${col.name}: ${col.type}${col.nullable ? " | null" : ""}\n`;
|
|
199
|
+
}
|
|
200
|
+
output += ` }\n Insert: {\n [_ in never]: never\n }\n`;
|
|
201
|
+
output += ` Update: {\n [_ in never]: never\n }\n`;
|
|
202
|
+
output += ` Relationships: []\n }\n`;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
output += ` }\n Functions: {\n`;
|
|
206
|
+
|
|
207
|
+
const funcEntries = Object.entries(functions);
|
|
208
|
+
if (funcEntries.length === 0) {
|
|
209
|
+
output += ` [_ in never]: never\n`;
|
|
210
|
+
} else {
|
|
211
|
+
for (const [funcName, func] of funcEntries) {
|
|
212
|
+
output += ` ${funcName}: {\n Args: {\n`;
|
|
213
|
+
for (const param of func.params) {
|
|
214
|
+
output += ` ${param.name}: ${param.type}\n`;
|
|
215
|
+
}
|
|
216
|
+
output += ` }\n Returns: ${func.returnType}\n }\n`;
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
output += ` }
|
|
221
|
+
Enums: {
|
|
222
|
+
[_ in never]: never
|
|
223
|
+
}
|
|
224
|
+
CompositeTypes: {
|
|
225
|
+
[_ in never]: never
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
type DatabaseWithoutInternals = Omit<Database, "__InternalSupabase">
|
|
231
|
+
|
|
232
|
+
type DefaultSchema = DatabaseWithoutInternals[Extract<keyof Database, "public">]
|
|
233
|
+
|
|
234
|
+
export type Tables<
|
|
235
|
+
DefaultSchemaTableNameOrOptions extends
|
|
236
|
+
| keyof (DefaultSchema["Tables"] & DefaultSchema["Views"])
|
|
237
|
+
| { schema: keyof DatabaseWithoutInternals },
|
|
238
|
+
TableName extends DefaultSchemaTableNameOrOptions extends {
|
|
239
|
+
schema: keyof DatabaseWithoutInternals
|
|
240
|
+
}
|
|
241
|
+
? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] &
|
|
242
|
+
DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])
|
|
243
|
+
: never = never,
|
|
244
|
+
> = DefaultSchemaTableNameOrOptions extends {
|
|
245
|
+
schema: keyof DatabaseWithoutInternals
|
|
246
|
+
}
|
|
247
|
+
? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] &
|
|
248
|
+
DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends {
|
|
249
|
+
Row: infer R
|
|
250
|
+
}
|
|
251
|
+
? R
|
|
252
|
+
: never
|
|
253
|
+
: DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] &
|
|
254
|
+
DefaultSchema["Views"])
|
|
255
|
+
? (DefaultSchema["Tables"] &
|
|
256
|
+
DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends {
|
|
257
|
+
Row: infer R
|
|
258
|
+
}
|
|
259
|
+
? R
|
|
260
|
+
: never
|
|
261
|
+
: never
|
|
262
|
+
|
|
263
|
+
export type TablesInsert<
|
|
264
|
+
DefaultSchemaTableNameOrOptions extends
|
|
265
|
+
| keyof DefaultSchema["Tables"]
|
|
266
|
+
| { schema: keyof DatabaseWithoutInternals },
|
|
267
|
+
TableName extends DefaultSchemaTableNameOrOptions extends {
|
|
268
|
+
schema: keyof DatabaseWithoutInternals
|
|
269
|
+
}
|
|
270
|
+
? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"]
|
|
271
|
+
: never = never,
|
|
272
|
+
> = DefaultSchemaTableNameOrOptions extends {
|
|
273
|
+
schema: keyof DatabaseWithoutInternals
|
|
274
|
+
}
|
|
275
|
+
? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends {
|
|
276
|
+
Insert: infer I
|
|
277
|
+
}
|
|
278
|
+
? I
|
|
279
|
+
: never
|
|
280
|
+
: DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"]
|
|
281
|
+
? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends {
|
|
282
|
+
Insert: infer I
|
|
283
|
+
}
|
|
284
|
+
? I
|
|
285
|
+
: never
|
|
286
|
+
: never
|
|
287
|
+
|
|
288
|
+
export type TablesUpdate<
|
|
289
|
+
DefaultSchemaTableNameOrOptions extends
|
|
290
|
+
| keyof DefaultSchema["Tables"]
|
|
291
|
+
| { schema: keyof DatabaseWithoutInternals },
|
|
292
|
+
TableName extends DefaultSchemaTableNameOrOptions extends {
|
|
293
|
+
schema: keyof DatabaseWithoutInternals
|
|
294
|
+
}
|
|
295
|
+
? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"]
|
|
296
|
+
: never = never,
|
|
297
|
+
> = DefaultSchemaTableNameOrOptions extends {
|
|
298
|
+
schema: keyof DatabaseWithoutInternals
|
|
299
|
+
}
|
|
300
|
+
? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends {
|
|
301
|
+
Update: infer U
|
|
302
|
+
}
|
|
303
|
+
? U
|
|
304
|
+
: never
|
|
305
|
+
: DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"]
|
|
306
|
+
? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends {
|
|
307
|
+
Update: infer U
|
|
308
|
+
}
|
|
309
|
+
? U
|
|
310
|
+
: never
|
|
311
|
+
: never
|
|
312
|
+
|
|
313
|
+
export type Enums<
|
|
314
|
+
DefaultSchemaEnumNameOrOptions extends
|
|
315
|
+
| keyof DefaultSchema["Enums"]
|
|
316
|
+
| { schema: keyof DatabaseWithoutInternals },
|
|
317
|
+
EnumName extends DefaultSchemaEnumNameOrOptions extends {
|
|
318
|
+
schema: keyof DatabaseWithoutInternals
|
|
319
|
+
}
|
|
320
|
+
? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"]
|
|
321
|
+
: never = never,
|
|
322
|
+
> = DefaultSchemaEnumNameOrOptions extends {
|
|
323
|
+
schema: keyof DatabaseWithoutInternals
|
|
324
|
+
}
|
|
325
|
+
? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName]
|
|
326
|
+
: DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"]
|
|
327
|
+
? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions]
|
|
328
|
+
: never
|
|
329
|
+
|
|
330
|
+
export type CompositeTypes<
|
|
331
|
+
PublicCompositeTypeNameOrOptions extends
|
|
332
|
+
| keyof DefaultSchema["CompositeTypes"]
|
|
333
|
+
| { schema: keyof DatabaseWithoutInternals },
|
|
334
|
+
CompositeTypeName extends PublicCompositeTypeNameOrOptions extends {
|
|
335
|
+
schema: keyof DatabaseWithoutInternals
|
|
336
|
+
}
|
|
337
|
+
? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"]
|
|
338
|
+
: never = never,
|
|
339
|
+
> = PublicCompositeTypeNameOrOptions extends {
|
|
340
|
+
schema: keyof DatabaseWithoutInternals
|
|
341
|
+
}
|
|
342
|
+
? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName]
|
|
343
|
+
: PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"]
|
|
344
|
+
? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions]
|
|
345
|
+
: never
|
|
346
|
+
|
|
347
|
+
export const Constants = {
|
|
348
|
+
public: {
|
|
349
|
+
Enums: {},
|
|
350
|
+
},
|
|
351
|
+
} as const
|
|
352
|
+
`;
|
|
353
|
+
|
|
354
|
+
fs.writeFileSync(OUTPUT_FILE, output);
|
|
355
|
+
|
|
356
|
+
// Output stats as JSON for the CLI to parse
|
|
357
|
+
const stats = {
|
|
358
|
+
tables: Object.keys(tables).length,
|
|
359
|
+
views: Object.keys(views).length,
|
|
360
|
+
functions: Object.keys(functions).length,
|
|
361
|
+
};
|
|
362
|
+
console.log("__STATS__" + JSON.stringify(stats));
|
|
363
|
+
|
|
364
|
+
// Helper functions
|
|
365
|
+
|
|
366
|
+
function mapPostgresType(pgType) {
|
|
367
|
+
const type = pgType.toLowerCase();
|
|
368
|
+
const isArray = type.endsWith("[]") || type.startsWith("_");
|
|
369
|
+
const baseType = type.replace("[]", "").replace(/^_/, "");
|
|
370
|
+
const mapped = mapBaseType(baseType);
|
|
371
|
+
return isArray ? `${mapped}[]` : mapped;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
function mapBaseType(type) {
|
|
375
|
+
if (type.includes("int") || type.includes("serial")) return "number";
|
|
376
|
+
if (type.includes("numeric") || type.includes("decimal") || type.includes("real") || type.includes("double")) return "number";
|
|
377
|
+
if (type.includes("bool")) return "boolean";
|
|
378
|
+
if (type.includes("json")) return "Json";
|
|
379
|
+
if (type.includes("uuid")) return "string";
|
|
380
|
+
if (type.includes("timestamp") || type.includes("date") || type.includes("time")) return "string";
|
|
381
|
+
if (type.includes("text") || type.includes("char") || type.includes("varchar")) return "string";
|
|
382
|
+
return "string";
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
function splitSelectColumns(selectClause) {
|
|
386
|
+
const columns = [];
|
|
387
|
+
let current = "";
|
|
388
|
+
let depth = 0;
|
|
389
|
+
for (const char of selectClause) {
|
|
390
|
+
if (char === "(") { depth++; current += char; }
|
|
391
|
+
else if (char === ")") { depth--; current += char; }
|
|
392
|
+
else if (char === "," && depth === 0) { columns.push(current.trim()); current = ""; }
|
|
393
|
+
else { current += char; }
|
|
394
|
+
}
|
|
395
|
+
if (current.trim()) columns.push(current.trim());
|
|
396
|
+
return columns;
|
|
397
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "supatypes",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Generate TypeScript types from a remote Supabase PostgreSQL database via SSH",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"author": "MadStoneDev",
|
|
7
|
+
"type": "module",
|
|
8
|
+
"bin": {
|
|
9
|
+
"supatypes": "bin/cli.js"
|
|
10
|
+
},
|
|
11
|
+
"files": [
|
|
12
|
+
"bin/",
|
|
13
|
+
"lib/",
|
|
14
|
+
"README.md"
|
|
15
|
+
],
|
|
16
|
+
"keywords": [
|
|
17
|
+
"supabase",
|
|
18
|
+
"typescript",
|
|
19
|
+
"types",
|
|
20
|
+
"codegen",
|
|
21
|
+
"postgresql",
|
|
22
|
+
"database"
|
|
23
|
+
],
|
|
24
|
+
"engines": {
|
|
25
|
+
"node": ">=18"
|
|
26
|
+
}
|
|
27
|
+
}
|