@prisma-next/adapter-postgres 0.3.0-dev.3 → 0.3.0-dev.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{exports/chunk-B5SU5BVC.js → chunk-HD5YISNQ.js} +1 -1
- package/dist/chunk-HD5YISNQ.js.map +1 -0
- package/dist/{exports/chunk-CPAKRHXM.js → chunk-J3XSOAM2.js} +1 -1
- package/dist/chunk-J3XSOAM2.js.map +1 -0
- package/dist/{exports/chunk-ZHJOVBWT.js → chunk-T6S3A6VT.js} +2 -2
- package/dist/chunk-T6S3A6VT.js.map +1 -0
- package/dist/core/adapter.d.ts +19 -0
- package/dist/core/adapter.d.ts.map +1 -0
- package/dist/core/codecs.d.ts +110 -0
- package/dist/core/codecs.d.ts.map +1 -0
- package/dist/core/control-adapter.d.ts +33 -0
- package/dist/core/control-adapter.d.ts.map +1 -0
- package/dist/core/descriptor-meta.d.ts +72 -0
- package/dist/core/descriptor-meta.d.ts.map +1 -0
- package/dist/core/types.d.ts +16 -0
- package/dist/core/types.d.ts.map +1 -0
- package/dist/exports/adapter.d.ts +2 -21
- package/dist/exports/adapter.d.ts.map +1 -0
- package/dist/exports/adapter.js +2 -2
- package/dist/exports/codec-types.d.ts +7 -34
- package/dist/exports/codec-types.d.ts.map +1 -0
- package/dist/exports/codec-types.js +1 -1
- package/dist/exports/column-types.d.ts +11 -14
- package/dist/exports/column-types.d.ts.map +1 -0
- package/dist/exports/control.d.ts +4 -5
- package/dist/exports/control.d.ts.map +1 -0
- package/dist/exports/control.js +1 -1
- package/dist/exports/runtime.d.ts +6 -8
- package/dist/exports/runtime.d.ts.map +1 -0
- package/dist/exports/runtime.js +3 -3
- package/dist/exports/types.d.ts +2 -19
- package/dist/exports/types.d.ts.map +1 -0
- package/package.json +24 -23
- package/src/core/adapter.ts +429 -0
- package/src/core/codecs.ts +194 -0
- package/src/core/control-adapter.ts +375 -0
- package/src/core/descriptor-meta.ts +41 -0
- package/src/core/types.ts +53 -0
- package/src/exports/adapter.ts +1 -0
- package/src/exports/codec-types.ts +11 -0
- package/src/exports/column-types.ts +53 -0
- package/src/exports/control.ts +20 -0
- package/src/exports/runtime.ts +32 -0
- package/src/exports/types.ts +14 -0
- package/dist/exports/chunk-B5SU5BVC.js.map +0 -1
- package/dist/exports/chunk-CPAKRHXM.js.map +0 -1
- package/dist/exports/chunk-ZHJOVBWT.js.map +0 -1
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unified codec definitions for Postgres adapter.
|
|
3
|
+
*
|
|
4
|
+
* This file contains a single source of truth for all codec information:
|
|
5
|
+
* - Scalar names
|
|
6
|
+
* - Type IDs
|
|
7
|
+
* - Codec implementations (runtime)
|
|
8
|
+
* - Type information (compile-time)
|
|
9
|
+
*
|
|
10
|
+
* This structure is used both at runtime (to populate the registry) and
|
|
11
|
+
* at compile time (to derive CodecTypes).
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { codec, defineCodecs } from '@prisma-next/sql-relational-core/ast';
|
|
15
|
+
|
|
16
|
+
// Create individual codec instances
|
|
17
|
+
const pgTextCodec = codec({
|
|
18
|
+
typeId: 'pg/text@1',
|
|
19
|
+
targetTypes: ['text'],
|
|
20
|
+
encode: (value: string): string => value,
|
|
21
|
+
decode: (wire: string): string => wire,
|
|
22
|
+
meta: {
|
|
23
|
+
db: {
|
|
24
|
+
sql: {
|
|
25
|
+
postgres: {
|
|
26
|
+
nativeType: 'text',
|
|
27
|
+
},
|
|
28
|
+
},
|
|
29
|
+
},
|
|
30
|
+
},
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
const pgInt4Codec = codec<'pg/int4@1', number, number>({
|
|
34
|
+
typeId: 'pg/int4@1',
|
|
35
|
+
targetTypes: ['int4'],
|
|
36
|
+
encode: (value) => value,
|
|
37
|
+
decode: (wire) => wire,
|
|
38
|
+
meta: {
|
|
39
|
+
db: {
|
|
40
|
+
sql: {
|
|
41
|
+
postgres: {
|
|
42
|
+
nativeType: 'integer',
|
|
43
|
+
},
|
|
44
|
+
},
|
|
45
|
+
},
|
|
46
|
+
},
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
const pgInt2Codec = codec<'pg/int2@1', number, number>({
|
|
50
|
+
typeId: 'pg/int2@1',
|
|
51
|
+
targetTypes: ['int2'],
|
|
52
|
+
encode: (value) => value,
|
|
53
|
+
decode: (wire) => wire,
|
|
54
|
+
meta: {
|
|
55
|
+
db: {
|
|
56
|
+
sql: {
|
|
57
|
+
postgres: {
|
|
58
|
+
nativeType: 'smallint',
|
|
59
|
+
},
|
|
60
|
+
},
|
|
61
|
+
},
|
|
62
|
+
},
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
const pgInt8Codec = codec<'pg/int8@1', number, number>({
|
|
66
|
+
typeId: 'pg/int8@1',
|
|
67
|
+
targetTypes: ['int8'],
|
|
68
|
+
encode: (value) => value,
|
|
69
|
+
decode: (wire) => wire,
|
|
70
|
+
meta: {
|
|
71
|
+
db: {
|
|
72
|
+
sql: {
|
|
73
|
+
postgres: {
|
|
74
|
+
nativeType: 'bigint',
|
|
75
|
+
},
|
|
76
|
+
},
|
|
77
|
+
},
|
|
78
|
+
},
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
const pgFloat4Codec = codec<'pg/float4@1', number, number>({
|
|
82
|
+
typeId: 'pg/float4@1',
|
|
83
|
+
targetTypes: ['float4'],
|
|
84
|
+
encode: (value) => value,
|
|
85
|
+
decode: (wire) => wire,
|
|
86
|
+
meta: {
|
|
87
|
+
db: {
|
|
88
|
+
sql: {
|
|
89
|
+
postgres: {
|
|
90
|
+
nativeType: 'real',
|
|
91
|
+
},
|
|
92
|
+
},
|
|
93
|
+
},
|
|
94
|
+
},
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
const pgFloat8Codec = codec<'pg/float8@1', number, number>({
|
|
98
|
+
typeId: 'pg/float8@1',
|
|
99
|
+
targetTypes: ['float8'],
|
|
100
|
+
encode: (value) => value,
|
|
101
|
+
decode: (wire) => wire,
|
|
102
|
+
meta: {
|
|
103
|
+
db: {
|
|
104
|
+
sql: {
|
|
105
|
+
postgres: {
|
|
106
|
+
nativeType: 'double precision',
|
|
107
|
+
},
|
|
108
|
+
},
|
|
109
|
+
},
|
|
110
|
+
},
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
const pgTimestampCodec = codec<'pg/timestamp@1', string | Date, string>({
|
|
114
|
+
typeId: 'pg/timestamp@1',
|
|
115
|
+
targetTypes: ['timestamp'],
|
|
116
|
+
encode: (value: string | Date): string => {
|
|
117
|
+
if (value instanceof Date) return value.toISOString();
|
|
118
|
+
if (typeof value === 'string') return value;
|
|
119
|
+
return String(value);
|
|
120
|
+
},
|
|
121
|
+
decode: (wire: string | Date): string => {
|
|
122
|
+
if (typeof wire === 'string') return wire;
|
|
123
|
+
if (wire instanceof Date) return wire.toISOString();
|
|
124
|
+
return String(wire);
|
|
125
|
+
},
|
|
126
|
+
meta: {
|
|
127
|
+
db: {
|
|
128
|
+
sql: {
|
|
129
|
+
postgres: {
|
|
130
|
+
nativeType: 'timestamp without time zone',
|
|
131
|
+
},
|
|
132
|
+
},
|
|
133
|
+
},
|
|
134
|
+
},
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
const pgTimestamptzCodec = codec<'pg/timestamptz@1', string | Date, string>({
|
|
138
|
+
typeId: 'pg/timestamptz@1',
|
|
139
|
+
targetTypes: ['timestamptz'],
|
|
140
|
+
encode: (value: string | Date): string => {
|
|
141
|
+
if (value instanceof Date) return value.toISOString();
|
|
142
|
+
if (typeof value === 'string') return value;
|
|
143
|
+
return String(value);
|
|
144
|
+
},
|
|
145
|
+
decode: (wire: string | Date): string => {
|
|
146
|
+
if (typeof wire === 'string') return wire;
|
|
147
|
+
if (wire instanceof Date) return wire.toISOString();
|
|
148
|
+
return String(wire);
|
|
149
|
+
},
|
|
150
|
+
meta: {
|
|
151
|
+
db: {
|
|
152
|
+
sql: {
|
|
153
|
+
postgres: {
|
|
154
|
+
nativeType: 'timestamp with time zone',
|
|
155
|
+
},
|
|
156
|
+
},
|
|
157
|
+
},
|
|
158
|
+
},
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
const pgBoolCodec = codec<'pg/bool@1', boolean, boolean>({
|
|
162
|
+
typeId: 'pg/bool@1',
|
|
163
|
+
targetTypes: ['bool'],
|
|
164
|
+
encode: (value) => value,
|
|
165
|
+
decode: (wire) => wire,
|
|
166
|
+
meta: {
|
|
167
|
+
db: {
|
|
168
|
+
sql: {
|
|
169
|
+
postgres: {
|
|
170
|
+
nativeType: 'boolean',
|
|
171
|
+
},
|
|
172
|
+
},
|
|
173
|
+
},
|
|
174
|
+
},
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
// Build codec definitions using the builder DSL
|
|
178
|
+
const codecs = defineCodecs()
|
|
179
|
+
.add('text', pgTextCodec)
|
|
180
|
+
.add('int4', pgInt4Codec)
|
|
181
|
+
.add('int2', pgInt2Codec)
|
|
182
|
+
.add('int8', pgInt8Codec)
|
|
183
|
+
.add('float4', pgFloat4Codec)
|
|
184
|
+
.add('float8', pgFloat8Codec)
|
|
185
|
+
.add('timestamp', pgTimestampCodec)
|
|
186
|
+
.add('timestamptz', pgTimestamptzCodec)
|
|
187
|
+
.add('bool', pgBoolCodec);
|
|
188
|
+
|
|
189
|
+
// Export derived structures directly from codecs builder
|
|
190
|
+
export const codecDefinitions = codecs.codecDefinitions;
|
|
191
|
+
export const dataTypes = codecs.dataTypes;
|
|
192
|
+
|
|
193
|
+
// Export types derived from codecs builder
|
|
194
|
+
export type CodecTypes = typeof codecs.CodecTypes;
|
|
@@ -0,0 +1,375 @@
|
|
|
1
|
+
import type { ControlDriverInstance } from '@prisma-next/core-control-plane/types';
|
|
2
|
+
import type { SqlControlAdapter } from '@prisma-next/family-sql/control-adapter';
|
|
3
|
+
import type {
|
|
4
|
+
PrimaryKey,
|
|
5
|
+
SqlColumnIR,
|
|
6
|
+
SqlForeignKeyIR,
|
|
7
|
+
SqlIndexIR,
|
|
8
|
+
SqlSchemaIR,
|
|
9
|
+
SqlTableIR,
|
|
10
|
+
SqlUniqueIR,
|
|
11
|
+
} from '@prisma-next/sql-schema-ir/types';
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Postgres control plane adapter for control-plane operations like introspection.
|
|
15
|
+
* Provides target-specific implementations for control-plane domain actions.
|
|
16
|
+
*/
|
|
17
|
+
export class PostgresControlAdapter implements SqlControlAdapter<'postgres'> {
|
|
18
|
+
readonly familyId = 'sql' as const;
|
|
19
|
+
readonly targetId = 'postgres' as const;
|
|
20
|
+
/**
|
|
21
|
+
* @deprecated Use targetId instead
|
|
22
|
+
*/
|
|
23
|
+
readonly target = 'postgres' as const;
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Introspects a Postgres database schema and returns a raw SqlSchemaIR.
|
|
27
|
+
*
|
|
28
|
+
* This is a pure schema discovery operation that queries the Postgres catalog
|
|
29
|
+
* and returns the schema structure without type mapping or contract enrichment.
|
|
30
|
+
* Type mapping and enrichment are handled separately by enrichment helpers.
|
|
31
|
+
*
|
|
32
|
+
* @param driver - ControlDriverInstance<'sql', 'postgres'> instance for executing queries
|
|
33
|
+
* @param contractIR - Optional contract IR for contract-guided introspection (filtering, optimization)
|
|
34
|
+
* @param schema - Schema name to introspect (defaults to 'public')
|
|
35
|
+
* @returns Promise resolving to SqlSchemaIR representing the live database schema
|
|
36
|
+
*/
|
|
37
|
+
async introspect(
|
|
38
|
+
driver: ControlDriverInstance<'sql', 'postgres'>,
|
|
39
|
+
_contractIR?: unknown,
|
|
40
|
+
schema = 'public',
|
|
41
|
+
): Promise<SqlSchemaIR> {
|
|
42
|
+
// Query tables
|
|
43
|
+
const tablesResult = await driver.query<{
|
|
44
|
+
table_name: string;
|
|
45
|
+
}>(
|
|
46
|
+
`SELECT table_name
|
|
47
|
+
FROM information_schema.tables
|
|
48
|
+
WHERE table_schema = $1
|
|
49
|
+
AND table_type = 'BASE TABLE'
|
|
50
|
+
ORDER BY table_name`,
|
|
51
|
+
[schema],
|
|
52
|
+
);
|
|
53
|
+
|
|
54
|
+
const tables: Record<string, SqlTableIR> = {};
|
|
55
|
+
|
|
56
|
+
for (const tableRow of tablesResult.rows) {
|
|
57
|
+
const tableName = tableRow.table_name;
|
|
58
|
+
|
|
59
|
+
// Query columns for this table
|
|
60
|
+
const columnsResult = await driver.query<{
|
|
61
|
+
column_name: string;
|
|
62
|
+
data_type: string;
|
|
63
|
+
udt_name: string;
|
|
64
|
+
is_nullable: string;
|
|
65
|
+
character_maximum_length: number | null;
|
|
66
|
+
numeric_precision: number | null;
|
|
67
|
+
numeric_scale: number | null;
|
|
68
|
+
}>(
|
|
69
|
+
`SELECT
|
|
70
|
+
column_name,
|
|
71
|
+
data_type,
|
|
72
|
+
udt_name,
|
|
73
|
+
is_nullable,
|
|
74
|
+
character_maximum_length,
|
|
75
|
+
numeric_precision,
|
|
76
|
+
numeric_scale
|
|
77
|
+
FROM information_schema.columns
|
|
78
|
+
WHERE table_schema = $1
|
|
79
|
+
AND table_name = $2
|
|
80
|
+
ORDER BY ordinal_position`,
|
|
81
|
+
[schema, tableName],
|
|
82
|
+
);
|
|
83
|
+
|
|
84
|
+
const columns: Record<string, SqlColumnIR> = {};
|
|
85
|
+
for (const colRow of columnsResult.rows) {
|
|
86
|
+
// Build native type string from catalog data
|
|
87
|
+
let nativeType = colRow.udt_name;
|
|
88
|
+
if (colRow.data_type === 'character varying' || colRow.data_type === 'character') {
|
|
89
|
+
if (colRow.character_maximum_length) {
|
|
90
|
+
nativeType = `${colRow.data_type}(${colRow.character_maximum_length})`;
|
|
91
|
+
} else {
|
|
92
|
+
nativeType = colRow.data_type;
|
|
93
|
+
}
|
|
94
|
+
} else if (colRow.data_type === 'numeric' || colRow.data_type === 'decimal') {
|
|
95
|
+
if (colRow.numeric_precision && colRow.numeric_scale !== null) {
|
|
96
|
+
nativeType = `${colRow.data_type}(${colRow.numeric_precision},${colRow.numeric_scale})`;
|
|
97
|
+
} else if (colRow.numeric_precision) {
|
|
98
|
+
nativeType = `${colRow.data_type}(${colRow.numeric_precision})`;
|
|
99
|
+
} else {
|
|
100
|
+
nativeType = colRow.data_type;
|
|
101
|
+
}
|
|
102
|
+
} else {
|
|
103
|
+
nativeType = colRow.udt_name || colRow.data_type;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
columns[colRow.column_name] = {
|
|
107
|
+
name: colRow.column_name,
|
|
108
|
+
nativeType,
|
|
109
|
+
nullable: colRow.is_nullable === 'YES',
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Query primary key
|
|
114
|
+
const pkResult = await driver.query<{
|
|
115
|
+
constraint_name: string;
|
|
116
|
+
column_name: string;
|
|
117
|
+
ordinal_position: number;
|
|
118
|
+
}>(
|
|
119
|
+
`SELECT
|
|
120
|
+
tc.constraint_name,
|
|
121
|
+
kcu.column_name,
|
|
122
|
+
kcu.ordinal_position
|
|
123
|
+
FROM information_schema.table_constraints tc
|
|
124
|
+
JOIN information_schema.key_column_usage kcu
|
|
125
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
126
|
+
AND tc.table_schema = kcu.table_schema
|
|
127
|
+
AND tc.table_name = kcu.table_name
|
|
128
|
+
WHERE tc.table_schema = $1
|
|
129
|
+
AND tc.table_name = $2
|
|
130
|
+
AND tc.constraint_type = 'PRIMARY KEY'
|
|
131
|
+
ORDER BY kcu.ordinal_position`,
|
|
132
|
+
[schema, tableName],
|
|
133
|
+
);
|
|
134
|
+
|
|
135
|
+
const primaryKeyColumns = pkResult.rows
|
|
136
|
+
.sort((a, b) => a.ordinal_position - b.ordinal_position)
|
|
137
|
+
.map((row) => row.column_name);
|
|
138
|
+
const primaryKey: PrimaryKey | undefined =
|
|
139
|
+
primaryKeyColumns.length > 0
|
|
140
|
+
? {
|
|
141
|
+
columns: primaryKeyColumns,
|
|
142
|
+
...(pkResult.rows[0]?.constraint_name
|
|
143
|
+
? { name: pkResult.rows[0].constraint_name }
|
|
144
|
+
: {}),
|
|
145
|
+
}
|
|
146
|
+
: undefined;
|
|
147
|
+
|
|
148
|
+
// Query foreign keys
|
|
149
|
+
const fkResult = await driver.query<{
|
|
150
|
+
constraint_name: string;
|
|
151
|
+
column_name: string;
|
|
152
|
+
ordinal_position: number;
|
|
153
|
+
referenced_table_schema: string;
|
|
154
|
+
referenced_table_name: string;
|
|
155
|
+
referenced_column_name: string;
|
|
156
|
+
}>(
|
|
157
|
+
`SELECT
|
|
158
|
+
tc.constraint_name,
|
|
159
|
+
kcu.column_name,
|
|
160
|
+
kcu.ordinal_position,
|
|
161
|
+
ccu.table_schema AS referenced_table_schema,
|
|
162
|
+
ccu.table_name AS referenced_table_name,
|
|
163
|
+
ccu.column_name AS referenced_column_name
|
|
164
|
+
FROM information_schema.table_constraints tc
|
|
165
|
+
JOIN information_schema.key_column_usage kcu
|
|
166
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
167
|
+
AND tc.table_schema = kcu.table_schema
|
|
168
|
+
AND tc.table_name = kcu.table_name
|
|
169
|
+
JOIN information_schema.constraint_column_usage ccu
|
|
170
|
+
ON ccu.constraint_name = tc.constraint_name
|
|
171
|
+
AND ccu.table_schema = tc.table_schema
|
|
172
|
+
WHERE tc.table_schema = $1
|
|
173
|
+
AND tc.table_name = $2
|
|
174
|
+
AND tc.constraint_type = 'FOREIGN KEY'
|
|
175
|
+
ORDER BY tc.constraint_name, kcu.ordinal_position`,
|
|
176
|
+
[schema, tableName],
|
|
177
|
+
);
|
|
178
|
+
|
|
179
|
+
const foreignKeysMap = new Map<
|
|
180
|
+
string,
|
|
181
|
+
{
|
|
182
|
+
columns: string[];
|
|
183
|
+
referencedTable: string;
|
|
184
|
+
referencedColumns: string[];
|
|
185
|
+
name: string;
|
|
186
|
+
}
|
|
187
|
+
>();
|
|
188
|
+
for (const fkRow of fkResult.rows) {
|
|
189
|
+
const existing = foreignKeysMap.get(fkRow.constraint_name);
|
|
190
|
+
if (existing) {
|
|
191
|
+
// Multi-column FK - add column
|
|
192
|
+
existing.columns.push(fkRow.column_name);
|
|
193
|
+
existing.referencedColumns.push(fkRow.referenced_column_name);
|
|
194
|
+
} else {
|
|
195
|
+
foreignKeysMap.set(fkRow.constraint_name, {
|
|
196
|
+
columns: [fkRow.column_name],
|
|
197
|
+
referencedTable: fkRow.referenced_table_name,
|
|
198
|
+
referencedColumns: [fkRow.referenced_column_name],
|
|
199
|
+
name: fkRow.constraint_name,
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
const foreignKeys: readonly SqlForeignKeyIR[] = Array.from(foreignKeysMap.values()).map(
|
|
204
|
+
(fk) => ({
|
|
205
|
+
columns: Object.freeze([...fk.columns]) as readonly string[],
|
|
206
|
+
referencedTable: fk.referencedTable,
|
|
207
|
+
referencedColumns: Object.freeze([...fk.referencedColumns]) as readonly string[],
|
|
208
|
+
name: fk.name,
|
|
209
|
+
}),
|
|
210
|
+
);
|
|
211
|
+
|
|
212
|
+
// Query unique constraints (excluding PK)
|
|
213
|
+
const uniqueResult = await driver.query<{
|
|
214
|
+
constraint_name: string;
|
|
215
|
+
column_name: string;
|
|
216
|
+
ordinal_position: number;
|
|
217
|
+
}>(
|
|
218
|
+
`SELECT
|
|
219
|
+
tc.constraint_name,
|
|
220
|
+
kcu.column_name,
|
|
221
|
+
kcu.ordinal_position
|
|
222
|
+
FROM information_schema.table_constraints tc
|
|
223
|
+
JOIN information_schema.key_column_usage kcu
|
|
224
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
225
|
+
AND tc.table_schema = kcu.table_schema
|
|
226
|
+
AND tc.table_name = kcu.table_name
|
|
227
|
+
WHERE tc.table_schema = $1
|
|
228
|
+
AND tc.table_name = $2
|
|
229
|
+
AND tc.constraint_type = 'UNIQUE'
|
|
230
|
+
AND tc.constraint_name NOT IN (
|
|
231
|
+
SELECT constraint_name
|
|
232
|
+
FROM information_schema.table_constraints
|
|
233
|
+
WHERE table_schema = $1
|
|
234
|
+
AND table_name = $2
|
|
235
|
+
AND constraint_type = 'PRIMARY KEY'
|
|
236
|
+
)
|
|
237
|
+
ORDER BY tc.constraint_name, kcu.ordinal_position`,
|
|
238
|
+
[schema, tableName],
|
|
239
|
+
);
|
|
240
|
+
|
|
241
|
+
const uniquesMap = new Map<
|
|
242
|
+
string,
|
|
243
|
+
{
|
|
244
|
+
columns: string[];
|
|
245
|
+
name: string;
|
|
246
|
+
}
|
|
247
|
+
>();
|
|
248
|
+
for (const uniqueRow of uniqueResult.rows) {
|
|
249
|
+
const existing = uniquesMap.get(uniqueRow.constraint_name);
|
|
250
|
+
if (existing) {
|
|
251
|
+
existing.columns.push(uniqueRow.column_name);
|
|
252
|
+
} else {
|
|
253
|
+
uniquesMap.set(uniqueRow.constraint_name, {
|
|
254
|
+
columns: [uniqueRow.column_name],
|
|
255
|
+
name: uniqueRow.constraint_name,
|
|
256
|
+
});
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
const uniques: readonly SqlUniqueIR[] = Array.from(uniquesMap.values()).map((uq) => ({
|
|
260
|
+
columns: Object.freeze([...uq.columns]) as readonly string[],
|
|
261
|
+
name: uq.name,
|
|
262
|
+
}));
|
|
263
|
+
|
|
264
|
+
// Query indexes (excluding PK and unique constraints)
|
|
265
|
+
const indexResult = await driver.query<{
|
|
266
|
+
indexname: string;
|
|
267
|
+
indisunique: boolean;
|
|
268
|
+
attname: string;
|
|
269
|
+
attnum: number;
|
|
270
|
+
}>(
|
|
271
|
+
`SELECT
|
|
272
|
+
i.indexname,
|
|
273
|
+
ix.indisunique,
|
|
274
|
+
a.attname,
|
|
275
|
+
a.attnum
|
|
276
|
+
FROM pg_indexes i
|
|
277
|
+
JOIN pg_class ic ON ic.relname = i.indexname
|
|
278
|
+
JOIN pg_namespace ins ON ins.oid = ic.relnamespace AND ins.nspname = $1
|
|
279
|
+
JOIN pg_index ix ON ix.indexrelid = ic.oid
|
|
280
|
+
JOIN pg_class t ON t.oid = ix.indrelid
|
|
281
|
+
JOIN pg_namespace tn ON tn.oid = t.relnamespace AND tn.nspname = $1
|
|
282
|
+
LEFT JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey) AND a.attnum > 0
|
|
283
|
+
WHERE i.schemaname = $1
|
|
284
|
+
AND i.tablename = $2
|
|
285
|
+
AND NOT EXISTS (
|
|
286
|
+
SELECT 1
|
|
287
|
+
FROM information_schema.table_constraints tc
|
|
288
|
+
WHERE tc.table_schema = $1
|
|
289
|
+
AND tc.table_name = $2
|
|
290
|
+
AND tc.constraint_name = i.indexname
|
|
291
|
+
)
|
|
292
|
+
ORDER BY i.indexname, a.attnum`,
|
|
293
|
+
[schema, tableName],
|
|
294
|
+
);
|
|
295
|
+
|
|
296
|
+
const indexesMap = new Map<
|
|
297
|
+
string,
|
|
298
|
+
{
|
|
299
|
+
columns: string[];
|
|
300
|
+
name: string;
|
|
301
|
+
unique: boolean;
|
|
302
|
+
}
|
|
303
|
+
>();
|
|
304
|
+
for (const idxRow of indexResult.rows) {
|
|
305
|
+
// Skip rows where attname is null (system columns or invalid attnum)
|
|
306
|
+
if (!idxRow.attname) {
|
|
307
|
+
continue;
|
|
308
|
+
}
|
|
309
|
+
const existing = indexesMap.get(idxRow.indexname);
|
|
310
|
+
if (existing) {
|
|
311
|
+
existing.columns.push(idxRow.attname);
|
|
312
|
+
} else {
|
|
313
|
+
indexesMap.set(idxRow.indexname, {
|
|
314
|
+
columns: [idxRow.attname],
|
|
315
|
+
name: idxRow.indexname,
|
|
316
|
+
unique: idxRow.indisunique,
|
|
317
|
+
});
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
const indexes: readonly SqlIndexIR[] = Array.from(indexesMap.values()).map((idx) => ({
|
|
321
|
+
columns: Object.freeze([...idx.columns]) as readonly string[],
|
|
322
|
+
name: idx.name,
|
|
323
|
+
unique: idx.unique,
|
|
324
|
+
}));
|
|
325
|
+
|
|
326
|
+
tables[tableName] = {
|
|
327
|
+
name: tableName,
|
|
328
|
+
columns,
|
|
329
|
+
...(primaryKey ? { primaryKey } : {}),
|
|
330
|
+
foreignKeys,
|
|
331
|
+
uniques,
|
|
332
|
+
indexes,
|
|
333
|
+
};
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
// Query extensions
|
|
337
|
+
const extensionsResult = await driver.query<{
|
|
338
|
+
extname: string;
|
|
339
|
+
}>(
|
|
340
|
+
`SELECT extname
|
|
341
|
+
FROM pg_extension
|
|
342
|
+
ORDER BY extname`,
|
|
343
|
+
[],
|
|
344
|
+
);
|
|
345
|
+
|
|
346
|
+
const extensions = extensionsResult.rows.map((row) => row.extname);
|
|
347
|
+
|
|
348
|
+
// Build annotations with Postgres-specific metadata
|
|
349
|
+
const annotations = {
|
|
350
|
+
pg: {
|
|
351
|
+
schema,
|
|
352
|
+
version: await this.getPostgresVersion(driver),
|
|
353
|
+
},
|
|
354
|
+
};
|
|
355
|
+
|
|
356
|
+
return {
|
|
357
|
+
tables,
|
|
358
|
+
extensions,
|
|
359
|
+
annotations,
|
|
360
|
+
};
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
/**
|
|
364
|
+
* Gets the Postgres version from the database.
|
|
365
|
+
*/
|
|
366
|
+
private async getPostgresVersion(
|
|
367
|
+
driver: ControlDriverInstance<'sql', 'postgres'>,
|
|
368
|
+
): Promise<string> {
|
|
369
|
+
const result = await driver.query<{ version: string }>('SELECT version() AS version', []);
|
|
370
|
+
const versionString = result.rows[0]?.version ?? '';
|
|
371
|
+
// Extract version number from "PostgreSQL 15.1 ..." format
|
|
372
|
+
const match = versionString.match(/PostgreSQL (\d+\.\d+)/);
|
|
373
|
+
return match?.[1] ?? 'unknown';
|
|
374
|
+
}
|
|
375
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
export const postgresAdapterDescriptorMeta = {
|
|
2
|
+
kind: 'adapter',
|
|
3
|
+
familyId: 'sql',
|
|
4
|
+
targetId: 'postgres',
|
|
5
|
+
id: 'postgres',
|
|
6
|
+
version: '0.0.1',
|
|
7
|
+
capabilities: {
|
|
8
|
+
postgres: {
|
|
9
|
+
orderBy: true,
|
|
10
|
+
limit: true,
|
|
11
|
+
lateral: true,
|
|
12
|
+
jsonAgg: true,
|
|
13
|
+
returning: true,
|
|
14
|
+
},
|
|
15
|
+
},
|
|
16
|
+
types: {
|
|
17
|
+
codecTypes: {
|
|
18
|
+
import: {
|
|
19
|
+
package: '@prisma-next/adapter-postgres/codec-types',
|
|
20
|
+
named: 'CodecTypes',
|
|
21
|
+
alias: 'PgTypes',
|
|
22
|
+
},
|
|
23
|
+
},
|
|
24
|
+
storage: [
|
|
25
|
+
{ typeId: 'pg/text@1', familyId: 'sql', targetId: 'postgres', nativeType: 'text' },
|
|
26
|
+
{ typeId: 'pg/int4@1', familyId: 'sql', targetId: 'postgres', nativeType: 'int4' },
|
|
27
|
+
{ typeId: 'pg/int2@1', familyId: 'sql', targetId: 'postgres', nativeType: 'int2' },
|
|
28
|
+
{ typeId: 'pg/int8@1', familyId: 'sql', targetId: 'postgres', nativeType: 'int8' },
|
|
29
|
+
{ typeId: 'pg/float4@1', familyId: 'sql', targetId: 'postgres', nativeType: 'float4' },
|
|
30
|
+
{ typeId: 'pg/float8@1', familyId: 'sql', targetId: 'postgres', nativeType: 'float8' },
|
|
31
|
+
{ typeId: 'pg/timestamp@1', familyId: 'sql', targetId: 'postgres', nativeType: 'timestamp' },
|
|
32
|
+
{
|
|
33
|
+
typeId: 'pg/timestamptz@1',
|
|
34
|
+
familyId: 'sql',
|
|
35
|
+
targetId: 'postgres',
|
|
36
|
+
nativeType: 'timestamptz',
|
|
37
|
+
},
|
|
38
|
+
{ typeId: 'pg/bool@1', familyId: 'sql', targetId: 'postgres', nativeType: 'bool' },
|
|
39
|
+
],
|
|
40
|
+
},
|
|
41
|
+
} as const;
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
SqlContract,
|
|
3
|
+
SqlStorage,
|
|
4
|
+
StorageColumn,
|
|
5
|
+
StorageTable,
|
|
6
|
+
} from '@prisma-next/sql-contract/types';
|
|
7
|
+
import type {
|
|
8
|
+
BinaryExpr,
|
|
9
|
+
ColumnRef,
|
|
10
|
+
DeleteAst,
|
|
11
|
+
Direction,
|
|
12
|
+
InsertAst,
|
|
13
|
+
JoinAst,
|
|
14
|
+
LiteralExpr,
|
|
15
|
+
LoweredStatement,
|
|
16
|
+
OperationExpr,
|
|
17
|
+
ParamRef,
|
|
18
|
+
QueryAst,
|
|
19
|
+
SelectAst,
|
|
20
|
+
UpdateAst,
|
|
21
|
+
} from '@prisma-next/sql-relational-core/ast';
|
|
22
|
+
|
|
23
|
+
export interface PostgresAdapterOptions {
|
|
24
|
+
readonly profileId?: string;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export type PostgresContract = SqlContract<SqlStorage> & { readonly target: 'postgres' };
|
|
28
|
+
|
|
29
|
+
export type Expr = ColumnRef | ParamRef;
|
|
30
|
+
|
|
31
|
+
export interface OrderClause {
|
|
32
|
+
readonly expr: ColumnRef;
|
|
33
|
+
readonly dir: Direction;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export type PostgresLoweredStatement = LoweredStatement;
|
|
37
|
+
|
|
38
|
+
export type {
|
|
39
|
+
BinaryExpr,
|
|
40
|
+
ColumnRef,
|
|
41
|
+
DeleteAst,
|
|
42
|
+
Direction,
|
|
43
|
+
InsertAst,
|
|
44
|
+
JoinAst,
|
|
45
|
+
LiteralExpr,
|
|
46
|
+
OperationExpr,
|
|
47
|
+
ParamRef,
|
|
48
|
+
QueryAst,
|
|
49
|
+
SelectAst,
|
|
50
|
+
StorageColumn,
|
|
51
|
+
StorageTable,
|
|
52
|
+
UpdateAst,
|
|
53
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { createPostgresAdapter } from '../core/adapter';
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Codec type definitions for Postgres adapter.
|
|
3
|
+
*
|
|
4
|
+
* This file exports type-only definitions for codec input/output types.
|
|
5
|
+
* These types are imported by contract.d.ts files for compile-time type inference.
|
|
6
|
+
*
|
|
7
|
+
* Runtime codec implementations are provided by the adapter's codec registry.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
export type { CodecTypes } from '../core/codecs';
|
|
11
|
+
export { dataTypes } from '../core/codecs';
|