@prisma-next/adapter-postgres 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +259 -0
- package/dist/exports/adapter.d.ts +19 -0
- package/dist/exports/adapter.js +8 -0
- package/dist/exports/adapter.js.map +1 -0
- package/dist/exports/chunk-CPAKRHXM.js +162 -0
- package/dist/exports/chunk-CPAKRHXM.js.map +1 -0
- package/dist/exports/chunk-NOYZK3LL.js +288 -0
- package/dist/exports/chunk-NOYZK3LL.js.map +1 -0
- package/dist/exports/codec-types.d.ts +38 -0
- package/dist/exports/codec-types.js +7 -0
- package/dist/exports/codec-types.js.map +1 -0
- package/dist/exports/control.d.ts +96 -0
- package/dist/exports/control.js +326 -0
- package/dist/exports/control.js.map +1 -0
- package/dist/exports/runtime.d.ts +17 -0
- package/dist/exports/runtime.js +64 -0
- package/dist/exports/runtime.js.map +1 -0
- package/dist/exports/types.d.ts +19 -0
- package/dist/exports/types.js +1 -0
- package/dist/exports/types.js.map +1 -0
- package/package.json +56 -0
|
@@ -0,0 +1,326 @@
|
|
|
1
|
+
// src/exports/control.ts
|
|
2
|
+
import { readFileSync } from "fs";
|
|
3
|
+
import { dirname, join } from "path";
|
|
4
|
+
import { fileURLToPath } from "url";
|
|
5
|
+
import { type } from "arktype";
|
|
6
|
+
|
|
7
|
+
// src/core/control-adapter.ts
|
|
8
|
+
var PostgresControlAdapter = class {
|
|
9
|
+
familyId = "sql";
|
|
10
|
+
targetId = "postgres";
|
|
11
|
+
/**
|
|
12
|
+
* @deprecated Use targetId instead
|
|
13
|
+
*/
|
|
14
|
+
target = "postgres";
|
|
15
|
+
/**
|
|
16
|
+
* Introspects a Postgres database schema and returns a raw SqlSchemaIR.
|
|
17
|
+
*
|
|
18
|
+
* This is a pure schema discovery operation that queries the Postgres catalog
|
|
19
|
+
* and returns the schema structure without type mapping or contract enrichment.
|
|
20
|
+
* Type mapping and enrichment are handled separately by enrichment helpers.
|
|
21
|
+
*
|
|
22
|
+
* @param driver - ControlDriverInstance<'postgres'> instance for executing queries
|
|
23
|
+
* @param contractIR - Optional contract IR for contract-guided introspection (filtering, optimization)
|
|
24
|
+
* @param schema - Schema name to introspect (defaults to 'public')
|
|
25
|
+
* @returns Promise resolving to SqlSchemaIR representing the live database schema
|
|
26
|
+
*/
|
|
27
|
+
async introspect(driver, _contractIR, schema = "public") {
|
|
28
|
+
const tablesResult = await driver.query(
|
|
29
|
+
`SELECT table_name
|
|
30
|
+
FROM information_schema.tables
|
|
31
|
+
WHERE table_schema = $1
|
|
32
|
+
AND table_type = 'BASE TABLE'
|
|
33
|
+
ORDER BY table_name`,
|
|
34
|
+
[schema]
|
|
35
|
+
);
|
|
36
|
+
const tables = {};
|
|
37
|
+
for (const tableRow of tablesResult.rows) {
|
|
38
|
+
const tableName = tableRow.table_name;
|
|
39
|
+
const columnsResult = await driver.query(
|
|
40
|
+
`SELECT
|
|
41
|
+
column_name,
|
|
42
|
+
data_type,
|
|
43
|
+
udt_name,
|
|
44
|
+
is_nullable,
|
|
45
|
+
character_maximum_length,
|
|
46
|
+
numeric_precision,
|
|
47
|
+
numeric_scale
|
|
48
|
+
FROM information_schema.columns
|
|
49
|
+
WHERE table_schema = $1
|
|
50
|
+
AND table_name = $2
|
|
51
|
+
ORDER BY ordinal_position`,
|
|
52
|
+
[schema, tableName]
|
|
53
|
+
);
|
|
54
|
+
const columns = {};
|
|
55
|
+
for (const colRow of columnsResult.rows) {
|
|
56
|
+
let nativeType = colRow.udt_name;
|
|
57
|
+
if (colRow.data_type === "character varying" || colRow.data_type === "character") {
|
|
58
|
+
if (colRow.character_maximum_length) {
|
|
59
|
+
nativeType = `${colRow.data_type}(${colRow.character_maximum_length})`;
|
|
60
|
+
} else {
|
|
61
|
+
nativeType = colRow.data_type;
|
|
62
|
+
}
|
|
63
|
+
} else if (colRow.data_type === "numeric" || colRow.data_type === "decimal") {
|
|
64
|
+
if (colRow.numeric_precision && colRow.numeric_scale !== null) {
|
|
65
|
+
nativeType = `${colRow.data_type}(${colRow.numeric_precision},${colRow.numeric_scale})`;
|
|
66
|
+
} else if (colRow.numeric_precision) {
|
|
67
|
+
nativeType = `${colRow.data_type}(${colRow.numeric_precision})`;
|
|
68
|
+
} else {
|
|
69
|
+
nativeType = colRow.data_type;
|
|
70
|
+
}
|
|
71
|
+
} else {
|
|
72
|
+
nativeType = colRow.udt_name || colRow.data_type;
|
|
73
|
+
}
|
|
74
|
+
columns[colRow.column_name] = {
|
|
75
|
+
name: colRow.column_name,
|
|
76
|
+
// Placeholder typeId - enrichment will replace this with actual codec ID
|
|
77
|
+
typeId: "unmapped",
|
|
78
|
+
nativeType,
|
|
79
|
+
nullable: colRow.is_nullable === "YES"
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
const pkResult = await driver.query(
|
|
83
|
+
`SELECT
|
|
84
|
+
tc.constraint_name,
|
|
85
|
+
kcu.column_name,
|
|
86
|
+
kcu.ordinal_position
|
|
87
|
+
FROM information_schema.table_constraints tc
|
|
88
|
+
JOIN information_schema.key_column_usage kcu
|
|
89
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
90
|
+
AND tc.table_schema = kcu.table_schema
|
|
91
|
+
AND tc.table_name = kcu.table_name
|
|
92
|
+
WHERE tc.table_schema = $1
|
|
93
|
+
AND tc.table_name = $2
|
|
94
|
+
AND tc.constraint_type = 'PRIMARY KEY'
|
|
95
|
+
ORDER BY kcu.ordinal_position`,
|
|
96
|
+
[schema, tableName]
|
|
97
|
+
);
|
|
98
|
+
const primaryKeyColumns = pkResult.rows.sort((a, b) => a.ordinal_position - b.ordinal_position).map((row) => row.column_name);
|
|
99
|
+
const primaryKey = primaryKeyColumns.length > 0 ? {
|
|
100
|
+
columns: primaryKeyColumns,
|
|
101
|
+
...pkResult.rows[0]?.constraint_name ? { name: pkResult.rows[0].constraint_name } : {}
|
|
102
|
+
} : void 0;
|
|
103
|
+
const fkResult = await driver.query(
|
|
104
|
+
`SELECT
|
|
105
|
+
tc.constraint_name,
|
|
106
|
+
kcu.column_name,
|
|
107
|
+
kcu.ordinal_position,
|
|
108
|
+
ccu.table_schema AS referenced_table_schema,
|
|
109
|
+
ccu.table_name AS referenced_table_name,
|
|
110
|
+
ccu.column_name AS referenced_column_name
|
|
111
|
+
FROM information_schema.table_constraints tc
|
|
112
|
+
JOIN information_schema.key_column_usage kcu
|
|
113
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
114
|
+
AND tc.table_schema = kcu.table_schema
|
|
115
|
+
AND tc.table_name = kcu.table_name
|
|
116
|
+
JOIN information_schema.constraint_column_usage ccu
|
|
117
|
+
ON ccu.constraint_name = tc.constraint_name
|
|
118
|
+
AND ccu.table_schema = tc.table_schema
|
|
119
|
+
WHERE tc.table_schema = $1
|
|
120
|
+
AND tc.table_name = $2
|
|
121
|
+
AND tc.constraint_type = 'FOREIGN KEY'
|
|
122
|
+
ORDER BY tc.constraint_name, kcu.ordinal_position`,
|
|
123
|
+
[schema, tableName]
|
|
124
|
+
);
|
|
125
|
+
const foreignKeysMap = /* @__PURE__ */ new Map();
|
|
126
|
+
for (const fkRow of fkResult.rows) {
|
|
127
|
+
const existing = foreignKeysMap.get(fkRow.constraint_name);
|
|
128
|
+
if (existing) {
|
|
129
|
+
existing.columns.push(fkRow.column_name);
|
|
130
|
+
existing.referencedColumns.push(fkRow.referenced_column_name);
|
|
131
|
+
} else {
|
|
132
|
+
foreignKeysMap.set(fkRow.constraint_name, {
|
|
133
|
+
columns: [fkRow.column_name],
|
|
134
|
+
referencedTable: fkRow.referenced_table_name,
|
|
135
|
+
referencedColumns: [fkRow.referenced_column_name],
|
|
136
|
+
name: fkRow.constraint_name
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
const foreignKeys = Array.from(foreignKeysMap.values()).map(
|
|
141
|
+
(fk) => ({
|
|
142
|
+
columns: Object.freeze([...fk.columns]),
|
|
143
|
+
referencedTable: fk.referencedTable,
|
|
144
|
+
referencedColumns: Object.freeze([...fk.referencedColumns]),
|
|
145
|
+
name: fk.name
|
|
146
|
+
})
|
|
147
|
+
);
|
|
148
|
+
const uniqueResult = await driver.query(
|
|
149
|
+
`SELECT
|
|
150
|
+
tc.constraint_name,
|
|
151
|
+
kcu.column_name,
|
|
152
|
+
kcu.ordinal_position
|
|
153
|
+
FROM information_schema.table_constraints tc
|
|
154
|
+
JOIN information_schema.key_column_usage kcu
|
|
155
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
156
|
+
AND tc.table_schema = kcu.table_schema
|
|
157
|
+
AND tc.table_name = kcu.table_name
|
|
158
|
+
WHERE tc.table_schema = $1
|
|
159
|
+
AND tc.table_name = $2
|
|
160
|
+
AND tc.constraint_type = 'UNIQUE'
|
|
161
|
+
AND tc.constraint_name NOT IN (
|
|
162
|
+
SELECT constraint_name
|
|
163
|
+
FROM information_schema.table_constraints
|
|
164
|
+
WHERE table_schema = $1
|
|
165
|
+
AND table_name = $2
|
|
166
|
+
AND constraint_type = 'PRIMARY KEY'
|
|
167
|
+
)
|
|
168
|
+
ORDER BY tc.constraint_name, kcu.ordinal_position`,
|
|
169
|
+
[schema, tableName]
|
|
170
|
+
);
|
|
171
|
+
const uniquesMap = /* @__PURE__ */ new Map();
|
|
172
|
+
for (const uniqueRow of uniqueResult.rows) {
|
|
173
|
+
const existing = uniquesMap.get(uniqueRow.constraint_name);
|
|
174
|
+
if (existing) {
|
|
175
|
+
existing.columns.push(uniqueRow.column_name);
|
|
176
|
+
} else {
|
|
177
|
+
uniquesMap.set(uniqueRow.constraint_name, {
|
|
178
|
+
columns: [uniqueRow.column_name],
|
|
179
|
+
name: uniqueRow.constraint_name
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
const uniques = Array.from(uniquesMap.values()).map((uq) => ({
|
|
184
|
+
columns: Object.freeze([...uq.columns]),
|
|
185
|
+
name: uq.name
|
|
186
|
+
}));
|
|
187
|
+
const indexResult = await driver.query(
|
|
188
|
+
`SELECT
|
|
189
|
+
i.indexname,
|
|
190
|
+
ix.indisunique,
|
|
191
|
+
a.attname,
|
|
192
|
+
a.attnum
|
|
193
|
+
FROM pg_indexes i
|
|
194
|
+
JOIN pg_class ic ON ic.relname = i.indexname
|
|
195
|
+
JOIN pg_namespace ins ON ins.oid = ic.relnamespace AND ins.nspname = $1
|
|
196
|
+
JOIN pg_index ix ON ix.indexrelid = ic.oid
|
|
197
|
+
JOIN pg_class t ON t.oid = ix.indrelid
|
|
198
|
+
JOIN pg_namespace tn ON tn.oid = t.relnamespace AND tn.nspname = $1
|
|
199
|
+
LEFT JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey) AND a.attnum > 0
|
|
200
|
+
WHERE i.schemaname = $1
|
|
201
|
+
AND i.tablename = $2
|
|
202
|
+
AND NOT EXISTS (
|
|
203
|
+
SELECT 1
|
|
204
|
+
FROM information_schema.table_constraints tc
|
|
205
|
+
WHERE tc.table_schema = $1
|
|
206
|
+
AND tc.table_name = $2
|
|
207
|
+
AND tc.constraint_name = i.indexname
|
|
208
|
+
)
|
|
209
|
+
ORDER BY i.indexname, a.attnum`,
|
|
210
|
+
[schema, tableName]
|
|
211
|
+
);
|
|
212
|
+
const indexesMap = /* @__PURE__ */ new Map();
|
|
213
|
+
for (const idxRow of indexResult.rows) {
|
|
214
|
+
if (!idxRow.attname) {
|
|
215
|
+
continue;
|
|
216
|
+
}
|
|
217
|
+
const existing = indexesMap.get(idxRow.indexname);
|
|
218
|
+
if (existing) {
|
|
219
|
+
existing.columns.push(idxRow.attname);
|
|
220
|
+
} else {
|
|
221
|
+
indexesMap.set(idxRow.indexname, {
|
|
222
|
+
columns: [idxRow.attname],
|
|
223
|
+
name: idxRow.indexname,
|
|
224
|
+
unique: idxRow.indisunique
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
const indexes = Array.from(indexesMap.values()).map((idx) => ({
|
|
229
|
+
columns: Object.freeze([...idx.columns]),
|
|
230
|
+
name: idx.name,
|
|
231
|
+
unique: idx.unique
|
|
232
|
+
}));
|
|
233
|
+
tables[tableName] = {
|
|
234
|
+
name: tableName,
|
|
235
|
+
columns,
|
|
236
|
+
...primaryKey ? { primaryKey } : {},
|
|
237
|
+
foreignKeys,
|
|
238
|
+
uniques,
|
|
239
|
+
indexes
|
|
240
|
+
};
|
|
241
|
+
}
|
|
242
|
+
const extensionsResult = await driver.query(
|
|
243
|
+
`SELECT extname
|
|
244
|
+
FROM pg_extension
|
|
245
|
+
ORDER BY extname`,
|
|
246
|
+
[]
|
|
247
|
+
);
|
|
248
|
+
const extensions = extensionsResult.rows.map((row) => row.extname);
|
|
249
|
+
const annotations = {
|
|
250
|
+
pg: {
|
|
251
|
+
schema,
|
|
252
|
+
version: await this.getPostgresVersion(driver)
|
|
253
|
+
}
|
|
254
|
+
};
|
|
255
|
+
return {
|
|
256
|
+
tables,
|
|
257
|
+
extensions,
|
|
258
|
+
annotations
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
/**
|
|
262
|
+
* Gets the Postgres version from the database.
|
|
263
|
+
*/
|
|
264
|
+
async getPostgresVersion(driver) {
|
|
265
|
+
const result = await driver.query("SELECT version() AS version", []);
|
|
266
|
+
const versionString = result.rows[0]?.version ?? "";
|
|
267
|
+
const match = versionString.match(/PostgreSQL (\d+\.\d+)/);
|
|
268
|
+
return match?.[1] ?? "unknown";
|
|
269
|
+
}
|
|
270
|
+
};
|
|
271
|
+
|
|
272
|
+
// src/exports/control.ts
|
|
273
|
+
var __filename = fileURLToPath(import.meta.url);
|
|
274
|
+
var __dirname = dirname(__filename);
|
|
275
|
+
var TypesImportSpecSchema = type({
|
|
276
|
+
package: "string",
|
|
277
|
+
named: "string",
|
|
278
|
+
alias: "string"
|
|
279
|
+
});
|
|
280
|
+
var StorageTypeMetadataSchema = type({
|
|
281
|
+
typeId: "string",
|
|
282
|
+
familyId: "string",
|
|
283
|
+
targetId: "string",
|
|
284
|
+
"nativeType?": "string"
|
|
285
|
+
});
|
|
286
|
+
var ExtensionPackManifestSchema = type({
|
|
287
|
+
id: "string",
|
|
288
|
+
version: "string",
|
|
289
|
+
"targets?": type({ "[string]": type({ "minVersion?": "string" }) }),
|
|
290
|
+
"capabilities?": "Record<string, unknown>",
|
|
291
|
+
"types?": type({
|
|
292
|
+
"codecTypes?": type({
|
|
293
|
+
import: TypesImportSpecSchema
|
|
294
|
+
}),
|
|
295
|
+
"operationTypes?": type({
|
|
296
|
+
import: TypesImportSpecSchema
|
|
297
|
+
}),
|
|
298
|
+
"storage?": StorageTypeMetadataSchema.array()
|
|
299
|
+
}),
|
|
300
|
+
"operations?": "unknown[]"
|
|
301
|
+
});
|
|
302
|
+
function loadAdapterManifest() {
|
|
303
|
+
const manifestPath = join(__dirname, "../../packs/manifest.json");
|
|
304
|
+
const manifestJson = JSON.parse(readFileSync(manifestPath, "utf-8"));
|
|
305
|
+
const result = ExtensionPackManifestSchema(manifestJson);
|
|
306
|
+
if (result instanceof type.errors) {
|
|
307
|
+
const messages = result.map((p) => p.message).join("; ");
|
|
308
|
+
throw new Error(`Invalid adapter manifest structure at ${manifestPath}: ${messages}`);
|
|
309
|
+
}
|
|
310
|
+
return result;
|
|
311
|
+
}
|
|
312
|
+
var postgresAdapterDescriptor = {
|
|
313
|
+
kind: "adapter",
|
|
314
|
+
familyId: "sql",
|
|
315
|
+
targetId: "postgres",
|
|
316
|
+
id: "postgres",
|
|
317
|
+
manifest: loadAdapterManifest(),
|
|
318
|
+
create() {
|
|
319
|
+
return new PostgresControlAdapter();
|
|
320
|
+
}
|
|
321
|
+
};
|
|
322
|
+
var control_default = postgresAdapterDescriptor;
|
|
323
|
+
export {
|
|
324
|
+
control_default as default
|
|
325
|
+
};
|
|
326
|
+
//# sourceMappingURL=control.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/exports/control.ts","../../src/core/control-adapter.ts"],"sourcesContent":["import { readFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { fileURLToPath } from 'node:url';\nimport type { ExtensionPackManifest } from '@prisma-next/contract/pack-manifest-types';\nimport type { ControlAdapterDescriptor } from '@prisma-next/core-control-plane/types';\nimport type { SqlControlAdapter } from '@prisma-next/family-sql/control-adapter';\nimport { type } from 'arktype';\nimport { PostgresControlAdapter } from '../core/control-adapter';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\n\nconst TypesImportSpecSchema = type({\n package: 'string',\n named: 'string',\n alias: 'string',\n});\n\nconst StorageTypeMetadataSchema = type({\n typeId: 'string',\n familyId: 'string',\n targetId: 'string',\n 'nativeType?': 'string',\n});\n\nconst ExtensionPackManifestSchema = type({\n id: 'string',\n version: 'string',\n 'targets?': type({ '[string]': type({ 'minVersion?': 'string' }) }),\n 'capabilities?': 'Record<string, unknown>',\n 'types?': type({\n 'codecTypes?': type({\n import: TypesImportSpecSchema,\n }),\n 'operationTypes?': type({\n import: TypesImportSpecSchema,\n }),\n 'storage?': StorageTypeMetadataSchema.array(),\n }),\n 'operations?': 'unknown[]',\n});\n\n/**\n * Loads the adapter manifest from packs/manifest.json.\n */\nfunction loadAdapterManifest(): ExtensionPackManifest {\n const manifestPath = join(__dirname, '../../packs/manifest.json');\n const manifestJson = JSON.parse(readFileSync(manifestPath, 'utf-8'));\n\n const result = ExtensionPackManifestSchema(manifestJson);\n if (result instanceof type.errors) {\n const messages = result.map((p: { message: string }) => p.message).join('; ');\n throw new Error(`Invalid adapter manifest structure at ${manifestPath}: ${messages}`);\n }\n\n return result as ExtensionPackManifest;\n}\n\n/**\n * Postgres adapter descriptor for CLI config.\n */\nconst postgresAdapterDescriptor: ControlAdapterDescriptor<\n 'sql',\n 'postgres',\n SqlControlAdapter<'postgres'>\n> = {\n kind: 'adapter',\n familyId: 'sql',\n targetId: 'postgres',\n id: 'postgres',\n manifest: loadAdapterManifest(),\n create(): SqlControlAdapter<'postgres'> {\n return new PostgresControlAdapter();\n },\n};\n\nexport default postgresAdapterDescriptor;\n","import type { ControlDriverInstance } from '@prisma-next/core-control-plane/types';\nimport type { SqlControlAdapter } from '@prisma-next/family-sql/control-adapter';\nimport type {\n PrimaryKey,\n SqlColumnIR,\n SqlForeignKeyIR,\n SqlIndexIR,\n SqlSchemaIR,\n SqlTableIR,\n SqlUniqueIR,\n} from '@prisma-next/sql-schema-ir/types';\n\n/**\n * Postgres control plane adapter for control-plane operations like introspection.\n * Provides target-specific implementations for control-plane domain actions.\n */\nexport class PostgresControlAdapter implements SqlControlAdapter<'postgres'> {\n readonly familyId = 'sql' as const;\n readonly targetId = 'postgres' as const;\n /**\n * @deprecated Use targetId instead\n */\n readonly target = 'postgres' as const;\n\n /**\n * Introspects a Postgres database schema and returns a raw SqlSchemaIR.\n *\n * This is a pure schema discovery operation that queries the Postgres catalog\n * and returns the schema structure without type mapping or contract enrichment.\n * Type mapping and enrichment are handled separately by enrichment helpers.\n *\n * @param driver - ControlDriverInstance<'postgres'> instance for executing queries\n * @param contractIR - Optional contract IR for contract-guided introspection (filtering, optimization)\n * @param schema - Schema name to introspect (defaults to 'public')\n * @returns Promise resolving to SqlSchemaIR representing the live database schema\n */\n async introspect(\n driver: ControlDriverInstance<'postgres'>,\n _contractIR?: unknown,\n schema = 'public',\n ): Promise<SqlSchemaIR> {\n // Query tables\n const tablesResult = await driver.query<{\n table_name: string;\n }>(\n `SELECT table_name\n FROM information_schema.tables\n WHERE table_schema = $1\n AND table_type = 'BASE TABLE'\n ORDER BY table_name`,\n [schema],\n );\n\n const tables: Record<string, SqlTableIR> = {};\n\n for (const tableRow of tablesResult.rows) {\n const tableName = tableRow.table_name;\n\n // Query columns for this table\n const columnsResult = await driver.query<{\n column_name: string;\n data_type: string;\n udt_name: string;\n is_nullable: string;\n character_maximum_length: number | null;\n numeric_precision: number | null;\n numeric_scale: number | null;\n }>(\n `SELECT\n column_name,\n data_type,\n udt_name,\n is_nullable,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n FROM information_schema.columns\n WHERE table_schema = $1\n AND table_name = $2\n ORDER BY ordinal_position`,\n [schema, tableName],\n );\n\n const columns: Record<string, SqlColumnIR> = {};\n for (const colRow of columnsResult.rows) {\n // Build native type string from catalog data\n let nativeType = colRow.udt_name;\n if (colRow.data_type === 'character varying' || colRow.data_type === 'character') {\n if (colRow.character_maximum_length) {\n nativeType = `${colRow.data_type}(${colRow.character_maximum_length})`;\n } else {\n nativeType = colRow.data_type;\n }\n } else if (colRow.data_type === 'numeric' || colRow.data_type === 'decimal') {\n if (colRow.numeric_precision && colRow.numeric_scale !== null) {\n nativeType = `${colRow.data_type}(${colRow.numeric_precision},${colRow.numeric_scale})`;\n } else if (colRow.numeric_precision) {\n nativeType = `${colRow.data_type}(${colRow.numeric_precision})`;\n } else {\n nativeType = colRow.data_type;\n }\n } else {\n nativeType = colRow.udt_name || colRow.data_type;\n }\n\n columns[colRow.column_name] = {\n name: colRow.column_name,\n // Placeholder typeId - enrichment will replace this with actual codec ID\n typeId: 'unmapped',\n nativeType,\n nullable: colRow.is_nullable === 'YES',\n };\n }\n\n // Query primary key\n const pkResult = await driver.query<{\n constraint_name: string;\n column_name: string;\n ordinal_position: number;\n }>(\n `SELECT\n tc.constraint_name,\n kcu.column_name,\n kcu.ordinal_position\n FROM information_schema.table_constraints tc\n JOIN information_schema.key_column_usage kcu\n ON tc.constraint_name = kcu.constraint_name\n AND tc.table_schema = kcu.table_schema\n AND tc.table_name = kcu.table_name\n WHERE tc.table_schema = $1\n AND tc.table_name = $2\n AND tc.constraint_type = 'PRIMARY KEY'\n ORDER BY kcu.ordinal_position`,\n [schema, tableName],\n );\n\n const primaryKeyColumns = pkResult.rows\n .sort((a, b) => a.ordinal_position - b.ordinal_position)\n .map((row) => row.column_name);\n const primaryKey: PrimaryKey | undefined =\n primaryKeyColumns.length > 0\n ? {\n columns: primaryKeyColumns,\n ...(pkResult.rows[0]?.constraint_name\n ? { name: pkResult.rows[0].constraint_name }\n : {}),\n }\n : undefined;\n\n // Query foreign keys\n const fkResult = await driver.query<{\n constraint_name: string;\n column_name: string;\n ordinal_position: number;\n referenced_table_schema: string;\n referenced_table_name: string;\n referenced_column_name: string;\n }>(\n `SELECT\n tc.constraint_name,\n kcu.column_name,\n kcu.ordinal_position,\n ccu.table_schema AS referenced_table_schema,\n ccu.table_name AS referenced_table_name,\n ccu.column_name AS referenced_column_name\n FROM information_schema.table_constraints tc\n JOIN information_schema.key_column_usage kcu\n ON tc.constraint_name = kcu.constraint_name\n AND tc.table_schema = kcu.table_schema\n AND tc.table_name = kcu.table_name\n JOIN information_schema.constraint_column_usage ccu\n ON ccu.constraint_name = tc.constraint_name\n AND ccu.table_schema = tc.table_schema\n WHERE tc.table_schema = $1\n AND tc.table_name = $2\n AND tc.constraint_type = 'FOREIGN KEY'\n ORDER BY tc.constraint_name, kcu.ordinal_position`,\n [schema, tableName],\n );\n\n const foreignKeysMap = new Map<\n string,\n {\n columns: string[];\n referencedTable: string;\n referencedColumns: string[];\n name: string;\n }\n >();\n for (const fkRow of fkResult.rows) {\n const existing = foreignKeysMap.get(fkRow.constraint_name);\n if (existing) {\n // Multi-column FK - add column\n existing.columns.push(fkRow.column_name);\n existing.referencedColumns.push(fkRow.referenced_column_name);\n } else {\n foreignKeysMap.set(fkRow.constraint_name, {\n columns: [fkRow.column_name],\n referencedTable: fkRow.referenced_table_name,\n referencedColumns: [fkRow.referenced_column_name],\n name: fkRow.constraint_name,\n });\n }\n }\n const foreignKeys: readonly SqlForeignKeyIR[] = Array.from(foreignKeysMap.values()).map(\n (fk) => ({\n columns: Object.freeze([...fk.columns]) as readonly string[],\n referencedTable: fk.referencedTable,\n referencedColumns: Object.freeze([...fk.referencedColumns]) as readonly string[],\n name: fk.name,\n }),\n );\n\n // Query unique constraints (excluding PK)\n const uniqueResult = await driver.query<{\n constraint_name: string;\n column_name: string;\n ordinal_position: number;\n }>(\n `SELECT\n tc.constraint_name,\n kcu.column_name,\n kcu.ordinal_position\n FROM information_schema.table_constraints tc\n JOIN information_schema.key_column_usage kcu\n ON tc.constraint_name = kcu.constraint_name\n AND tc.table_schema = kcu.table_schema\n AND tc.table_name = kcu.table_name\n WHERE tc.table_schema = $1\n AND tc.table_name = $2\n AND tc.constraint_type = 'UNIQUE'\n AND tc.constraint_name NOT IN (\n SELECT constraint_name\n FROM information_schema.table_constraints\n WHERE table_schema = $1\n AND table_name = $2\n AND constraint_type = 'PRIMARY KEY'\n )\n ORDER BY tc.constraint_name, kcu.ordinal_position`,\n [schema, tableName],\n );\n\n const uniquesMap = new Map<\n string,\n {\n columns: string[];\n name: string;\n }\n >();\n for (const uniqueRow of uniqueResult.rows) {\n const existing = uniquesMap.get(uniqueRow.constraint_name);\n if (existing) {\n existing.columns.push(uniqueRow.column_name);\n } else {\n uniquesMap.set(uniqueRow.constraint_name, {\n columns: [uniqueRow.column_name],\n name: uniqueRow.constraint_name,\n });\n }\n }\n const uniques: readonly SqlUniqueIR[] = Array.from(uniquesMap.values()).map((uq) => ({\n columns: Object.freeze([...uq.columns]) as readonly string[],\n name: uq.name,\n }));\n\n // Query indexes (excluding PK and unique constraints)\n const indexResult = await driver.query<{\n indexname: string;\n indisunique: boolean;\n attname: string;\n attnum: number;\n }>(\n `SELECT\n i.indexname,\n ix.indisunique,\n a.attname,\n a.attnum\n FROM pg_indexes i\n JOIN pg_class ic ON ic.relname = i.indexname\n JOIN pg_namespace ins ON ins.oid = ic.relnamespace AND ins.nspname = $1\n JOIN pg_index ix ON ix.indexrelid = ic.oid\n JOIN pg_class t ON t.oid = ix.indrelid\n JOIN pg_namespace tn ON tn.oid = t.relnamespace AND tn.nspname = $1\n LEFT JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey) AND a.attnum > 0\n WHERE i.schemaname = $1\n AND i.tablename = $2\n AND NOT EXISTS (\n SELECT 1\n FROM information_schema.table_constraints tc\n WHERE tc.table_schema = $1\n AND tc.table_name = $2\n AND tc.constraint_name = i.indexname\n )\n ORDER BY i.indexname, a.attnum`,\n [schema, tableName],\n );\n\n const indexesMap = new Map<\n string,\n {\n columns: string[];\n name: string;\n unique: boolean;\n }\n >();\n for (const idxRow of indexResult.rows) {\n // Skip rows where attname is null (system columns or invalid attnum)\n if (!idxRow.attname) {\n continue;\n }\n const existing = indexesMap.get(idxRow.indexname);\n if (existing) {\n existing.columns.push(idxRow.attname);\n } else {\n indexesMap.set(idxRow.indexname, {\n columns: [idxRow.attname],\n name: idxRow.indexname,\n unique: idxRow.indisunique,\n });\n }\n }\n const indexes: readonly SqlIndexIR[] = Array.from(indexesMap.values()).map((idx) => ({\n columns: Object.freeze([...idx.columns]) as readonly string[],\n name: idx.name,\n unique: idx.unique,\n }));\n\n tables[tableName] = {\n name: tableName,\n columns,\n ...(primaryKey ? { primaryKey } : {}),\n foreignKeys,\n uniques,\n indexes,\n };\n }\n\n // Query extensions\n const extensionsResult = await driver.query<{\n extname: string;\n }>(\n `SELECT extname\n FROM pg_extension\n ORDER BY extname`,\n [],\n );\n\n const extensions = extensionsResult.rows.map((row) => row.extname);\n\n // Build annotations with Postgres-specific metadata\n const annotations = {\n pg: {\n schema,\n version: await this.getPostgresVersion(driver),\n },\n };\n\n return {\n tables,\n extensions,\n annotations,\n };\n }\n\n /**\n * Gets the Postgres version from the database.\n */\n private async getPostgresVersion(driver: ControlDriverInstance<'postgres'>): Promise<string> {\n const result = await driver.query<{ version: string }>('SELECT version() AS version', []);\n const versionString = result.rows[0]?.version ?? '';\n // Extract version number from \"PostgreSQL 15.1 ...\" format\n const match = versionString.match(/PostgreSQL (\\d+\\.\\d+)/);\n return match?.[1] ?? 'unknown';\n }\n}\n"],"mappings":";AAAA,SAAS,oBAAoB;AAC7B,SAAS,SAAS,YAAY;AAC9B,SAAS,qBAAqB;AAI9B,SAAS,YAAY;;;ACUd,IAAM,yBAAN,MAAsE;AAAA,EAClE,WAAW;AAAA,EACX,WAAW;AAAA;AAAA;AAAA;AAAA,EAIX,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAclB,MAAM,WACJ,QACA,aACA,SAAS,UACa;AAEtB,UAAM,eAAe,MAAM,OAAO;AAAA,MAGhC;AAAA;AAAA;AAAA;AAAA;AAAA,MAKA,CAAC,MAAM;AAAA,IACT;AAEA,UAAM,SAAqC,CAAC;AAE5C,eAAW,YAAY,aAAa,MAAM;AACxC,YAAM,YAAY,SAAS;AAG3B,YAAM,gBAAgB,MAAM,OAAO;AAAA,QASjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAYA,CAAC,QAAQ,SAAS;AAAA,MACpB;AAEA,YAAM,UAAuC,CAAC;AAC9C,iBAAW,UAAU,cAAc,MAAM;AAEvC,YAAI,aAAa,OAAO;AACxB,YAAI,OAAO,cAAc,uBAAuB,OAAO,cAAc,aAAa;AAChF,cAAI,OAAO,0BAA0B;AACnC,yBAAa,GAAG,OAAO,SAAS,IAAI,OAAO,wBAAwB;AAAA,UACrE,OAAO;AACL,yBAAa,OAAO;AAAA,UACtB;AAAA,QACF,WAAW,OAAO,cAAc,aAAa,OAAO,cAAc,WAAW;AAC3E,cAAI,OAAO,qBAAqB,OAAO,kBAAkB,MAAM;AAC7D,yBAAa,GAAG,OAAO,SAAS,IAAI,OAAO,iBAAiB,IAAI,OAAO,aAAa;AAAA,UACtF,WAAW,OAAO,mBAAmB;AACnC,yBAAa,GAAG,OAAO,SAAS,IAAI,OAAO,iBAAiB;AAAA,UAC9D,OAAO;AACL,yBAAa,OAAO;AAAA,UACtB;AAAA,QACF,OAAO;AACL,uBAAa,OAAO,YAAY,OAAO;AAAA,QACzC;AAEA,gBAAQ,OAAO,WAAW,IAAI;AAAA,UAC5B,MAAM,OAAO;AAAA;AAAA,UAEb,QAAQ;AAAA,UACR;AAAA,UACA,UAAU,OAAO,gBAAgB;AAAA,QACnC;AAAA,MACF;AAGA,YAAM,WAAW,MAAM,OAAO;AAAA,QAK5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAaA,CAAC,QAAQ,SAAS;AAAA,MACpB;AAEA,YAAM,oBAAoB,SAAS,KAChC,KAAK,CAAC,GAAG,MAAM,EAAE,mBAAmB,EAAE,gBAAgB,EACtD,IAAI,CAAC,QAAQ,IAAI,WAAW;AAC/B,YAAM,aACJ,kBAAkB,SAAS,IACvB;AAAA,QACE,SAAS;AAAA,QACT,GAAI,SAAS,KAAK,CAAC,GAAG,kBAClB,EAAE,MAAM,SAAS,KAAK,CAAC,EAAE,gBAAgB,IACzC,CAAC;AAAA,MACP,IACA;AAGN,YAAM,WAAW,MAAM,OAAO;AAAA,QAQ5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAmBA,CAAC,QAAQ,SAAS;AAAA,MACpB;AAEA,YAAM,iBAAiB,oBAAI,IAQzB;AACF,iBAAW,SAAS,SAAS,MAAM;AACjC,cAAM,WAAW,eAAe,IAAI,MAAM,eAAe;AACzD,YAAI,UAAU;AAEZ,mBAAS,QAAQ,KAAK,MAAM,WAAW;AACvC,mBAAS,kBAAkB,KAAK,MAAM,sBAAsB;AAAA,QAC9D,OAAO;AACL,yBAAe,IAAI,MAAM,iBAAiB;AAAA,YACxC,SAAS,CAAC,MAAM,WAAW;AAAA,YAC3B,iBAAiB,MAAM;AAAA,YACvB,mBAAmB,CAAC,MAAM,sBAAsB;AAAA,YAChD,MAAM,MAAM;AAAA,UACd,CAAC;AAAA,QACH;AAAA,MACF;AACA,YAAM,cAA0C,MAAM,KAAK,eAAe,OAAO,CAAC,EAAE;AAAA,QAClF,CAAC,QAAQ;AAAA,UACP,SAAS,OAAO,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC;AAAA,UACtC,iBAAiB,GAAG;AAAA,UACpB,mBAAmB,OAAO,OAAO,CAAC,GAAG,GAAG,iBAAiB,CAAC;AAAA,UAC1D,MAAM,GAAG;AAAA,QACX;AAAA,MACF;AAGA,YAAM,eAAe,MAAM,OAAO;AAAA,QAKhC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAoBA,CAAC,QAAQ,SAAS;AAAA,MACpB;AAEA,YAAM,aAAa,oBAAI,IAMrB;AACF,iBAAW,aAAa,aAAa,MAAM;AACzC,cAAM,WAAW,WAAW,IAAI,UAAU,eAAe;AACzD,YAAI,UAAU;AACZ,mBAAS,QAAQ,KAAK,UAAU,WAAW;AAAA,QAC7C,OAAO;AACL,qBAAW,IAAI,UAAU,iBAAiB;AAAA,YACxC,SAAS,CAAC,UAAU,WAAW;AAAA,YAC/B,MAAM,UAAU;AAAA,UAClB,CAAC;AAAA,QACH;AAAA,MACF;AACA,YAAM,UAAkC,MAAM,KAAK,WAAW,OAAO,CAAC,EAAE,IAAI,CAAC,QAAQ;AAAA,QACnF,SAAS,OAAO,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC;AAAA,QACtC,MAAM,GAAG;AAAA,MACX,EAAE;AAGF,YAAM,cAAc,MAAM,OAAO;AAAA,QAM/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAsBA,CAAC,QAAQ,SAAS;AAAA,MACpB;AAEA,YAAM,aAAa,oBAAI,IAOrB;AACF,iBAAW,UAAU,YAAY,MAAM;AAErC,YAAI,CAAC,OAAO,SAAS;AACnB;AAAA,QACF;AACA,cAAM,WAAW,WAAW,IAAI,OAAO,SAAS;AAChD,YAAI,UAAU;AACZ,mBAAS,QAAQ,KAAK,OAAO,OAAO;AAAA,QACtC,OAAO;AACL,qBAAW,IAAI,OAAO,WAAW;AAAA,YAC/B,SAAS,CAAC,OAAO,OAAO;AAAA,YACxB,MAAM,OAAO;AAAA,YACb,QAAQ,OAAO;AAAA,UACjB,CAAC;AAAA,QACH;AAAA,MACF;AACA,YAAM,UAAiC,MAAM,KAAK,WAAW,OAAO,CAAC,EAAE,IAAI,CAAC,SAAS;AAAA,QACnF,SAAS,OAAO,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC;AAAA,QACvC,MAAM,IAAI;AAAA,QACV,QAAQ,IAAI;AAAA,MACd,EAAE;AAEF,aAAO,SAAS,IAAI;AAAA,QAClB,MAAM;AAAA,QACN;AAAA,QACA,GAAI,aAAa,EAAE,WAAW,IAAI,CAAC;AAAA,QACnC;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,UAAM,mBAAmB,MAAM,OAAO;AAAA,MAGpC;AAAA;AAAA;AAAA,MAGA,CAAC;AAAA,IACH;AAEA,UAAM,aAAa,iBAAiB,KAAK,IAAI,CAAC,QAAQ,IAAI,OAAO;AAGjE,UAAM,cAAc;AAAA,MAClB,IAAI;AAAA,QACF;AAAA,QACA,SAAS,MAAM,KAAK,mBAAmB,MAAM;AAAA,MAC/C;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,mBAAmB,QAA4D;AAC3F,UAAM,SAAS,MAAM,OAAO,MAA2B,+BAA+B,CAAC,CAAC;AACxF,UAAM,gBAAgB,OAAO,KAAK,CAAC,GAAG,WAAW;AAEjD,UAAM,QAAQ,cAAc,MAAM,uBAAuB;AACzD,WAAO,QAAQ,CAAC,KAAK;AAAA,EACvB;AACF;;;AD7WA,IAAM,aAAa,cAAc,YAAY,GAAG;AAChD,IAAM,YAAY,QAAQ,UAAU;AAEpC,IAAM,wBAAwB,KAAK;AAAA,EACjC,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AACT,CAAC;AAED,IAAM,4BAA4B,KAAK;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,eAAe;AACjB,CAAC;AAED,IAAM,8BAA8B,KAAK;AAAA,EACvC,IAAI;AAAA,EACJ,SAAS;AAAA,EACT,YAAY,KAAK,EAAE,YAAY,KAAK,EAAE,eAAe,SAAS,CAAC,EAAE,CAAC;AAAA,EAClE,iBAAiB;AAAA,EACjB,UAAU,KAAK;AAAA,IACb,eAAe,KAAK;AAAA,MAClB,QAAQ;AAAA,IACV,CAAC;AAAA,IACD,mBAAmB,KAAK;AAAA,MACtB,QAAQ;AAAA,IACV,CAAC;AAAA,IACD,YAAY,0BAA0B,MAAM;AAAA,EAC9C,CAAC;AAAA,EACD,eAAe;AACjB,CAAC;AAKD,SAAS,sBAA6C;AACpD,QAAM,eAAe,KAAK,WAAW,2BAA2B;AAChE,QAAM,eAAe,KAAK,MAAM,aAAa,cAAc,OAAO,CAAC;AAEnE,QAAM,SAAS,4BAA4B,YAAY;AACvD,MAAI,kBAAkB,KAAK,QAAQ;AACjC,UAAM,WAAW,OAAO,IAAI,CAAC,MAA2B,EAAE,OAAO,EAAE,KAAK,IAAI;AAC5E,UAAM,IAAI,MAAM,yCAAyC,YAAY,KAAK,QAAQ,EAAE;AAAA,EACtF;AAEA,SAAO;AACT;AAKA,IAAM,4BAIF;AAAA,EACF,MAAM;AAAA,EACN,UAAU;AAAA,EACV,UAAU;AAAA,EACV,IAAI;AAAA,EACJ,UAAU,oBAAoB;AAAA,EAC9B,SAAwC;AACtC,WAAO,IAAI,uBAAuB;AAAA,EACpC;AACF;AAEA,IAAO,kBAAQ;","names":[]}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { RuntimeAdapterDescriptor, RuntimeAdapterInstance } from '@prisma-next/core-execution-plane/types';
|
|
2
|
+
import { Adapter, QueryAst } from '@prisma-next/sql-relational-core/ast';
|
|
3
|
+
import { PostgresContract, PostgresLoweredStatement } from './types.js';
|
|
4
|
+
import '@prisma-next/sql-contract/types';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* SQL runtime adapter interface for Postgres.
|
|
8
|
+
* Extends RuntimeAdapterInstance with SQL-specific adapter methods.
|
|
9
|
+
*/
|
|
10
|
+
interface SqlRuntimeAdapter extends RuntimeAdapterInstance<'sql', 'postgres'>, Adapter<QueryAst, PostgresContract, PostgresLoweredStatement> {
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Postgres adapter descriptor for runtime plane.
|
|
14
|
+
*/
|
|
15
|
+
declare const postgresRuntimeAdapterDescriptor: RuntimeAdapterDescriptor<'sql', 'postgres', SqlRuntimeAdapter>;
|
|
16
|
+
|
|
17
|
+
export { type SqlRuntimeAdapter, postgresRuntimeAdapterDescriptor as default };
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import {
|
|
2
|
+
createPostgresAdapter
|
|
3
|
+
} from "./chunk-NOYZK3LL.js";
|
|
4
|
+
import "./chunk-CPAKRHXM.js";
|
|
5
|
+
|
|
6
|
+
// src/exports/runtime.ts
|
|
7
|
+
import { readFileSync } from "fs";
|
|
8
|
+
import { dirname, join } from "path";
|
|
9
|
+
import { fileURLToPath } from "url";
|
|
10
|
+
import { type } from "arktype";
|
|
11
|
+
var __filename = fileURLToPath(import.meta.url);
|
|
12
|
+
var __dirname = dirname(__filename);
|
|
13
|
+
var TypesImportSpecSchema = type({
|
|
14
|
+
package: "string",
|
|
15
|
+
named: "string",
|
|
16
|
+
alias: "string"
|
|
17
|
+
});
|
|
18
|
+
var StorageTypeMetadataSchema = type({
|
|
19
|
+
typeId: "string",
|
|
20
|
+
familyId: "string",
|
|
21
|
+
targetId: "string",
|
|
22
|
+
"nativeType?": "string"
|
|
23
|
+
});
|
|
24
|
+
var ExtensionPackManifestSchema = type({
|
|
25
|
+
id: "string",
|
|
26
|
+
version: "string",
|
|
27
|
+
"targets?": type({ "[string]": type({ "minVersion?": "string" }) }),
|
|
28
|
+
"capabilities?": "Record<string, unknown>",
|
|
29
|
+
"types?": type({
|
|
30
|
+
"codecTypes?": type({
|
|
31
|
+
import: TypesImportSpecSchema
|
|
32
|
+
}),
|
|
33
|
+
"operationTypes?": type({
|
|
34
|
+
import: TypesImportSpecSchema
|
|
35
|
+
}),
|
|
36
|
+
"storage?": StorageTypeMetadataSchema.array()
|
|
37
|
+
}),
|
|
38
|
+
"operations?": "unknown[]"
|
|
39
|
+
});
|
|
40
|
+
function loadAdapterManifest() {
|
|
41
|
+
const manifestPath = join(__dirname, "../../packs/manifest.json");
|
|
42
|
+
const manifestJson = JSON.parse(readFileSync(manifestPath, "utf-8"));
|
|
43
|
+
const result = ExtensionPackManifestSchema(manifestJson);
|
|
44
|
+
if (result instanceof type.errors) {
|
|
45
|
+
const messages = result.map((p) => p.message).join("; ");
|
|
46
|
+
throw new Error(`Invalid adapter manifest structure at ${manifestPath}: ${messages}`);
|
|
47
|
+
}
|
|
48
|
+
return result;
|
|
49
|
+
}
|
|
50
|
+
var postgresRuntimeAdapterDescriptor = {
|
|
51
|
+
kind: "adapter",
|
|
52
|
+
familyId: "sql",
|
|
53
|
+
targetId: "postgres",
|
|
54
|
+
id: "postgres",
|
|
55
|
+
manifest: loadAdapterManifest(),
|
|
56
|
+
create() {
|
|
57
|
+
return createPostgresAdapter();
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
var runtime_default = postgresRuntimeAdapterDescriptor;
|
|
61
|
+
export {
|
|
62
|
+
runtime_default as default
|
|
63
|
+
};
|
|
64
|
+
//# sourceMappingURL=runtime.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/exports/runtime.ts"],"sourcesContent":["import { readFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { fileURLToPath } from 'node:url';\nimport type { ExtensionPackManifest } from '@prisma-next/contract/pack-manifest-types';\nimport type {\n RuntimeAdapterDescriptor,\n RuntimeAdapterInstance,\n} from '@prisma-next/core-execution-plane/types';\nimport type { Adapter, QueryAst } from '@prisma-next/sql-relational-core/ast';\nimport { type } from 'arktype';\nimport { createPostgresAdapter } from '../core/adapter';\nimport type { PostgresContract, PostgresLoweredStatement } from '../core/types';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\n\nconst TypesImportSpecSchema = type({\n package: 'string',\n named: 'string',\n alias: 'string',\n});\n\nconst StorageTypeMetadataSchema = type({\n typeId: 'string',\n familyId: 'string',\n targetId: 'string',\n 'nativeType?': 'string',\n});\n\nconst ExtensionPackManifestSchema = type({\n id: 'string',\n version: 'string',\n 'targets?': type({ '[string]': type({ 'minVersion?': 'string' }) }),\n 'capabilities?': 'Record<string, unknown>',\n 'types?': type({\n 'codecTypes?': type({\n import: TypesImportSpecSchema,\n }),\n 'operationTypes?': type({\n import: TypesImportSpecSchema,\n }),\n 'storage?': StorageTypeMetadataSchema.array(),\n }),\n 'operations?': 'unknown[]',\n});\n\n/**\n * Loads the adapter manifest from packs/manifest.json.\n */\nfunction loadAdapterManifest(): ExtensionPackManifest {\n const manifestPath = join(__dirname, '../../packs/manifest.json');\n const manifestJson = JSON.parse(readFileSync(manifestPath, 'utf-8'));\n\n const result = ExtensionPackManifestSchema(manifestJson);\n if (result instanceof type.errors) {\n const messages = result.map((p: { message: string }) => p.message).join('; ');\n throw new Error(`Invalid adapter manifest structure at ${manifestPath}: ${messages}`);\n }\n\n return result as ExtensionPackManifest;\n}\n\n/**\n * SQL runtime adapter interface for Postgres.\n * Extends RuntimeAdapterInstance with SQL-specific adapter methods.\n */\nexport interface SqlRuntimeAdapter\n extends RuntimeAdapterInstance<'sql', 'postgres'>,\n Adapter<QueryAst, PostgresContract, PostgresLoweredStatement> {}\n\n/**\n * Postgres adapter descriptor for runtime plane.\n */\nconst postgresRuntimeAdapterDescriptor: RuntimeAdapterDescriptor<\n 'sql',\n 'postgres',\n SqlRuntimeAdapter\n> = {\n kind: 'adapter',\n familyId: 'sql',\n targetId: 'postgres',\n id: 'postgres',\n manifest: loadAdapterManifest(),\n create(): SqlRuntimeAdapter {\n return createPostgresAdapter() as unknown as SqlRuntimeAdapter;\n },\n};\n\nexport default postgresRuntimeAdapterDescriptor;\n"],"mappings":";;;;;;AAAA,SAAS,oBAAoB;AAC7B,SAAS,SAAS,YAAY;AAC9B,SAAS,qBAAqB;AAO9B,SAAS,YAAY;AAIrB,IAAM,aAAa,cAAc,YAAY,GAAG;AAChD,IAAM,YAAY,QAAQ,UAAU;AAEpC,IAAM,wBAAwB,KAAK;AAAA,EACjC,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AACT,CAAC;AAED,IAAM,4BAA4B,KAAK;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,eAAe;AACjB,CAAC;AAED,IAAM,8BAA8B,KAAK;AAAA,EACvC,IAAI;AAAA,EACJ,SAAS;AAAA,EACT,YAAY,KAAK,EAAE,YAAY,KAAK,EAAE,eAAe,SAAS,CAAC,EAAE,CAAC;AAAA,EAClE,iBAAiB;AAAA,EACjB,UAAU,KAAK;AAAA,IACb,eAAe,KAAK;AAAA,MAClB,QAAQ;AAAA,IACV,CAAC;AAAA,IACD,mBAAmB,KAAK;AAAA,MACtB,QAAQ;AAAA,IACV,CAAC;AAAA,IACD,YAAY,0BAA0B,MAAM;AAAA,EAC9C,CAAC;AAAA,EACD,eAAe;AACjB,CAAC;AAKD,SAAS,sBAA6C;AACpD,QAAM,eAAe,KAAK,WAAW,2BAA2B;AAChE,QAAM,eAAe,KAAK,MAAM,aAAa,cAAc,OAAO,CAAC;AAEnE,QAAM,SAAS,4BAA4B,YAAY;AACvD,MAAI,kBAAkB,KAAK,QAAQ;AACjC,UAAM,WAAW,OAAO,IAAI,CAAC,MAA2B,EAAE,OAAO,EAAE,KAAK,IAAI;AAC5E,UAAM,IAAI,MAAM,yCAAyC,YAAY,KAAK,QAAQ,EAAE;AAAA,EACtF;AAEA,SAAO;AACT;AAaA,IAAM,mCAIF;AAAA,EACF,MAAM;AAAA,EACN,UAAU;AAAA,EACV,UAAU;AAAA,EACV,IAAI;AAAA,EACJ,UAAU,oBAAoB;AAAA,EAC9B,SAA4B;AAC1B,WAAO,sBAAsB;AAAA,EAC/B;AACF;AAEA,IAAO,kBAAQ;","names":[]}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { SqlContract, SqlStorage } from '@prisma-next/sql-contract/types';
|
|
2
|
+
export { StorageColumn, StorageTable } from '@prisma-next/sql-contract/types';
|
|
3
|
+
import { LoweredStatement, ColumnRef, ParamRef, Direction } from '@prisma-next/sql-relational-core/ast';
|
|
4
|
+
export { BinaryExpr, ColumnRef, Direction, ParamRef, SelectAst } from '@prisma-next/sql-relational-core/ast';
|
|
5
|
+
|
|
6
|
+
interface PostgresAdapterOptions {
|
|
7
|
+
readonly profileId?: string;
|
|
8
|
+
}
|
|
9
|
+
type PostgresContract = SqlContract<SqlStorage> & {
|
|
10
|
+
readonly target: 'postgres';
|
|
11
|
+
};
|
|
12
|
+
type Expr = ColumnRef | ParamRef;
|
|
13
|
+
interface OrderClause {
|
|
14
|
+
readonly expr: ColumnRef;
|
|
15
|
+
readonly dir: Direction;
|
|
16
|
+
}
|
|
17
|
+
type PostgresLoweredStatement = LoweredStatement;
|
|
18
|
+
|
|
19
|
+
export type { Expr, OrderClause, PostgresAdapterOptions, PostgresContract, PostgresLoweredStatement };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
//# sourceMappingURL=types.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@prisma-next/adapter-postgres",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"sideEffects": false,
|
|
6
|
+
"files": [
|
|
7
|
+
"dist"
|
|
8
|
+
],
|
|
9
|
+
"dependencies": {
|
|
10
|
+
"arktype": "^2.0.0",
|
|
11
|
+
"@prisma-next/core-execution-plane": "0.0.1",
|
|
12
|
+
"@prisma-next/cli": "0.0.1",
|
|
13
|
+
"@prisma-next/family-sql": "0.0.1",
|
|
14
|
+
"@prisma-next/sql-contract": "0.0.1",
|
|
15
|
+
"@prisma-next/sql-operations": "0.0.1",
|
|
16
|
+
"@prisma-next/sql-schema-ir": "0.0.1",
|
|
17
|
+
"@prisma-next/sql-contract-ts": "0.0.1",
|
|
18
|
+
"@prisma-next/sql-relational-core": "0.0.1"
|
|
19
|
+
},
|
|
20
|
+
"devDependencies": {
|
|
21
|
+
"tsup": "^8.3.0",
|
|
22
|
+
"typescript": "^5.9.3",
|
|
23
|
+
"vitest": "^2.1.1",
|
|
24
|
+
"@prisma-next/test-utils": "0.0.1"
|
|
25
|
+
},
|
|
26
|
+
"exports": {
|
|
27
|
+
"./adapter": {
|
|
28
|
+
"types": "./dist/exports/adapter.d.ts",
|
|
29
|
+
"import": "./dist/exports/adapter.js"
|
|
30
|
+
},
|
|
31
|
+
"./types": {
|
|
32
|
+
"types": "./dist/exports/types.d.ts",
|
|
33
|
+
"import": "./dist/exports/types.js"
|
|
34
|
+
},
|
|
35
|
+
"./codec-types": {
|
|
36
|
+
"types": "./dist/exports/codec-types.d.ts",
|
|
37
|
+
"import": "./dist/exports/codec-types.js"
|
|
38
|
+
},
|
|
39
|
+
"./control": {
|
|
40
|
+
"types": "./dist/exports/control.d.ts",
|
|
41
|
+
"import": "./dist/exports/control.js"
|
|
42
|
+
},
|
|
43
|
+
"./runtime": {
|
|
44
|
+
"types": "./dist/exports/runtime.d.ts",
|
|
45
|
+
"import": "./dist/exports/runtime.js"
|
|
46
|
+
}
|
|
47
|
+
},
|
|
48
|
+
"scripts": {
|
|
49
|
+
"build": "tsup --config tsup.config.ts",
|
|
50
|
+
"test": "vitest run",
|
|
51
|
+
"test:coverage": "vitest run --coverage",
|
|
52
|
+
"typecheck": "tsc --project tsconfig.json --noEmit",
|
|
53
|
+
"lint": "biome check . --config-path ../../../biome.json --error-on-warnings",
|
|
54
|
+
"clean": "node ../../../scripts/clean.mjs"
|
|
55
|
+
}
|
|
56
|
+
}
|