postgresdk 0.4.0 → 0.5.1-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -569,6 +569,27 @@ export default {
569
569
  */
570
570
  // dateType: "date",
571
571
 
572
+ /**
573
+ * Server framework for generated API routes
574
+ * - "hono": Lightweight, edge-compatible web framework (default)
575
+ * - "express": Traditional Node.js framework (planned)
576
+ * - "fastify": High-performance Node.js framework (planned)
577
+ * @default "hono"
578
+ */
579
+ // serverFramework: "hono",
580
+
581
+ /**
582
+ * Use .js extensions in server imports (for Vercel Edge, Deno, etc.)
583
+ * @default false
584
+ */
585
+ // useJsExtensions: false,
586
+
587
+ /**
588
+ * Use .js extensions in client SDK imports (rarely needed)
589
+ * @default false
590
+ */
591
+ // useJsExtensionsClient: false,
592
+
572
593
  // ========== AUTHENTICATION ==========
573
594
 
574
595
  /**
@@ -1009,8 +1030,8 @@ export type Update${Type} = z.infer<typeof Update${Type}Schema>;
1009
1030
  `;
1010
1031
  }
1011
1032
 
1012
- // src/emit-routes.ts
1013
- function emitRoutes(table, _graph, opts) {
1033
+ // src/emit-routes-hono.ts
1034
+ function emitHonoRoutes(table, _graph, opts) {
1014
1035
  const fileTableName = table.name;
1015
1036
  const Type = pascal(table.name);
1016
1037
  const rawPk = table.pk;
@@ -1019,34 +1040,36 @@ function emitRoutes(table, _graph, opts) {
1019
1040
  const hasCompositePk = safePkCols.length > 1;
1020
1041
  const pkPath = hasCompositePk ? safePkCols.map((c) => `:${c}`).join("/") : `:${safePkCols[0]}`;
1021
1042
  const softDel = opts.softDeleteColumn && table.columns.some((c) => c.name === opts.softDeleteColumn) ? opts.softDeleteColumn : null;
1022
- const wherePkSql = hasCompositePk ? safePkCols.map((c, i) => `"${c}" = $${i + 1}`).join(" AND ") : `"${safePkCols[0]}" = $1`;
1023
1043
  const getPkParams = hasCompositePk ? `const pkValues = [${safePkCols.map((c) => `c.req.param("${c}")`).join(", ")}];` : `const pkValues = [c.req.param("${safePkCols[0]}")];`;
1024
- const updateSetSql = hasCompositePk ? `Object.keys(updateData).map((k, i) => \`"\${k}" = $\${i + ${safePkCols.length} + 1}\`).join(", ")` : `Object.keys(updateData).map((k, i) => \`"\${k}" = $\${i + 2}\`).join(", ")`;
1025
- const pkFilter = safePkCols.length ? `const updateData = Object.fromEntries(Object.entries(parsed.data).filter(([k]) => !new Set(${JSON.stringify(safePkCols)}).has(k)));` : `const updateData = parsed.data;`;
1026
1044
  const hasAuth = opts.authStrategy && opts.authStrategy !== "none";
1027
- const authImport = hasAuth ? `import { authMiddleware } from "../auth";` : "";
1045
+ const ext = opts.useJsExtensions ? ".js" : "";
1046
+ const authImport = hasAuth ? `import { authMiddleware } from "../auth${ext}";` : "";
1028
1047
  return `/* Generated. Do not edit. */
1029
1048
  import { Hono } from "hono";
1030
1049
  import { z } from "zod";
1031
- import { Insert${Type}Schema, Update${Type}Schema } from "../zod/${fileTableName}";
1032
- import { loadIncludes } from "../include-loader";
1050
+ import { Insert${Type}Schema, Update${Type}Schema } from "../zod/${fileTableName}${ext}";
1051
+ import { loadIncludes } from "../include-loader${ext}";
1052
+ import * as coreOps from "../core/operations${ext}";
1033
1053
  ${authImport}
1034
1054
 
1035
- const DEBUG = process.env.SDK_DEBUG === "1" || process.env.SDK_DEBUG === "true";
1036
- const log = {
1037
- debug: (...args: any[]) => { if (DEBUG) console.debug("[sdk]", ...args); },
1038
- error: (...args: any[]) => console.error("[sdk]", ...args),
1039
- };
1040
-
1041
1055
  const listSchema = z.object({
1042
- include: z.any().optional(), // TODO: typed include spec in later pass
1056
+ include: z.any().optional(),
1043
1057
  limit: z.number().int().positive().max(100).optional(),
1044
1058
  offset: z.number().int().min(0).optional(),
1045
- orderBy: z.any().optional() // TODO: typed orderBy in a later pass
1059
+ orderBy: z.any().optional()
1046
1060
  });
1047
1061
 
1048
1062
  export function register${Type}Routes(app: Hono, deps: { pg: { query: (text: string, params?: any[]) => Promise<{ rows: any[] }> } }) {
1049
1063
  const base = "/v1/${fileTableName}";
1064
+
1065
+ // Create operation context
1066
+ const ctx: coreOps.OperationContext = {
1067
+ pg: deps.pg,
1068
+ table: "${fileTableName}",
1069
+ pkColumns: ${JSON.stringify(safePkCols)},
1070
+ softDeleteColumn: ${softDel ? `"${softDel}"` : "null"},
1071
+ includeDepthLimit: ${opts.includeDepthLimit}
1072
+ };
1050
1073
  ${hasAuth ? `
1051
1074
  // \uD83D\uDD10 Auth: protect all routes for this table
1052
1075
  app.use(base, authMiddleware);
@@ -1054,159 +1077,132 @@ ${hasAuth ? `
1054
1077
 
1055
1078
  // CREATE
1056
1079
  app.post(base, async (c) => {
1057
- try {
1058
- const body = await c.req.json().catch(() => ({}));
1059
- log.debug("POST ${fileTableName} body:", body);
1060
- const parsed = Insert${Type}Schema.safeParse(body);
1061
- if (!parsed.success) {
1062
- const issues = parsed.error.flatten();
1063
- log.debug("POST ${fileTableName} invalid:", issues);
1064
- return c.json({ error: "Invalid body", issues }, 400);
1065
- }
1066
-
1067
- const data = parsed.data;
1068
- const cols = Object.keys(data);
1069
- const vals = Object.values(data);
1070
- if (!cols.length) return c.json({ error: "No fields provided" }, 400);
1071
-
1072
- const placeholders = cols.map((_, i) => '$' + (i + 1)).join(", ");
1073
- const text = \`INSERT INTO "${fileTableName}" (\${cols.map(c => '"' + c + '"').join(", ")})
1074
- VALUES (\${placeholders})
1075
- RETURNING *\`;
1076
- log.debug("SQL:", text, "vals:", vals);
1077
- const { rows } = await deps.pg.query(text, vals);
1078
- return c.json(rows[0] ?? null, rows[0] ? 201 : 500);
1079
- } catch (e: any) {
1080
- log.error("POST ${fileTableName} error:", e?.stack ?? e);
1081
- return c.json({ error: e?.message ?? "Internal error", ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
1080
+ const body = await c.req.json().catch(() => ({}));
1081
+ const parsed = Insert${Type}Schema.safeParse(body);
1082
+
1083
+ if (!parsed.success) {
1084
+ const issues = parsed.error.flatten();
1085
+ return c.json({ error: "Invalid body", issues }, 400);
1086
+ }
1087
+
1088
+ const result = await coreOps.createRecord(ctx, parsed.data);
1089
+
1090
+ if (result.error) {
1091
+ return c.json({ error: result.error }, result.status as any);
1082
1092
  }
1093
+
1094
+ return c.json(result.data, result.status as any);
1083
1095
  });
1084
1096
 
1085
1097
  // GET BY PK
1086
1098
  app.get(\`\${base}/${pkPath}\`, async (c) => {
1087
- try {
1088
- ${getPkParams}
1089
- const text = \`SELECT * FROM "${fileTableName}" WHERE ${wherePkSql} LIMIT 1\`;
1090
- log.debug("GET ${fileTableName} by PK:", pkValues, "SQL:", text);
1091
- const { rows } = await deps.pg.query(text, pkValues);
1092
- if (!rows[0]) return c.json(null, 404);
1093
- return c.json(rows[0]);
1094
- } catch (e: any) {
1095
- log.error("GET ${fileTableName} error:", e?.stack ?? e);
1096
- return c.json({ error: e?.message ?? "Internal error", ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
1099
+ ${getPkParams}
1100
+ const result = await coreOps.getByPk(ctx, pkValues);
1101
+
1102
+ if (result.error) {
1103
+ return c.json({ error: result.error }, result.status as any);
1097
1104
  }
1105
+
1106
+ return c.json(result.data, result.status as any);
1098
1107
  });
1099
1108
 
1100
1109
  // LIST
1101
1110
  app.post(\`\${base}/list\`, async (c) => {
1102
- try {
1103
- const body = listSchema.safeParse(await c.req.json().catch(() => ({})));
1104
- if (!body.success) {
1105
- const issues = body.error.flatten();
1106
- log.debug("LIST ${fileTableName} invalid:", issues);
1107
- return c.json({ error: "Invalid body", issues }, 400);
1108
- }
1109
- const { include, limit = 50, offset = 0 } = body.data;
1110
-
1111
- const where = ${softDel ? `\`WHERE "${softDel}" IS NULL\`` : `""`};
1112
- const text = \`SELECT * FROM "${fileTableName}" \${where} LIMIT $1 OFFSET $2\`;
1113
- log.debug("LIST ${fileTableName} SQL:", text, "params:", [limit, offset]);
1114
- const { rows } = await deps.pg.query(text, [limit, offset]);
1115
-
1116
- if (!include) {
1117
- log.debug("LIST ${fileTableName} rows:", rows.length);
1118
- return c.json(rows);
1119
- }
1120
-
1121
- // Attempt include stitching with explicit error handling
1122
- log.debug("LIST ${fileTableName} include spec:", include);
1111
+ const body = listSchema.safeParse(await c.req.json().catch(() => ({})));
1112
+
1113
+ if (!body.success) {
1114
+ const issues = body.error.flatten();
1115
+ return c.json({ error: "Invalid body", issues }, 400);
1116
+ }
1117
+
1118
+ const result = await coreOps.listRecords(ctx, body.data);
1119
+
1120
+ if (result.error) {
1121
+ return c.json({ error: result.error }, result.status as any);
1122
+ }
1123
+
1124
+ // Handle includes if needed
1125
+ if (result.needsIncludes && result.includeSpec) {
1123
1126
  try {
1124
- const stitched = await loadIncludes("${fileTableName}", rows, include, deps.pg, ${opts.includeDepthLimit});
1125
- log.debug("LIST ${fileTableName} stitched count:", Array.isArray(stitched) ? stitched.length : "n/a");
1127
+ const stitched = await loadIncludes(
1128
+ "${fileTableName}",
1129
+ result.data,
1130
+ result.includeSpec,
1131
+ deps.pg,
1132
+ ${opts.includeDepthLimit}
1133
+ );
1126
1134
  return c.json(stitched);
1127
1135
  } catch (e: any) {
1128
- const strict = process.env.SDK_STRICT_INCLUDE === "1" || process.env.SDK_STRICT_INCLUDE === "true";
1129
- const msg = e?.message ?? String(e);
1130
- const stack = e?.stack;
1131
- log.error("LIST ${fileTableName} include stitch FAILED:", msg, stack);
1132
-
1136
+ const strict = process.env.SDK_STRICT_INCLUDE === "1";
1133
1137
  if (strict) {
1134
- return c.json({ error: "include-stitch-failed", message: msg, ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
1138
+ return c.json({
1139
+ error: "include-stitch-failed",
1140
+ message: e?.message,
1141
+ ...(process.env.SDK_DEBUG === "1" ? { stack: e?.stack } : {})
1142
+ }, 500);
1135
1143
  }
1136
- // Non-strict fallback: return base rows plus error metadata
1137
- return c.json({ data: rows, includeError: { message: msg, ...(DEBUG ? { stack: e?.stack } : {}) } }, 200);
1144
+ // Non-strict: return base rows with error metadata
1145
+ return c.json({
1146
+ data: result.data,
1147
+ includeError: {
1148
+ message: e?.message,
1149
+ ...(process.env.SDK_DEBUG === "1" ? { stack: e?.stack } : {})
1150
+ }
1151
+ }, 200);
1138
1152
  }
1139
- } catch (e: any) {
1140
- log.error("LIST ${fileTableName} error:", e?.stack ?? e);
1141
- return c.json({ error: e?.message ?? "Internal error", ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
1142
1153
  }
1154
+
1155
+ return c.json(result.data, result.status as any);
1143
1156
  });
1144
1157
 
1145
1158
  // UPDATE
1146
1159
  app.patch(\`\${base}/${pkPath}\`, async (c) => {
1147
- try {
1148
- ${getPkParams}
1149
- const body = await c.req.json().catch(() => ({}));
1150
- log.debug("PATCH ${fileTableName} pk:", pkValues, "patch:", body);
1151
- const parsed = Update${Type}Schema.safeParse(body);
1152
- if (!parsed.success) {
1153
- const issues = parsed.error.flatten();
1154
- log.debug("PATCH ${fileTableName} invalid:", issues);
1155
- return c.json({ error: "Invalid body", issues: issues }, 400);
1156
- }
1157
-
1158
- ${pkFilter}
1159
- if (!Object.keys(updateData).length) return c.json({ error: "No updatable fields provided" }, 400);
1160
-
1161
- const setSql = ${updateSetSql};
1162
- const text = \`UPDATE "${fileTableName}" SET \${setSql} WHERE ${wherePkSql} RETURNING *\`;
1163
- const params = ${hasCompositePk ? `[...pkValues, ...Object.values(updateData)]` : `[pkValues[0], ...Object.values(updateData)]`};
1164
- log.debug("PATCH ${fileTableName} SQL:", text, "params:", params);
1165
- const { rows } = await deps.pg.query(text, params);
1166
- if (!rows[0]) return c.json(null, 404);
1167
- return c.json(rows[0]);
1168
- } catch (e: any) {
1169
- log.error("PATCH ${fileTableName} error:", e?.stack ?? e);
1170
- return c.json({ error: e?.message ?? "Internal error", ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
1160
+ ${getPkParams}
1161
+ const body = await c.req.json().catch(() => ({}));
1162
+ const parsed = Update${Type}Schema.safeParse(body);
1163
+
1164
+ if (!parsed.success) {
1165
+ const issues = parsed.error.flatten();
1166
+ return c.json({ error: "Invalid body", issues }, 400);
1167
+ }
1168
+
1169
+ const result = await coreOps.updateRecord(ctx, pkValues, parsed.data);
1170
+
1171
+ if (result.error) {
1172
+ return c.json({ error: result.error }, result.status as any);
1171
1173
  }
1174
+
1175
+ return c.json(result.data, result.status as any);
1172
1176
  });
1173
1177
 
1174
- // DELETE (soft or hard)
1178
+ // DELETE
1175
1179
  app.delete(\`\${base}/${pkPath}\`, async (c) => {
1176
- try {
1177
- ${getPkParams}
1178
- ${softDel ? `
1179
- const text = \`UPDATE "${fileTableName}" SET "${softDel}" = NOW() WHERE ${wherePkSql} RETURNING *\`;
1180
- log.debug("DELETE (soft) ${fileTableName} SQL:", text, "pk:", pkValues);
1181
- const { rows } = await deps.pg.query(text, pkValues);
1182
- if (!rows[0]) return c.json(null, 404);
1183
- return c.json(rows[0]);` : `
1184
- const text = \`DELETE FROM "${fileTableName}" WHERE ${wherePkSql} RETURNING *\`;
1185
- log.debug("DELETE ${fileTableName} SQL:", text, "pk:", pkValues);
1186
- const { rows } = await deps.pg.query(text, pkValues);
1187
- if (!rows[0]) return c.json(null, 404);
1188
- return c.json(rows[0]);`}
1189
- } catch (e: any) {
1190
- log.error("DELETE ${fileTableName} error:", e?.stack ?? e);
1191
- return c.json({ error: e?.message ?? "Internal error", ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
1180
+ ${getPkParams}
1181
+ const result = await coreOps.deleteRecord(ctx, pkValues);
1182
+
1183
+ if (result.error) {
1184
+ return c.json({ error: result.error }, result.status as any);
1192
1185
  }
1186
+
1187
+ return c.json(result.data, result.status as any);
1193
1188
  });
1194
1189
  }
1195
1190
  `;
1196
1191
  }
1197
1192
 
1198
1193
  // src/emit-client.ts
1199
- function emitClient(table) {
1194
+ function emitClient(table, useJsExtensions) {
1200
1195
  const Type = pascal(table.name);
1196
+ const ext = useJsExtensions ? ".js" : "";
1201
1197
  const pkCols = Array.isArray(table.pk) ? table.pk : table.pk ? [table.pk] : [];
1202
1198
  const safePk = pkCols.length ? pkCols : ["id"];
1203
1199
  const hasCompositePk = safePk.length > 1;
1204
1200
  const pkType = hasCompositePk ? `{ ${safePk.map((c) => `${c}: string`).join("; ")} }` : `string`;
1205
1201
  const pkPathExpr = hasCompositePk ? safePk.map((c) => `pk.${c}`).join(` + "/" + `) : `pk`;
1206
1202
  return `/* Generated. Do not edit. */
1207
- import { BaseClient } from "./base-client";
1208
- import type { ${Type}IncludeSpec } from "./include-spec";
1209
- import type { Insert${Type}, Update${Type}, Select${Type} } from "./types/${table.name}";
1203
+ import { BaseClient } from "./base-client${ext}";
1204
+ import type { ${Type}IncludeSpec } from "./include-spec${ext}";
1205
+ import type { Insert${Type}, Update${Type}, Select${Type} } from "./types/${table.name}${ext}";
1210
1206
 
1211
1207
  /**
1212
1208
  * Client for ${table.name} table operations
@@ -1246,19 +1242,20 @@ export class ${Type}Client extends BaseClient {
1246
1242
  }
1247
1243
  `;
1248
1244
  }
1249
- function emitClientIndex(tables) {
1245
+ function emitClientIndex(tables, useJsExtensions) {
1246
+ const ext = useJsExtensions ? ".js" : "";
1250
1247
  let out = `/* Generated. Do not edit. */
1251
1248
  `;
1252
- out += `import { BaseClient, type AuthConfig } from "./base-client";
1249
+ out += `import { BaseClient, type AuthConfig } from "./base-client${ext}";
1253
1250
  `;
1254
1251
  for (const t of tables) {
1255
- out += `import { ${pascal(t.name)}Client } from "./${t.name}";
1252
+ out += `import { ${pascal(t.name)}Client } from "./${t.name}${ext}";
1256
1253
  `;
1257
1254
  }
1258
1255
  out += `
1259
1256
  // Re-export auth types for convenience
1260
1257
  `;
1261
- out += `export type { AuthConfig as SDKAuth, AuthConfig, HeaderMap, AuthHeadersProvider } from "./base-client";
1258
+ out += `export type { AuthConfig as SDKAuth, AuthConfig, HeaderMap, AuthHeadersProvider } from "./base-client${ext}";
1262
1259
 
1263
1260
  `;
1264
1261
  out += `/**
@@ -1290,18 +1287,18 @@ function emitClientIndex(tables) {
1290
1287
  out += `// Export individual table clients
1291
1288
  `;
1292
1289
  for (const t of tables) {
1293
- out += `export { ${pascal(t.name)}Client } from "./${t.name}";
1290
+ out += `export { ${pascal(t.name)}Client } from "./${t.name}${ext}";
1294
1291
  `;
1295
1292
  }
1296
1293
  out += `
1297
1294
  // Export base client for custom extensions
1298
1295
  `;
1299
- out += `export { BaseClient } from "./base-client";
1296
+ out += `export { BaseClient } from "./base-client${ext}";
1300
1297
  `;
1301
1298
  out += `
1302
1299
  // Export include specifications
1303
1300
  `;
1304
- out += `export * from "./include-spec";
1301
+ out += `export * from "./include-spec${ext}";
1305
1302
  `;
1306
1303
  return out;
1307
1304
  }
@@ -1449,13 +1446,14 @@ export abstract class BaseClient {
1449
1446
  }
1450
1447
 
1451
1448
  // src/emit-include-loader.ts
1452
- function emitIncludeLoader(graph, model, maxDepth) {
1449
+ function emitIncludeLoader(graph, model, maxDepth, useJsExtensions) {
1453
1450
  const fkIndex = {};
1454
1451
  for (const t of Object.values(model.tables)) {
1455
1452
  fkIndex[t.name] = t.fks.map((f) => ({ from: f.from, toTable: f.toTable, to: f.to }));
1456
1453
  }
1454
+ const ext = useJsExtensions ? ".js" : "";
1457
1455
  return `/* Generated. Do not edit. */
1458
- import { RELATION_GRAPH } from "./include-builder";
1456
+ import { RELATION_GRAPH } from "./include-builder${ext}";
1459
1457
 
1460
1458
  // Minimal types to keep the file self-contained
1461
1459
  type Graph = typeof RELATION_GRAPH;
@@ -1974,12 +1972,13 @@ export async function authMiddleware(c: Context, next: Next) {
1974
1972
  `;
1975
1973
  }
1976
1974
 
1977
- // src/emit-router.ts
1978
- function emitRouter(tables, hasAuth) {
1975
+ // src/emit-router-hono.ts
1976
+ function emitHonoRouter(tables, hasAuth, useJsExtensions) {
1979
1977
  const tableNames = tables.map((t) => t.name).sort();
1978
+ const ext = useJsExtensions ? ".js" : "";
1980
1979
  const imports = tableNames.map((name) => {
1981
1980
  const Type = pascal(name);
1982
- return `import { register${Type}Routes } from "./routes/${name}";`;
1981
+ return `import { register${Type}Routes } from "./routes/${name}${ext}";`;
1983
1982
  }).join(`
1984
1983
  `);
1985
1984
  const registrations = tableNames.map((name) => {
@@ -1989,14 +1988,14 @@ function emitRouter(tables, hasAuth) {
1989
1988
  `);
1990
1989
  const reExports = tableNames.map((name) => {
1991
1990
  const Type = pascal(name);
1992
- return `export { register${Type}Routes } from "./routes/${name}";`;
1991
+ return `export { register${Type}Routes } from "./routes/${name}${ext}";`;
1993
1992
  }).join(`
1994
1993
  `);
1995
1994
  return `/* Generated. Do not edit. */
1996
1995
  import { Hono } from "hono";
1997
- import { SDK_MANIFEST } from "./sdk-bundle";
1996
+ import { SDK_MANIFEST } from "./sdk-bundle${ext}";
1998
1997
  ${imports}
1999
- ${hasAuth ? `export { authMiddleware } from "./auth";` : ""}
1998
+ ${hasAuth ? `export { authMiddleware } from "./auth${ext}";` : ""}
2000
1999
 
2001
2000
  /**
2002
2001
  * Creates a Hono router with all generated routes that can be mounted into your existing app.
@@ -2085,7 +2084,7 @@ ${registrations.replace(/router/g, "app")}
2085
2084
  ${reExports}
2086
2085
 
2087
2086
  // Re-export types and schemas for convenience
2088
- export * from "./include-spec";
2087
+ export * from "./include-spec${ext}";
2089
2088
  `;
2090
2089
  }
2091
2090
 
@@ -2112,6 +2111,222 @@ export const SDK_MANIFEST = {
2112
2111
  `;
2113
2112
  }
2114
2113
 
2114
+ // src/emit-core-operations.ts
2115
+ function emitCoreOperations() {
2116
+ return `/**
2117
+ * Core database operations that are framework-agnostic.
2118
+ * These functions handle the actual database logic and can be used by any framework adapter.
2119
+ */
2120
+
2121
+ import type { z } from "zod";
2122
+
2123
+ export interface DatabaseClient {
2124
+ query: (text: string, params?: any[]) => Promise<{ rows: any[] }>;
2125
+ }
2126
+
2127
+ export interface OperationContext {
2128
+ pg: DatabaseClient;
2129
+ table: string;
2130
+ pkColumns: string[];
2131
+ softDeleteColumn?: string | null;
2132
+ includeDepthLimit: number;
2133
+ }
2134
+
2135
+ const DEBUG = process.env.SDK_DEBUG === "1" || process.env.SDK_DEBUG === "true";
2136
+ const log = {
2137
+ debug: (...args: any[]) => { if (DEBUG) console.debug("[sdk]", ...args); },
2138
+ error: (...args: any[]) => console.error("[sdk]", ...args),
2139
+ };
2140
+
2141
+ /**
2142
+ * CREATE operation - Insert a new record
2143
+ */
2144
+ export async function createRecord(
2145
+ ctx: OperationContext,
2146
+ data: Record<string, any>
2147
+ ): Promise<{ data?: any; error?: string; issues?: any; status: number }> {
2148
+ try {
2149
+ const cols = Object.keys(data);
2150
+ const vals = Object.values(data);
2151
+
2152
+ if (!cols.length) {
2153
+ return { error: "No fields provided", status: 400 };
2154
+ }
2155
+
2156
+ const placeholders = cols.map((_, i) => '$' + (i + 1)).join(", ");
2157
+ const text = \`INSERT INTO "\${ctx.table}" (\${cols.map(c => '"' + c + '"').join(", ")})
2158
+ VALUES (\${placeholders})
2159
+ RETURNING *\`;
2160
+
2161
+ log.debug("SQL:", text, "vals:", vals);
2162
+ const { rows } = await ctx.pg.query(text, vals);
2163
+
2164
+ return { data: rows[0] ?? null, status: rows[0] ? 201 : 500 };
2165
+ } catch (e: any) {
2166
+ log.error(\`POST \${ctx.table} error:\`, e?.stack ?? e);
2167
+ return {
2168
+ error: e?.message ?? "Internal error",
2169
+ ...(DEBUG ? { stack: e?.stack } : {}),
2170
+ status: 500
2171
+ };
2172
+ }
2173
+ }
2174
+
2175
+ /**
2176
+ * READ operation - Get a record by primary key
2177
+ */
2178
+ export async function getByPk(
2179
+ ctx: OperationContext,
2180
+ pkValues: any[]
2181
+ ): Promise<{ data?: any; error?: string; status: number }> {
2182
+ try {
2183
+ const hasCompositePk = ctx.pkColumns.length > 1;
2184
+ const wherePkSql = hasCompositePk
2185
+ ? ctx.pkColumns.map((c, i) => \`"\${c}" = $\${i + 1}\`).join(" AND ")
2186
+ : \`"\${ctx.pkColumns[0]}" = $1\`;
2187
+
2188
+ const text = \`SELECT * FROM "\${ctx.table}" WHERE \${wherePkSql} LIMIT 1\`;
2189
+ log.debug(\`GET \${ctx.table} by PK:\`, pkValues, "SQL:", text);
2190
+
2191
+ const { rows } = await ctx.pg.query(text, pkValues);
2192
+
2193
+ if (!rows[0]) {
2194
+ return { data: null, status: 404 };
2195
+ }
2196
+
2197
+ return { data: rows[0], status: 200 };
2198
+ } catch (e: any) {
2199
+ log.error(\`GET \${ctx.table} error:\`, e?.stack ?? e);
2200
+ return {
2201
+ error: e?.message ?? "Internal error",
2202
+ ...(DEBUG ? { stack: e?.stack } : {}),
2203
+ status: 500
2204
+ };
2205
+ }
2206
+ }
2207
+
2208
+ /**
2209
+ * LIST operation - Get multiple records with optional filters
2210
+ */
2211
+ export async function listRecords(
2212
+ ctx: OperationContext,
2213
+ params: { limit?: number; offset?: number; include?: any }
2214
+ ): Promise<{ data?: any; error?: string; issues?: any; status: number; needsIncludes?: boolean; includeSpec?: any }> {
2215
+ try {
2216
+ const { limit = 50, offset = 0, include } = params;
2217
+
2218
+ const where = ctx.softDeleteColumn
2219
+ ? \`WHERE "\${ctx.softDeleteColumn}" IS NULL\`
2220
+ : "";
2221
+
2222
+ const text = \`SELECT * FROM "\${ctx.table}" \${where} LIMIT $1 OFFSET $2\`;
2223
+ log.debug(\`LIST \${ctx.table} SQL:\`, text, "params:", [limit, offset]);
2224
+
2225
+ const { rows } = await ctx.pg.query(text, [limit, offset]);
2226
+
2227
+ if (!include) {
2228
+ log.debug(\`LIST \${ctx.table} rows:\`, rows.length);
2229
+ return { data: rows, status: 200 };
2230
+ }
2231
+
2232
+ // Include logic will be handled by the include-loader
2233
+ // For now, just return the rows with a note that includes need to be applied
2234
+ log.debug(\`LIST \${ctx.table} include spec:\`, include);
2235
+ return { data: rows, needsIncludes: true, includeSpec: include, status: 200 };
2236
+ } catch (e: any) {
2237
+ log.error(\`LIST \${ctx.table} error:\`, e?.stack ?? e);
2238
+ return {
2239
+ error: e?.message ?? "Internal error",
2240
+ ...(DEBUG ? { stack: e?.stack } : {}),
2241
+ status: 500
2242
+ };
2243
+ }
2244
+ }
2245
+
2246
+ /**
2247
+ * UPDATE operation - Update a record by primary key
2248
+ */
2249
+ export async function updateRecord(
2250
+ ctx: OperationContext,
2251
+ pkValues: any[],
2252
+ updateData: Record<string, any>
2253
+ ): Promise<{ data?: any; error?: string; issues?: any; status: number }> {
2254
+ try {
2255
+ // Filter out PK columns from update data
2256
+ const filteredData = Object.fromEntries(
2257
+ Object.entries(updateData).filter(([k]) => !ctx.pkColumns.includes(k))
2258
+ );
2259
+
2260
+ if (!Object.keys(filteredData).length) {
2261
+ return { error: "No updatable fields provided", status: 400 };
2262
+ }
2263
+
2264
+ const hasCompositePk = ctx.pkColumns.length > 1;
2265
+ const wherePkSql = hasCompositePk
2266
+ ? ctx.pkColumns.map((c, i) => \`"\${c}" = $\${i + 1}\`).join(" AND ")
2267
+ : \`"\${ctx.pkColumns[0]}" = $1\`;
2268
+
2269
+ const setSql = Object.keys(filteredData)
2270
+ .map((k, i) => \`"\${k}" = $\${i + pkValues.length + 1}\`)
2271
+ .join(", ");
2272
+
2273
+ const text = \`UPDATE "\${ctx.table}" SET \${setSql} WHERE \${wherePkSql} RETURNING *\`;
2274
+ const params = [...pkValues, ...Object.values(filteredData)];
2275
+
2276
+ log.debug(\`PATCH \${ctx.table} SQL:\`, text, "params:", params);
2277
+ const { rows } = await ctx.pg.query(text, params);
2278
+
2279
+ if (!rows[0]) {
2280
+ return { data: null, status: 404 };
2281
+ }
2282
+
2283
+ return { data: rows[0], status: 200 };
2284
+ } catch (e: any) {
2285
+ log.error(\`PATCH \${ctx.table} error:\`, e?.stack ?? e);
2286
+ return {
2287
+ error: e?.message ?? "Internal error",
2288
+ ...(DEBUG ? { stack: e?.stack } : {}),
2289
+ status: 500
2290
+ };
2291
+ }
2292
+ }
2293
+
2294
+ /**
2295
+ * DELETE operation - Delete or soft-delete a record by primary key
2296
+ */
2297
+ export async function deleteRecord(
2298
+ ctx: OperationContext,
2299
+ pkValues: any[]
2300
+ ): Promise<{ data?: any; error?: string; status: number }> {
2301
+ try {
2302
+ const hasCompositePk = ctx.pkColumns.length > 1;
2303
+ const wherePkSql = hasCompositePk
2304
+ ? ctx.pkColumns.map((c, i) => \`"\${c}" = $\${i + 1}\`).join(" AND ")
2305
+ : \`"\${ctx.pkColumns[0]}" = $1\`;
2306
+
2307
+ const text = ctx.softDeleteColumn
2308
+ ? \`UPDATE "\${ctx.table}" SET "\${ctx.softDeleteColumn}" = NOW() WHERE \${wherePkSql} RETURNING *\`
2309
+ : \`DELETE FROM "\${ctx.table}" WHERE \${wherePkSql} RETURNING *\`;
2310
+
2311
+ log.debug(\`DELETE \${ctx.softDeleteColumn ? '(soft)' : ''} \${ctx.table} SQL:\`, text, "pk:", pkValues);
2312
+ const { rows } = await ctx.pg.query(text, pkValues);
2313
+
2314
+ if (!rows[0]) {
2315
+ return { data: null, status: 404 };
2316
+ }
2317
+
2318
+ return { data: rows[0], status: 200 };
2319
+ } catch (e: any) {
2320
+ log.error(\`DELETE \${ctx.table} error:\`, e?.stack ?? e);
2321
+ return {
2322
+ error: e?.message ?? "Internal error",
2323
+ ...(DEBUG ? { stack: e?.stack } : {}),
2324
+ status: 500
2325
+ };
2326
+ }
2327
+ }`;
2328
+ }
2329
+
2115
2330
  // src/types.ts
2116
2331
  function normalizeAuthConfig(input) {
2117
2332
  if (!input)
@@ -2168,6 +2383,7 @@ async function generate(configPath) {
2168
2383
  clientDir = join(originalClientDir, "sdk");
2169
2384
  }
2170
2385
  const normDateType = cfg.dateType === "string" ? "string" : "date";
2386
+ const serverFramework = cfg.serverFramework || "hono";
2171
2387
  console.log("\uD83D\uDCC1 Creating directories...");
2172
2388
  await ensureDirs([
2173
2389
  serverDir,
@@ -2188,12 +2404,16 @@ async function generate(configPath) {
2188
2404
  });
2189
2405
  files.push({
2190
2406
  path: join(serverDir, "include-loader.ts"),
2191
- content: emitIncludeLoader(graph, model, cfg.includeDepthLimit || 3)
2407
+ content: emitIncludeLoader(graph, model, cfg.includeDepthLimit || 3, cfg.useJsExtensions)
2192
2408
  });
2193
2409
  files.push({ path: join(serverDir, "logger.ts"), content: emitLogger() });
2194
2410
  if (normalizedAuth?.strategy && normalizedAuth.strategy !== "none") {
2195
2411
  files.push({ path: join(serverDir, "auth.ts"), content: emitAuth(normalizedAuth) });
2196
2412
  }
2413
+ files.push({
2414
+ path: join(serverDir, "core", "operations.ts"),
2415
+ content: emitCoreOperations()
2416
+ });
2197
2417
  for (const table of Object.values(model.tables)) {
2198
2418
  const typesSrc = emitTypes(table, { dateType: normDateType, numericMode: "string" });
2199
2419
  files.push({ path: join(serverDir, "types", `${table.name}.ts`), content: typesSrc });
@@ -2202,27 +2422,36 @@ async function generate(configPath) {
2202
2422
  path: join(serverDir, "zod", `${table.name}.ts`),
2203
2423
  content: emitZod(table, { dateType: normDateType, numericMode: "string" })
2204
2424
  });
2205
- files.push({
2206
- path: join(serverDir, "routes", `${table.name}.ts`),
2207
- content: emitRoutes(table, graph, {
2425
+ let routeContent;
2426
+ if (serverFramework === "hono") {
2427
+ routeContent = emitHonoRoutes(table, graph, {
2208
2428
  softDeleteColumn: cfg.softDeleteColumn || null,
2209
2429
  includeDepthLimit: cfg.includeDepthLimit || 3,
2210
- authStrategy: normalizedAuth?.strategy
2211
- })
2430
+ authStrategy: normalizedAuth?.strategy,
2431
+ useJsExtensions: cfg.useJsExtensions
2432
+ });
2433
+ } else {
2434
+ throw new Error(`Framework "${serverFramework}" is not yet supported. Currently only "hono" is available.`);
2435
+ }
2436
+ files.push({
2437
+ path: join(serverDir, "routes", `${table.name}.ts`),
2438
+ content: routeContent
2212
2439
  });
2213
2440
  files.push({
2214
2441
  path: join(clientDir, `${table.name}.ts`),
2215
- content: emitClient(table)
2442
+ content: emitClient(table, cfg.useJsExtensionsClient)
2216
2443
  });
2217
2444
  }
2218
2445
  files.push({
2219
2446
  path: join(clientDir, "index.ts"),
2220
- content: emitClientIndex(Object.values(model.tables))
2221
- });
2222
- files.push({
2223
- path: join(serverDir, "router.ts"),
2224
- content: emitRouter(Object.values(model.tables), !!normalizedAuth?.strategy && normalizedAuth.strategy !== "none")
2447
+ content: emitClientIndex(Object.values(model.tables), cfg.useJsExtensionsClient)
2225
2448
  });
2449
+ if (serverFramework === "hono") {
2450
+ files.push({
2451
+ path: join(serverDir, "router.ts"),
2452
+ content: emitHonoRouter(Object.values(model.tables), !!normalizedAuth?.strategy && normalizedAuth.strategy !== "none", cfg.useJsExtensions)
2453
+ });
2454
+ }
2226
2455
  const clientFiles = files.filter((f) => {
2227
2456
  return f.path.includes(clientDir);
2228
2457
  });