postgresdk 0.16.6 → 0.16.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -163,6 +163,9 @@ export default {
163
163
  }
164
164
  },
165
165
 
166
+ // SDK endpoint protection (optional)
167
+ pullToken: "env:POSTGRESDK_PULL_TOKEN", // Protect /_psdk/* endpoints (if not set, public)
168
+
166
169
  // Test generation (optional)
167
170
  tests: {
168
171
  generate: true, // Generate test files
@@ -515,7 +518,7 @@ export default {
515
518
  pull: {
516
519
  from: "https://api.myapp.com",
517
520
  output: "./src/sdk",
518
- token: process.env.API_TOKEN // Optional auth for protected APIs
521
+ pullToken: "env:POSTGRESDK_PULL_TOKEN" // Optional: if server has pullToken set
519
522
  }
520
523
  };
521
524
  ```
package/dist/cli.js CHANGED
@@ -2242,13 +2242,14 @@ async function initCommand(args) {
2242
2242
  console.log(" DATABASE_URL=postgres://user:pass@localhost:5432/mydb");
2243
2243
  console.log(" API_KEY=your-secret-key");
2244
2244
  console.log(" JWT_SECRET=your-jwt-secret");
2245
+ console.log(" POSTGRESDK_PULL_TOKEN=your-pull-token");
2245
2246
  }
2246
2247
  console.log(" 3. Run 'postgresdk generate' to create your SDK");
2247
2248
  } else {
2248
2249
  console.log(" 1. Edit postgresdk.config.ts with your API URL in pull.from");
2249
2250
  if (!hasEnv) {
2250
2251
  console.log(" 2. Consider creating a .env file if you need authentication:");
2251
- console.log(" API_TOKEN=your-api-token");
2252
+ console.log(" POSTGRESDK_PULL_TOKEN=your-pull-token");
2252
2253
  }
2253
2254
  console.log(" 3. Run 'postgresdk pull' to fetch your SDK");
2254
2255
  }
@@ -2395,6 +2396,21 @@ export default {
2395
2396
  // audience: "my-api", // Optional: validate 'aud' claim
2396
2397
  // }
2397
2398
  // },
2399
+
2400
+ // ========== SDK ENDPOINT PROTECTION ==========
2401
+
2402
+ /**
2403
+ * Token for protecting /_psdk/* endpoints (SDK distribution and contract endpoints)
2404
+ *
2405
+ * When set, clients must provide this token via Authorization header when pulling SDK.
2406
+ * If not set, /_psdk/* endpoints are publicly accessible.
2407
+ *
2408
+ * This is separate from the main auth strategy (JWT/API key) used for CRUD operations.
2409
+ *
2410
+ * Use "env:" prefix to read from environment variables:
2411
+ * pullToken: "env:POSTGRESDK_PULL_TOKEN"
2412
+ */
2413
+ // pullToken: "env:POSTGRESDK_PULL_TOKEN",
2398
2414
  };
2399
2415
  `, CONFIG_TEMPLATE_SDK = `/**
2400
2416
  * PostgreSDK Configuration (SDK-Side)
@@ -2420,9 +2436,17 @@ export default {
2420
2436
  * Configuration for pulling SDK from a remote API
2421
2437
  */
2422
2438
  pull: {
2423
- from: "https://api.myapp.com", // API URL to pull SDK from
2424
- output: "./src/sdk", // Local directory for pulled SDK
2425
- // token: process.env.API_TOKEN, // Optional authentication token
2439
+ from: "https://api.myapp.com", // API URL to pull SDK from
2440
+ output: "./src/sdk", // Local directory for pulled SDK
2441
+
2442
+ /**
2443
+ * Authentication token for protected /_psdk/* endpoints
2444
+ * Should match the server's pullToken configuration
2445
+ *
2446
+ * Use "env:" prefix to read from environment variables:
2447
+ * pullToken: "env:POSTGRESDK_PULL_TOKEN"
2448
+ */
2449
+ // pullToken: "env:POSTGRESDK_PULL_TOKEN",
2426
2450
  },
2427
2451
  };
2428
2452
  `;
@@ -2463,7 +2487,7 @@ async function pullCommand(args) {
2463
2487
  const cliConfig = {
2464
2488
  from: args.find((a) => a.startsWith("--from="))?.split("=")[1],
2465
2489
  output: args.find((a) => a.startsWith("--output="))?.split("=")[1],
2466
- token: args.find((a) => a.startsWith("--token="))?.split("=")[1]
2490
+ pullToken: args.find((a) => a.startsWith("--pullToken="))?.split("=")[1]
2467
2491
  };
2468
2492
  const config = {
2469
2493
  output: "./src/sdk",
@@ -2479,10 +2503,19 @@ Options:`);
2479
2503
  console.error(" (then edit postgresdk.config.ts and run 'postgresdk pull')");
2480
2504
  process.exit(1);
2481
2505
  }
2506
+ let resolvedToken = config.pullToken;
2507
+ if (resolvedToken?.startsWith("env:")) {
2508
+ const envVarName = resolvedToken.slice(4);
2509
+ resolvedToken = process.env[envVarName];
2510
+ if (!resolvedToken) {
2511
+ console.error(`❌ Environment variable "${envVarName}" not set (referenced in pullToken config)`);
2512
+ process.exit(1);
2513
+ }
2514
+ }
2482
2515
  console.log(`\uD83D\uDD04 Pulling SDK from ${config.from}`);
2483
2516
  console.log(`\uD83D\uDCC1 Output directory: ${config.output}`);
2484
2517
  try {
2485
- const headers = config.token ? { Authorization: `Bearer ${config.token}` } : {};
2518
+ const headers = resolvedToken ? { Authorization: `Bearer ${resolvedToken}` } : {};
2486
2519
  const manifestRes = await fetch(`${config.from}/_psdk/sdk/manifest`, { headers });
2487
2520
  if (!manifestRes.ok) {
2488
2521
  throw new Error(`Failed to fetch SDK manifest: ${manifestRes.status} ${manifestRes.statusText}`);
@@ -2815,6 +2848,8 @@ function emitZod(table, opts, enums) {
2815
2848
  return `z.unknown()`;
2816
2849
  if (t === "date" || t.startsWith("timestamp"))
2817
2850
  return `z.string()`;
2851
+ if (t === "vector" || t === "halfvec" || t === "sparsevec" || t === "bit")
2852
+ return `z.array(z.number())`;
2818
2853
  if (t.startsWith("_"))
2819
2854
  return `z.array(${zFor(t.slice(1))})`;
2820
2855
  return `z.string()`;
@@ -2948,6 +2983,7 @@ function emitHonoRoutes(table, _graph, opts) {
2948
2983
  const fileTableName = table.name;
2949
2984
  const Type = pascal(table.name);
2950
2985
  const hasVectorColumns = table.columns.some((c) => isVectorType(c.pgType));
2986
+ const vectorColumns = table.columns.filter((c) => isVectorType(c.pgType)).map((c) => c.name);
2951
2987
  const rawPk = table.pk;
2952
2988
  const pkCols = Array.isArray(rawPk) ? rawPk : rawPk ? [rawPk] : [];
2953
2989
  const safePkCols = pkCols.length ? pkCols : ["id"];
@@ -3008,7 +3044,8 @@ export function register${Type}Routes(app: Hono, deps: { pg: { query: (text: str
3008
3044
  table: "${fileTableName}",
3009
3045
  pkColumns: ${JSON.stringify(safePkCols)},
3010
3046
  softDeleteColumn: ${softDel ? `"${softDel}"` : "null"},
3011
- includeMethodsDepth: ${opts.includeMethodsDepth}
3047
+ includeMethodsDepth: ${opts.includeMethodsDepth}${vectorColumns.length > 0 ? `,
3048
+ vectorColumns: ${JSON.stringify(vectorColumns)}` : ""}
3012
3049
  };
3013
3050
  ${hasAuth ? `
3014
3051
  // \uD83D\uDD10 Auth: protect all routes for this table
@@ -3990,6 +4027,8 @@ function tsTypeFor(pgType, opts, enums) {
3990
4027
  return "string";
3991
4028
  if (t === "json" || t === "jsonb")
3992
4029
  return "unknown";
4030
+ if (t === "vector" || t === "halfvec" || t === "sparsevec" || t === "bit")
4031
+ return "number[]";
3993
4032
  return "string";
3994
4033
  }
3995
4034
  function isJsonbType2(pgType) {
@@ -4438,9 +4477,18 @@ export async function authMiddleware(c: Context, next: Next) {
4438
4477
 
4439
4478
  // src/emit-router-hono.ts
4440
4479
  init_utils();
4441
- function emitHonoRouter(tables, hasAuth, useJsExtensions) {
4480
+ function emitHonoRouter(tables, hasAuth, useJsExtensions, pullToken) {
4442
4481
  const tableNames = tables.map((t) => t.name).sort();
4443
4482
  const ext = useJsExtensions ? ".js" : "";
4483
+ let resolvedPullToken;
4484
+ if (pullToken) {
4485
+ if (pullToken.startsWith("env:")) {
4486
+ const envVarName = pullToken.slice(4);
4487
+ resolvedPullToken = `process.env.${envVarName}`;
4488
+ } else {
4489
+ resolvedPullToken = JSON.stringify(pullToken);
4490
+ }
4491
+ }
4444
4492
  const imports = tableNames.map((name) => {
4445
4493
  const Type = pascal(name);
4446
4494
  return `import { register${Type}Routes } from "./routes/${name}${ext}";`;
@@ -4527,10 +4575,33 @@ export function createRouter(
4527
4575
  }
4528
4576
  ): Hono {
4529
4577
  const router = new Hono();
4530
-
4578
+
4531
4579
  // Register table routes
4532
4580
  ${registrations}
4581
+ ${pullToken ? `
4582
+ // \uD83D\uDD10 Protect /_psdk/* endpoints with pullToken
4583
+ router.use("/_psdk/*", async (c, next) => {
4584
+ const authHeader = c.req.header("Authorization");
4585
+ const expectedToken = ${resolvedPullToken};
4533
4586
 
4587
+ if (!expectedToken) {
4588
+ // Token not configured in environment - reject request
4589
+ return c.json({ error: "SDK endpoints are protected but token not configured" }, 500);
4590
+ }
4591
+
4592
+ if (!authHeader || !authHeader.startsWith("Bearer ")) {
4593
+ return c.json({ error: "Missing or invalid Authorization header" }, 401);
4594
+ }
4595
+
4596
+ const providedToken = authHeader.slice(7); // Remove "Bearer " prefix
4597
+
4598
+ if (providedToken !== expectedToken) {
4599
+ return c.json({ error: "Invalid pull token" }, 401);
4600
+ }
4601
+
4602
+ await next();
4603
+ });
4604
+ ` : ""}
4534
4605
  // SDK distribution endpoints
4535
4606
  router.get("/_psdk/sdk/manifest", (c) => {
4536
4607
  return c.json({
@@ -4674,6 +4745,7 @@ export interface OperationContext {
4674
4745
  pkColumns: string[];
4675
4746
  softDeleteColumn?: string | null;
4676
4747
  includeMethodsDepth: number;
4748
+ vectorColumns?: string[];
4677
4749
  }
4678
4750
 
4679
4751
  const DEBUG = process.env.SDK_DEBUG === "1" || process.env.SDK_DEBUG === "true";
@@ -4698,6 +4770,30 @@ function prepareParams(params: any[]): any[] {
4698
4770
  });
4699
4771
  }
4700
4772
 
4773
+ /**
4774
+ * Parse vector columns in retrieved rows.
4775
+ * pgvector returns vectors as strings (e.g., "[1.5,2.5,3.5]") which need to be
4776
+ * parsed back to number[] to match TypeScript types.
4777
+ */
4778
+ function parseVectorColumns(rows: any[], vectorColumns?: string[]): any[] {
4779
+ if (!vectorColumns || vectorColumns.length === 0) return rows;
4780
+
4781
+ return rows.map(row => {
4782
+ const parsed = { ...row };
4783
+ for (const col of vectorColumns) {
4784
+ if (parsed[col] !== null && parsed[col] !== undefined && typeof parsed[col] === 'string') {
4785
+ try {
4786
+ parsed[col] = JSON.parse(parsed[col]);
4787
+ } catch (e) {
4788
+ // If parsing fails, leave as string (shouldn't happen with valid vectors)
4789
+ log.error(\`Failed to parse vector column "\${col}":, e\`);
4790
+ }
4791
+ }
4792
+ }
4793
+ return parsed;
4794
+ });
4795
+ }
4796
+
4701
4797
  /**
4702
4798
  * CREATE operation - Insert a new record
4703
4799
  */
@@ -4720,8 +4816,9 @@ export async function createRecord(
4720
4816
 
4721
4817
  log.debug("SQL:", text, "vals:", vals);
4722
4818
  const { rows } = await ctx.pg.query(text, prepareParams(vals));
4819
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
4723
4820
 
4724
- return { data: rows[0] ?? null, status: rows[0] ? 201 : 500 };
4821
+ return { data: parsedRows[0] ?? null, status: parsedRows[0] ? 201 : 500 };
4725
4822
  } catch (e: any) {
4726
4823
  // Enhanced logging for JSON validation errors
4727
4824
  const errorMsg = e?.message ?? "";
@@ -4760,12 +4857,13 @@ export async function getByPk(
4760
4857
  log.debug(\`GET \${ctx.table} by PK:\`, pkValues, "SQL:", text);
4761
4858
 
4762
4859
  const { rows } = await ctx.pg.query(text, prepareParams(pkValues));
4763
-
4764
- if (!rows[0]) {
4860
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
4861
+
4862
+ if (!parsedRows[0]) {
4765
4863
  return { data: null, status: 404 };
4766
4864
  }
4767
-
4768
- return { data: rows[0], status: 200 };
4865
+
4866
+ return { data: parsedRows[0], status: 200 };
4769
4867
  } catch (e: any) {
4770
4868
  log.error(\`GET \${ctx.table} error:\`, e?.stack ?? e);
4771
4869
  return {
@@ -5151,12 +5249,13 @@ export async function listRecords(
5151
5249
  log.debug(\`LIST \${ctx.table} SQL:\`, text, "params:", allParams);
5152
5250
 
5153
5251
  const { rows } = await ctx.pg.query(text, prepareParams(allParams));
5252
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
5154
5253
 
5155
5254
  // Calculate hasMore
5156
5255
  const hasMore = offset + limit < total;
5157
5256
 
5158
5257
  const metadata = {
5159
- data: rows,
5258
+ data: parsedRows,
5160
5259
  total,
5161
5260
  limit,
5162
5261
  offset,
@@ -5221,12 +5320,13 @@ export async function updateRecord(
5221
5320
 
5222
5321
  log.debug(\`PATCH \${ctx.table} SQL:\`, text, "params:", params);
5223
5322
  const { rows } = await ctx.pg.query(text, prepareParams(params));
5224
-
5225
- if (!rows[0]) {
5323
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
5324
+
5325
+ if (!parsedRows[0]) {
5226
5326
  return { data: null, status: 404 };
5227
5327
  }
5228
-
5229
- return { data: rows[0], status: 200 };
5328
+
5329
+ return { data: parsedRows[0], status: 200 };
5230
5330
  } catch (e: any) {
5231
5331
  // Enhanced logging for JSON validation errors
5232
5332
  const errorMsg = e?.message ?? "";
@@ -5270,12 +5370,13 @@ export async function deleteRecord(
5270
5370
 
5271
5371
  log.debug(\`DELETE \${ctx.softDeleteColumn ? '(soft)' : ''} \${ctx.table} SQL:\`, text, "pk:", pkValues);
5272
5372
  const { rows } = await ctx.pg.query(text, prepareParams(pkValues));
5273
-
5274
- if (!rows[0]) {
5373
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
5374
+
5375
+ if (!parsedRows[0]) {
5275
5376
  return { data: null, status: 404 };
5276
5377
  }
5277
-
5278
- return { data: rows[0], status: 200 };
5378
+
5379
+ return { data: parsedRows[0], status: 200 };
5279
5380
  } catch (e: any) {
5280
5381
  log.error(\`DELETE \${ctx.table} error:\`, e?.stack ?? e);
5281
5382
  return {
@@ -6145,7 +6246,7 @@ async function generate(configPath) {
6145
6246
  if (serverFramework === "hono") {
6146
6247
  files.push({
6147
6248
  path: join(serverDir, "router.ts"),
6148
- content: emitHonoRouter(Object.values(model.tables), getAuthStrategy(normalizedAuth) !== "none", cfg.useJsExtensions)
6249
+ content: emitHonoRouter(Object.values(model.tables), getAuthStrategy(normalizedAuth) !== "none", cfg.useJsExtensions, cfg.pullToken)
6149
6250
  });
6150
6251
  }
6151
6252
  const { generateUnifiedContract: generateUnifiedContract2, generateUnifiedContractMarkdown: generateUnifiedContractMarkdown2 } = await Promise.resolve().then(() => (init_emit_sdk_contract(), exports_emit_sdk_contract));
@@ -2,4 +2,4 @@ import type { Table } from "./introspect";
2
2
  /**
3
3
  * Emits the Hono server router file that exports helper functions for route registration
4
4
  */
5
- export declare function emitHonoRouter(tables: Table[], hasAuth: boolean, useJsExtensions?: boolean): string;
5
+ export declare function emitHonoRouter(tables: Table[], hasAuth: boolean, useJsExtensions?: boolean, pullToken?: string): string;
package/dist/index.js CHANGED
@@ -1986,6 +1986,8 @@ function emitZod(table, opts, enums) {
1986
1986
  return `z.unknown()`;
1987
1987
  if (t === "date" || t.startsWith("timestamp"))
1988
1988
  return `z.string()`;
1989
+ if (t === "vector" || t === "halfvec" || t === "sparsevec" || t === "bit")
1990
+ return `z.array(z.number())`;
1989
1991
  if (t.startsWith("_"))
1990
1992
  return `z.array(${zFor(t.slice(1))})`;
1991
1993
  return `z.string()`;
@@ -2119,6 +2121,7 @@ function emitHonoRoutes(table, _graph, opts) {
2119
2121
  const fileTableName = table.name;
2120
2122
  const Type = pascal(table.name);
2121
2123
  const hasVectorColumns = table.columns.some((c) => isVectorType(c.pgType));
2124
+ const vectorColumns = table.columns.filter((c) => isVectorType(c.pgType)).map((c) => c.name);
2122
2125
  const rawPk = table.pk;
2123
2126
  const pkCols = Array.isArray(rawPk) ? rawPk : rawPk ? [rawPk] : [];
2124
2127
  const safePkCols = pkCols.length ? pkCols : ["id"];
@@ -2179,7 +2182,8 @@ export function register${Type}Routes(app: Hono, deps: { pg: { query: (text: str
2179
2182
  table: "${fileTableName}",
2180
2183
  pkColumns: ${JSON.stringify(safePkCols)},
2181
2184
  softDeleteColumn: ${softDel ? `"${softDel}"` : "null"},
2182
- includeMethodsDepth: ${opts.includeMethodsDepth}
2185
+ includeMethodsDepth: ${opts.includeMethodsDepth}${vectorColumns.length > 0 ? `,
2186
+ vectorColumns: ${JSON.stringify(vectorColumns)}` : ""}
2183
2187
  };
2184
2188
  ${hasAuth ? `
2185
2189
  // \uD83D\uDD10 Auth: protect all routes for this table
@@ -3161,6 +3165,8 @@ function tsTypeFor(pgType, opts, enums) {
3161
3165
  return "string";
3162
3166
  if (t === "json" || t === "jsonb")
3163
3167
  return "unknown";
3168
+ if (t === "vector" || t === "halfvec" || t === "sparsevec" || t === "bit")
3169
+ return "number[]";
3164
3170
  return "string";
3165
3171
  }
3166
3172
  function isJsonbType2(pgType) {
@@ -3609,9 +3615,18 @@ export async function authMiddleware(c: Context, next: Next) {
3609
3615
 
3610
3616
  // src/emit-router-hono.ts
3611
3617
  init_utils();
3612
- function emitHonoRouter(tables, hasAuth, useJsExtensions) {
3618
+ function emitHonoRouter(tables, hasAuth, useJsExtensions, pullToken) {
3613
3619
  const tableNames = tables.map((t) => t.name).sort();
3614
3620
  const ext = useJsExtensions ? ".js" : "";
3621
+ let resolvedPullToken;
3622
+ if (pullToken) {
3623
+ if (pullToken.startsWith("env:")) {
3624
+ const envVarName = pullToken.slice(4);
3625
+ resolvedPullToken = `process.env.${envVarName}`;
3626
+ } else {
3627
+ resolvedPullToken = JSON.stringify(pullToken);
3628
+ }
3629
+ }
3615
3630
  const imports = tableNames.map((name) => {
3616
3631
  const Type = pascal(name);
3617
3632
  return `import { register${Type}Routes } from "./routes/${name}${ext}";`;
@@ -3698,10 +3713,33 @@ export function createRouter(
3698
3713
  }
3699
3714
  ): Hono {
3700
3715
  const router = new Hono();
3701
-
3716
+
3702
3717
  // Register table routes
3703
3718
  ${registrations}
3719
+ ${pullToken ? `
3720
+ // \uD83D\uDD10 Protect /_psdk/* endpoints with pullToken
3721
+ router.use("/_psdk/*", async (c, next) => {
3722
+ const authHeader = c.req.header("Authorization");
3723
+ const expectedToken = ${resolvedPullToken};
3724
+
3725
+ if (!expectedToken) {
3726
+ // Token not configured in environment - reject request
3727
+ return c.json({ error: "SDK endpoints are protected but token not configured" }, 500);
3728
+ }
3729
+
3730
+ if (!authHeader || !authHeader.startsWith("Bearer ")) {
3731
+ return c.json({ error: "Missing or invalid Authorization header" }, 401);
3732
+ }
3704
3733
 
3734
+ const providedToken = authHeader.slice(7); // Remove "Bearer " prefix
3735
+
3736
+ if (providedToken !== expectedToken) {
3737
+ return c.json({ error: "Invalid pull token" }, 401);
3738
+ }
3739
+
3740
+ await next();
3741
+ });
3742
+ ` : ""}
3705
3743
  // SDK distribution endpoints
3706
3744
  router.get("/_psdk/sdk/manifest", (c) => {
3707
3745
  return c.json({
@@ -3845,6 +3883,7 @@ export interface OperationContext {
3845
3883
  pkColumns: string[];
3846
3884
  softDeleteColumn?: string | null;
3847
3885
  includeMethodsDepth: number;
3886
+ vectorColumns?: string[];
3848
3887
  }
3849
3888
 
3850
3889
  const DEBUG = process.env.SDK_DEBUG === "1" || process.env.SDK_DEBUG === "true";
@@ -3869,6 +3908,30 @@ function prepareParams(params: any[]): any[] {
3869
3908
  });
3870
3909
  }
3871
3910
 
3911
+ /**
3912
+ * Parse vector columns in retrieved rows.
3913
+ * pgvector returns vectors as strings (e.g., "[1.5,2.5,3.5]") which need to be
3914
+ * parsed back to number[] to match TypeScript types.
3915
+ */
3916
+ function parseVectorColumns(rows: any[], vectorColumns?: string[]): any[] {
3917
+ if (!vectorColumns || vectorColumns.length === 0) return rows;
3918
+
3919
+ return rows.map(row => {
3920
+ const parsed = { ...row };
3921
+ for (const col of vectorColumns) {
3922
+ if (parsed[col] !== null && parsed[col] !== undefined && typeof parsed[col] === 'string') {
3923
+ try {
3924
+ parsed[col] = JSON.parse(parsed[col]);
3925
+ } catch (e) {
3926
+ // If parsing fails, leave as string (shouldn't happen with valid vectors)
3927
+ log.error(\`Failed to parse vector column "\${col}":, e\`);
3928
+ }
3929
+ }
3930
+ }
3931
+ return parsed;
3932
+ });
3933
+ }
3934
+
3872
3935
  /**
3873
3936
  * CREATE operation - Insert a new record
3874
3937
  */
@@ -3891,8 +3954,9 @@ export async function createRecord(
3891
3954
 
3892
3955
  log.debug("SQL:", text, "vals:", vals);
3893
3956
  const { rows } = await ctx.pg.query(text, prepareParams(vals));
3957
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
3894
3958
 
3895
- return { data: rows[0] ?? null, status: rows[0] ? 201 : 500 };
3959
+ return { data: parsedRows[0] ?? null, status: parsedRows[0] ? 201 : 500 };
3896
3960
  } catch (e: any) {
3897
3961
  // Enhanced logging for JSON validation errors
3898
3962
  const errorMsg = e?.message ?? "";
@@ -3931,12 +3995,13 @@ export async function getByPk(
3931
3995
  log.debug(\`GET \${ctx.table} by PK:\`, pkValues, "SQL:", text);
3932
3996
 
3933
3997
  const { rows } = await ctx.pg.query(text, prepareParams(pkValues));
3934
-
3935
- if (!rows[0]) {
3998
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
3999
+
4000
+ if (!parsedRows[0]) {
3936
4001
  return { data: null, status: 404 };
3937
4002
  }
3938
-
3939
- return { data: rows[0], status: 200 };
4003
+
4004
+ return { data: parsedRows[0], status: 200 };
3940
4005
  } catch (e: any) {
3941
4006
  log.error(\`GET \${ctx.table} error:\`, e?.stack ?? e);
3942
4007
  return {
@@ -4322,12 +4387,13 @@ export async function listRecords(
4322
4387
  log.debug(\`LIST \${ctx.table} SQL:\`, text, "params:", allParams);
4323
4388
 
4324
4389
  const { rows } = await ctx.pg.query(text, prepareParams(allParams));
4390
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
4325
4391
 
4326
4392
  // Calculate hasMore
4327
4393
  const hasMore = offset + limit < total;
4328
4394
 
4329
4395
  const metadata = {
4330
- data: rows,
4396
+ data: parsedRows,
4331
4397
  total,
4332
4398
  limit,
4333
4399
  offset,
@@ -4392,12 +4458,13 @@ export async function updateRecord(
4392
4458
 
4393
4459
  log.debug(\`PATCH \${ctx.table} SQL:\`, text, "params:", params);
4394
4460
  const { rows } = await ctx.pg.query(text, prepareParams(params));
4395
-
4396
- if (!rows[0]) {
4461
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
4462
+
4463
+ if (!parsedRows[0]) {
4397
4464
  return { data: null, status: 404 };
4398
4465
  }
4399
-
4400
- return { data: rows[0], status: 200 };
4466
+
4467
+ return { data: parsedRows[0], status: 200 };
4401
4468
  } catch (e: any) {
4402
4469
  // Enhanced logging for JSON validation errors
4403
4470
  const errorMsg = e?.message ?? "";
@@ -4441,12 +4508,13 @@ export async function deleteRecord(
4441
4508
 
4442
4509
  log.debug(\`DELETE \${ctx.softDeleteColumn ? '(soft)' : ''} \${ctx.table} SQL:\`, text, "pk:", pkValues);
4443
4510
  const { rows } = await ctx.pg.query(text, prepareParams(pkValues));
4444
-
4445
- if (!rows[0]) {
4511
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
4512
+
4513
+ if (!parsedRows[0]) {
4446
4514
  return { data: null, status: 404 };
4447
4515
  }
4448
-
4449
- return { data: rows[0], status: 200 };
4516
+
4517
+ return { data: parsedRows[0], status: 200 };
4450
4518
  } catch (e: any) {
4451
4519
  log.error(\`DELETE \${ctx.table} error:\`, e?.stack ?? e);
4452
4520
  return {
@@ -5316,7 +5384,7 @@ async function generate(configPath) {
5316
5384
  if (serverFramework === "hono") {
5317
5385
  files.push({
5318
5386
  path: join(serverDir, "router.ts"),
5319
- content: emitHonoRouter(Object.values(model.tables), getAuthStrategy(normalizedAuth) !== "none", cfg.useJsExtensions)
5387
+ content: emitHonoRouter(Object.values(model.tables), getAuthStrategy(normalizedAuth) !== "none", cfg.useJsExtensions, cfg.pullToken)
5320
5388
  });
5321
5389
  }
5322
5390
  const { generateUnifiedContract: generateUnifiedContract2, generateUnifiedContractMarkdown: generateUnifiedContractMarkdown2 } = await Promise.resolve().then(() => (init_emit_sdk_contract(), exports_emit_sdk_contract));
package/dist/types.d.ts CHANGED
@@ -29,6 +29,7 @@ export interface Config {
29
29
  serverFramework?: "hono" | "express" | "fastify";
30
30
  apiPathPrefix?: string;
31
31
  auth?: AuthConfigInput;
32
+ pullToken?: string;
32
33
  pull?: PullConfig;
33
34
  useJsExtensions?: boolean;
34
35
  useJsExtensionsClient?: boolean;
@@ -41,6 +42,6 @@ export interface Config {
41
42
  export interface PullConfig {
42
43
  from: string;
43
44
  output?: string;
44
- token?: string;
45
+ pullToken?: string;
45
46
  }
46
47
  export declare function normalizeAuthConfig(input: AuthConfigInput | undefined): AuthConfig | undefined;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "postgresdk",
3
- "version": "0.16.6",
3
+ "version": "0.16.7",
4
4
  "description": "Generate a typed server/client SDK from a Postgres schema (includes, Zod, Hono).",
5
5
  "type": "module",
6
6
  "bin": {