postgresdk 0.16.5 → 0.16.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -163,6 +163,9 @@ export default {
163
163
  }
164
164
  },
165
165
 
166
+ // SDK endpoint protection (optional)
167
+ pullToken: "env:POSTGRESDK_PULL_TOKEN", // Protect /_psdk/* endpoints (if not set, public)
168
+
166
169
  // Test generation (optional)
167
170
  tests: {
168
171
  generate: true, // Generate test files
@@ -515,7 +518,7 @@ export default {
515
518
  pull: {
516
519
  from: "https://api.myapp.com",
517
520
  output: "./src/sdk",
518
- token: process.env.API_TOKEN // Optional auth for protected APIs
521
+ pullToken: "env:POSTGRESDK_PULL_TOKEN" // Optional: if server has pullToken set
519
522
  }
520
523
  };
521
524
  ```
package/dist/cli.js CHANGED
@@ -2242,13 +2242,14 @@ async function initCommand(args) {
2242
2242
  console.log(" DATABASE_URL=postgres://user:pass@localhost:5432/mydb");
2243
2243
  console.log(" API_KEY=your-secret-key");
2244
2244
  console.log(" JWT_SECRET=your-jwt-secret");
2245
+ console.log(" POSTGRESDK_PULL_TOKEN=your-pull-token");
2245
2246
  }
2246
2247
  console.log(" 3. Run 'postgresdk generate' to create your SDK");
2247
2248
  } else {
2248
2249
  console.log(" 1. Edit postgresdk.config.ts with your API URL in pull.from");
2249
2250
  if (!hasEnv) {
2250
2251
  console.log(" 2. Consider creating a .env file if you need authentication:");
2251
- console.log(" API_TOKEN=your-api-token");
2252
+ console.log(" POSTGRESDK_PULL_TOKEN=your-pull-token");
2252
2253
  }
2253
2254
  console.log(" 3. Run 'postgresdk pull' to fetch your SDK");
2254
2255
  }
@@ -2395,6 +2396,21 @@ export default {
2395
2396
  // audience: "my-api", // Optional: validate 'aud' claim
2396
2397
  // }
2397
2398
  // },
2399
+
2400
+ // ========== SDK ENDPOINT PROTECTION ==========
2401
+
2402
+ /**
2403
+ * Token for protecting /_psdk/* endpoints (SDK distribution and contract endpoints)
2404
+ *
2405
+ * When set, clients must provide this token via Authorization header when pulling SDK.
2406
+ * If not set, /_psdk/* endpoints are publicly accessible.
2407
+ *
2408
+ * This is separate from the main auth strategy (JWT/API key) used for CRUD operations.
2409
+ *
2410
+ * Use "env:" prefix to read from environment variables:
2411
+ * pullToken: "env:POSTGRESDK_PULL_TOKEN"
2412
+ */
2413
+ // pullToken: "env:POSTGRESDK_PULL_TOKEN",
2398
2414
  };
2399
2415
  `, CONFIG_TEMPLATE_SDK = `/**
2400
2416
  * PostgreSDK Configuration (SDK-Side)
@@ -2420,9 +2436,17 @@ export default {
2420
2436
  * Configuration for pulling SDK from a remote API
2421
2437
  */
2422
2438
  pull: {
2423
- from: "https://api.myapp.com", // API URL to pull SDK from
2424
- output: "./src/sdk", // Local directory for pulled SDK
2425
- // token: process.env.API_TOKEN, // Optional authentication token
2439
+ from: "https://api.myapp.com", // API URL to pull SDK from
2440
+ output: "./src/sdk", // Local directory for pulled SDK
2441
+
2442
+ /**
2443
+ * Authentication token for protected /_psdk/* endpoints
2444
+ * Should match the server's pullToken configuration
2445
+ *
2446
+ * Use "env:" prefix to read from environment variables:
2447
+ * pullToken: "env:POSTGRESDK_PULL_TOKEN"
2448
+ */
2449
+ // pullToken: "env:POSTGRESDK_PULL_TOKEN",
2426
2450
  },
2427
2451
  };
2428
2452
  `;
@@ -2463,7 +2487,7 @@ async function pullCommand(args) {
2463
2487
  const cliConfig = {
2464
2488
  from: args.find((a) => a.startsWith("--from="))?.split("=")[1],
2465
2489
  output: args.find((a) => a.startsWith("--output="))?.split("=")[1],
2466
- token: args.find((a) => a.startsWith("--token="))?.split("=")[1]
2490
+ pullToken: args.find((a) => a.startsWith("--pullToken="))?.split("=")[1]
2467
2491
  };
2468
2492
  const config = {
2469
2493
  output: "./src/sdk",
@@ -2479,10 +2503,19 @@ Options:`);
2479
2503
  console.error(" (then edit postgresdk.config.ts and run 'postgresdk pull')");
2480
2504
  process.exit(1);
2481
2505
  }
2506
+ let resolvedToken = config.pullToken;
2507
+ if (resolvedToken?.startsWith("env:")) {
2508
+ const envVarName = resolvedToken.slice(4);
2509
+ resolvedToken = process.env[envVarName];
2510
+ if (!resolvedToken) {
2511
+ console.error(`❌ Environment variable "${envVarName}" not set (referenced in pullToken config)`);
2512
+ process.exit(1);
2513
+ }
2514
+ }
2482
2515
  console.log(`\uD83D\uDD04 Pulling SDK from ${config.from}`);
2483
2516
  console.log(`\uD83D\uDCC1 Output directory: ${config.output}`);
2484
2517
  try {
2485
- const headers = config.token ? { Authorization: `Bearer ${config.token}` } : {};
2518
+ const headers = resolvedToken ? { Authorization: `Bearer ${resolvedToken}` } : {};
2486
2519
  const manifestRes = await fetch(`${config.from}/_psdk/sdk/manifest`, { headers });
2487
2520
  if (!manifestRes.ok) {
2488
2521
  throw new Error(`Failed to fetch SDK manifest: ${manifestRes.status} ${manifestRes.statusText}`);
@@ -2815,6 +2848,8 @@ function emitZod(table, opts, enums) {
2815
2848
  return `z.unknown()`;
2816
2849
  if (t === "date" || t.startsWith("timestamp"))
2817
2850
  return `z.string()`;
2851
+ if (t === "vector" || t === "halfvec" || t === "sparsevec" || t === "bit")
2852
+ return `z.array(z.number())`;
2818
2853
  if (t.startsWith("_"))
2819
2854
  return `z.array(${zFor(t.slice(1))})`;
2820
2855
  return `z.string()`;
@@ -2948,6 +2983,7 @@ function emitHonoRoutes(table, _graph, opts) {
2948
2983
  const fileTableName = table.name;
2949
2984
  const Type = pascal(table.name);
2950
2985
  const hasVectorColumns = table.columns.some((c) => isVectorType(c.pgType));
2986
+ const vectorColumns = table.columns.filter((c) => isVectorType(c.pgType)).map((c) => c.name);
2951
2987
  const rawPk = table.pk;
2952
2988
  const pkCols = Array.isArray(rawPk) ? rawPk : rawPk ? [rawPk] : [];
2953
2989
  const safePkCols = pkCols.length ? pkCols : ["id"];
@@ -3008,7 +3044,8 @@ export function register${Type}Routes(app: Hono, deps: { pg: { query: (text: str
3008
3044
  table: "${fileTableName}",
3009
3045
  pkColumns: ${JSON.stringify(safePkCols)},
3010
3046
  softDeleteColumn: ${softDel ? `"${softDel}"` : "null"},
3011
- includeMethodsDepth: ${opts.includeMethodsDepth}
3047
+ includeMethodsDepth: ${opts.includeMethodsDepth}${vectorColumns.length > 0 ? `,
3048
+ vectorColumns: ${JSON.stringify(vectorColumns)}` : ""}
3012
3049
  };
3013
3050
  ${hasAuth ? `
3014
3051
  // \uD83D\uDD10 Auth: protect all routes for this table
@@ -3990,6 +4027,8 @@ function tsTypeFor(pgType, opts, enums) {
3990
4027
  return "string";
3991
4028
  if (t === "json" || t === "jsonb")
3992
4029
  return "unknown";
4030
+ if (t === "vector" || t === "halfvec" || t === "sparsevec" || t === "bit")
4031
+ return "number[]";
3993
4032
  return "string";
3994
4033
  }
3995
4034
  function isJsonbType2(pgType) {
@@ -4438,9 +4477,18 @@ export async function authMiddleware(c: Context, next: Next) {
4438
4477
 
4439
4478
  // src/emit-router-hono.ts
4440
4479
  init_utils();
4441
- function emitHonoRouter(tables, hasAuth, useJsExtensions) {
4480
+ function emitHonoRouter(tables, hasAuth, useJsExtensions, pullToken) {
4442
4481
  const tableNames = tables.map((t) => t.name).sort();
4443
4482
  const ext = useJsExtensions ? ".js" : "";
4483
+ let resolvedPullToken;
4484
+ if (pullToken) {
4485
+ if (pullToken.startsWith("env:")) {
4486
+ const envVarName = pullToken.slice(4);
4487
+ resolvedPullToken = `process.env.${envVarName}`;
4488
+ } else {
4489
+ resolvedPullToken = JSON.stringify(pullToken);
4490
+ }
4491
+ }
4444
4492
  const imports = tableNames.map((name) => {
4445
4493
  const Type = pascal(name);
4446
4494
  return `import { register${Type}Routes } from "./routes/${name}${ext}";`;
@@ -4527,10 +4575,33 @@ export function createRouter(
4527
4575
  }
4528
4576
  ): Hono {
4529
4577
  const router = new Hono();
4530
-
4578
+
4531
4579
  // Register table routes
4532
4580
  ${registrations}
4581
+ ${pullToken ? `
4582
+ // \uD83D\uDD10 Protect /_psdk/* endpoints with pullToken
4583
+ router.use("/_psdk/*", async (c, next) => {
4584
+ const authHeader = c.req.header("Authorization");
4585
+ const expectedToken = ${resolvedPullToken};
4533
4586
 
4587
+ if (!expectedToken) {
4588
+ // Token not configured in environment - reject request
4589
+ return c.json({ error: "SDK endpoints are protected but token not configured" }, 500);
4590
+ }
4591
+
4592
+ if (!authHeader || !authHeader.startsWith("Bearer ")) {
4593
+ return c.json({ error: "Missing or invalid Authorization header" }, 401);
4594
+ }
4595
+
4596
+ const providedToken = authHeader.slice(7); // Remove "Bearer " prefix
4597
+
4598
+ if (providedToken !== expectedToken) {
4599
+ return c.json({ error: "Invalid pull token" }, 401);
4600
+ }
4601
+
4602
+ await next();
4603
+ });
4604
+ ` : ""}
4534
4605
  // SDK distribution endpoints
4535
4606
  router.get("/_psdk/sdk/manifest", (c) => {
4536
4607
  return c.json({
@@ -4674,6 +4745,7 @@ export interface OperationContext {
4674
4745
  pkColumns: string[];
4675
4746
  softDeleteColumn?: string | null;
4676
4747
  includeMethodsDepth: number;
4748
+ vectorColumns?: string[];
4677
4749
  }
4678
4750
 
4679
4751
  const DEBUG = process.env.SDK_DEBUG === "1" || process.env.SDK_DEBUG === "true";
@@ -4682,6 +4754,46 @@ const log = {
4682
4754
  error: (...args: any[]) => console.error("[sdk]", ...args),
4683
4755
  };
4684
4756
 
4757
+ /**
4758
+ * Prepare query parameters for PostgreSQL.
4759
+ * The pg library should handle JSONB automatically, but there are edge cases
4760
+ * where explicit stringification is needed (e.g., certain pg versions or when
4761
+ * objects have been through serialization/deserialization).
4762
+ */
4763
+ function prepareParams(params: any[]): any[] {
4764
+ return params.map(p => {
4765
+ if (p === null || p === undefined) return p;
4766
+ // Stringify objects/arrays for JSONB - while pg should handle this automatically,
4767
+ // we've observed cases where it fails without explicit stringification
4768
+ if (typeof p === 'object') return JSON.stringify(p);
4769
+ return p;
4770
+ });
4771
+ }
4772
+
4773
+ /**
4774
+ * Parse vector columns in retrieved rows.
4775
+ * pgvector returns vectors as strings (e.g., "[1.5,2.5,3.5]") which need to be
4776
+ * parsed back to number[] to match TypeScript types.
4777
+ */
4778
+ function parseVectorColumns(rows: any[], vectorColumns?: string[]): any[] {
4779
+ if (!vectorColumns || vectorColumns.length === 0) return rows;
4780
+
4781
+ return rows.map(row => {
4782
+ const parsed = { ...row };
4783
+ for (const col of vectorColumns) {
4784
+ if (parsed[col] !== null && parsed[col] !== undefined && typeof parsed[col] === 'string') {
4785
+ try {
4786
+ parsed[col] = JSON.parse(parsed[col]);
4787
+ } catch (e) {
4788
+ // If parsing fails, leave as string (shouldn't happen with valid vectors)
4789
+ log.error(\`Failed to parse vector column "\${col}":, e\`);
4790
+ }
4791
+ }
4792
+ }
4793
+ return parsed;
4794
+ });
4795
+ }
4796
+
4685
4797
  /**
4686
4798
  * CREATE operation - Insert a new record
4687
4799
  */
@@ -4701,11 +4813,12 @@ export async function createRecord(
4701
4813
  const text = \`INSERT INTO "\${ctx.table}" (\${cols.map(c => '"' + c + '"').join(", ")})
4702
4814
  VALUES (\${placeholders})
4703
4815
  RETURNING *\`;
4704
-
4816
+
4705
4817
  log.debug("SQL:", text, "vals:", vals);
4706
- const { rows } = await ctx.pg.query(text, vals);
4818
+ const { rows } = await ctx.pg.query(text, prepareParams(vals));
4819
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
4707
4820
 
4708
- return { data: rows[0] ?? null, status: rows[0] ? 201 : 500 };
4821
+ return { data: parsedRows[0] ?? null, status: parsedRows[0] ? 201 : 500 };
4709
4822
  } catch (e: any) {
4710
4823
  // Enhanced logging for JSON validation errors
4711
4824
  const errorMsg = e?.message ?? "";
@@ -4742,14 +4855,15 @@ export async function getByPk(
4742
4855
 
4743
4856
  const text = \`SELECT * FROM "\${ctx.table}" WHERE \${wherePkSql} LIMIT 1\`;
4744
4857
  log.debug(\`GET \${ctx.table} by PK:\`, pkValues, "SQL:", text);
4745
-
4746
- const { rows } = await ctx.pg.query(text, pkValues);
4747
-
4748
- if (!rows[0]) {
4858
+
4859
+ const { rows } = await ctx.pg.query(text, prepareParams(pkValues));
4860
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
4861
+
4862
+ if (!parsedRows[0]) {
4749
4863
  return { data: null, status: 404 };
4750
4864
  }
4751
-
4752
- return { data: rows[0], status: 200 };
4865
+
4866
+ return { data: parsedRows[0], status: 200 };
4753
4867
  } catch (e: any) {
4754
4868
  log.error(\`GET \${ctx.table} error:\`, e?.stack ?? e);
4755
4869
  return {
@@ -5127,20 +5241,21 @@ export async function listRecords(
5127
5241
  // Get total count for pagination
5128
5242
  const countText = \`SELECT COUNT(*) FROM "\${ctx.table}" \${countWhereSQL}\`;
5129
5243
  log.debug(\`LIST \${ctx.table} COUNT SQL:\`, countText, "params:", countParams);
5130
- const countResult = await ctx.pg.query(countText, countParams);
5244
+ const countResult = await ctx.pg.query(countText, prepareParams(countParams));
5131
5245
  const total = parseInt(countResult.rows[0].count, 10);
5132
5246
 
5133
5247
  // Get paginated data
5134
5248
  const text = \`SELECT \${selectClause} FROM "\${ctx.table}" \${whereSQL} \${orderBySQL} LIMIT \${limitParam} OFFSET \${offsetParam}\`;
5135
5249
  log.debug(\`LIST \${ctx.table} SQL:\`, text, "params:", allParams);
5136
5250
 
5137
- const { rows } = await ctx.pg.query(text, allParams);
5251
+ const { rows } = await ctx.pg.query(text, prepareParams(allParams));
5252
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
5138
5253
 
5139
5254
  // Calculate hasMore
5140
5255
  const hasMore = offset + limit < total;
5141
5256
 
5142
5257
  const metadata = {
5143
- data: rows,
5258
+ data: parsedRows,
5144
5259
  total,
5145
5260
  limit,
5146
5261
  offset,
@@ -5199,18 +5314,19 @@ export async function updateRecord(
5199
5314
  const setSql = Object.keys(filteredData)
5200
5315
  .map((k, i) => \`"\${k}" = $\${i + pkValues.length + 1}\`)
5201
5316
  .join(", ");
5202
-
5317
+
5203
5318
  const text = \`UPDATE "\${ctx.table}" SET \${setSql} WHERE \${wherePkSql} RETURNING *\`;
5204
5319
  const params = [...pkValues, ...Object.values(filteredData)];
5205
-
5320
+
5206
5321
  log.debug(\`PATCH \${ctx.table} SQL:\`, text, "params:", params);
5207
- const { rows } = await ctx.pg.query(text, params);
5208
-
5209
- if (!rows[0]) {
5322
+ const { rows } = await ctx.pg.query(text, prepareParams(params));
5323
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
5324
+
5325
+ if (!parsedRows[0]) {
5210
5326
  return { data: null, status: 404 };
5211
5327
  }
5212
-
5213
- return { data: rows[0], status: 200 };
5328
+
5329
+ return { data: parsedRows[0], status: 200 };
5214
5330
  } catch (e: any) {
5215
5331
  // Enhanced logging for JSON validation errors
5216
5332
  const errorMsg = e?.message ?? "";
@@ -5251,15 +5367,16 @@ export async function deleteRecord(
5251
5367
  const text = ctx.softDeleteColumn
5252
5368
  ? \`UPDATE "\${ctx.table}" SET "\${ctx.softDeleteColumn}" = NOW() WHERE \${wherePkSql} RETURNING *\`
5253
5369
  : \`DELETE FROM "\${ctx.table}" WHERE \${wherePkSql} RETURNING *\`;
5254
-
5370
+
5255
5371
  log.debug(\`DELETE \${ctx.softDeleteColumn ? '(soft)' : ''} \${ctx.table} SQL:\`, text, "pk:", pkValues);
5256
- const { rows } = await ctx.pg.query(text, pkValues);
5257
-
5258
- if (!rows[0]) {
5372
+ const { rows } = await ctx.pg.query(text, prepareParams(pkValues));
5373
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
5374
+
5375
+ if (!parsedRows[0]) {
5259
5376
  return { data: null, status: 404 };
5260
5377
  }
5261
-
5262
- return { data: rows[0], status: 200 };
5378
+
5379
+ return { data: parsedRows[0], status: 200 };
5263
5380
  } catch (e: any) {
5264
5381
  log.error(\`DELETE \${ctx.table} error:\`, e?.stack ?? e);
5265
5382
  return {
@@ -6129,7 +6246,7 @@ async function generate(configPath) {
6129
6246
  if (serverFramework === "hono") {
6130
6247
  files.push({
6131
6248
  path: join(serverDir, "router.ts"),
6132
- content: emitHonoRouter(Object.values(model.tables), getAuthStrategy(normalizedAuth) !== "none", cfg.useJsExtensions)
6249
+ content: emitHonoRouter(Object.values(model.tables), getAuthStrategy(normalizedAuth) !== "none", cfg.useJsExtensions, cfg.pullToken)
6133
6250
  });
6134
6251
  }
6135
6252
  const { generateUnifiedContract: generateUnifiedContract2, generateUnifiedContractMarkdown: generateUnifiedContractMarkdown2 } = await Promise.resolve().then(() => (init_emit_sdk_contract(), exports_emit_sdk_contract));
@@ -2,4 +2,4 @@ import type { Table } from "./introspect";
2
2
  /**
3
3
  * Emits the Hono server router file that exports helper functions for route registration
4
4
  */
5
- export declare function emitHonoRouter(tables: Table[], hasAuth: boolean, useJsExtensions?: boolean): string;
5
+ export declare function emitHonoRouter(tables: Table[], hasAuth: boolean, useJsExtensions?: boolean, pullToken?: string): string;
package/dist/index.js CHANGED
@@ -1986,6 +1986,8 @@ function emitZod(table, opts, enums) {
1986
1986
  return `z.unknown()`;
1987
1987
  if (t === "date" || t.startsWith("timestamp"))
1988
1988
  return `z.string()`;
1989
+ if (t === "vector" || t === "halfvec" || t === "sparsevec" || t === "bit")
1990
+ return `z.array(z.number())`;
1989
1991
  if (t.startsWith("_"))
1990
1992
  return `z.array(${zFor(t.slice(1))})`;
1991
1993
  return `z.string()`;
@@ -2119,6 +2121,7 @@ function emitHonoRoutes(table, _graph, opts) {
2119
2121
  const fileTableName = table.name;
2120
2122
  const Type = pascal(table.name);
2121
2123
  const hasVectorColumns = table.columns.some((c) => isVectorType(c.pgType));
2124
+ const vectorColumns = table.columns.filter((c) => isVectorType(c.pgType)).map((c) => c.name);
2122
2125
  const rawPk = table.pk;
2123
2126
  const pkCols = Array.isArray(rawPk) ? rawPk : rawPk ? [rawPk] : [];
2124
2127
  const safePkCols = pkCols.length ? pkCols : ["id"];
@@ -2179,7 +2182,8 @@ export function register${Type}Routes(app: Hono, deps: { pg: { query: (text: str
2179
2182
  table: "${fileTableName}",
2180
2183
  pkColumns: ${JSON.stringify(safePkCols)},
2181
2184
  softDeleteColumn: ${softDel ? `"${softDel}"` : "null"},
2182
- includeMethodsDepth: ${opts.includeMethodsDepth}
2185
+ includeMethodsDepth: ${opts.includeMethodsDepth}${vectorColumns.length > 0 ? `,
2186
+ vectorColumns: ${JSON.stringify(vectorColumns)}` : ""}
2183
2187
  };
2184
2188
  ${hasAuth ? `
2185
2189
  // \uD83D\uDD10 Auth: protect all routes for this table
@@ -3161,6 +3165,8 @@ function tsTypeFor(pgType, opts, enums) {
3161
3165
  return "string";
3162
3166
  if (t === "json" || t === "jsonb")
3163
3167
  return "unknown";
3168
+ if (t === "vector" || t === "halfvec" || t === "sparsevec" || t === "bit")
3169
+ return "number[]";
3164
3170
  return "string";
3165
3171
  }
3166
3172
  function isJsonbType2(pgType) {
@@ -3609,9 +3615,18 @@ export async function authMiddleware(c: Context, next: Next) {
3609
3615
 
3610
3616
  // src/emit-router-hono.ts
3611
3617
  init_utils();
3612
- function emitHonoRouter(tables, hasAuth, useJsExtensions) {
3618
+ function emitHonoRouter(tables, hasAuth, useJsExtensions, pullToken) {
3613
3619
  const tableNames = tables.map((t) => t.name).sort();
3614
3620
  const ext = useJsExtensions ? ".js" : "";
3621
+ let resolvedPullToken;
3622
+ if (pullToken) {
3623
+ if (pullToken.startsWith("env:")) {
3624
+ const envVarName = pullToken.slice(4);
3625
+ resolvedPullToken = `process.env.${envVarName}`;
3626
+ } else {
3627
+ resolvedPullToken = JSON.stringify(pullToken);
3628
+ }
3629
+ }
3615
3630
  const imports = tableNames.map((name) => {
3616
3631
  const Type = pascal(name);
3617
3632
  return `import { register${Type}Routes } from "./routes/${name}${ext}";`;
@@ -3698,10 +3713,33 @@ export function createRouter(
3698
3713
  }
3699
3714
  ): Hono {
3700
3715
  const router = new Hono();
3701
-
3716
+
3702
3717
  // Register table routes
3703
3718
  ${registrations}
3719
+ ${pullToken ? `
3720
+ // \uD83D\uDD10 Protect /_psdk/* endpoints with pullToken
3721
+ router.use("/_psdk/*", async (c, next) => {
3722
+ const authHeader = c.req.header("Authorization");
3723
+ const expectedToken = ${resolvedPullToken};
3724
+
3725
+ if (!expectedToken) {
3726
+ // Token not configured in environment - reject request
3727
+ return c.json({ error: "SDK endpoints are protected but token not configured" }, 500);
3728
+ }
3704
3729
 
3730
+ if (!authHeader || !authHeader.startsWith("Bearer ")) {
3731
+ return c.json({ error: "Missing or invalid Authorization header" }, 401);
3732
+ }
3733
+
3734
+ const providedToken = authHeader.slice(7); // Remove "Bearer " prefix
3735
+
3736
+ if (providedToken !== expectedToken) {
3737
+ return c.json({ error: "Invalid pull token" }, 401);
3738
+ }
3739
+
3740
+ await next();
3741
+ });
3742
+ ` : ""}
3705
3743
  // SDK distribution endpoints
3706
3744
  router.get("/_psdk/sdk/manifest", (c) => {
3707
3745
  return c.json({
@@ -3845,6 +3883,7 @@ export interface OperationContext {
3845
3883
  pkColumns: string[];
3846
3884
  softDeleteColumn?: string | null;
3847
3885
  includeMethodsDepth: number;
3886
+ vectorColumns?: string[];
3848
3887
  }
3849
3888
 
3850
3889
  const DEBUG = process.env.SDK_DEBUG === "1" || process.env.SDK_DEBUG === "true";
@@ -3853,6 +3892,46 @@ const log = {
3853
3892
  error: (...args: any[]) => console.error("[sdk]", ...args),
3854
3893
  };
3855
3894
 
3895
+ /**
3896
+ * Prepare query parameters for PostgreSQL.
3897
+ * The pg library should handle JSONB automatically, but there are edge cases
3898
+ * where explicit stringification is needed (e.g., certain pg versions or when
3899
+ * objects have been through serialization/deserialization).
3900
+ */
3901
+ function prepareParams(params: any[]): any[] {
3902
+ return params.map(p => {
3903
+ if (p === null || p === undefined) return p;
3904
+ // Stringify objects/arrays for JSONB - while pg should handle this automatically,
3905
+ // we've observed cases where it fails without explicit stringification
3906
+ if (typeof p === 'object') return JSON.stringify(p);
3907
+ return p;
3908
+ });
3909
+ }
3910
+
3911
+ /**
3912
+ * Parse vector columns in retrieved rows.
3913
+ * pgvector returns vectors as strings (e.g., "[1.5,2.5,3.5]") which need to be
3914
+ * parsed back to number[] to match TypeScript types.
3915
+ */
3916
+ function parseVectorColumns(rows: any[], vectorColumns?: string[]): any[] {
3917
+ if (!vectorColumns || vectorColumns.length === 0) return rows;
3918
+
3919
+ return rows.map(row => {
3920
+ const parsed = { ...row };
3921
+ for (const col of vectorColumns) {
3922
+ if (parsed[col] !== null && parsed[col] !== undefined && typeof parsed[col] === 'string') {
3923
+ try {
3924
+ parsed[col] = JSON.parse(parsed[col]);
3925
+ } catch (e) {
3926
+ // If parsing fails, leave as string (shouldn't happen with valid vectors)
3927
+ log.error(\`Failed to parse vector column "\${col}":, e\`);
3928
+ }
3929
+ }
3930
+ }
3931
+ return parsed;
3932
+ });
3933
+ }
3934
+
3856
3935
  /**
3857
3936
  * CREATE operation - Insert a new record
3858
3937
  */
@@ -3872,11 +3951,12 @@ export async function createRecord(
3872
3951
  const text = \`INSERT INTO "\${ctx.table}" (\${cols.map(c => '"' + c + '"').join(", ")})
3873
3952
  VALUES (\${placeholders})
3874
3953
  RETURNING *\`;
3875
-
3954
+
3876
3955
  log.debug("SQL:", text, "vals:", vals);
3877
- const { rows } = await ctx.pg.query(text, vals);
3956
+ const { rows } = await ctx.pg.query(text, prepareParams(vals));
3957
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
3878
3958
 
3879
- return { data: rows[0] ?? null, status: rows[0] ? 201 : 500 };
3959
+ return { data: parsedRows[0] ?? null, status: parsedRows[0] ? 201 : 500 };
3880
3960
  } catch (e: any) {
3881
3961
  // Enhanced logging for JSON validation errors
3882
3962
  const errorMsg = e?.message ?? "";
@@ -3913,14 +3993,15 @@ export async function getByPk(
3913
3993
 
3914
3994
  const text = \`SELECT * FROM "\${ctx.table}" WHERE \${wherePkSql} LIMIT 1\`;
3915
3995
  log.debug(\`GET \${ctx.table} by PK:\`, pkValues, "SQL:", text);
3916
-
3917
- const { rows } = await ctx.pg.query(text, pkValues);
3918
-
3919
- if (!rows[0]) {
3996
+
3997
+ const { rows } = await ctx.pg.query(text, prepareParams(pkValues));
3998
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
3999
+
4000
+ if (!parsedRows[0]) {
3920
4001
  return { data: null, status: 404 };
3921
4002
  }
3922
-
3923
- return { data: rows[0], status: 200 };
4003
+
4004
+ return { data: parsedRows[0], status: 200 };
3924
4005
  } catch (e: any) {
3925
4006
  log.error(\`GET \${ctx.table} error:\`, e?.stack ?? e);
3926
4007
  return {
@@ -4298,20 +4379,21 @@ export async function listRecords(
4298
4379
  // Get total count for pagination
4299
4380
  const countText = \`SELECT COUNT(*) FROM "\${ctx.table}" \${countWhereSQL}\`;
4300
4381
  log.debug(\`LIST \${ctx.table} COUNT SQL:\`, countText, "params:", countParams);
4301
- const countResult = await ctx.pg.query(countText, countParams);
4382
+ const countResult = await ctx.pg.query(countText, prepareParams(countParams));
4302
4383
  const total = parseInt(countResult.rows[0].count, 10);
4303
4384
 
4304
4385
  // Get paginated data
4305
4386
  const text = \`SELECT \${selectClause} FROM "\${ctx.table}" \${whereSQL} \${orderBySQL} LIMIT \${limitParam} OFFSET \${offsetParam}\`;
4306
4387
  log.debug(\`LIST \${ctx.table} SQL:\`, text, "params:", allParams);
4307
4388
 
4308
- const { rows } = await ctx.pg.query(text, allParams);
4389
+ const { rows } = await ctx.pg.query(text, prepareParams(allParams));
4390
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
4309
4391
 
4310
4392
  // Calculate hasMore
4311
4393
  const hasMore = offset + limit < total;
4312
4394
 
4313
4395
  const metadata = {
4314
- data: rows,
4396
+ data: parsedRows,
4315
4397
  total,
4316
4398
  limit,
4317
4399
  offset,
@@ -4370,18 +4452,19 @@ export async function updateRecord(
4370
4452
  const setSql = Object.keys(filteredData)
4371
4453
  .map((k, i) => \`"\${k}" = $\${i + pkValues.length + 1}\`)
4372
4454
  .join(", ");
4373
-
4455
+
4374
4456
  const text = \`UPDATE "\${ctx.table}" SET \${setSql} WHERE \${wherePkSql} RETURNING *\`;
4375
4457
  const params = [...pkValues, ...Object.values(filteredData)];
4376
-
4458
+
4377
4459
  log.debug(\`PATCH \${ctx.table} SQL:\`, text, "params:", params);
4378
- const { rows } = await ctx.pg.query(text, params);
4379
-
4380
- if (!rows[0]) {
4460
+ const { rows } = await ctx.pg.query(text, prepareParams(params));
4461
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
4462
+
4463
+ if (!parsedRows[0]) {
4381
4464
  return { data: null, status: 404 };
4382
4465
  }
4383
-
4384
- return { data: rows[0], status: 200 };
4466
+
4467
+ return { data: parsedRows[0], status: 200 };
4385
4468
  } catch (e: any) {
4386
4469
  // Enhanced logging for JSON validation errors
4387
4470
  const errorMsg = e?.message ?? "";
@@ -4422,15 +4505,16 @@ export async function deleteRecord(
4422
4505
  const text = ctx.softDeleteColumn
4423
4506
  ? \`UPDATE "\${ctx.table}" SET "\${ctx.softDeleteColumn}" = NOW() WHERE \${wherePkSql} RETURNING *\`
4424
4507
  : \`DELETE FROM "\${ctx.table}" WHERE \${wherePkSql} RETURNING *\`;
4425
-
4508
+
4426
4509
  log.debug(\`DELETE \${ctx.softDeleteColumn ? '(soft)' : ''} \${ctx.table} SQL:\`, text, "pk:", pkValues);
4427
- const { rows } = await ctx.pg.query(text, pkValues);
4428
-
4429
- if (!rows[0]) {
4510
+ const { rows } = await ctx.pg.query(text, prepareParams(pkValues));
4511
+ const parsedRows = parseVectorColumns(rows, ctx.vectorColumns);
4512
+
4513
+ if (!parsedRows[0]) {
4430
4514
  return { data: null, status: 404 };
4431
4515
  }
4432
-
4433
- return { data: rows[0], status: 200 };
4516
+
4517
+ return { data: parsedRows[0], status: 200 };
4434
4518
  } catch (e: any) {
4435
4519
  log.error(\`DELETE \${ctx.table} error:\`, e?.stack ?? e);
4436
4520
  return {
@@ -5300,7 +5384,7 @@ async function generate(configPath) {
5300
5384
  if (serverFramework === "hono") {
5301
5385
  files.push({
5302
5386
  path: join(serverDir, "router.ts"),
5303
- content: emitHonoRouter(Object.values(model.tables), getAuthStrategy(normalizedAuth) !== "none", cfg.useJsExtensions)
5387
+ content: emitHonoRouter(Object.values(model.tables), getAuthStrategy(normalizedAuth) !== "none", cfg.useJsExtensions, cfg.pullToken)
5304
5388
  });
5305
5389
  }
5306
5390
  const { generateUnifiedContract: generateUnifiedContract2, generateUnifiedContractMarkdown: generateUnifiedContractMarkdown2 } = await Promise.resolve().then(() => (init_emit_sdk_contract(), exports_emit_sdk_contract));
package/dist/types.d.ts CHANGED
@@ -29,6 +29,7 @@ export interface Config {
29
29
  serverFramework?: "hono" | "express" | "fastify";
30
30
  apiPathPrefix?: string;
31
31
  auth?: AuthConfigInput;
32
+ pullToken?: string;
32
33
  pull?: PullConfig;
33
34
  useJsExtensions?: boolean;
34
35
  useJsExtensionsClient?: boolean;
@@ -41,6 +42,6 @@ export interface Config {
41
42
  export interface PullConfig {
42
43
  from: string;
43
44
  output?: string;
44
- token?: string;
45
+ pullToken?: string;
45
46
  }
46
47
  export declare function normalizeAuthConfig(input: AuthConfigInput | undefined): AuthConfig | undefined;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "postgresdk",
3
- "version": "0.16.5",
3
+ "version": "0.16.7",
4
4
  "description": "Generate a typed server/client SDK from a Postgres schema (includes, Zod, Hono).",
5
5
  "type": "module",
6
6
  "bin": {