@neverinfamous/postgres-mcp 1.2.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (293) hide show
  1. package/README.md +202 -148
  2. package/dist/__tests__/benchmarks/codemode.bench.d.ts +10 -0
  3. package/dist/__tests__/benchmarks/codemode.bench.d.ts.map +1 -0
  4. package/dist/__tests__/benchmarks/codemode.bench.js +159 -0
  5. package/dist/__tests__/benchmarks/codemode.bench.js.map +1 -0
  6. package/dist/__tests__/benchmarks/connection-pool.bench.d.ts +10 -0
  7. package/dist/__tests__/benchmarks/connection-pool.bench.d.ts.map +1 -0
  8. package/dist/__tests__/benchmarks/connection-pool.bench.js +123 -0
  9. package/dist/__tests__/benchmarks/connection-pool.bench.js.map +1 -0
  10. package/dist/__tests__/benchmarks/handler-dispatch.bench.d.ts +11 -0
  11. package/dist/__tests__/benchmarks/handler-dispatch.bench.d.ts.map +1 -0
  12. package/dist/__tests__/benchmarks/handler-dispatch.bench.js +199 -0
  13. package/dist/__tests__/benchmarks/handler-dispatch.bench.js.map +1 -0
  14. package/dist/__tests__/benchmarks/logger-sanitization.bench.d.ts +15 -0
  15. package/dist/__tests__/benchmarks/logger-sanitization.bench.d.ts.map +1 -0
  16. package/dist/__tests__/benchmarks/logger-sanitization.bench.js +155 -0
  17. package/dist/__tests__/benchmarks/logger-sanitization.bench.js.map +1 -0
  18. package/dist/__tests__/benchmarks/resource-prompts.bench.d.ts +10 -0
  19. package/dist/__tests__/benchmarks/resource-prompts.bench.d.ts.map +1 -0
  20. package/dist/__tests__/benchmarks/resource-prompts.bench.js +181 -0
  21. package/dist/__tests__/benchmarks/resource-prompts.bench.js.map +1 -0
  22. package/dist/__tests__/benchmarks/schema-parsing.bench.d.ts +11 -0
  23. package/dist/__tests__/benchmarks/schema-parsing.bench.d.ts.map +1 -0
  24. package/dist/__tests__/benchmarks/schema-parsing.bench.js +209 -0
  25. package/dist/__tests__/benchmarks/schema-parsing.bench.js.map +1 -0
  26. package/dist/__tests__/benchmarks/tool-filtering.bench.d.ts +9 -0
  27. package/dist/__tests__/benchmarks/tool-filtering.bench.d.ts.map +1 -0
  28. package/dist/__tests__/benchmarks/tool-filtering.bench.js +83 -0
  29. package/dist/__tests__/benchmarks/tool-filtering.bench.js.map +1 -0
  30. package/dist/__tests__/benchmarks/transport-auth.bench.d.ts +10 -0
  31. package/dist/__tests__/benchmarks/transport-auth.bench.d.ts.map +1 -0
  32. package/dist/__tests__/benchmarks/transport-auth.bench.js +128 -0
  33. package/dist/__tests__/benchmarks/transport-auth.bench.js.map +1 -0
  34. package/dist/__tests__/benchmarks/utilities.bench.d.ts +10 -0
  35. package/dist/__tests__/benchmarks/utilities.bench.d.ts.map +1 -0
  36. package/dist/__tests__/benchmarks/utilities.bench.js +164 -0
  37. package/dist/__tests__/benchmarks/utilities.bench.js.map +1 -0
  38. package/dist/adapters/DatabaseAdapter.d.ts.map +1 -1
  39. package/dist/adapters/DatabaseAdapter.js +12 -0
  40. package/dist/adapters/DatabaseAdapter.js.map +1 -1
  41. package/dist/adapters/postgresql/PostgresAdapter.d.ts.map +1 -1
  42. package/dist/adapters/postgresql/PostgresAdapter.js +56 -3
  43. package/dist/adapters/postgresql/PostgresAdapter.js.map +1 -1
  44. package/dist/adapters/postgresql/prompts/ltree.js +2 -2
  45. package/dist/adapters/postgresql/prompts/ltree.js.map +1 -1
  46. package/dist/adapters/postgresql/schemas/admin.d.ts +10 -5
  47. package/dist/adapters/postgresql/schemas/admin.d.ts.map +1 -1
  48. package/dist/adapters/postgresql/schemas/admin.js +10 -5
  49. package/dist/adapters/postgresql/schemas/admin.js.map +1 -1
  50. package/dist/adapters/postgresql/schemas/backup.d.ts +45 -27
  51. package/dist/adapters/postgresql/schemas/backup.d.ts.map +1 -1
  52. package/dist/adapters/postgresql/schemas/backup.js +64 -26
  53. package/dist/adapters/postgresql/schemas/backup.js.map +1 -1
  54. package/dist/adapters/postgresql/schemas/core.d.ts +53 -19
  55. package/dist/adapters/postgresql/schemas/core.d.ts.map +1 -1
  56. package/dist/adapters/postgresql/schemas/core.js +61 -17
  57. package/dist/adapters/postgresql/schemas/core.js.map +1 -1
  58. package/dist/adapters/postgresql/schemas/cron.d.ts +51 -32
  59. package/dist/adapters/postgresql/schemas/cron.d.ts.map +1 -1
  60. package/dist/adapters/postgresql/schemas/cron.js +64 -44
  61. package/dist/adapters/postgresql/schemas/cron.js.map +1 -1
  62. package/dist/adapters/postgresql/schemas/extensions.d.ts +224 -110
  63. package/dist/adapters/postgresql/schemas/extensions.d.ts.map +1 -1
  64. package/dist/adapters/postgresql/schemas/extensions.js +245 -96
  65. package/dist/adapters/postgresql/schemas/extensions.js.map +1 -1
  66. package/dist/adapters/postgresql/schemas/index.d.ts +7 -6
  67. package/dist/adapters/postgresql/schemas/index.d.ts.map +1 -1
  68. package/dist/adapters/postgresql/schemas/index.js +16 -8
  69. package/dist/adapters/postgresql/schemas/index.js.map +1 -1
  70. package/dist/adapters/postgresql/schemas/introspection.d.ts +445 -0
  71. package/dist/adapters/postgresql/schemas/introspection.d.ts.map +1 -0
  72. package/dist/adapters/postgresql/schemas/introspection.js +478 -0
  73. package/dist/adapters/postgresql/schemas/introspection.js.map +1 -0
  74. package/dist/adapters/postgresql/schemas/jsonb.d.ts +102 -42
  75. package/dist/adapters/postgresql/schemas/jsonb.d.ts.map +1 -1
  76. package/dist/adapters/postgresql/schemas/jsonb.js +125 -30
  77. package/dist/adapters/postgresql/schemas/jsonb.js.map +1 -1
  78. package/dist/adapters/postgresql/schemas/monitoring.d.ts +69 -36
  79. package/dist/adapters/postgresql/schemas/monitoring.d.ts.map +1 -1
  80. package/dist/adapters/postgresql/schemas/monitoring.js +98 -40
  81. package/dist/adapters/postgresql/schemas/monitoring.js.map +1 -1
  82. package/dist/adapters/postgresql/schemas/partitioning.d.ts +21 -24
  83. package/dist/adapters/postgresql/schemas/partitioning.d.ts.map +1 -1
  84. package/dist/adapters/postgresql/schemas/partitioning.js +26 -14
  85. package/dist/adapters/postgresql/schemas/partitioning.js.map +1 -1
  86. package/dist/adapters/postgresql/schemas/partman.d.ts +69 -0
  87. package/dist/adapters/postgresql/schemas/partman.d.ts.map +1 -1
  88. package/dist/adapters/postgresql/schemas/partman.js +46 -33
  89. package/dist/adapters/postgresql/schemas/partman.js.map +1 -1
  90. package/dist/adapters/postgresql/schemas/performance.d.ts +97 -49
  91. package/dist/adapters/postgresql/schemas/performance.d.ts.map +1 -1
  92. package/dist/adapters/postgresql/schemas/performance.js +139 -34
  93. package/dist/adapters/postgresql/schemas/performance.js.map +1 -1
  94. package/dist/adapters/postgresql/schemas/postgis.d.ts +20 -0
  95. package/dist/adapters/postgresql/schemas/postgis.d.ts.map +1 -1
  96. package/dist/adapters/postgresql/schemas/postgis.js +40 -0
  97. package/dist/adapters/postgresql/schemas/postgis.js.map +1 -1
  98. package/dist/adapters/postgresql/schemas/schema-mgmt.d.ts +50 -30
  99. package/dist/adapters/postgresql/schemas/schema-mgmt.d.ts.map +1 -1
  100. package/dist/adapters/postgresql/schemas/schema-mgmt.js +105 -33
  101. package/dist/adapters/postgresql/schemas/schema-mgmt.js.map +1 -1
  102. package/dist/adapters/postgresql/schemas/stats.d.ts +33 -20
  103. package/dist/adapters/postgresql/schemas/stats.d.ts.map +1 -1
  104. package/dist/adapters/postgresql/schemas/stats.js +36 -20
  105. package/dist/adapters/postgresql/schemas/stats.js.map +1 -1
  106. package/dist/adapters/postgresql/schemas/text-search.d.ts +34 -19
  107. package/dist/adapters/postgresql/schemas/text-search.d.ts.map +1 -1
  108. package/dist/adapters/postgresql/schemas/text-search.js +52 -13
  109. package/dist/adapters/postgresql/schemas/text-search.js.map +1 -1
  110. package/dist/adapters/postgresql/tools/admin.d.ts.map +1 -1
  111. package/dist/adapters/postgresql/tools/admin.js +272 -186
  112. package/dist/adapters/postgresql/tools/admin.js.map +1 -1
  113. package/dist/adapters/postgresql/tools/backup/dump.d.ts.map +1 -1
  114. package/dist/adapters/postgresql/tools/backup/dump.js +376 -350
  115. package/dist/adapters/postgresql/tools/backup/dump.js.map +1 -1
  116. package/dist/adapters/postgresql/tools/citext.d.ts.map +1 -1
  117. package/dist/adapters/postgresql/tools/citext.js +333 -243
  118. package/dist/adapters/postgresql/tools/citext.js.map +1 -1
  119. package/dist/adapters/postgresql/tools/codemode/index.d.ts.map +1 -1
  120. package/dist/adapters/postgresql/tools/codemode/index.js +2 -11
  121. package/dist/adapters/postgresql/tools/codemode/index.js.map +1 -1
  122. package/dist/adapters/postgresql/tools/core/convenience.d.ts +9 -1
  123. package/dist/adapters/postgresql/tools/core/convenience.d.ts.map +1 -1
  124. package/dist/adapters/postgresql/tools/core/convenience.js +101 -19
  125. package/dist/adapters/postgresql/tools/core/convenience.js.map +1 -1
  126. package/dist/adapters/postgresql/tools/core/error-helpers.d.ts +48 -0
  127. package/dist/adapters/postgresql/tools/core/error-helpers.d.ts.map +1 -0
  128. package/dist/adapters/postgresql/tools/core/error-helpers.js +256 -0
  129. package/dist/adapters/postgresql/tools/core/error-helpers.js.map +1 -0
  130. package/dist/adapters/postgresql/tools/core/health.d.ts.map +1 -1
  131. package/dist/adapters/postgresql/tools/core/health.js +18 -4
  132. package/dist/adapters/postgresql/tools/core/health.js.map +1 -1
  133. package/dist/adapters/postgresql/tools/core/indexes.d.ts.map +1 -1
  134. package/dist/adapters/postgresql/tools/core/indexes.js +48 -6
  135. package/dist/adapters/postgresql/tools/core/indexes.js.map +1 -1
  136. package/dist/adapters/postgresql/tools/core/objects.d.ts.map +1 -1
  137. package/dist/adapters/postgresql/tools/core/objects.js +104 -85
  138. package/dist/adapters/postgresql/tools/core/objects.js.map +1 -1
  139. package/dist/adapters/postgresql/tools/core/query.d.ts.map +1 -1
  140. package/dist/adapters/postgresql/tools/core/query.js +100 -42
  141. package/dist/adapters/postgresql/tools/core/query.js.map +1 -1
  142. package/dist/adapters/postgresql/tools/core/schemas.d.ts +51 -25
  143. package/dist/adapters/postgresql/tools/core/schemas.d.ts.map +1 -1
  144. package/dist/adapters/postgresql/tools/core/schemas.js +51 -25
  145. package/dist/adapters/postgresql/tools/core/schemas.js.map +1 -1
  146. package/dist/adapters/postgresql/tools/core/tables.d.ts.map +1 -1
  147. package/dist/adapters/postgresql/tools/core/tables.js +72 -32
  148. package/dist/adapters/postgresql/tools/core/tables.js.map +1 -1
  149. package/dist/adapters/postgresql/tools/cron.d.ts.map +1 -1
  150. package/dist/adapters/postgresql/tools/cron.js +333 -206
  151. package/dist/adapters/postgresql/tools/cron.js.map +1 -1
  152. package/dist/adapters/postgresql/tools/introspection.d.ts +15 -0
  153. package/dist/adapters/postgresql/tools/introspection.d.ts.map +1 -0
  154. package/dist/adapters/postgresql/tools/introspection.js +1682 -0
  155. package/dist/adapters/postgresql/tools/introspection.js.map +1 -0
  156. package/dist/adapters/postgresql/tools/jsonb/advanced.d.ts.map +1 -1
  157. package/dist/adapters/postgresql/tools/jsonb/advanced.js +394 -297
  158. package/dist/adapters/postgresql/tools/jsonb/advanced.js.map +1 -1
  159. package/dist/adapters/postgresql/tools/jsonb/basic.d.ts.map +1 -1
  160. package/dist/adapters/postgresql/tools/jsonb/basic.js +686 -398
  161. package/dist/adapters/postgresql/tools/jsonb/basic.js.map +1 -1
  162. package/dist/adapters/postgresql/tools/kcache.d.ts.map +1 -1
  163. package/dist/adapters/postgresql/tools/kcache.js +278 -246
  164. package/dist/adapters/postgresql/tools/kcache.js.map +1 -1
  165. package/dist/adapters/postgresql/tools/ltree.d.ts.map +1 -1
  166. package/dist/adapters/postgresql/tools/ltree.js +137 -38
  167. package/dist/adapters/postgresql/tools/ltree.js.map +1 -1
  168. package/dist/adapters/postgresql/tools/monitoring.d.ts.map +1 -1
  169. package/dist/adapters/postgresql/tools/monitoring.js +86 -55
  170. package/dist/adapters/postgresql/tools/monitoring.js.map +1 -1
  171. package/dist/adapters/postgresql/tools/partitioning.d.ts.map +1 -1
  172. package/dist/adapters/postgresql/tools/partitioning.js +79 -15
  173. package/dist/adapters/postgresql/tools/partitioning.js.map +1 -1
  174. package/dist/adapters/postgresql/tools/partman/management.d.ts.map +1 -1
  175. package/dist/adapters/postgresql/tools/partman/management.js +43 -56
  176. package/dist/adapters/postgresql/tools/partman/management.js.map +1 -1
  177. package/dist/adapters/postgresql/tools/partman/operations.d.ts.map +1 -1
  178. package/dist/adapters/postgresql/tools/partman/operations.js +137 -24
  179. package/dist/adapters/postgresql/tools/partman/operations.js.map +1 -1
  180. package/dist/adapters/postgresql/tools/performance/analysis.d.ts.map +1 -1
  181. package/dist/adapters/postgresql/tools/performance/analysis.js +276 -165
  182. package/dist/adapters/postgresql/tools/performance/analysis.js.map +1 -1
  183. package/dist/adapters/postgresql/tools/performance/explain.d.ts.map +1 -1
  184. package/dist/adapters/postgresql/tools/performance/explain.js +61 -21
  185. package/dist/adapters/postgresql/tools/performance/explain.js.map +1 -1
  186. package/dist/adapters/postgresql/tools/performance/monitoring.d.ts.map +1 -1
  187. package/dist/adapters/postgresql/tools/performance/monitoring.js +52 -12
  188. package/dist/adapters/postgresql/tools/performance/monitoring.js.map +1 -1
  189. package/dist/adapters/postgresql/tools/performance/optimization.d.ts.map +1 -1
  190. package/dist/adapters/postgresql/tools/performance/optimization.js +92 -81
  191. package/dist/adapters/postgresql/tools/performance/optimization.js.map +1 -1
  192. package/dist/adapters/postgresql/tools/performance/stats.d.ts.map +1 -1
  193. package/dist/adapters/postgresql/tools/performance/stats.js +182 -60
  194. package/dist/adapters/postgresql/tools/performance/stats.js.map +1 -1
  195. package/dist/adapters/postgresql/tools/pgcrypto.d.ts.map +1 -1
  196. package/dist/adapters/postgresql/tools/pgcrypto.js +277 -102
  197. package/dist/adapters/postgresql/tools/pgcrypto.js.map +1 -1
  198. package/dist/adapters/postgresql/tools/postgis/advanced.d.ts.map +1 -1
  199. package/dist/adapters/postgresql/tools/postgis/advanced.js +298 -230
  200. package/dist/adapters/postgresql/tools/postgis/advanced.js.map +1 -1
  201. package/dist/adapters/postgresql/tools/postgis/basic.d.ts.map +1 -1
  202. package/dist/adapters/postgresql/tools/postgis/basic.js +370 -251
  203. package/dist/adapters/postgresql/tools/postgis/basic.js.map +1 -1
  204. package/dist/adapters/postgresql/tools/postgis/standalone.d.ts.map +1 -1
  205. package/dist/adapters/postgresql/tools/postgis/standalone.js +135 -51
  206. package/dist/adapters/postgresql/tools/postgis/standalone.js.map +1 -1
  207. package/dist/adapters/postgresql/tools/schema.d.ts.map +1 -1
  208. package/dist/adapters/postgresql/tools/schema.js +580 -233
  209. package/dist/adapters/postgresql/tools/schema.js.map +1 -1
  210. package/dist/adapters/postgresql/tools/stats/advanced.d.ts.map +1 -1
  211. package/dist/adapters/postgresql/tools/stats/advanced.js +567 -506
  212. package/dist/adapters/postgresql/tools/stats/advanced.js.map +1 -1
  213. package/dist/adapters/postgresql/tools/stats/basic.d.ts.map +1 -1
  214. package/dist/adapters/postgresql/tools/stats/basic.js +340 -316
  215. package/dist/adapters/postgresql/tools/stats/basic.js.map +1 -1
  216. package/dist/adapters/postgresql/tools/text.d.ts.map +1 -1
  217. package/dist/adapters/postgresql/tools/text.js +690 -337
  218. package/dist/adapters/postgresql/tools/text.js.map +1 -1
  219. package/dist/adapters/postgresql/tools/transactions.d.ts.map +1 -1
  220. package/dist/adapters/postgresql/tools/transactions.js +157 -50
  221. package/dist/adapters/postgresql/tools/transactions.js.map +1 -1
  222. package/dist/adapters/postgresql/tools/vector/advanced.d.ts.map +1 -1
  223. package/dist/adapters/postgresql/tools/vector/advanced.js +18 -0
  224. package/dist/adapters/postgresql/tools/vector/advanced.js.map +1 -1
  225. package/dist/adapters/postgresql/tools/vector/basic.d.ts.map +1 -1
  226. package/dist/adapters/postgresql/tools/vector/basic.js +100 -53
  227. package/dist/adapters/postgresql/tools/vector/basic.js.map +1 -1
  228. package/dist/auth/auth-context.d.ts +28 -0
  229. package/dist/auth/auth-context.d.ts.map +1 -0
  230. package/dist/auth/auth-context.js +37 -0
  231. package/dist/auth/auth-context.js.map +1 -0
  232. package/dist/auth/scope-map.d.ts +20 -0
  233. package/dist/auth/scope-map.d.ts.map +1 -0
  234. package/dist/auth/scope-map.js +40 -0
  235. package/dist/auth/scope-map.js.map +1 -0
  236. package/dist/auth/scopes.d.ts.map +1 -1
  237. package/dist/auth/scopes.js +2 -0
  238. package/dist/auth/scopes.js.map +1 -1
  239. package/dist/cli.js +1 -1
  240. package/dist/cli.js.map +1 -1
  241. package/dist/codemode/api.d.ts +1 -0
  242. package/dist/codemode/api.d.ts.map +1 -1
  243. package/dist/codemode/api.js +35 -1
  244. package/dist/codemode/api.js.map +1 -1
  245. package/dist/codemode/index.d.ts +0 -2
  246. package/dist/codemode/index.d.ts.map +1 -1
  247. package/dist/codemode/index.js +0 -4
  248. package/dist/codemode/index.js.map +1 -1
  249. package/dist/codemode/sandbox.d.ts +14 -1
  250. package/dist/codemode/sandbox.d.ts.map +1 -1
  251. package/dist/codemode/sandbox.js +58 -19
  252. package/dist/codemode/sandbox.js.map +1 -1
  253. package/dist/codemode/types.d.ts.map +1 -1
  254. package/dist/codemode/types.js +3 -0
  255. package/dist/codemode/types.js.map +1 -1
  256. package/dist/constants/ServerInstructions.d.ts +5 -1
  257. package/dist/constants/ServerInstructions.d.ts.map +1 -1
  258. package/dist/constants/ServerInstructions.js +117 -31
  259. package/dist/constants/ServerInstructions.js.map +1 -1
  260. package/dist/filtering/ToolConstants.d.ts +22 -19
  261. package/dist/filtering/ToolConstants.d.ts.map +1 -1
  262. package/dist/filtering/ToolConstants.js +48 -37
  263. package/dist/filtering/ToolConstants.js.map +1 -1
  264. package/dist/filtering/ToolFilter.d.ts.map +1 -1
  265. package/dist/filtering/ToolFilter.js +10 -13
  266. package/dist/filtering/ToolFilter.js.map +1 -1
  267. package/dist/pool/ConnectionPool.js +1 -1
  268. package/dist/pool/ConnectionPool.js.map +1 -1
  269. package/dist/transports/http.d.ts +1 -0
  270. package/dist/transports/http.d.ts.map +1 -1
  271. package/dist/transports/http.js +75 -21
  272. package/dist/transports/http.js.map +1 -1
  273. package/dist/types/filtering.d.ts +2 -2
  274. package/dist/types/filtering.d.ts.map +1 -1
  275. package/dist/utils/icons.d.ts.map +1 -1
  276. package/dist/utils/icons.js +5 -0
  277. package/dist/utils/icons.js.map +1 -1
  278. package/dist/utils/where-clause.d.ts.map +1 -1
  279. package/dist/utils/where-clause.js +24 -0
  280. package/dist/utils/where-clause.js.map +1 -1
  281. package/package.json +20 -13
  282. package/dist/codemode/sandbox-factory.d.ts +0 -72
  283. package/dist/codemode/sandbox-factory.d.ts.map +0 -1
  284. package/dist/codemode/sandbox-factory.js +0 -88
  285. package/dist/codemode/sandbox-factory.js.map +0 -1
  286. package/dist/codemode/worker-sandbox.d.ts +0 -82
  287. package/dist/codemode/worker-sandbox.d.ts.map +0 -1
  288. package/dist/codemode/worker-sandbox.js +0 -244
  289. package/dist/codemode/worker-sandbox.js.map +0 -1
  290. package/dist/codemode/worker-script.d.ts +0 -8
  291. package/dist/codemode/worker-script.d.ts.map +0 -1
  292. package/dist/codemode/worker-script.js +0 -113
  293. package/dist/codemode/worker-script.js.map +0 -1
@@ -10,6 +10,8 @@ import { buildProgressContext, sendProgress, } from "../../../../utils/progress-
10
10
  import { CopyExportSchema, CopyExportSchemaBase, DumpSchemaSchema,
11
11
  // Output schemas
12
12
  DumpTableOutputSchema, DumpSchemaOutputSchema, CopyExportOutputSchema, CopyImportOutputSchema, } from "../../schemas/index.js";
13
+ import { formatPostgresError } from "../core/error-helpers.js";
14
+ import { sanitizeIdentifier, sanitizeIdentifiers, sanitizeTableName, } from "../../../../utils/identifiers.js";
13
15
  export function createDumpTableTool(adapter) {
14
16
  return {
15
17
  name: "pg_dump_table",
@@ -33,127 +35,128 @@ export function createDumpTableTool(adapter) {
33
35
  annotations: readOnly("Dump Table"),
34
36
  icons: getToolIcons("backup", readOnly("Dump Table")),
35
37
  handler: async (params, _context) => {
36
- const parsed = params;
37
- // Validate required table parameter
38
- if (!parsed.table || parsed.table.trim() === "") {
39
- throw new Error("table parameter is required");
40
- }
41
- // Parse schema.table format (e.g., 'public.users' -> schema='public', table='users')
42
- // If table contains a dot, always parse it as schema.table (embedded schema takes priority)
43
- let tableName = parsed.table;
44
- let schemaName = parsed.schema ?? "public";
45
- if (parsed.table.includes(".")) {
46
- const parts = parsed.table.split(".");
47
- if (parts.length === 2 && parts[0] && parts[1]) {
48
- schemaName = parts[0];
49
- tableName = parts[1];
38
+ try {
39
+ const parsed = params;
40
+ // Validate required table parameter
41
+ if (!parsed.table || parsed.table.trim() === "") {
42
+ throw new Error("table parameter is required");
50
43
  }
51
- }
52
- // Check if it's a sequence by querying pg_class
53
- const relkindResult = await adapter.executeQuery(`
44
+ // Parse schema.table format (e.g., 'public.users' -> schema='public', table='users')
45
+ // If table contains a dot, always parse it as schema.table (embedded schema takes priority)
46
+ let tableName = parsed.table;
47
+ let schemaName = parsed.schema ?? "public";
48
+ if (parsed.table.includes(".")) {
49
+ const parts = parsed.table.split(".");
50
+ if (parts.length === 2 && parts[0] && parts[1]) {
51
+ schemaName = parts[0];
52
+ tableName = parts[1];
53
+ }
54
+ }
55
+ // Check if it's a sequence by querying pg_class
56
+ const relkindResult = await adapter.executeQuery(`
54
57
  SELECT relkind FROM pg_class c
55
58
  JOIN pg_namespace n ON c.relnamespace = n.oid
56
- WHERE n.nspname = '${schemaName}' AND c.relname = '${tableName}'
57
- `);
58
- const relkind = relkindResult.rows?.[0]?.["relkind"];
59
- // relkind 'S' = sequence
60
- if (relkind === "S") {
61
- // Use pg_sequence system catalog (works in all PostgreSQL versions 10+)
62
- // Fallback to basic DDL if query fails
63
- try {
64
- const seqInfo = await adapter.executeQuery(`
59
+ WHERE n.nspname = $1 AND c.relname = $2
60
+ `, [schemaName, tableName]);
61
+ const relkind = relkindResult.rows?.[0]?.["relkind"];
62
+ // relkind 'S' = sequence
63
+ if (relkind === "S") {
64
+ // Use pg_sequence system catalog (works in all PostgreSQL versions 10+)
65
+ // Fallback to basic DDL if query fails
66
+ try {
67
+ const seqInfo = await adapter.executeQuery(`
65
68
  SELECT s.seqstart as start_value, s.seqincrement as increment_by,
66
69
  s.seqmin as min_value, s.seqmax as max_value, s.seqcycle as cycle
67
70
  FROM pg_sequence s
68
71
  JOIN pg_class c ON s.seqrelid = c.oid
69
72
  JOIN pg_namespace n ON c.relnamespace = n.oid
70
- WHERE n.nspname = '${schemaName}' AND c.relname = '${tableName}'
71
- `);
72
- const seq = seqInfo.rows?.[0];
73
- if (seq !== undefined) {
74
- const startVal = typeof seq["start_value"] === "number" ||
75
- typeof seq["start_value"] === "bigint"
76
- ? String(seq["start_value"])
77
- : null;
78
- const incrVal = typeof seq["increment_by"] === "number" ||
79
- typeof seq["increment_by"] === "bigint"
80
- ? Number(seq["increment_by"])
81
- : null;
82
- const minVal = typeof seq["min_value"] === "number" ||
83
- typeof seq["min_value"] === "bigint"
84
- ? String(seq["min_value"])
85
- : null;
86
- const maxVal = typeof seq["max_value"] === "number" ||
87
- typeof seq["max_value"] === "bigint"
88
- ? String(seq["max_value"])
89
- : null;
90
- const startValue = startVal !== null ? ` START ${startVal}` : "";
91
- const increment = incrVal !== null && incrVal !== 1
92
- ? ` INCREMENT ${String(incrVal)}`
93
- : "";
94
- const minValue = minVal !== null ? ` MINVALUE ${minVal}` : "";
95
- const maxValue = maxVal !== null ? ` MAXVALUE ${maxVal}` : "";
96
- const cycle = seq["cycle"] === true ? " CYCLE" : "";
97
- const ddl = `CREATE SEQUENCE "${schemaName}"."${tableName}"${startValue}${increment}${minValue}${maxValue}${cycle};`;
98
- return {
99
- ddl,
100
- type: "sequence",
101
- note: "Use pg_list_sequences to see all sequences.",
102
- ...(parsed.includeData === true && {
103
- warning: "includeData is ignored for sequences - sequences have no row data to export",
104
- }),
105
- };
73
+ WHERE n.nspname = $1 AND c.relname = $2
74
+ `, [schemaName, tableName]);
75
+ const seq = seqInfo.rows?.[0];
76
+ if (seq !== undefined) {
77
+ const startVal = typeof seq["start_value"] === "number" ||
78
+ typeof seq["start_value"] === "bigint"
79
+ ? String(seq["start_value"])
80
+ : null;
81
+ const incrVal = typeof seq["increment_by"] === "number" ||
82
+ typeof seq["increment_by"] === "bigint"
83
+ ? Number(seq["increment_by"])
84
+ : null;
85
+ const minVal = typeof seq["min_value"] === "number" ||
86
+ typeof seq["min_value"] === "bigint"
87
+ ? String(seq["min_value"])
88
+ : null;
89
+ const maxVal = typeof seq["max_value"] === "number" ||
90
+ typeof seq["max_value"] === "bigint"
91
+ ? String(seq["max_value"])
92
+ : null;
93
+ const startValue = startVal !== null ? ` START ${startVal}` : "";
94
+ const increment = incrVal !== null && incrVal !== 1
95
+ ? ` INCREMENT ${String(incrVal)}`
96
+ : "";
97
+ const minValue = minVal !== null ? ` MINVALUE ${minVal}` : "";
98
+ const maxValue = maxVal !== null ? ` MAXVALUE ${maxVal}` : "";
99
+ const cycle = seq["cycle"] === true ? " CYCLE" : "";
100
+ const ddl = `CREATE SEQUENCE ${sanitizeTableName(tableName, schemaName)}${startValue}${increment}${minValue}${maxValue}${cycle};`;
101
+ return {
102
+ ddl,
103
+ type: "sequence",
104
+ note: "Use pg_list_sequences to see all sequences.",
105
+ ...(parsed.includeData === true && {
106
+ warning: "includeData is ignored for sequences - sequences have no row data to export",
107
+ }),
108
+ };
109
+ }
106
110
  }
111
+ catch {
112
+ // Query failed, use basic DDL
113
+ }
114
+ // Fallback if pg_sequence query fails
115
+ return {
116
+ ddl: `CREATE SEQUENCE ${sanitizeTableName(tableName, schemaName)};`,
117
+ type: "sequence",
118
+ note: "Basic CREATE SEQUENCE. Use pg_list_sequences for details.",
119
+ ...(parsed.includeData === true && {
120
+ warning: "includeData is ignored for sequences - sequences have no row data to export",
121
+ }),
122
+ };
107
123
  }
108
- catch {
109
- // Query failed, use basic DDL
110
- }
111
- // Fallback if pg_sequence query fails
112
- return {
113
- ddl: `CREATE SEQUENCE "${schemaName}"."${tableName}";`,
114
- type: "sequence",
115
- note: "Basic CREATE SEQUENCE. Use pg_list_sequences for details.",
116
- ...(parsed.includeData === true && {
117
- warning: "includeData is ignored for sequences - sequences have no row data to export",
118
- }),
119
- };
120
- }
121
- // relkind 'v' = view, 'm' = materialized view
122
- if (relkind === "v" || relkind === "m") {
123
- try {
124
- const viewDefResult = await adapter.executeQuery(`
124
+ // relkind 'v' = view, 'm' = materialized view
125
+ if (relkind === "v" || relkind === "m") {
126
+ try {
127
+ const viewDefResult = await adapter.executeQuery(`
125
128
  SELECT definition FROM pg_views
126
- WHERE schemaname = '${schemaName}' AND viewname = '${tableName}'
127
- `);
128
- const definition = viewDefResult.rows?.[0]?.["definition"];
129
- if (typeof definition === "string") {
130
- const createType = relkind === "m" ? "MATERIALIZED VIEW" : "VIEW";
131
- const ddl = `CREATE ${createType} "${schemaName}"."${tableName}" AS\n${definition.trim()}`;
132
- return {
133
- ddl,
134
- type: relkind === "m" ? "materialized_view" : "view",
135
- note: `Use pg_list_views to see all views.`,
136
- };
129
+ WHERE schemaname = $1 AND viewname = $2
130
+ `, [schemaName, tableName]);
131
+ const definition = viewDefResult.rows?.[0]?.["definition"];
132
+ if (typeof definition === "string") {
133
+ const createType = relkind === "m" ? "MATERIALIZED VIEW" : "VIEW";
134
+ const ddl = `CREATE ${createType} ${sanitizeTableName(tableName, schemaName)} AS\n${definition.trim()}`;
135
+ return {
136
+ ddl,
137
+ type: relkind === "m" ? "materialized_view" : "view",
138
+ note: `Use pg_list_views to see all views.`,
139
+ };
140
+ }
137
141
  }
142
+ catch {
143
+ // Query failed, use basic DDL
144
+ }
145
+ // Fallback for views
146
+ const createType = relkind === "m" ? "MATERIALIZED VIEW" : "VIEW";
147
+ return {
148
+ ddl: `-- Unable to retrieve ${createType.toLowerCase()} definition\nCREATE ${createType} ${sanitizeTableName(tableName, schemaName)} AS SELECT ...;`,
149
+ type: relkind === "m" ? "materialized_view" : "view",
150
+ note: "View definition could not be retrieved. Use pg_list_views for details.",
151
+ };
138
152
  }
139
- catch {
140
- // Query failed, use basic DDL
141
- }
142
- // Fallback for views
143
- const createType = relkind === "m" ? "MATERIALIZED VIEW" : "VIEW";
144
- return {
145
- ddl: `-- Unable to retrieve ${createType.toLowerCase()} definition\nCREATE ${createType} "${schemaName}"."${tableName}" AS SELECT ...;`,
146
- type: relkind === "m" ? "materialized_view" : "view",
147
- note: "View definition could not be retrieved. Use pg_list_views for details.",
148
- };
149
- }
150
- // Check if it's a partitioned table (relkind 'p') and get partition info
151
- let partitionClause = "";
152
- const isPartitionedTable = relkind === "p";
153
- if (isPartitionedTable) {
154
- try {
155
- // Query pg_partitioned_table to get partition strategy and key columns
156
- const partInfo = await adapter.executeQuery(`
153
+ // Check if it's a partitioned table (relkind 'p') and get partition info
154
+ let partitionClause = "";
155
+ const isPartitionedTable = relkind === "p";
156
+ if (isPartitionedTable) {
157
+ try {
158
+ // Query pg_partitioned_table to get partition strategy and key columns
159
+ const partInfo = await adapter.executeQuery(`
157
160
  SELECT pt.partstrat,
158
161
  array_agg(a.attname ORDER BY partattrs.ord) as partition_columns
159
162
  FROM pg_partitioned_table pt
@@ -161,131 +164,144 @@ export function createDumpTableTool(adapter) {
161
164
  JOIN pg_namespace n ON c.relnamespace = n.oid
162
165
  CROSS JOIN LATERAL unnest(pt.partattrs) WITH ORDINALITY AS partattrs(attnum, ord)
163
166
  JOIN pg_attribute a ON a.attrelid = c.oid AND a.attnum = partattrs.attnum
164
- WHERE n.nspname = '${schemaName}' AND c.relname = '${tableName}'
167
+ WHERE n.nspname = $1 AND c.relname = $2
165
168
  GROUP BY pt.partstrat
166
- `);
167
- const partRow = partInfo.rows?.[0];
168
- if (partRow) {
169
- const strategy = partRow["partstrat"];
170
- const columns = partRow["partition_columns"];
171
- // Map strategy code to keyword
172
- const strategyMap = {
173
- r: "RANGE",
174
- l: "LIST",
175
- h: "HASH",
176
- };
177
- const strategyKeyword = typeof strategy === "string"
178
- ? (strategyMap[strategy] ?? "RANGE")
179
- : "RANGE";
180
- // Build column list - PostgreSQL returns array_agg as string like "{col1,col2}"
181
- let columnList = "";
182
- if (Array.isArray(columns)) {
183
- columnList = columns.map((c) => `"${String(c)}"`).join(", ");
184
- }
185
- else if (typeof columns === "string") {
186
- // Parse PostgreSQL array literal format: "{col1,col2}" -> ["col1", "col2"]
187
- const parsed = columns
188
- .replace(/^\{/, "")
189
- .replace(/\}$/, "")
190
- .split(",")
191
- .filter((c) => c.length > 0);
192
- columnList = parsed.map((c) => `"${c.trim()}"`).join(", ");
193
- }
194
- if (columnList) {
195
- partitionClause = ` PARTITION BY ${strategyKeyword} (${columnList})`;
169
+ `, [schemaName, tableName]);
170
+ const partRow = partInfo.rows?.[0];
171
+ if (partRow) {
172
+ const strategy = partRow["partstrat"];
173
+ const columns = partRow["partition_columns"];
174
+ // Map strategy code to keyword
175
+ const strategyMap = {
176
+ r: "RANGE",
177
+ l: "LIST",
178
+ h: "HASH",
179
+ };
180
+ const strategyKeyword = typeof strategy === "string"
181
+ ? (strategyMap[strategy] ?? "RANGE")
182
+ : "RANGE";
183
+ // Build column list - PostgreSQL returns array_agg as string like "{col1,col2}"
184
+ let columnList = "";
185
+ if (Array.isArray(columns)) {
186
+ columnList = columns
187
+ .map((c) => sanitizeIdentifier(String(c)))
188
+ .join(", ");
189
+ }
190
+ else if (typeof columns === "string") {
191
+ // Parse PostgreSQL array literal format: "{col1,col2}" -> ["col1", "col2"]
192
+ const parsed = columns
193
+ .replace(/^\{/, "")
194
+ .replace(/\}$/, "")
195
+ .split(",")
196
+ .filter((c) => c.length > 0);
197
+ columnList = parsed
198
+ .map((c) => sanitizeIdentifier(c.trim()))
199
+ .join(", ");
200
+ }
201
+ if (columnList) {
202
+ partitionClause = ` PARTITION BY ${strategyKeyword} (${columnList})`;
203
+ }
196
204
  }
197
205
  }
198
- }
199
- catch {
200
- // Partition info query failed, continue without partition clause
201
- }
202
- }
203
- const tableInfo = await adapter.describeTable(tableName, schemaName);
204
- const columns = tableInfo.columns
205
- ?.map((col) => {
206
- let def = ` "${col.name}" ${col.type}`;
207
- if (col.defaultValue !== undefined && col.defaultValue !== null) {
208
- let defaultStr;
209
- if (typeof col.defaultValue === "object") {
210
- defaultStr = JSON.stringify(col.defaultValue);
206
+ catch {
207
+ // Partition info query failed, continue without partition clause
211
208
  }
212
- else if (typeof col.defaultValue === "string" ||
213
- typeof col.defaultValue === "number" ||
214
- typeof col.defaultValue === "boolean") {
215
- defaultStr = String(col.defaultValue);
216
- }
217
- else {
218
- defaultStr = JSON.stringify(col.defaultValue);
219
- }
220
- def += ` DEFAULT ${defaultStr}`;
221
209
  }
222
- if (!col.nullable)
223
- def += " NOT NULL";
224
- return def;
225
- })
226
- .join(",\n") ?? "";
227
- const createTable = `CREATE TABLE "${schemaName}"."${tableName}" (\n${columns}\n)${partitionClause};`;
228
- const result = {
229
- ddl: createTable,
230
- type: isPartitionedTable ? "partitioned_table" : "table",
231
- note: isPartitionedTable
232
- ? "For partition children use pg_list_partitions, for indexes use pg_get_indexes, for constraints use pg_get_constraints."
233
- : "Basic CREATE TABLE only. For indexes use pg_get_indexes, for constraints use pg_get_constraints.",
234
- };
235
- if (parsed.includeData) {
236
- // Default limit is 500 to prevent large payloads, 0 means no limit
237
- const effectiveLimit = parsed.limit === 0 ? null : (parsed.limit ?? 500);
238
- const limitClause = effectiveLimit !== null ? ` LIMIT ${String(effectiveLimit)}` : "";
239
- const dataResult = await adapter.executeQuery(`SELECT * FROM "${schemaName}"."${tableName}"${limitClause}`);
240
- if (dataResult.rows !== undefined && dataResult.rows.length > 0) {
241
- const firstRow = dataResult.rows[0];
242
- if (firstRow === undefined)
243
- return result;
244
- const cols = Object.keys(firstRow)
245
- .map((c) => `"${c}"`)
246
- .join(", ");
247
- const inserts = dataResult.rows
248
- .map((row) => {
249
- const vals = Object.entries(row)
250
- .map(([, value]) => {
251
- if (value === null)
252
- return "NULL";
253
- // Handle Date objects - format as PostgreSQL timestamp
254
- if (value instanceof Date) {
255
- const iso = value.toISOString();
256
- // Convert ISO 8601 to PostgreSQL format: 'YYYY-MM-DD HH:MM:SS.mmm'
257
- const pgTimestamp = iso.replace("T", " ").replace("Z", "");
258
- return `'${pgTimestamp}'`;
259
- }
260
- if (typeof value === "string") {
261
- // Escape backslashes first, then single quotes (PostgreSQL string literal escaping)
262
- const escaped = value
263
- .replace(/\\/g, "\\\\")
264
- .replace(/'/g, "''");
265
- // Check if string looks like an ISO timestamp
266
- if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/.test(value)) {
267
- // Convert ISO format to PostgreSQL format
268
- const pgTimestamp = value
210
+ const tableInfo = await adapter.describeTable(tableName, schemaName);
211
+ const columns = tableInfo.columns
212
+ ?.map((col) => {
213
+ let def = ` ${sanitizeIdentifier(col.name)} ${col.type}`;
214
+ if (col.defaultValue !== undefined && col.defaultValue !== null) {
215
+ let defaultStr;
216
+ if (typeof col.defaultValue === "object") {
217
+ defaultStr = JSON.stringify(col.defaultValue);
218
+ }
219
+ else if (typeof col.defaultValue === "string" ||
220
+ typeof col.defaultValue === "number" ||
221
+ typeof col.defaultValue === "boolean") {
222
+ defaultStr = String(col.defaultValue);
223
+ }
224
+ else {
225
+ defaultStr = JSON.stringify(col.defaultValue);
226
+ }
227
+ def += ` DEFAULT ${defaultStr}`;
228
+ }
229
+ if (!col.nullable)
230
+ def += " NOT NULL";
231
+ return def;
232
+ })
233
+ .join(",\n") ?? "";
234
+ const createTable = `CREATE TABLE ${sanitizeTableName(tableName, schemaName)} (\n${columns}\n)${partitionClause};`;
235
+ const result = {
236
+ ddl: createTable,
237
+ type: isPartitionedTable ? "partitioned_table" : "table",
238
+ note: isPartitionedTable
239
+ ? "For partition children use pg_list_partitions, for indexes use pg_get_indexes, for constraints use pg_get_constraints."
240
+ : "Basic CREATE TABLE only. For indexes use pg_get_indexes, for constraints use pg_get_constraints.",
241
+ };
242
+ if (parsed.includeData) {
243
+ // Default limit is 500 to prevent large payloads, 0 means no limit
244
+ const effectiveLimit = parsed.limit === 0 ? null : (parsed.limit ?? 500);
245
+ const limitClause = effectiveLimit !== null ? ` LIMIT ${String(effectiveLimit)}` : "";
246
+ const dataResult = await adapter.executeQuery(`SELECT * FROM ${sanitizeTableName(tableName, schemaName)}${limitClause}`);
247
+ if (dataResult.rows !== undefined && dataResult.rows.length > 0) {
248
+ const firstRow = dataResult.rows[0];
249
+ if (firstRow === undefined)
250
+ return result;
251
+ const cols = Object.keys(firstRow)
252
+ .map((c) => sanitizeIdentifier(c))
253
+ .join(", ");
254
+ const inserts = dataResult.rows
255
+ .map((row) => {
256
+ const vals = Object.entries(row)
257
+ .map(([, value]) => {
258
+ if (value === null)
259
+ return "NULL";
260
+ // Handle Date objects - format as PostgreSQL timestamp
261
+ if (value instanceof Date) {
262
+ const iso = value.toISOString();
263
+ // Convert ISO 8601 to PostgreSQL format: 'YYYY-MM-DD HH:MM:SS.mmm'
264
+ const pgTimestamp = iso
269
265
  .replace("T", " ")
270
- .replace("Z", "")
271
- .replace(/\.\d+$/, "");
272
- return `'${pgTimestamp.replace(/\\/g, "\\\\").replace(/'/g, "''")}'`;
266
+ .replace("Z", "");
267
+ return `'${pgTimestamp}'`;
273
268
  }
274
- return `'${escaped}'`;
275
- }
276
- if (typeof value === "number" || typeof value === "boolean")
277
- return String(value);
278
- // For objects (JSONB, arrays), use PostgreSQL JSONB literal
279
- return `'${JSON.stringify(value).replace(/\\/g, "\\\\").replace(/'/g, "''")}'::jsonb`;
269
+ if (typeof value === "string") {
270
+ // Escape backslashes first, then single quotes (PostgreSQL string literal escaping)
271
+ const escaped = value
272
+ .replace(/\\/g, "\\\\")
273
+ .replace(/'/g, "''");
274
+ // Check if string looks like an ISO timestamp
275
+ if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/.test(value)) {
276
+ // Convert ISO format to PostgreSQL format
277
+ const pgTimestamp = value
278
+ .replace("T", " ")
279
+ .replace("Z", "")
280
+ .replace(/\.\d+$/, "");
281
+ return `'${pgTimestamp.replace(/\\/g, "\\\\").replace(/'/g, "''")}'`;
282
+ }
283
+ return `'${escaped}'`;
284
+ }
285
+ if (typeof value === "number" || typeof value === "boolean")
286
+ return String(value);
287
+ // For objects (JSONB, arrays), use PostgreSQL JSONB literal
288
+ return `'${JSON.stringify(value).replace(/\\/g, "\\\\").replace(/'/g, "''")}'::jsonb`;
289
+ })
290
+ .join(", ");
291
+ return `INSERT INTO ${sanitizeTableName(tableName, schemaName)} (${cols}) VALUES (${vals});`;
280
292
  })
281
- .join(", ");
282
- return `INSERT INTO "${schemaName}"."${tableName}" (${cols}) VALUES (${vals});`;
283
- })
284
- .join("\n");
285
- result.insertStatements = inserts;
293
+ .join("\n");
294
+ result.insertStatements = inserts;
295
+ }
286
296
  }
297
+ return result;
298
+ }
299
+ catch (error) {
300
+ return {
301
+ success: false,
302
+ error: formatPostgresError(error, { tool: "pg_dump_table" }),
303
+ };
287
304
  }
288
- return result;
289
305
  },
290
306
  };
291
307
  }
@@ -314,7 +330,7 @@ export function createDumpSchemaTool(_adapter) {
314
330
  const sqlExtWarning = outputFilename.endsWith(".sql")
315
331
  ? "Warning: Using .sql extension with --format=custom produces binary output. Use .dump extension or --format=plain for SQL text output."
316
332
  : undefined;
317
- command += ` --file=${outputFilename}`;
333
+ command += ` --file="${outputFilename}"`;
318
334
  command += " $POSTGRES_CONNECTION_STRING";
319
335
  return Promise.resolve({
320
336
  command,
@@ -343,144 +359,156 @@ export function createCopyExportTool(adapter) {
343
359
  annotations: readOnly("Copy Export"),
344
360
  icons: getToolIcons("backup", readOnly("Copy Export")),
345
361
  handler: async (params, context) => {
346
- const progress = buildProgressContext(context);
347
- await sendProgress(progress, 1, 3, "Preparing COPY export...");
348
- const { query, format, header, delimiter, conflictWarning, effectiveLimit, } = CopyExportSchema.parse(params); // Use transform for validation
349
- const options = [];
350
- options.push(`FORMAT ${format ?? "csv"}`);
351
- if (header !== false)
352
- options.push("HEADER");
353
- if (delimiter)
354
- options.push(`DELIMITER '${delimiter}'`);
355
- const copyCommand = `COPY (${query}) TO STDOUT WITH (${options.join(", ")})`;
356
- void copyCommand;
357
- await sendProgress(progress, 2, 3, "Executing query...");
358
- const result = await adapter.executeQuery(query);
359
- // Handle CSV format (default)
360
- if (format === "csv" || format === undefined) {
361
- if (result.rows === undefined || result.rows.length === 0) {
362
- return {
363
- data: "",
364
- rowCount: 0,
365
- note: "Query returned no rows. Headers omitted for empty results.",
366
- ...(conflictWarning !== undefined
367
- ? { warning: conflictWarning }
368
- : {}),
369
- };
370
- }
371
- const firstRowData = result.rows[0];
372
- if (firstRowData === undefined) {
362
+ try {
363
+ const progress = buildProgressContext(context);
364
+ await sendProgress(progress, 1, 3, "Preparing COPY export...");
365
+ const { query, format, header, delimiter, conflictWarning, effectiveLimit, } = CopyExportSchema.parse(params); // Use transform for validation
366
+ const options = [];
367
+ options.push(`FORMAT ${format ?? "csv"}`);
368
+ if (header !== false)
369
+ options.push("HEADER");
370
+ if (delimiter)
371
+ options.push(`DELIMITER '${delimiter}'`);
372
+ const copyCommand = `COPY (${query}) TO STDOUT WITH (${options.join(", ")})`;
373
+ void copyCommand;
374
+ await sendProgress(progress, 2, 3, "Executing query...");
375
+ const result = await adapter.executeQuery(query);
376
+ // Handle CSV format (default)
377
+ if (format === "csv" || format === undefined) {
378
+ if (result.rows === undefined || result.rows.length === 0) {
379
+ return {
380
+ data: "",
381
+ rowCount: 0,
382
+ note: "Query returned no rows. Headers omitted for empty results.",
383
+ ...(conflictWarning !== undefined
384
+ ? { warning: conflictWarning }
385
+ : {}),
386
+ };
387
+ }
388
+ const firstRowData = result.rows[0];
389
+ if (firstRowData === undefined) {
390
+ return {
391
+ data: "",
392
+ rowCount: 0,
393
+ note: "Query returned no rows. Headers omitted for empty results.",
394
+ ...(conflictWarning !== undefined
395
+ ? { warning: conflictWarning }
396
+ : {}),
397
+ };
398
+ }
399
+ const headers = Object.keys(firstRowData);
400
+ const delim = delimiter ?? ",";
401
+ const lines = [];
402
+ if (header !== false) {
403
+ lines.push(headers.join(delim));
404
+ }
405
+ for (const row of result.rows) {
406
+ lines.push(headers
407
+ .map((h) => {
408
+ const v = row[h];
409
+ if (v === null)
410
+ return "";
411
+ if (typeof v === "object")
412
+ return JSON.stringify(v);
413
+ if (typeof v !== "string" &&
414
+ typeof v !== "number" &&
415
+ typeof v !== "boolean") {
416
+ return JSON.stringify(v);
417
+ }
418
+ const s = String(v);
419
+ return s.includes(delim) ||
420
+ s.includes('"') ||
421
+ s.includes("\n")
422
+ ? `"${s.replace(/"/g, '""')}"`
423
+ : s;
424
+ })
425
+ .join(delim));
426
+ }
427
+ // Mark as truncated if any limit was applied AND rows returned equals that limit
428
+ // This indicates there are likely more rows available
429
+ const isTruncated = effectiveLimit !== undefined &&
430
+ result.rows.length === effectiveLimit;
431
+ await sendProgress(progress, 3, 3, "Export complete");
373
432
  return {
374
- data: "",
375
- rowCount: 0,
376
- note: "Query returned no rows. Headers omitted for empty results.",
433
+ data: lines.join("\n"),
434
+ rowCount: result.rows.length,
435
+ ...(isTruncated ? { truncated: true, limit: effectiveLimit } : {}),
377
436
  ...(conflictWarning !== undefined
378
437
  ? { warning: conflictWarning }
379
438
  : {}),
380
439
  };
381
440
  }
382
- const headers = Object.keys(firstRowData);
383
- const delim = delimiter ?? ",";
384
- const lines = [];
385
- if (header !== false) {
386
- lines.push(headers.join(delim));
387
- }
388
- for (const row of result.rows) {
389
- lines.push(headers
390
- .map((h) => {
391
- const v = row[h];
392
- if (v === null)
393
- return "";
394
- if (typeof v === "object")
395
- return JSON.stringify(v);
396
- if (typeof v !== "string" &&
397
- typeof v !== "number" &&
398
- typeof v !== "boolean") {
441
+ // Handle TEXT format - tab-delimited with \N for NULLs
442
+ if (format === "text") {
443
+ if (result.rows === undefined || result.rows.length === 0) {
444
+ return {
445
+ data: "",
446
+ rowCount: 0,
447
+ note: "Query returned no rows. Headers omitted for empty results.",
448
+ ...(conflictWarning !== undefined
449
+ ? { warning: conflictWarning }
450
+ : {}),
451
+ };
452
+ }
453
+ const firstRowData = result.rows[0];
454
+ if (firstRowData === undefined) {
455
+ return {
456
+ data: "",
457
+ rowCount: 0,
458
+ note: "Query returned no rows. Headers omitted for empty results.",
459
+ ...(conflictWarning !== undefined
460
+ ? { warning: conflictWarning }
461
+ : {}),
462
+ };
463
+ }
464
+ const headers = Object.keys(firstRowData);
465
+ const delim = delimiter ?? "\t";
466
+ const lines = [];
467
+ if (header !== false) {
468
+ lines.push(headers.join(delim));
469
+ }
470
+ for (const row of result.rows) {
471
+ lines.push(headers
472
+ .map((h) => {
473
+ const v = row[h];
474
+ if (v === null)
475
+ return "\\N"; // PostgreSQL NULL representation in text format
476
+ if (typeof v === "object")
477
+ return JSON.stringify(v);
478
+ if (typeof v === "string" ||
479
+ typeof v === "number" ||
480
+ typeof v === "boolean") {
481
+ return String(v);
482
+ }
483
+ // Fallback for any other type
399
484
  return JSON.stringify(v);
400
- }
401
- const s = String(v);
402
- return s.includes(delim) || s.includes('"') || s.includes("\n")
403
- ? `"${s.replace(/"/g, '""')}"`
404
- : s;
405
- })
406
- .join(delim));
407
- }
408
- // Mark as truncated if any limit was applied AND rows returned equals that limit
409
- // This indicates there are likely more rows available
410
- const isTruncated = effectiveLimit !== undefined && result.rows.length === effectiveLimit;
411
- await sendProgress(progress, 3, 3, "Export complete");
412
- return {
413
- data: lines.join("\n"),
414
- rowCount: result.rows.length,
415
- ...(isTruncated ? { truncated: true, limit: effectiveLimit } : {}),
416
- ...(conflictWarning !== undefined
417
- ? { warning: conflictWarning }
418
- : {}),
419
- };
420
- }
421
- // Handle TEXT format - tab-delimited with \N for NULLs
422
- if (format === "text") {
423
- if (result.rows === undefined || result.rows.length === 0) {
424
- return {
425
- data: "",
426
- rowCount: 0,
427
- note: "Query returned no rows. Headers omitted for empty results.",
428
- ...(conflictWarning !== undefined
429
- ? { warning: conflictWarning }
430
- : {}),
431
- };
432
- }
433
- const firstRowData = result.rows[0];
434
- if (firstRowData === undefined) {
485
+ })
486
+ .join(delim));
487
+ }
488
+ // Mark as truncated if any limit was applied AND rows returned equals that limit
489
+ // This indicates there are likely more rows available
490
+ const isTruncated = effectiveLimit !== undefined &&
491
+ result.rows.length === effectiveLimit;
492
+ await sendProgress(progress, 3, 3, "Export complete");
435
493
  return {
436
- data: "",
437
- rowCount: 0,
438
- note: "Query returned no rows. Headers omitted for empty results.",
494
+ data: lines.join("\n"),
495
+ rowCount: result.rows.length,
496
+ ...(isTruncated ? { truncated: true, limit: effectiveLimit } : {}),
439
497
  ...(conflictWarning !== undefined
440
498
  ? { warning: conflictWarning }
441
499
  : {}),
442
500
  };
443
501
  }
444
- const headers = Object.keys(firstRowData);
445
- const delim = delimiter ?? "\t";
446
- const lines = [];
447
- if (header !== false) {
448
- lines.push(headers.join(delim));
449
- }
450
- for (const row of result.rows) {
451
- lines.push(headers
452
- .map((h) => {
453
- const v = row[h];
454
- if (v === null)
455
- return "\\N"; // PostgreSQL NULL representation in text format
456
- if (typeof v === "object")
457
- return JSON.stringify(v);
458
- if (typeof v === "string" ||
459
- typeof v === "number" ||
460
- typeof v === "boolean") {
461
- return String(v);
462
- }
463
- // Fallback for any other type
464
- return JSON.stringify(v);
465
- })
466
- .join(delim));
467
- }
468
- // Mark as truncated if any limit was applied AND rows returned equals that limit
469
- // This indicates there are likely more rows available
470
- const isTruncated = effectiveLimit !== undefined && result.rows.length === effectiveLimit;
471
- await sendProgress(progress, 3, 3, "Export complete");
502
+ // Handle BINARY format - not supported via MCP protocol
503
+ // Binary data cannot be safely serialized to JSON without corruption
504
+ throw new Error('Binary format is not supported via MCP protocol. Use format: "csv" or "text" instead. For binary export, use pg_dump_schema to generate a pg_dump command.');
505
+ }
506
+ catch (error) {
472
507
  return {
473
- data: lines.join("\n"),
474
- rowCount: result.rows.length,
475
- ...(isTruncated ? { truncated: true, limit: effectiveLimit } : {}),
476
- ...(conflictWarning !== undefined
477
- ? { warning: conflictWarning }
478
- : {}),
508
+ success: false,
509
+ error: formatPostgresError(error, { tool: "pg_copy_export" }),
479
510
  };
480
511
  }
481
- // Handle BINARY format - not supported via MCP protocol
482
- // Binary data cannot be safely serialized to JSON without corruption
483
- throw new Error('Binary format is not supported via MCP protocol. Use format: "csv" or "text" instead. For binary export, use pg_dump_schema to generate a pg_dump command.');
484
512
  },
485
513
  };
486
514
  }
@@ -527,11 +555,9 @@ export function createCopyImportTool(_adapter) {
527
555
  tableNamePart = parts[1];
528
556
  }
529
557
  }
530
- const tableName = schemaNamePart
531
- ? `"${schemaNamePart}"."${tableNamePart}"`
532
- : `"${tableNamePart}"`;
558
+ const tableName = sanitizeTableName(tableNamePart, schemaNamePart);
533
559
  const columnClause = parsed.columns !== undefined && parsed.columns.length > 0
534
- ? ` (${parsed.columns.map((c) => `"${c}"`).join(", ")})`
560
+ ? ` (${sanitizeIdentifiers(parsed.columns).join(", ")})`
535
561
  : "";
536
562
  const options = [];
537
563
  options.push(`FORMAT ${parsed.format ?? "csv"}`);