@neverinfamous/postgres-mcp 1.1.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. package/README.md +95 -81
  2. package/dist/__tests__/mocks/adapter.d.ts.map +1 -1
  3. package/dist/__tests__/mocks/adapter.js +0 -1
  4. package/dist/__tests__/mocks/adapter.js.map +1 -1
  5. package/dist/__tests__/mocks/pool.d.ts.map +1 -1
  6. package/dist/__tests__/mocks/pool.js +0 -1
  7. package/dist/__tests__/mocks/pool.js.map +1 -1
  8. package/dist/adapters/DatabaseAdapter.js +1 -1
  9. package/dist/adapters/DatabaseAdapter.js.map +1 -1
  10. package/dist/adapters/postgresql/PostgresAdapter.d.ts.map +1 -1
  11. package/dist/adapters/postgresql/PostgresAdapter.js +78 -8
  12. package/dist/adapters/postgresql/PostgresAdapter.js.map +1 -1
  13. package/dist/adapters/postgresql/prompts/backup.d.ts.map +1 -1
  14. package/dist/adapters/postgresql/prompts/backup.js +2 -3
  15. package/dist/adapters/postgresql/prompts/backup.js.map +1 -1
  16. package/dist/adapters/postgresql/prompts/citext.d.ts.map +1 -1
  17. package/dist/adapters/postgresql/prompts/citext.js +3 -4
  18. package/dist/adapters/postgresql/prompts/citext.js.map +1 -1
  19. package/dist/adapters/postgresql/prompts/extensionSetup.d.ts.map +1 -1
  20. package/dist/adapters/postgresql/prompts/extensionSetup.js +2 -3
  21. package/dist/adapters/postgresql/prompts/extensionSetup.js.map +1 -1
  22. package/dist/adapters/postgresql/prompts/health.d.ts.map +1 -1
  23. package/dist/adapters/postgresql/prompts/health.js +2 -3
  24. package/dist/adapters/postgresql/prompts/health.js.map +1 -1
  25. package/dist/adapters/postgresql/prompts/index.js +20 -27
  26. package/dist/adapters/postgresql/prompts/index.js.map +1 -1
  27. package/dist/adapters/postgresql/prompts/indexTuning.d.ts.map +1 -1
  28. package/dist/adapters/postgresql/prompts/indexTuning.js +2 -3
  29. package/dist/adapters/postgresql/prompts/indexTuning.js.map +1 -1
  30. package/dist/adapters/postgresql/prompts/kcache.d.ts.map +1 -1
  31. package/dist/adapters/postgresql/prompts/kcache.js +3 -4
  32. package/dist/adapters/postgresql/prompts/kcache.js.map +1 -1
  33. package/dist/adapters/postgresql/prompts/ltree.d.ts.map +1 -1
  34. package/dist/adapters/postgresql/prompts/ltree.js +5 -6
  35. package/dist/adapters/postgresql/prompts/ltree.js.map +1 -1
  36. package/dist/adapters/postgresql/prompts/partman.d.ts.map +1 -1
  37. package/dist/adapters/postgresql/prompts/partman.js +2 -3
  38. package/dist/adapters/postgresql/prompts/partman.js.map +1 -1
  39. package/dist/adapters/postgresql/prompts/pgcron.d.ts.map +1 -1
  40. package/dist/adapters/postgresql/prompts/pgcron.js +2 -3
  41. package/dist/adapters/postgresql/prompts/pgcron.js.map +1 -1
  42. package/dist/adapters/postgresql/prompts/pgcrypto.d.ts.map +1 -1
  43. package/dist/adapters/postgresql/prompts/pgcrypto.js +3 -4
  44. package/dist/adapters/postgresql/prompts/pgcrypto.js.map +1 -1
  45. package/dist/adapters/postgresql/prompts/pgvector.d.ts.map +1 -1
  46. package/dist/adapters/postgresql/prompts/pgvector.js +3 -4
  47. package/dist/adapters/postgresql/prompts/pgvector.js.map +1 -1
  48. package/dist/adapters/postgresql/prompts/postgis.d.ts.map +1 -1
  49. package/dist/adapters/postgresql/prompts/postgis.js +2 -3
  50. package/dist/adapters/postgresql/prompts/postgis.js.map +1 -1
  51. package/dist/adapters/postgresql/schemas/admin.d.ts +10 -5
  52. package/dist/adapters/postgresql/schemas/admin.d.ts.map +1 -1
  53. package/dist/adapters/postgresql/schemas/admin.js +10 -5
  54. package/dist/adapters/postgresql/schemas/admin.js.map +1 -1
  55. package/dist/adapters/postgresql/schemas/backup.d.ts +8 -4
  56. package/dist/adapters/postgresql/schemas/backup.d.ts.map +1 -1
  57. package/dist/adapters/postgresql/schemas/backup.js +11 -4
  58. package/dist/adapters/postgresql/schemas/backup.js.map +1 -1
  59. package/dist/adapters/postgresql/schemas/core.d.ts +54 -19
  60. package/dist/adapters/postgresql/schemas/core.d.ts.map +1 -1
  61. package/dist/adapters/postgresql/schemas/core.js +65 -17
  62. package/dist/adapters/postgresql/schemas/core.js.map +1 -1
  63. package/dist/adapters/postgresql/schemas/cron.d.ts +51 -32
  64. package/dist/adapters/postgresql/schemas/cron.d.ts.map +1 -1
  65. package/dist/adapters/postgresql/schemas/cron.js +64 -44
  66. package/dist/adapters/postgresql/schemas/cron.js.map +1 -1
  67. package/dist/adapters/postgresql/schemas/extensions.d.ts +168 -73
  68. package/dist/adapters/postgresql/schemas/extensions.d.ts.map +1 -1
  69. package/dist/adapters/postgresql/schemas/extensions.js +179 -62
  70. package/dist/adapters/postgresql/schemas/extensions.js.map +1 -1
  71. package/dist/adapters/postgresql/schemas/index.d.ts +5 -5
  72. package/dist/adapters/postgresql/schemas/index.d.ts.map +1 -1
  73. package/dist/adapters/postgresql/schemas/index.js +9 -7
  74. package/dist/adapters/postgresql/schemas/index.js.map +1 -1
  75. package/dist/adapters/postgresql/schemas/jsonb.d.ts +94 -42
  76. package/dist/adapters/postgresql/schemas/jsonb.d.ts.map +1 -1
  77. package/dist/adapters/postgresql/schemas/jsonb.js +101 -30
  78. package/dist/adapters/postgresql/schemas/jsonb.js.map +1 -1
  79. package/dist/adapters/postgresql/schemas/monitoring.d.ts +28 -11
  80. package/dist/adapters/postgresql/schemas/monitoring.d.ts.map +1 -1
  81. package/dist/adapters/postgresql/schemas/monitoring.js +49 -24
  82. package/dist/adapters/postgresql/schemas/monitoring.js.map +1 -1
  83. package/dist/adapters/postgresql/schemas/partitioning.d.ts +15 -11
  84. package/dist/adapters/postgresql/schemas/partitioning.d.ts.map +1 -1
  85. package/dist/adapters/postgresql/schemas/partitioning.js +17 -13
  86. package/dist/adapters/postgresql/schemas/partitioning.js.map +1 -1
  87. package/dist/adapters/postgresql/schemas/performance.d.ts +62 -31
  88. package/dist/adapters/postgresql/schemas/performance.d.ts.map +1 -1
  89. package/dist/adapters/postgresql/schemas/performance.js +86 -24
  90. package/dist/adapters/postgresql/schemas/performance.js.map +1 -1
  91. package/dist/adapters/postgresql/schemas/postgis.d.ts +20 -0
  92. package/dist/adapters/postgresql/schemas/postgis.d.ts.map +1 -1
  93. package/dist/adapters/postgresql/schemas/postgis.js +20 -3
  94. package/dist/adapters/postgresql/schemas/postgis.js.map +1 -1
  95. package/dist/adapters/postgresql/schemas/schema-mgmt.d.ts +35 -23
  96. package/dist/adapters/postgresql/schemas/schema-mgmt.d.ts.map +1 -1
  97. package/dist/adapters/postgresql/schemas/schema-mgmt.js +69 -26
  98. package/dist/adapters/postgresql/schemas/schema-mgmt.js.map +1 -1
  99. package/dist/adapters/postgresql/schemas/stats.d.ts +33 -20
  100. package/dist/adapters/postgresql/schemas/stats.d.ts.map +1 -1
  101. package/dist/adapters/postgresql/schemas/stats.js +36 -20
  102. package/dist/adapters/postgresql/schemas/stats.js.map +1 -1
  103. package/dist/adapters/postgresql/schemas/text-search.d.ts +8 -5
  104. package/dist/adapters/postgresql/schemas/text-search.d.ts.map +1 -1
  105. package/dist/adapters/postgresql/schemas/text-search.js +15 -5
  106. package/dist/adapters/postgresql/schemas/text-search.js.map +1 -1
  107. package/dist/adapters/postgresql/tools/admin.d.ts.map +1 -1
  108. package/dist/adapters/postgresql/tools/admin.js +211 -140
  109. package/dist/adapters/postgresql/tools/admin.js.map +1 -1
  110. package/dist/adapters/postgresql/tools/backup/dump.d.ts.map +1 -1
  111. package/dist/adapters/postgresql/tools/backup/dump.js +410 -387
  112. package/dist/adapters/postgresql/tools/backup/dump.js.map +1 -1
  113. package/dist/adapters/postgresql/tools/backup/planning.d.ts.map +1 -1
  114. package/dist/adapters/postgresql/tools/backup/planning.js +175 -172
  115. package/dist/adapters/postgresql/tools/backup/planning.js.map +1 -1
  116. package/dist/adapters/postgresql/tools/citext.d.ts.map +1 -1
  117. package/dist/adapters/postgresql/tools/citext.js +221 -163
  118. package/dist/adapters/postgresql/tools/citext.js.map +1 -1
  119. package/dist/adapters/postgresql/tools/core/convenience.d.ts +9 -1
  120. package/dist/adapters/postgresql/tools/core/convenience.d.ts.map +1 -1
  121. package/dist/adapters/postgresql/tools/core/convenience.js +96 -9
  122. package/dist/adapters/postgresql/tools/core/convenience.js.map +1 -1
  123. package/dist/adapters/postgresql/tools/core/error-helpers.d.ts +48 -0
  124. package/dist/adapters/postgresql/tools/core/error-helpers.d.ts.map +1 -0
  125. package/dist/adapters/postgresql/tools/core/error-helpers.js +256 -0
  126. package/dist/adapters/postgresql/tools/core/error-helpers.js.map +1 -0
  127. package/dist/adapters/postgresql/tools/core/health.d.ts.map +1 -1
  128. package/dist/adapters/postgresql/tools/core/health.js +23 -6
  129. package/dist/adapters/postgresql/tools/core/health.js.map +1 -1
  130. package/dist/adapters/postgresql/tools/core/indexes.d.ts.map +1 -1
  131. package/dist/adapters/postgresql/tools/core/indexes.js +45 -4
  132. package/dist/adapters/postgresql/tools/core/indexes.js.map +1 -1
  133. package/dist/adapters/postgresql/tools/core/objects.d.ts.map +1 -1
  134. package/dist/adapters/postgresql/tools/core/objects.js +104 -85
  135. package/dist/adapters/postgresql/tools/core/objects.js.map +1 -1
  136. package/dist/adapters/postgresql/tools/core/query.d.ts.map +1 -1
  137. package/dist/adapters/postgresql/tools/core/query.js +100 -42
  138. package/dist/adapters/postgresql/tools/core/query.js.map +1 -1
  139. package/dist/adapters/postgresql/tools/core/schemas.d.ts +52 -25
  140. package/dist/adapters/postgresql/tools/core/schemas.d.ts.map +1 -1
  141. package/dist/adapters/postgresql/tools/core/schemas.js +55 -25
  142. package/dist/adapters/postgresql/tools/core/schemas.js.map +1 -1
  143. package/dist/adapters/postgresql/tools/core/tables.d.ts.map +1 -1
  144. package/dist/adapters/postgresql/tools/core/tables.js +74 -30
  145. package/dist/adapters/postgresql/tools/core/tables.js.map +1 -1
  146. package/dist/adapters/postgresql/tools/cron.d.ts.map +1 -1
  147. package/dist/adapters/postgresql/tools/cron.js +274 -179
  148. package/dist/adapters/postgresql/tools/cron.js.map +1 -1
  149. package/dist/adapters/postgresql/tools/jsonb/advanced.d.ts.map +1 -1
  150. package/dist/adapters/postgresql/tools/jsonb/advanced.js +372 -284
  151. package/dist/adapters/postgresql/tools/jsonb/advanced.js.map +1 -1
  152. package/dist/adapters/postgresql/tools/jsonb/basic.d.ts.map +1 -1
  153. package/dist/adapters/postgresql/tools/jsonb/basic.js +617 -398
  154. package/dist/adapters/postgresql/tools/jsonb/basic.js.map +1 -1
  155. package/dist/adapters/postgresql/tools/kcache.d.ts.map +1 -1
  156. package/dist/adapters/postgresql/tools/kcache.js +282 -220
  157. package/dist/adapters/postgresql/tools/kcache.js.map +1 -1
  158. package/dist/adapters/postgresql/tools/ltree.d.ts.map +1 -1
  159. package/dist/adapters/postgresql/tools/ltree.js +126 -35
  160. package/dist/adapters/postgresql/tools/ltree.js.map +1 -1
  161. package/dist/adapters/postgresql/tools/monitoring.d.ts.map +1 -1
  162. package/dist/adapters/postgresql/tools/monitoring.js +59 -40
  163. package/dist/adapters/postgresql/tools/monitoring.js.map +1 -1
  164. package/dist/adapters/postgresql/tools/partitioning.d.ts.map +1 -1
  165. package/dist/adapters/postgresql/tools/partitioning.js +150 -15
  166. package/dist/adapters/postgresql/tools/partitioning.js.map +1 -1
  167. package/dist/adapters/postgresql/tools/partman/management.d.ts.map +1 -1
  168. package/dist/adapters/postgresql/tools/partman/management.js +12 -5
  169. package/dist/adapters/postgresql/tools/partman/management.js.map +1 -1
  170. package/dist/adapters/postgresql/tools/partman/operations.d.ts.map +1 -1
  171. package/dist/adapters/postgresql/tools/partman/operations.js +135 -22
  172. package/dist/adapters/postgresql/tools/partman/operations.js.map +1 -1
  173. package/dist/adapters/postgresql/tools/performance/analysis.d.ts.map +1 -1
  174. package/dist/adapters/postgresql/tools/performance/analysis.js +264 -160
  175. package/dist/adapters/postgresql/tools/performance/analysis.js.map +1 -1
  176. package/dist/adapters/postgresql/tools/performance/explain.d.ts.map +1 -1
  177. package/dist/adapters/postgresql/tools/performance/explain.js +61 -21
  178. package/dist/adapters/postgresql/tools/performance/explain.js.map +1 -1
  179. package/dist/adapters/postgresql/tools/performance/monitoring.d.ts.map +1 -1
  180. package/dist/adapters/postgresql/tools/performance/monitoring.js +44 -7
  181. package/dist/adapters/postgresql/tools/performance/monitoring.js.map +1 -1
  182. package/dist/adapters/postgresql/tools/performance/optimization.d.ts.map +1 -1
  183. package/dist/adapters/postgresql/tools/performance/optimization.js +92 -81
  184. package/dist/adapters/postgresql/tools/performance/optimization.js.map +1 -1
  185. package/dist/adapters/postgresql/tools/performance/stats.d.ts.map +1 -1
  186. package/dist/adapters/postgresql/tools/performance/stats.js +128 -37
  187. package/dist/adapters/postgresql/tools/performance/stats.js.map +1 -1
  188. package/dist/adapters/postgresql/tools/pgcrypto.d.ts.map +1 -1
  189. package/dist/adapters/postgresql/tools/pgcrypto.js +242 -87
  190. package/dist/adapters/postgresql/tools/pgcrypto.js.map +1 -1
  191. package/dist/adapters/postgresql/tools/postgis/advanced.d.ts.map +1 -1
  192. package/dist/adapters/postgresql/tools/postgis/advanced.js +293 -201
  193. package/dist/adapters/postgresql/tools/postgis/advanced.js.map +1 -1
  194. package/dist/adapters/postgresql/tools/postgis/basic.d.ts.map +1 -1
  195. package/dist/adapters/postgresql/tools/postgis/basic.js +359 -249
  196. package/dist/adapters/postgresql/tools/postgis/basic.js.map +1 -1
  197. package/dist/adapters/postgresql/tools/postgis/standalone.d.ts.map +1 -1
  198. package/dist/adapters/postgresql/tools/postgis/standalone.js +135 -51
  199. package/dist/adapters/postgresql/tools/postgis/standalone.js.map +1 -1
  200. package/dist/adapters/postgresql/tools/schema.d.ts.map +1 -1
  201. package/dist/adapters/postgresql/tools/schema.js +515 -226
  202. package/dist/adapters/postgresql/tools/schema.js.map +1 -1
  203. package/dist/adapters/postgresql/tools/stats/advanced.d.ts.map +1 -1
  204. package/dist/adapters/postgresql/tools/stats/advanced.js +515 -476
  205. package/dist/adapters/postgresql/tools/stats/advanced.js.map +1 -1
  206. package/dist/adapters/postgresql/tools/stats/basic.d.ts.map +1 -1
  207. package/dist/adapters/postgresql/tools/stats/basic.js +302 -293
  208. package/dist/adapters/postgresql/tools/stats/basic.js.map +1 -1
  209. package/dist/adapters/postgresql/tools/text.d.ts.map +1 -1
  210. package/dist/adapters/postgresql/tools/text.js +398 -220
  211. package/dist/adapters/postgresql/tools/text.js.map +1 -1
  212. package/dist/adapters/postgresql/tools/transactions.d.ts.map +1 -1
  213. package/dist/adapters/postgresql/tools/transactions.js +157 -50
  214. package/dist/adapters/postgresql/tools/transactions.js.map +1 -1
  215. package/dist/adapters/postgresql/tools/vector/advanced.d.ts.map +1 -1
  216. package/dist/adapters/postgresql/tools/vector/advanced.js +70 -38
  217. package/dist/adapters/postgresql/tools/vector/advanced.js.map +1 -1
  218. package/dist/adapters/postgresql/tools/vector/basic.d.ts +8 -0
  219. package/dist/adapters/postgresql/tools/vector/basic.d.ts.map +1 -1
  220. package/dist/adapters/postgresql/tools/vector/basic.js +194 -82
  221. package/dist/adapters/postgresql/tools/vector/basic.js.map +1 -1
  222. package/dist/cli/args.d.ts +2 -0
  223. package/dist/cli/args.d.ts.map +1 -1
  224. package/dist/cli/args.js +15 -0
  225. package/dist/cli/args.js.map +1 -1
  226. package/dist/cli.js +7 -6
  227. package/dist/cli.js.map +1 -1
  228. package/dist/codemode/api.d.ts.map +1 -1
  229. package/dist/codemode/api.js +4 -3
  230. package/dist/codemode/api.js.map +1 -1
  231. package/dist/constants/ServerInstructions.d.ts +1 -1
  232. package/dist/constants/ServerInstructions.d.ts.map +1 -1
  233. package/dist/constants/ServerInstructions.js +76 -34
  234. package/dist/constants/ServerInstructions.js.map +1 -1
  235. package/dist/filtering/ToolConstants.d.ts +29 -13
  236. package/dist/filtering/ToolConstants.d.ts.map +1 -1
  237. package/dist/filtering/ToolConstants.js +44 -27
  238. package/dist/filtering/ToolConstants.js.map +1 -1
  239. package/dist/utils/logger.js +2 -2
  240. package/dist/utils/logger.js.map +1 -1
  241. package/dist/utils/progress-utils.js +1 -1
  242. package/dist/utils/progress-utils.js.map +1 -1
  243. package/package.json +13 -9
@@ -10,6 +10,7 @@ import { buildProgressContext, sendProgress, } from "../../../../utils/progress-
10
10
  import { CopyExportSchema, CopyExportSchemaBase, DumpSchemaSchema,
11
11
  // Output schemas
12
12
  DumpTableOutputSchema, DumpSchemaOutputSchema, CopyExportOutputSchema, CopyImportOutputSchema, } from "../../schemas/index.js";
13
+ import { formatPostgresError } from "../core/error-helpers.js";
13
14
  export function createDumpTableTool(adapter) {
14
15
  return {
15
16
  name: "pg_dump_table",
@@ -33,35 +34,36 @@ export function createDumpTableTool(adapter) {
33
34
  annotations: readOnly("Dump Table"),
34
35
  icons: getToolIcons("backup", readOnly("Dump Table")),
35
36
  handler: async (params, _context) => {
36
- const parsed = params;
37
- // Validate required table parameter
38
- if (!parsed.table || parsed.table.trim() === "") {
39
- throw new Error("table parameter is required");
40
- }
41
- // Parse schema.table format (e.g., 'public.users' -> schema='public', table='users')
42
- // If table contains a dot, always parse it as schema.table (embedded schema takes priority)
43
- let tableName = parsed.table;
44
- let schemaName = parsed.schema ?? "public";
45
- if (parsed.table.includes(".")) {
46
- const parts = parsed.table.split(".");
47
- if (parts.length === 2 && parts[0] && parts[1]) {
48
- schemaName = parts[0];
49
- tableName = parts[1];
37
+ try {
38
+ const parsed = params;
39
+ // Validate required table parameter
40
+ if (!parsed.table || parsed.table.trim() === "") {
41
+ throw new Error("table parameter is required");
50
42
  }
51
- }
52
- // Check if it's a sequence by querying pg_class
53
- const relkindResult = await adapter.executeQuery(`
43
+ // Parse schema.table format (e.g., 'public.users' -> schema='public', table='users')
44
+ // If table contains a dot, always parse it as schema.table (embedded schema takes priority)
45
+ let tableName = parsed.table;
46
+ let schemaName = parsed.schema ?? "public";
47
+ if (parsed.table.includes(".")) {
48
+ const parts = parsed.table.split(".");
49
+ if (parts.length === 2 && parts[0] && parts[1]) {
50
+ schemaName = parts[0];
51
+ tableName = parts[1];
52
+ }
53
+ }
54
+ // Check if it's a sequence by querying pg_class
55
+ const relkindResult = await adapter.executeQuery(`
54
56
  SELECT relkind FROM pg_class c
55
57
  JOIN pg_namespace n ON c.relnamespace = n.oid
56
58
  WHERE n.nspname = '${schemaName}' AND c.relname = '${tableName}'
57
59
  `);
58
- const relkind = relkindResult.rows?.[0]?.["relkind"];
59
- // relkind 'S' = sequence
60
- if (relkind === "S") {
61
- // Use pg_sequence system catalog (works in all PostgreSQL versions 10+)
62
- // Fallback to basic DDL if query fails
63
- try {
64
- const seqInfo = await adapter.executeQuery(`
60
+ const relkind = relkindResult.rows?.[0]?.["relkind"];
61
+ // relkind 'S' = sequence
62
+ if (relkind === "S") {
63
+ // Use pg_sequence system catalog (works in all PostgreSQL versions 10+)
64
+ // Fallback to basic DDL if query fails
65
+ try {
66
+ const seqInfo = await adapter.executeQuery(`
65
67
  SELECT s.seqstart as start_value, s.seqincrement as increment_by,
66
68
  s.seqmin as min_value, s.seqmax as max_value, s.seqcycle as cycle
67
69
  FROM pg_sequence s
@@ -69,91 +71,91 @@ export function createDumpTableTool(adapter) {
69
71
  JOIN pg_namespace n ON c.relnamespace = n.oid
70
72
  WHERE n.nspname = '${schemaName}' AND c.relname = '${tableName}'
71
73
  `);
72
- const seq = seqInfo.rows?.[0];
73
- if (seq !== undefined) {
74
- const startVal = typeof seq["start_value"] === "number" ||
75
- typeof seq["start_value"] === "bigint"
76
- ? String(seq["start_value"])
77
- : null;
78
- const incrVal = typeof seq["increment_by"] === "number" ||
79
- typeof seq["increment_by"] === "bigint"
80
- ? Number(seq["increment_by"])
81
- : null;
82
- const minVal = typeof seq["min_value"] === "number" ||
83
- typeof seq["min_value"] === "bigint"
84
- ? String(seq["min_value"])
85
- : null;
86
- const maxVal = typeof seq["max_value"] === "number" ||
87
- typeof seq["max_value"] === "bigint"
88
- ? String(seq["max_value"])
89
- : null;
90
- const startValue = startVal !== null ? ` START ${startVal}` : "";
91
- const increment = incrVal !== null && incrVal !== 1
92
- ? ` INCREMENT ${String(incrVal)}`
93
- : "";
94
- const minValue = minVal !== null ? ` MINVALUE ${minVal}` : "";
95
- const maxValue = maxVal !== null ? ` MAXVALUE ${maxVal}` : "";
96
- const cycle = seq["cycle"] === true ? " CYCLE" : "";
97
- const ddl = `CREATE SEQUENCE "${schemaName}"."${tableName}"${startValue}${increment}${minValue}${maxValue}${cycle};`;
98
- return {
99
- ddl,
100
- type: "sequence",
101
- note: "Use pg_list_sequences to see all sequences.",
102
- ...(parsed.includeData === true && {
103
- warning: "includeData is ignored for sequences - sequences have no row data to export",
104
- }),
105
- };
74
+ const seq = seqInfo.rows?.[0];
75
+ if (seq !== undefined) {
76
+ const startVal = typeof seq["start_value"] === "number" ||
77
+ typeof seq["start_value"] === "bigint"
78
+ ? String(seq["start_value"])
79
+ : null;
80
+ const incrVal = typeof seq["increment_by"] === "number" ||
81
+ typeof seq["increment_by"] === "bigint"
82
+ ? Number(seq["increment_by"])
83
+ : null;
84
+ const minVal = typeof seq["min_value"] === "number" ||
85
+ typeof seq["min_value"] === "bigint"
86
+ ? String(seq["min_value"])
87
+ : null;
88
+ const maxVal = typeof seq["max_value"] === "number" ||
89
+ typeof seq["max_value"] === "bigint"
90
+ ? String(seq["max_value"])
91
+ : null;
92
+ const startValue = startVal !== null ? ` START ${startVal}` : "";
93
+ const increment = incrVal !== null && incrVal !== 1
94
+ ? ` INCREMENT ${String(incrVal)}`
95
+ : "";
96
+ const minValue = minVal !== null ? ` MINVALUE ${minVal}` : "";
97
+ const maxValue = maxVal !== null ? ` MAXVALUE ${maxVal}` : "";
98
+ const cycle = seq["cycle"] === true ? " CYCLE" : "";
99
+ const ddl = `CREATE SEQUENCE "${schemaName}"."${tableName}"${startValue}${increment}${minValue}${maxValue}${cycle};`;
100
+ return {
101
+ ddl,
102
+ type: "sequence",
103
+ note: "Use pg_list_sequences to see all sequences.",
104
+ ...(parsed.includeData === true && {
105
+ warning: "includeData is ignored for sequences - sequences have no row data to export",
106
+ }),
107
+ };
108
+ }
106
109
  }
110
+ catch {
111
+ // Query failed, use basic DDL
112
+ }
113
+ // Fallback if pg_sequence query fails
114
+ return {
115
+ ddl: `CREATE SEQUENCE "${schemaName}"."${tableName}";`,
116
+ type: "sequence",
117
+ note: "Basic CREATE SEQUENCE. Use pg_list_sequences for details.",
118
+ ...(parsed.includeData === true && {
119
+ warning: "includeData is ignored for sequences - sequences have no row data to export",
120
+ }),
121
+ };
107
122
  }
108
- catch {
109
- // Query failed, use basic DDL
110
- }
111
- // Fallback if pg_sequence query fails
112
- return {
113
- ddl: `CREATE SEQUENCE "${schemaName}"."${tableName}";`,
114
- type: "sequence",
115
- note: "Basic CREATE SEQUENCE. Use pg_list_sequences for details.",
116
- ...(parsed.includeData === true && {
117
- warning: "includeData is ignored for sequences - sequences have no row data to export",
118
- }),
119
- };
120
- }
121
- // relkind 'v' = view, 'm' = materialized view
122
- if (relkind === "v" || relkind === "m") {
123
- try {
124
- const viewDefResult = await adapter.executeQuery(`
123
+ // relkind 'v' = view, 'm' = materialized view
124
+ if (relkind === "v" || relkind === "m") {
125
+ try {
126
+ const viewDefResult = await adapter.executeQuery(`
125
127
  SELECT definition FROM pg_views
126
128
  WHERE schemaname = '${schemaName}' AND viewname = '${tableName}'
127
129
  `);
128
- const definition = viewDefResult.rows?.[0]?.["definition"];
129
- if (typeof definition === "string") {
130
- const createType = relkind === "m" ? "MATERIALIZED VIEW" : "VIEW";
131
- const ddl = `CREATE ${createType} "${schemaName}"."${tableName}" AS\n${definition.trim()}`;
132
- return {
133
- ddl,
134
- type: relkind === "m" ? "materialized_view" : "view",
135
- note: `Use pg_list_views to see all views.`,
136
- };
130
+ const definition = viewDefResult.rows?.[0]?.["definition"];
131
+ if (typeof definition === "string") {
132
+ const createType = relkind === "m" ? "MATERIALIZED VIEW" : "VIEW";
133
+ const ddl = `CREATE ${createType} "${schemaName}"."${tableName}" AS\n${definition.trim()}`;
134
+ return {
135
+ ddl,
136
+ type: relkind === "m" ? "materialized_view" : "view",
137
+ note: `Use pg_list_views to see all views.`,
138
+ };
139
+ }
137
140
  }
141
+ catch {
142
+ // Query failed, use basic DDL
143
+ }
144
+ // Fallback for views
145
+ const createType = relkind === "m" ? "MATERIALIZED VIEW" : "VIEW";
146
+ return {
147
+ ddl: `-- Unable to retrieve ${createType.toLowerCase()} definition\nCREATE ${createType} "${schemaName}"."${tableName}" AS SELECT ...;`,
148
+ type: relkind === "m" ? "materialized_view" : "view",
149
+ note: "View definition could not be retrieved. Use pg_list_views for details.",
150
+ };
138
151
  }
139
- catch {
140
- // Query failed, use basic DDL
141
- }
142
- // Fallback for views
143
- const createType = relkind === "m" ? "MATERIALIZED VIEW" : "VIEW";
144
- return {
145
- ddl: `-- Unable to retrieve ${createType.toLowerCase()} definition\nCREATE ${createType} "${schemaName}"."${tableName}" AS SELECT ...;`,
146
- type: relkind === "m" ? "materialized_view" : "view",
147
- note: "View definition could not be retrieved. Use pg_list_views for details.",
148
- };
149
- }
150
- // Check if it's a partitioned table (relkind 'p') and get partition info
151
- let partitionClause = "";
152
- const isPartitionedTable = relkind === "p";
153
- if (isPartitionedTable) {
154
- try {
155
- // Query pg_partitioned_table to get partition strategy and key columns
156
- const partInfo = await adapter.executeQuery(`
152
+ // Check if it's a partitioned table (relkind 'p') and get partition info
153
+ let partitionClause = "";
154
+ const isPartitionedTable = relkind === "p";
155
+ if (isPartitionedTable) {
156
+ try {
157
+ // Query pg_partitioned_table to get partition strategy and key columns
158
+ const partInfo = await adapter.executeQuery(`
157
159
  SELECT pt.partstrat,
158
160
  array_agg(a.attname ORDER BY partattrs.ord) as partition_columns
159
161
  FROM pg_partitioned_table pt
@@ -164,128 +166,137 @@ export function createDumpTableTool(adapter) {
164
166
  WHERE n.nspname = '${schemaName}' AND c.relname = '${tableName}'
165
167
  GROUP BY pt.partstrat
166
168
  `);
167
- const partRow = partInfo.rows?.[0];
168
- if (partRow) {
169
- const strategy = partRow["partstrat"];
170
- const columns = partRow["partition_columns"];
171
- // Map strategy code to keyword
172
- const strategyMap = {
173
- r: "RANGE",
174
- l: "LIST",
175
- h: "HASH",
176
- };
177
- const strategyKeyword = typeof strategy === "string"
178
- ? (strategyMap[strategy] ?? "RANGE")
179
- : "RANGE";
180
- // Build column list - PostgreSQL returns array_agg as string like "{col1,col2}"
181
- let columnList = "";
182
- if (Array.isArray(columns)) {
183
- columnList = columns.map((c) => `"${String(c)}"`).join(", ");
184
- }
185
- else if (typeof columns === "string") {
186
- // Parse PostgreSQL array literal format: "{col1,col2}" -> ["col1", "col2"]
187
- const parsed = columns
188
- .replace(/^\{/, "")
189
- .replace(/\}$/, "")
190
- .split(",")
191
- .filter((c) => c.length > 0);
192
- columnList = parsed.map((c) => `"${c.trim()}"`).join(", ");
193
- }
194
- if (columnList) {
195
- partitionClause = ` PARTITION BY ${strategyKeyword} (${columnList})`;
169
+ const partRow = partInfo.rows?.[0];
170
+ if (partRow) {
171
+ const strategy = partRow["partstrat"];
172
+ const columns = partRow["partition_columns"];
173
+ // Map strategy code to keyword
174
+ const strategyMap = {
175
+ r: "RANGE",
176
+ l: "LIST",
177
+ h: "HASH",
178
+ };
179
+ const strategyKeyword = typeof strategy === "string"
180
+ ? (strategyMap[strategy] ?? "RANGE")
181
+ : "RANGE";
182
+ // Build column list - PostgreSQL returns array_agg as string like "{col1,col2}"
183
+ let columnList = "";
184
+ if (Array.isArray(columns)) {
185
+ columnList = columns.map((c) => `"${String(c)}"`).join(", ");
186
+ }
187
+ else if (typeof columns === "string") {
188
+ // Parse PostgreSQL array literal format: "{col1,col2}" -> ["col1", "col2"]
189
+ const parsed = columns
190
+ .replace(/^\{/, "")
191
+ .replace(/\}$/, "")
192
+ .split(",")
193
+ .filter((c) => c.length > 0);
194
+ columnList = parsed.map((c) => `"${c.trim()}"`).join(", ");
195
+ }
196
+ if (columnList) {
197
+ partitionClause = ` PARTITION BY ${strategyKeyword} (${columnList})`;
198
+ }
196
199
  }
197
200
  }
198
- }
199
- catch {
200
- // Partition info query failed, continue without partition clause
201
- }
202
- }
203
- const tableInfo = await adapter.describeTable(tableName, schemaName);
204
- const columns = tableInfo.columns
205
- ?.map((col) => {
206
- let def = ` "${col.name}" ${col.type}`;
207
- if (col.defaultValue !== undefined && col.defaultValue !== null) {
208
- let defaultStr;
209
- if (typeof col.defaultValue === "object") {
210
- defaultStr = JSON.stringify(col.defaultValue);
201
+ catch {
202
+ // Partition info query failed, continue without partition clause
211
203
  }
212
- else if (typeof col.defaultValue === "string" ||
213
- typeof col.defaultValue === "number" ||
214
- typeof col.defaultValue === "boolean") {
215
- defaultStr = String(col.defaultValue);
216
- }
217
- else {
218
- defaultStr = JSON.stringify(col.defaultValue);
219
- }
220
- def += ` DEFAULT ${defaultStr}`;
221
204
  }
222
- if (!col.nullable)
223
- def += " NOT NULL";
224
- return def;
225
- })
226
- .join(",\n") ?? "";
227
- const createTable = `CREATE TABLE "${schemaName}"."${tableName}" (\n${columns}\n)${partitionClause};`;
228
- const result = {
229
- ddl: createTable,
230
- type: isPartitionedTable ? "partitioned_table" : "table",
231
- note: isPartitionedTable
232
- ? "For partition children use pg_list_partitions, for indexes use pg_get_indexes, for constraints use pg_get_constraints."
233
- : "Basic CREATE TABLE only. For indexes use pg_get_indexes, for constraints use pg_get_constraints.",
234
- };
235
- if (parsed.includeData) {
236
- // Default limit is 500 to prevent large payloads, 0 means no limit
237
- const effectiveLimit = parsed.limit === 0 ? null : (parsed.limit ?? 500);
238
- const limitClause = effectiveLimit !== null ? ` LIMIT ${String(effectiveLimit)}` : "";
239
- const dataResult = await adapter.executeQuery(`SELECT * FROM "${schemaName}"."${tableName}"${limitClause}`);
240
- if (dataResult.rows !== undefined && dataResult.rows.length > 0) {
241
- const firstRow = dataResult.rows[0];
242
- if (firstRow === undefined)
243
- return result;
244
- const cols = Object.keys(firstRow)
245
- .map((c) => `"${c}"`)
246
- .join(", ");
247
- const inserts = dataResult.rows
248
- .map((row) => {
249
- const vals = Object.entries(row)
250
- .map(([, value]) => {
251
- if (value === null)
252
- return "NULL";
253
- // Handle Date objects - format as PostgreSQL timestamp
254
- if (value instanceof Date) {
255
- const iso = value.toISOString();
256
- // Convert ISO 8601 to PostgreSQL format: 'YYYY-MM-DD HH:MM:SS.mmm'
257
- const pgTimestamp = iso.replace("T", " ").replace("Z", "");
258
- return `'${pgTimestamp}'`;
259
- }
260
- if (typeof value === "string") {
261
- // Escape backslashes first, then single quotes (PostgreSQL string literal escaping)
262
- const escaped = value
263
- .replace(/\\/g, "\\\\")
264
- .replace(/'/g, "''");
265
- // Check if string looks like an ISO timestamp
266
- if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/.test(value)) {
267
- // Convert ISO format to PostgreSQL format
268
- const pgTimestamp = value
205
+ const tableInfo = await adapter.describeTable(tableName, schemaName);
206
+ const columns = tableInfo.columns
207
+ ?.map((col) => {
208
+ let def = ` "${col.name}" ${col.type}`;
209
+ if (col.defaultValue !== undefined && col.defaultValue !== null) {
210
+ let defaultStr;
211
+ if (typeof col.defaultValue === "object") {
212
+ defaultStr = JSON.stringify(col.defaultValue);
213
+ }
214
+ else if (typeof col.defaultValue === "string" ||
215
+ typeof col.defaultValue === "number" ||
216
+ typeof col.defaultValue === "boolean") {
217
+ defaultStr = String(col.defaultValue);
218
+ }
219
+ else {
220
+ defaultStr = JSON.stringify(col.defaultValue);
221
+ }
222
+ def += ` DEFAULT ${defaultStr}`;
223
+ }
224
+ if (!col.nullable)
225
+ def += " NOT NULL";
226
+ return def;
227
+ })
228
+ .join(",\n") ?? "";
229
+ const createTable = `CREATE TABLE "${schemaName}"."${tableName}" (\n${columns}\n)${partitionClause};`;
230
+ const result = {
231
+ ddl: createTable,
232
+ type: isPartitionedTable ? "partitioned_table" : "table",
233
+ note: isPartitionedTable
234
+ ? "For partition children use pg_list_partitions, for indexes use pg_get_indexes, for constraints use pg_get_constraints."
235
+ : "Basic CREATE TABLE only. For indexes use pg_get_indexes, for constraints use pg_get_constraints.",
236
+ };
237
+ if (parsed.includeData) {
238
+ // Default limit is 500 to prevent large payloads, 0 means no limit
239
+ const effectiveLimit = parsed.limit === 0 ? null : (parsed.limit ?? 500);
240
+ const limitClause = effectiveLimit !== null ? ` LIMIT ${String(effectiveLimit)}` : "";
241
+ const dataResult = await adapter.executeQuery(`SELECT * FROM "${schemaName}"."${tableName}"${limitClause}`);
242
+ if (dataResult.rows !== undefined && dataResult.rows.length > 0) {
243
+ const firstRow = dataResult.rows[0];
244
+ if (firstRow === undefined)
245
+ return result;
246
+ const cols = Object.keys(firstRow)
247
+ .map((c) => `"${c}"`)
248
+ .join(", ");
249
+ const inserts = dataResult.rows
250
+ .map((row) => {
251
+ const vals = Object.entries(row)
252
+ .map(([, value]) => {
253
+ if (value === null)
254
+ return "NULL";
255
+ // Handle Date objects - format as PostgreSQL timestamp
256
+ if (value instanceof Date) {
257
+ const iso = value.toISOString();
258
+ // Convert ISO 8601 to PostgreSQL format: 'YYYY-MM-DD HH:MM:SS.mmm'
259
+ const pgTimestamp = iso
269
260
  .replace("T", " ")
270
- .replace("Z", "")
271
- .replace(/\.\d+$/, "");
272
- return `'${pgTimestamp.replace(/\\/g, "\\\\").replace(/'/g, "''")}'`;
261
+ .replace("Z", "");
262
+ return `'${pgTimestamp}'`;
273
263
  }
274
- return `'${escaped}'`;
275
- }
276
- if (typeof value === "number" || typeof value === "boolean")
277
- return String(value);
278
- // For objects (JSONB, arrays), use PostgreSQL JSONB literal
279
- return `'${JSON.stringify(value).replace(/\\/g, "\\\\").replace(/'/g, "''")}'::jsonb`;
264
+ if (typeof value === "string") {
265
+ // Escape backslashes first, then single quotes (PostgreSQL string literal escaping)
266
+ const escaped = value
267
+ .replace(/\\/g, "\\\\")
268
+ .replace(/'/g, "''");
269
+ // Check if string looks like an ISO timestamp
270
+ if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/.test(value)) {
271
+ // Convert ISO format to PostgreSQL format
272
+ const pgTimestamp = value
273
+ .replace("T", " ")
274
+ .replace("Z", "")
275
+ .replace(/\.\d+$/, "");
276
+ return `'${pgTimestamp.replace(/\\/g, "\\\\").replace(/'/g, "''")}'`;
277
+ }
278
+ return `'${escaped}'`;
279
+ }
280
+ if (typeof value === "number" || typeof value === "boolean")
281
+ return String(value);
282
+ // For objects (JSONB, arrays), use PostgreSQL JSONB literal
283
+ return `'${JSON.stringify(value).replace(/\\/g, "\\\\").replace(/'/g, "''")}'::jsonb`;
284
+ })
285
+ .join(", ");
286
+ return `INSERT INTO "${schemaName}"."${tableName}" (${cols}) VALUES (${vals});`;
280
287
  })
281
- .join(", ");
282
- return `INSERT INTO "${schemaName}"."${tableName}" (${cols}) VALUES (${vals});`;
283
- })
284
- .join("\n");
285
- result.insertStatements = inserts;
288
+ .join("\n");
289
+ result.insertStatements = inserts;
290
+ }
286
291
  }
292
+ return result;
293
+ }
294
+ catch (error) {
295
+ return {
296
+ success: false,
297
+ error: formatPostgresError(error, { tool: "pg_dump_table" }),
298
+ };
287
299
  }
288
- return result;
289
300
  },
290
301
  };
291
302
  }
@@ -298,8 +309,7 @@ export function createDumpSchemaTool(_adapter) {
298
309
  outputSchema: DumpSchemaOutputSchema,
299
310
  annotations: readOnly("Dump Schema"),
300
311
  icons: getToolIcons("backup", readOnly("Dump Schema")),
301
- // eslint-disable-next-line @typescript-eslint/require-await
302
- handler: async (params, _context) => {
312
+ handler: (params, _context) => {
303
313
  const { table, schema, filename } = DumpSchemaSchema.parse(params);
304
314
  let command = "pg_dump";
305
315
  command += " --format=custom";
@@ -317,7 +327,7 @@ export function createDumpSchemaTool(_adapter) {
317
327
  : undefined;
318
328
  command += ` --file=${outputFilename}`;
319
329
  command += " $POSTGRES_CONNECTION_STRING";
320
- return {
330
+ return Promise.resolve({
321
331
  command,
322
332
  ...(schema !== undefined &&
323
333
  table !== undefined && {
@@ -330,7 +340,7 @@ export function createDumpSchemaTool(_adapter) {
330
340
  "Add --data-only to exclude schema",
331
341
  "Add --schema-only to exclude data",
332
342
  ],
333
- };
343
+ });
334
344
  },
335
345
  };
336
346
  }
@@ -344,144 +354,156 @@ export function createCopyExportTool(adapter) {
344
354
  annotations: readOnly("Copy Export"),
345
355
  icons: getToolIcons("backup", readOnly("Copy Export")),
346
356
  handler: async (params, context) => {
347
- const progress = buildProgressContext(context);
348
- await sendProgress(progress, 1, 3, "Preparing COPY export...");
349
- const { query, format, header, delimiter, conflictWarning, effectiveLimit, } = CopyExportSchema.parse(params); // Use transform for validation
350
- const options = [];
351
- options.push(`FORMAT ${format ?? "csv"}`);
352
- if (header !== false)
353
- options.push("HEADER");
354
- if (delimiter)
355
- options.push(`DELIMITER '${delimiter}'`);
356
- const copyCommand = `COPY (${query}) TO STDOUT WITH (${options.join(", ")})`;
357
- void copyCommand;
358
- await sendProgress(progress, 2, 3, "Executing query...");
359
- const result = await adapter.executeQuery(query);
360
- // Handle CSV format (default)
361
- if (format === "csv" || format === undefined) {
362
- if (result.rows === undefined || result.rows.length === 0) {
363
- return {
364
- data: "",
365
- rowCount: 0,
366
- note: "Query returned no rows. Headers omitted for empty results.",
367
- ...(conflictWarning !== undefined
368
- ? { warning: conflictWarning }
369
- : {}),
370
- };
371
- }
372
- const firstRowData = result.rows[0];
373
- if (firstRowData === undefined) {
357
+ try {
358
+ const progress = buildProgressContext(context);
359
+ await sendProgress(progress, 1, 3, "Preparing COPY export...");
360
+ const { query, format, header, delimiter, conflictWarning, effectiveLimit, } = CopyExportSchema.parse(params); // Use transform for validation
361
+ const options = [];
362
+ options.push(`FORMAT ${format ?? "csv"}`);
363
+ if (header !== false)
364
+ options.push("HEADER");
365
+ if (delimiter)
366
+ options.push(`DELIMITER '${delimiter}'`);
367
+ const copyCommand = `COPY (${query}) TO STDOUT WITH (${options.join(", ")})`;
368
+ void copyCommand;
369
+ await sendProgress(progress, 2, 3, "Executing query...");
370
+ const result = await adapter.executeQuery(query);
371
+ // Handle CSV format (default)
372
+ if (format === "csv" || format === undefined) {
373
+ if (result.rows === undefined || result.rows.length === 0) {
374
+ return {
375
+ data: "",
376
+ rowCount: 0,
377
+ note: "Query returned no rows. Headers omitted for empty results.",
378
+ ...(conflictWarning !== undefined
379
+ ? { warning: conflictWarning }
380
+ : {}),
381
+ };
382
+ }
383
+ const firstRowData = result.rows[0];
384
+ if (firstRowData === undefined) {
385
+ return {
386
+ data: "",
387
+ rowCount: 0,
388
+ note: "Query returned no rows. Headers omitted for empty results.",
389
+ ...(conflictWarning !== undefined
390
+ ? { warning: conflictWarning }
391
+ : {}),
392
+ };
393
+ }
394
+ const headers = Object.keys(firstRowData);
395
+ const delim = delimiter ?? ",";
396
+ const lines = [];
397
+ if (header !== false) {
398
+ lines.push(headers.join(delim));
399
+ }
400
+ for (const row of result.rows) {
401
+ lines.push(headers
402
+ .map((h) => {
403
+ const v = row[h];
404
+ if (v === null)
405
+ return "";
406
+ if (typeof v === "object")
407
+ return JSON.stringify(v);
408
+ if (typeof v !== "string" &&
409
+ typeof v !== "number" &&
410
+ typeof v !== "boolean") {
411
+ return JSON.stringify(v);
412
+ }
413
+ const s = String(v);
414
+ return s.includes(delim) ||
415
+ s.includes('"') ||
416
+ s.includes("\n")
417
+ ? `"${s.replace(/"/g, '""')}"`
418
+ : s;
419
+ })
420
+ .join(delim));
421
+ }
422
+ // Mark as truncated if any limit was applied AND rows returned equals that limit
423
+ // This indicates there are likely more rows available
424
+ const isTruncated = effectiveLimit !== undefined &&
425
+ result.rows.length === effectiveLimit;
426
+ await sendProgress(progress, 3, 3, "Export complete");
374
427
  return {
375
- data: "",
376
- rowCount: 0,
377
- note: "Query returned no rows. Headers omitted for empty results.",
428
+ data: lines.join("\n"),
429
+ rowCount: result.rows.length,
430
+ ...(isTruncated ? { truncated: true, limit: effectiveLimit } : {}),
378
431
  ...(conflictWarning !== undefined
379
432
  ? { warning: conflictWarning }
380
433
  : {}),
381
434
  };
382
435
  }
383
- const headers = Object.keys(firstRowData);
384
- const delim = delimiter ?? ",";
385
- const lines = [];
386
- if (header !== false) {
387
- lines.push(headers.join(delim));
388
- }
389
- for (const row of result.rows) {
390
- lines.push(headers
391
- .map((h) => {
392
- const v = row[h];
393
- if (v === null)
394
- return "";
395
- if (typeof v === "object")
396
- return JSON.stringify(v);
397
- if (typeof v !== "string" &&
398
- typeof v !== "number" &&
399
- typeof v !== "boolean") {
436
+ // Handle TEXT format - tab-delimited with \N for NULLs
437
+ if (format === "text") {
438
+ if (result.rows === undefined || result.rows.length === 0) {
439
+ return {
440
+ data: "",
441
+ rowCount: 0,
442
+ note: "Query returned no rows. Headers omitted for empty results.",
443
+ ...(conflictWarning !== undefined
444
+ ? { warning: conflictWarning }
445
+ : {}),
446
+ };
447
+ }
448
+ const firstRowData = result.rows[0];
449
+ if (firstRowData === undefined) {
450
+ return {
451
+ data: "",
452
+ rowCount: 0,
453
+ note: "Query returned no rows. Headers omitted for empty results.",
454
+ ...(conflictWarning !== undefined
455
+ ? { warning: conflictWarning }
456
+ : {}),
457
+ };
458
+ }
459
+ const headers = Object.keys(firstRowData);
460
+ const delim = delimiter ?? "\t";
461
+ const lines = [];
462
+ if (header !== false) {
463
+ lines.push(headers.join(delim));
464
+ }
465
+ for (const row of result.rows) {
466
+ lines.push(headers
467
+ .map((h) => {
468
+ const v = row[h];
469
+ if (v === null)
470
+ return "\\N"; // PostgreSQL NULL representation in text format
471
+ if (typeof v === "object")
472
+ return JSON.stringify(v);
473
+ if (typeof v === "string" ||
474
+ typeof v === "number" ||
475
+ typeof v === "boolean") {
476
+ return String(v);
477
+ }
478
+ // Fallback for any other type
400
479
  return JSON.stringify(v);
401
- }
402
- const s = String(v);
403
- return s.includes(delim) || s.includes('"') || s.includes("\n")
404
- ? `"${s.replace(/"/g, '""')}"`
405
- : s;
406
- })
407
- .join(delim));
408
- }
409
- // Mark as truncated if any limit was applied AND rows returned equals that limit
410
- // This indicates there are likely more rows available
411
- const isTruncated = effectiveLimit !== undefined && result.rows.length === effectiveLimit;
412
- await sendProgress(progress, 3, 3, "Export complete");
413
- return {
414
- data: lines.join("\n"),
415
- rowCount: result.rows.length,
416
- ...(isTruncated ? { truncated: true, limit: effectiveLimit } : {}),
417
- ...(conflictWarning !== undefined
418
- ? { warning: conflictWarning }
419
- : {}),
420
- };
421
- }
422
- // Handle TEXT format - tab-delimited with \N for NULLs
423
- if (format === "text") {
424
- if (result.rows === undefined || result.rows.length === 0) {
425
- return {
426
- data: "",
427
- rowCount: 0,
428
- note: "Query returned no rows. Headers omitted for empty results.",
429
- ...(conflictWarning !== undefined
430
- ? { warning: conflictWarning }
431
- : {}),
432
- };
433
- }
434
- const firstRowData = result.rows[0];
435
- if (firstRowData === undefined) {
480
+ })
481
+ .join(delim));
482
+ }
483
+ // Mark as truncated if any limit was applied AND rows returned equals that limit
484
+ // This indicates there are likely more rows available
485
+ const isTruncated = effectiveLimit !== undefined &&
486
+ result.rows.length === effectiveLimit;
487
+ await sendProgress(progress, 3, 3, "Export complete");
436
488
  return {
437
- data: "",
438
- rowCount: 0,
439
- note: "Query returned no rows. Headers omitted for empty results.",
489
+ data: lines.join("\n"),
490
+ rowCount: result.rows.length,
491
+ ...(isTruncated ? { truncated: true, limit: effectiveLimit } : {}),
440
492
  ...(conflictWarning !== undefined
441
493
  ? { warning: conflictWarning }
442
494
  : {}),
443
495
  };
444
496
  }
445
- const headers = Object.keys(firstRowData);
446
- const delim = delimiter ?? "\t";
447
- const lines = [];
448
- if (header !== false) {
449
- lines.push(headers.join(delim));
450
- }
451
- for (const row of result.rows) {
452
- lines.push(headers
453
- .map((h) => {
454
- const v = row[h];
455
- if (v === null)
456
- return "\\N"; // PostgreSQL NULL representation in text format
457
- if (typeof v === "object")
458
- return JSON.stringify(v);
459
- if (typeof v === "string" ||
460
- typeof v === "number" ||
461
- typeof v === "boolean") {
462
- return String(v);
463
- }
464
- // Fallback for any other type
465
- return JSON.stringify(v);
466
- })
467
- .join(delim));
468
- }
469
- // Mark as truncated if any limit was applied AND rows returned equals that limit
470
- // This indicates there are likely more rows available
471
- const isTruncated = effectiveLimit !== undefined && result.rows.length === effectiveLimit;
472
- await sendProgress(progress, 3, 3, "Export complete");
497
+ // Handle BINARY format - not supported via MCP protocol
498
+ // Binary data cannot be safely serialized to JSON without corruption
499
+ throw new Error('Binary format is not supported via MCP protocol. Use format: "csv" or "text" instead. For binary export, use pg_dump_schema to generate a pg_dump command.');
500
+ }
501
+ catch (error) {
473
502
  return {
474
- data: lines.join("\n"),
475
- rowCount: result.rows.length,
476
- ...(isTruncated ? { truncated: true, limit: effectiveLimit } : {}),
477
- ...(conflictWarning !== undefined
478
- ? { warning: conflictWarning }
479
- : {}),
503
+ success: false,
504
+ error: formatPostgresError(error, { tool: "pg_copy_export" }),
480
505
  };
481
506
  }
482
- // Handle BINARY format - not supported via MCP protocol
483
- // Binary data cannot be safely serialized to JSON without corruption
484
- throw new Error('Binary format is not supported via MCP protocol. Use format: "csv" or "text" instead. For binary export, use pg_dump_schema to generate a pg_dump command.');
485
507
  },
486
508
  };
487
509
  }
@@ -505,53 +527,54 @@ export function createCopyImportTool(_adapter) {
505
527
  outputSchema: CopyImportOutputSchema,
506
528
  annotations: write("Copy Import"),
507
529
  icons: getToolIcons("backup", write("Copy Import")),
508
- // eslint-disable-next-line @typescript-eslint/require-await
509
- handler: async (params, _context) => {
510
- const rawParams = params;
511
- // Resolve tableName alias to table
512
- const tableValue = rawParams.table ?? rawParams.tableName;
513
- if (!tableValue) {
514
- throw new Error("table parameter is required");
515
- }
516
- const parsed = {
517
- ...rawParams,
518
- table: tableValue,
519
- };
520
- // Parse schema.table format (e.g., 'public.users' -> schema='public', table='users')
521
- // If table contains a dot, always parse it as schema.table (embedded schema takes priority)
522
- let tableNamePart = parsed.table;
523
- let schemaNamePart = parsed.schema;
524
- if (parsed.table.includes(".")) {
525
- const parts = parsed.table.split(".");
526
- if (parts.length === 2 && parts[0] && parts[1]) {
527
- schemaNamePart = parts[0];
528
- tableNamePart = parts[1];
530
+ handler: (params, _context) => {
531
+ return Promise.resolve().then(() => {
532
+ const rawParams = params;
533
+ // Resolve tableName alias to table
534
+ const tableValue = rawParams.table ?? rawParams.tableName;
535
+ if (!tableValue) {
536
+ throw new Error("table parameter is required");
529
537
  }
530
- }
531
- const tableName = schemaNamePart
532
- ? `"${schemaNamePart}"."${tableNamePart}"`
533
- : `"${tableNamePart}"`;
534
- const columnClause = parsed.columns !== undefined && parsed.columns.length > 0
535
- ? ` (${parsed.columns.map((c) => `"${c}"`).join(", ")})`
536
- : "";
537
- const options = [];
538
- options.push(`FORMAT ${parsed.format ?? "csv"}`);
539
- if (parsed.header)
540
- options.push("HEADER");
541
- if (parsed.delimiter)
542
- options.push(`DELIMITER '${parsed.delimiter}'`);
543
- // Use provided filePath or generate placeholder with appropriate extension
544
- const ext = parsed.format === "text"
545
- ? "txt"
546
- : parsed.format === "binary"
547
- ? "bin"
548
- : "csv";
549
- const filePath = parsed.filePath ?? `/path/to/file.${ext}`;
550
- return {
551
- command: `COPY ${tableName}${columnClause} FROM '${filePath}' WITH (${options.join(", ")})`,
552
- stdinCommand: `COPY ${tableName}${columnClause} FROM STDIN WITH (${options.join(", ")})`,
553
- notes: "Use \\copy in psql for client-side files",
554
- };
538
+ const parsed = {
539
+ ...rawParams,
540
+ table: tableValue,
541
+ };
542
+ // Parse schema.table format (e.g., 'public.users' -> schema='public', table='users')
543
+ // If table contains a dot, always parse it as schema.table (embedded schema takes priority)
544
+ let tableNamePart = parsed.table;
545
+ let schemaNamePart = parsed.schema;
546
+ if (parsed.table.includes(".")) {
547
+ const parts = parsed.table.split(".");
548
+ if (parts.length === 2 && parts[0] && parts[1]) {
549
+ schemaNamePart = parts[0];
550
+ tableNamePart = parts[1];
551
+ }
552
+ }
553
+ const tableName = schemaNamePart
554
+ ? `"${schemaNamePart}"."${tableNamePart}"`
555
+ : `"${tableNamePart}"`;
556
+ const columnClause = parsed.columns !== undefined && parsed.columns.length > 0
557
+ ? ` (${parsed.columns.map((c) => `"${c}"`).join(", ")})`
558
+ : "";
559
+ const options = [];
560
+ options.push(`FORMAT ${parsed.format ?? "csv"}`);
561
+ if (parsed.header)
562
+ options.push("HEADER");
563
+ if (parsed.delimiter)
564
+ options.push(`DELIMITER '${parsed.delimiter}'`);
565
+ // Use provided filePath or generate placeholder with appropriate extension
566
+ const ext = parsed.format === "text"
567
+ ? "txt"
568
+ : parsed.format === "binary"
569
+ ? "bin"
570
+ : "csv";
571
+ const filePath = parsed.filePath ?? `/path/to/file.${ext}`;
572
+ return {
573
+ command: `COPY ${tableName}${columnClause} FROM '${filePath}' WITH (${options.join(", ")})`,
574
+ stdinCommand: `COPY ${tableName}${columnClause} FROM STDIN WITH (${options.join(", ")})`,
575
+ notes: "Use \\copy in psql for client-side files",
576
+ };
577
+ });
555
578
  },
556
579
  };
557
580
  }