@neverinfamous/postgres-mcp 2.0.0 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +119 -46
- package/dist/__tests__/benchmarks/codemode.bench.js +3 -3
- package/dist/__tests__/benchmarks/codemode.bench.js.map +1 -1
- package/dist/__tests__/benchmarks/connection-pool.bench.js +3 -3
- package/dist/__tests__/benchmarks/connection-pool.bench.js.map +1 -1
- package/dist/__tests__/benchmarks/introspection-migration.bench.d.ts +11 -0
- package/dist/__tests__/benchmarks/introspection-migration.bench.d.ts.map +1 -0
- package/dist/__tests__/benchmarks/introspection-migration.bench.js +143 -0
- package/dist/__tests__/benchmarks/introspection-migration.bench.js.map +1 -0
- package/dist/__tests__/benchmarks/resource-prompts.bench.js +0 -64
- package/dist/__tests__/benchmarks/resource-prompts.bench.js.map +1 -1
- package/dist/__tests__/benchmarks/schema-parsing.bench.js +5 -5
- package/dist/__tests__/benchmarks/schema-parsing.bench.js.map +1 -1
- package/dist/__tests__/benchmarks/tool-filtering.bench.js +17 -8
- package/dist/__tests__/benchmarks/tool-filtering.bench.js.map +1 -1
- package/dist/__tests__/mocks/adapter.d.ts.map +1 -1
- package/dist/__tests__/mocks/adapter.js +2 -1
- package/dist/__tests__/mocks/adapter.js.map +1 -1
- package/dist/adapters/DatabaseAdapter.d.ts +6 -5
- package/dist/adapters/DatabaseAdapter.d.ts.map +1 -1
- package/dist/adapters/DatabaseAdapter.js +11 -20
- package/dist/adapters/DatabaseAdapter.js.map +1 -1
- package/dist/adapters/postgresql/PostgresAdapter.d.ts +5 -26
- package/dist/adapters/postgresql/PostgresAdapter.d.ts.map +1 -1
- package/dist/adapters/postgresql/PostgresAdapter.js +31 -526
- package/dist/adapters/postgresql/PostgresAdapter.js.map +1 -1
- package/dist/adapters/postgresql/prompts/index.js +1 -1
- package/dist/adapters/postgresql/prompts/index.js.map +1 -1
- package/dist/adapters/postgresql/resources/index.d.ts +1 -1
- package/dist/adapters/postgresql/resources/index.js +3 -3
- package/dist/adapters/postgresql/resources/index.js.map +1 -1
- package/dist/adapters/postgresql/schema-operations.d.ts +71 -0
- package/dist/adapters/postgresql/schema-operations.d.ts.map +1 -0
- package/dist/adapters/postgresql/schema-operations.js +561 -0
- package/dist/adapters/postgresql/schema-operations.js.map +1 -0
- package/dist/adapters/postgresql/schemas/admin.d.ts +4 -4
- package/dist/adapters/postgresql/schemas/admin.js +4 -4
- package/dist/adapters/postgresql/schemas/admin.js.map +1 -1
- package/dist/adapters/postgresql/schemas/backup.d.ts +2 -2
- package/dist/adapters/postgresql/schemas/backup.d.ts.map +1 -1
- package/dist/adapters/postgresql/schemas/backup.js +1 -3
- package/dist/adapters/postgresql/schemas/backup.js.map +1 -1
- package/dist/adapters/postgresql/schemas/core/index.d.ts +6 -0
- package/dist/adapters/postgresql/schemas/core/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/core/index.js +6 -0
- package/dist/adapters/postgresql/schemas/core/index.js.map +1 -0
- package/dist/adapters/postgresql/schemas/{core.d.ts → core/queries.d.ts} +16 -171
- package/dist/adapters/postgresql/schemas/core/queries.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/{core.js → core/queries.js} +5 -213
- package/dist/adapters/postgresql/schemas/core/queries.js.map +1 -0
- package/dist/adapters/postgresql/schemas/core/transactions.d.ts +149 -0
- package/dist/adapters/postgresql/schemas/core/transactions.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/core/transactions.js +239 -0
- package/dist/adapters/postgresql/schemas/core/transactions.js.map +1 -0
- package/dist/adapters/postgresql/schemas/cron.d.ts +12 -12
- package/dist/adapters/postgresql/schemas/cron.d.ts.map +1 -1
- package/dist/adapters/postgresql/schemas/cron.js +38 -10
- package/dist/adapters/postgresql/schemas/cron.js.map +1 -1
- package/dist/adapters/postgresql/schemas/extensions/citext.d.ts +222 -0
- package/dist/adapters/postgresql/schemas/extensions/citext.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/citext.js +306 -0
- package/dist/adapters/postgresql/schemas/extensions/citext.js.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/index.d.ts +15 -0
- package/dist/adapters/postgresql/schemas/extensions/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/index.js +20 -0
- package/dist/adapters/postgresql/schemas/extensions/index.js.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/kcache.d.ts +164 -0
- package/dist/adapters/postgresql/schemas/extensions/kcache.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/kcache.js +225 -0
- package/dist/adapters/postgresql/schemas/extensions/kcache.js.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/ltree.d.ts +253 -0
- package/dist/adapters/postgresql/schemas/extensions/ltree.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/ltree.js +430 -0
- package/dist/adapters/postgresql/schemas/extensions/ltree.js.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/pgcrypto.d.ts +251 -0
- package/dist/adapters/postgresql/schemas/extensions/pgcrypto.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/pgcrypto.js +294 -0
- package/dist/adapters/postgresql/schemas/extensions/pgcrypto.js.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/shared.d.ts +10 -0
- package/dist/adapters/postgresql/schemas/extensions/shared.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/extensions/shared.js +15 -0
- package/dist/adapters/postgresql/schemas/extensions/shared.js.map +1 -0
- package/dist/adapters/postgresql/schemas/index.d.ts +6 -6
- package/dist/adapters/postgresql/schemas/index.d.ts.map +1 -1
- package/dist/adapters/postgresql/schemas/index.js +8 -8
- package/dist/adapters/postgresql/schemas/index.js.map +1 -1
- package/dist/adapters/postgresql/schemas/introspection.d.ts +19 -42
- package/dist/adapters/postgresql/schemas/introspection.d.ts.map +1 -1
- package/dist/adapters/postgresql/schemas/introspection.js +72 -27
- package/dist/adapters/postgresql/schemas/introspection.js.map +1 -1
- package/dist/adapters/postgresql/schemas/jsonb/advanced.d.ts +270 -0
- package/dist/adapters/postgresql/schemas/jsonb/advanced.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/jsonb/advanced.js +371 -0
- package/dist/adapters/postgresql/schemas/jsonb/advanced.js.map +1 -0
- package/dist/adapters/postgresql/schemas/jsonb/basic.d.ts +283 -0
- package/dist/adapters/postgresql/schemas/jsonb/basic.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/jsonb/basic.js +456 -0
- package/dist/adapters/postgresql/schemas/jsonb/basic.js.map +1 -0
- package/dist/adapters/postgresql/schemas/jsonb/index.d.ts +6 -0
- package/dist/adapters/postgresql/schemas/jsonb/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/jsonb/index.js +6 -0
- package/dist/adapters/postgresql/schemas/jsonb/index.js.map +1 -0
- package/dist/adapters/postgresql/schemas/monitoring.d.ts +4 -4
- package/dist/adapters/postgresql/schemas/monitoring.js +2 -2
- package/dist/adapters/postgresql/schemas/monitoring.js.map +1 -1
- package/dist/adapters/postgresql/schemas/partitioning.d.ts +14 -14
- package/dist/adapters/postgresql/schemas/partitioning.d.ts.map +1 -1
- package/dist/adapters/postgresql/schemas/partitioning.js +64 -46
- package/dist/adapters/postgresql/schemas/partitioning.js.map +1 -1
- package/dist/adapters/postgresql/schemas/partman.d.ts +16 -14
- package/dist/adapters/postgresql/schemas/partman.d.ts.map +1 -1
- package/dist/adapters/postgresql/schemas/partman.js +9 -9
- package/dist/adapters/postgresql/schemas/partman.js.map +1 -1
- package/dist/adapters/postgresql/schemas/postgis/advanced.d.ts +429 -0
- package/dist/adapters/postgresql/schemas/postgis/advanced.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/postgis/advanced.js +495 -0
- package/dist/adapters/postgresql/schemas/postgis/advanced.js.map +1 -0
- package/dist/adapters/postgresql/schemas/{postgis.d.ts → postgis/basic.d.ts} +1 -423
- package/dist/adapters/postgresql/schemas/postgis/basic.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/{postgis.js → postgis/basic.js} +1 -486
- package/dist/adapters/postgresql/schemas/postgis/basic.js.map +1 -0
- package/dist/adapters/postgresql/schemas/postgis/index.d.ts +6 -0
- package/dist/adapters/postgresql/schemas/postgis/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/postgis/index.js +6 -0
- package/dist/adapters/postgresql/schemas/postgis/index.js.map +1 -0
- package/dist/adapters/postgresql/schemas/schema-mgmt.d.ts +35 -25
- package/dist/adapters/postgresql/schemas/schema-mgmt.d.ts.map +1 -1
- package/dist/adapters/postgresql/schemas/schema-mgmt.js +57 -19
- package/dist/adapters/postgresql/schemas/schema-mgmt.js.map +1 -1
- package/dist/adapters/postgresql/schemas/stats/index.d.ts +6 -0
- package/dist/adapters/postgresql/schemas/stats/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/stats/index.js +6 -0
- package/dist/adapters/postgresql/schemas/stats/index.js.map +1 -0
- package/dist/adapters/postgresql/schemas/stats/input.d.ts +260 -0
- package/dist/adapters/postgresql/schemas/stats/input.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/{stats.js → stats/input.js} +2 -331
- package/dist/adapters/postgresql/schemas/stats/input.js.map +1 -0
- package/dist/adapters/postgresql/schemas/{stats.d.ts → stats/output.d.ts} +3 -246
- package/dist/adapters/postgresql/schemas/stats/output.d.ts.map +1 -0
- package/dist/adapters/postgresql/schemas/stats/output.js +334 -0
- package/dist/adapters/postgresql/schemas/stats/output.js.map +1 -0
- package/dist/adapters/postgresql/schemas/text-search.d.ts +18 -18
- package/dist/adapters/postgresql/schemas/text-search.d.ts.map +1 -1
- package/dist/adapters/postgresql/schemas/text-search.js +12 -27
- package/dist/adapters/postgresql/schemas/text-search.js.map +1 -1
- package/dist/adapters/postgresql/schemas/vector.d.ts +10 -10
- package/dist/adapters/postgresql/schemas/vector.d.ts.map +1 -1
- package/dist/adapters/postgresql/schemas/vector.js +9 -15
- package/dist/adapters/postgresql/schemas/vector.js.map +1 -1
- package/dist/adapters/postgresql/tools/backup/dump.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/backup/dump.js +95 -76
- package/dist/adapters/postgresql/tools/backup/dump.js.map +1 -1
- package/dist/adapters/postgresql/tools/backup/planning.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/backup/planning.js +345 -287
- package/dist/adapters/postgresql/tools/backup/planning.js.map +1 -1
- package/dist/adapters/postgresql/tools/citext/analysis.d.ts +24 -0
- package/dist/adapters/postgresql/tools/citext/analysis.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/{citext.js → citext/analysis.js} +50 -232
- package/dist/adapters/postgresql/tools/citext/analysis.js.map +1 -0
- package/dist/adapters/postgresql/tools/citext/index.d.ts +15 -0
- package/dist/adapters/postgresql/tools/citext/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/citext/index.js +23 -0
- package/dist/adapters/postgresql/tools/citext/index.js.map +1 -0
- package/dist/adapters/postgresql/tools/citext/setup.d.ts +16 -0
- package/dist/adapters/postgresql/tools/citext/setup.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/citext/setup.js +193 -0
- package/dist/adapters/postgresql/tools/citext/setup.js.map +1 -0
- package/dist/adapters/postgresql/tools/codemode/index.js +1 -1
- package/dist/adapters/postgresql/tools/codemode/index.js.map +1 -1
- package/dist/adapters/postgresql/tools/core/convenience.d.ts +12 -22
- package/dist/adapters/postgresql/tools/core/convenience.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/core/convenience.js +100 -210
- package/dist/adapters/postgresql/tools/core/convenience.js.map +1 -1
- package/dist/adapters/postgresql/tools/core/error-helpers.d.ts +1 -0
- package/dist/adapters/postgresql/tools/core/error-helpers.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/core/error-helpers.js +8 -1
- package/dist/adapters/postgresql/tools/core/error-helpers.js.map +1 -1
- package/dist/adapters/postgresql/tools/core/health.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/core/health.js +124 -114
- package/dist/adapters/postgresql/tools/core/health.js.map +1 -1
- package/dist/adapters/postgresql/tools/core/index.d.ts +2 -1
- package/dist/adapters/postgresql/tools/core/index.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/core/index.js +3 -2
- package/dist/adapters/postgresql/tools/core/index.js.map +1 -1
- package/dist/adapters/postgresql/tools/core/indexes.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/core/indexes.js +151 -127
- package/dist/adapters/postgresql/tools/core/indexes.js.map +1 -1
- package/dist/adapters/postgresql/tools/core/objects.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/core/objects.js +186 -161
- package/dist/adapters/postgresql/tools/core/objects.js.map +1 -1
- package/dist/adapters/postgresql/tools/core/query.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/core/query.js +37 -25
- package/dist/adapters/postgresql/tools/core/query.js.map +1 -1
- package/dist/adapters/postgresql/tools/core/schemas.d.ts +6 -3
- package/dist/adapters/postgresql/tools/core/schemas.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/core/schemas.js +11 -2
- package/dist/adapters/postgresql/tools/core/schemas.js.map +1 -1
- package/dist/adapters/postgresql/tools/core/tables.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/core/tables.js +156 -129
- package/dist/adapters/postgresql/tools/core/tables.js.map +1 -1
- package/dist/adapters/postgresql/tools/core/utility.d.ts +26 -0
- package/dist/adapters/postgresql/tools/core/utility.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/core/utility.js +174 -0
- package/dist/adapters/postgresql/tools/core/utility.js.map +1 -0
- package/dist/adapters/postgresql/tools/cron.js +90 -43
- package/dist/adapters/postgresql/tools/cron.js.map +1 -1
- package/dist/adapters/postgresql/tools/introspection/analysis.d.ts +12 -0
- package/dist/adapters/postgresql/tools/introspection/analysis.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/introspection/analysis.js +605 -0
- package/dist/adapters/postgresql/tools/introspection/analysis.js.map +1 -0
- package/dist/adapters/postgresql/tools/introspection/graph.d.ts +55 -0
- package/dist/adapters/postgresql/tools/introspection/graph.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/introspection/graph.js +621 -0
- package/dist/adapters/postgresql/tools/introspection/graph.js.map +1 -0
- package/dist/adapters/postgresql/tools/introspection/index.d.ts +21 -0
- package/dist/adapters/postgresql/tools/introspection/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/introspection/index.js +31 -0
- package/dist/adapters/postgresql/tools/introspection/index.js.map +1 -0
- package/dist/adapters/postgresql/tools/introspection/migration.d.ts +15 -0
- package/dist/adapters/postgresql/tools/introspection/migration.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/introspection/migration.js +575 -0
- package/dist/adapters/postgresql/tools/introspection/migration.js.map +1 -0
- package/dist/adapters/postgresql/tools/jsonb/analytics.d.ts +20 -0
- package/dist/adapters/postgresql/tools/jsonb/analytics.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/jsonb/analytics.js +367 -0
- package/dist/adapters/postgresql/tools/jsonb/analytics.js.map +1 -0
- package/dist/adapters/postgresql/tools/jsonb/index.d.ts +4 -2
- package/dist/adapters/postgresql/tools/jsonb/index.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/jsonb/index.js +8 -4
- package/dist/adapters/postgresql/tools/jsonb/index.js.map +1 -1
- package/dist/adapters/postgresql/tools/jsonb/read.d.ts +38 -0
- package/dist/adapters/postgresql/tools/jsonb/read.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/jsonb/{basic.js → read.js} +41 -482
- package/dist/adapters/postgresql/tools/jsonb/read.js.map +1 -0
- package/dist/adapters/postgresql/tools/jsonb/{advanced.d.ts → transform.d.ts} +1 -13
- package/dist/adapters/postgresql/tools/jsonb/transform.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/jsonb/{advanced.js → transform.js} +26 -357
- package/dist/adapters/postgresql/tools/jsonb/transform.js.map +1 -0
- package/dist/adapters/postgresql/tools/jsonb/write.d.ts +14 -0
- package/dist/adapters/postgresql/tools/jsonb/write.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/jsonb/write.js +468 -0
- package/dist/adapters/postgresql/tools/jsonb/write.js.map +1 -0
- package/dist/adapters/postgresql/tools/kcache.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/kcache.js +116 -51
- package/dist/adapters/postgresql/tools/kcache.js.map +1 -1
- package/dist/adapters/postgresql/tools/ltree.js +346 -260
- package/dist/adapters/postgresql/tools/ltree.js.map +1 -1
- package/dist/adapters/postgresql/tools/migration/index.d.ts +15 -0
- package/dist/adapters/postgresql/tools/migration/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/migration/index.js +23 -0
- package/dist/adapters/postgresql/tools/migration/index.js.map +1 -0
- package/dist/adapters/postgresql/tools/monitoring/analysis.d.ts +15 -0
- package/dist/adapters/postgresql/tools/monitoring/analysis.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/{monitoring.js → monitoring/analysis.js} +24 -359
- package/dist/adapters/postgresql/tools/monitoring/analysis.js.map +1 -0
- package/dist/adapters/postgresql/tools/monitoring/basic.d.ts +17 -0
- package/dist/adapters/postgresql/tools/monitoring/basic.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/monitoring/basic.js +432 -0
- package/dist/adapters/postgresql/tools/monitoring/basic.js.map +1 -0
- package/dist/adapters/postgresql/tools/monitoring/index.d.ts +16 -0
- package/dist/adapters/postgresql/tools/monitoring/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/monitoring/index.js +31 -0
- package/dist/adapters/postgresql/tools/monitoring/index.js.map +1 -0
- package/dist/adapters/postgresql/tools/partitioning/index.d.ts +15 -0
- package/dist/adapters/postgresql/tools/partitioning/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/partitioning/index.js +23 -0
- package/dist/adapters/postgresql/tools/partitioning/index.js.map +1 -0
- package/dist/adapters/postgresql/tools/partitioning/info.d.ts +11 -0
- package/dist/adapters/postgresql/tools/partitioning/info.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/partitioning/info.js +302 -0
- package/dist/adapters/postgresql/tools/partitioning/info.js.map +1 -0
- package/dist/adapters/postgresql/tools/partitioning/management.d.ts +28 -0
- package/dist/adapters/postgresql/tools/partitioning/management.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/{partitioning.js → partitioning/management.js} +48 -307
- package/dist/adapters/postgresql/tools/partitioning/management.js.map +1 -0
- package/dist/adapters/postgresql/tools/partman/helpers.d.ts +29 -0
- package/dist/adapters/postgresql/tools/partman/helpers.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/partman/helpers.js +59 -0
- package/dist/adapters/postgresql/tools/partman/helpers.js.map +1 -0
- package/dist/adapters/postgresql/tools/partman/index.d.ts +2 -1
- package/dist/adapters/postgresql/tools/partman/index.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/partman/index.js +4 -2
- package/dist/adapters/postgresql/tools/partman/index.js.map +1 -1
- package/dist/adapters/postgresql/tools/partman/maintenance.d.ts +20 -0
- package/dist/adapters/postgresql/tools/partman/maintenance.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/partman/maintenance.js +496 -0
- package/dist/adapters/postgresql/tools/partman/maintenance.js.map +1 -0
- package/dist/adapters/postgresql/tools/partman/management.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/partman/management.js +438 -383
- package/dist/adapters/postgresql/tools/partman/management.js.map +1 -1
- package/dist/adapters/postgresql/tools/partman/operations.d.ts +1 -13
- package/dist/adapters/postgresql/tools/partman/operations.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/partman/operations.js +171 -652
- package/dist/adapters/postgresql/tools/partman/operations.js.map +1 -1
- package/dist/adapters/postgresql/tools/performance/analysis.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/performance/analysis.js +69 -42
- package/dist/adapters/postgresql/tools/performance/analysis.js.map +1 -1
- package/dist/adapters/postgresql/tools/performance/anomaly-detection.d.ts +18 -0
- package/dist/adapters/postgresql/tools/performance/anomaly-detection.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/performance/anomaly-detection.js +533 -0
- package/dist/adapters/postgresql/tools/performance/anomaly-detection.js.map +1 -0
- package/dist/adapters/postgresql/tools/performance/diagnostics.d.ts +11 -0
- package/dist/adapters/postgresql/tools/performance/diagnostics.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/performance/diagnostics.js +332 -0
- package/dist/adapters/postgresql/tools/performance/diagnostics.js.map +1 -0
- package/dist/adapters/postgresql/tools/performance/index.d.ts +1 -1
- package/dist/adapters/postgresql/tools/performance/index.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/performance/index.js +7 -1
- package/dist/adapters/postgresql/tools/performance/index.js.map +1 -1
- package/dist/adapters/postgresql/tools/performance/monitoring.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/performance/monitoring.js +80 -55
- package/dist/adapters/postgresql/tools/performance/monitoring.js.map +1 -1
- package/dist/adapters/postgresql/tools/performance/optimization.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/performance/optimization.js +18 -11
- package/dist/adapters/postgresql/tools/performance/optimization.js.map +1 -1
- package/dist/adapters/postgresql/tools/performance/stats.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/performance/stats.js +439 -318
- package/dist/adapters/postgresql/tools/performance/stats.js.map +1 -1
- package/dist/adapters/postgresql/tools/pgcrypto.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/pgcrypto.js +45 -77
- package/dist/adapters/postgresql/tools/pgcrypto.js.map +1 -1
- package/dist/adapters/postgresql/tools/postgis/basic.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/postgis/basic.js +121 -93
- package/dist/adapters/postgresql/tools/postgis/basic.js.map +1 -1
- package/dist/adapters/postgresql/tools/schema/index.d.ts +16 -0
- package/dist/adapters/postgresql/tools/schema/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/schema/index.js +32 -0
- package/dist/adapters/postgresql/tools/schema/index.js.map +1 -0
- package/dist/adapters/postgresql/tools/schema/objects.d.ts +15 -0
- package/dist/adapters/postgresql/tools/schema/objects.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/schema/objects.js +378 -0
- package/dist/adapters/postgresql/tools/schema/objects.js.map +1 -0
- package/dist/adapters/postgresql/tools/schema/views.d.ts +15 -0
- package/dist/adapters/postgresql/tools/schema/views.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/{schema.js → schema/views.js} +64 -386
- package/dist/adapters/postgresql/tools/schema/views.js.map +1 -0
- package/dist/adapters/postgresql/tools/stats/advanced.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/stats/advanced.js +1 -218
- package/dist/adapters/postgresql/tools/stats/advanced.js.map +1 -1
- package/dist/adapters/postgresql/tools/stats/math-utils.d.ts +33 -0
- package/dist/adapters/postgresql/tools/stats/math-utils.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/stats/math-utils.js +225 -0
- package/dist/adapters/postgresql/tools/stats/math-utils.js.map +1 -0
- package/dist/adapters/postgresql/tools/text/index.d.ts +16 -0
- package/dist/adapters/postgresql/tools/text/index.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/text/index.js +33 -0
- package/dist/adapters/postgresql/tools/text/index.js.map +1 -0
- package/dist/adapters/postgresql/tools/text/matching.d.ts +17 -0
- package/dist/adapters/postgresql/tools/text/matching.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/text/matching.js +565 -0
- package/dist/adapters/postgresql/tools/text/matching.js.map +1 -0
- package/dist/adapters/postgresql/tools/text/search.d.ts +17 -0
- package/dist/adapters/postgresql/tools/text/search.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/text/search.js +653 -0
- package/dist/adapters/postgresql/tools/text/search.js.map +1 -0
- package/dist/adapters/postgresql/tools/transactions.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/transactions.js +11 -27
- package/dist/adapters/postgresql/tools/transactions.js.map +1 -1
- package/dist/adapters/postgresql/tools/vector/{basic.d.ts → data.d.ts} +10 -8
- package/dist/adapters/postgresql/tools/vector/data.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/vector/data.js +540 -0
- package/dist/adapters/postgresql/tools/vector/data.js.map +1 -0
- package/dist/adapters/postgresql/tools/vector/index.d.ts.map +1 -1
- package/dist/adapters/postgresql/tools/vector/index.js +6 -2
- package/dist/adapters/postgresql/tools/vector/index.js.map +1 -1
- package/dist/adapters/postgresql/tools/vector/management.d.ts +11 -0
- package/dist/adapters/postgresql/tools/vector/management.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/vector/management.js +425 -0
- package/dist/adapters/postgresql/tools/vector/management.js.map +1 -0
- package/dist/adapters/postgresql/tools/vector/query.d.ts +14 -0
- package/dist/adapters/postgresql/tools/vector/query.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/vector/query.js +767 -0
- package/dist/adapters/postgresql/tools/vector/query.js.map +1 -0
- package/dist/adapters/postgresql/tools/vector/{advanced.d.ts → search-advanced.d.ts} +4 -5
- package/dist/adapters/postgresql/tools/vector/search-advanced.d.ts.map +1 -0
- package/dist/adapters/postgresql/tools/vector/search-advanced.js +626 -0
- package/dist/adapters/postgresql/tools/vector/search-advanced.js.map +1 -0
- package/dist/auth/scopes.d.ts.map +1 -1
- package/dist/auth/scopes.js +3 -1
- package/dist/auth/scopes.js.map +1 -1
- package/dist/cli/args.d.ts +3 -2
- package/dist/cli/args.d.ts.map +1 -1
- package/dist/cli/args.js +4 -3
- package/dist/cli/args.js.map +1 -1
- package/dist/cli.js +16 -4
- package/dist/cli.js.map +1 -1
- package/dist/codemode/api/aliases.d.ts +14 -0
- package/dist/codemode/api/aliases.d.ts.map +1 -0
- package/dist/codemode/api/aliases.js +503 -0
- package/dist/codemode/api/aliases.js.map +1 -0
- package/dist/codemode/api/group-api.d.ts +23 -0
- package/dist/codemode/api/group-api.d.ts.map +1 -0
- package/dist/codemode/api/group-api.js +179 -0
- package/dist/codemode/api/group-api.js.map +1 -0
- package/dist/codemode/{api.d.ts → api/index.d.ts} +5 -4
- package/dist/codemode/api/index.d.ts.map +1 -0
- package/dist/codemode/api/index.js +195 -0
- package/dist/codemode/api/index.js.map +1 -0
- package/dist/codemode/api/maps.d.ts +47 -0
- package/dist/codemode/api/maps.d.ts.map +1 -0
- package/dist/codemode/api/maps.js +529 -0
- package/dist/codemode/api/maps.js.map +1 -0
- package/dist/codemode/api/normalize.d.ts +13 -0
- package/dist/codemode/api/normalize.d.ts.map +1 -0
- package/dist/codemode/api/normalize.js +120 -0
- package/dist/codemode/api/normalize.js.map +1 -0
- package/dist/codemode/index.d.ts +1 -1
- package/dist/codemode/index.d.ts.map +1 -1
- package/dist/codemode/index.js +1 -1
- package/dist/codemode/index.js.map +1 -1
- package/dist/codemode/sandbox.d.ts.map +1 -1
- package/dist/codemode/sandbox.js +8 -25
- package/dist/codemode/sandbox.js.map +1 -1
- package/dist/filtering/ToolConstants.d.ts +11 -11
- package/dist/filtering/ToolConstants.d.ts.map +1 -1
- package/dist/filtering/ToolConstants.js +28 -15
- package/dist/filtering/ToolConstants.js.map +1 -1
- package/dist/filtering/ToolFilter.d.ts +0 -32
- package/dist/filtering/ToolFilter.d.ts.map +1 -1
- package/dist/filtering/ToolFilter.js +0 -43
- package/dist/filtering/ToolFilter.js.map +1 -1
- package/dist/server/McpServer.d.ts +1 -1
- package/dist/server/McpServer.d.ts.map +1 -1
- package/dist/server/McpServer.js +1 -2
- package/dist/server/McpServer.js.map +1 -1
- package/dist/transports/http.d.ts +55 -10
- package/dist/transports/http.d.ts.map +1 -1
- package/dist/transports/http.js +301 -50
- package/dist/transports/http.js.map +1 -1
- package/dist/types/filtering.d.ts +1 -1
- package/dist/types/filtering.d.ts.map +1 -1
- package/dist/types/index.d.ts +2 -2
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/index.js.map +1 -1
- package/dist/types/mcp.d.ts +0 -21
- package/dist/types/mcp.d.ts.map +1 -1
- package/dist/types/schema.d.ts +0 -79
- package/dist/types/schema.d.ts.map +1 -1
- package/dist/utils/fts-config.d.ts +0 -6
- package/dist/utils/fts-config.d.ts.map +1 -1
- package/dist/utils/fts-config.js +1 -1
- package/dist/utils/fts-config.js.map +1 -1
- package/dist/utils/icons.d.ts.map +1 -1
- package/dist/utils/icons.js +5 -0
- package/dist/utils/icons.js.map +1 -1
- package/dist/utils/identifiers.d.ts.map +1 -1
- package/dist/utils/identifiers.js +6 -6
- package/dist/utils/identifiers.js.map +1 -1
- package/dist/utils/logger.d.ts +6 -6
- package/dist/utils/logger.d.ts.map +1 -1
- package/dist/utils/logger.js +18 -15
- package/dist/utils/logger.js.map +1 -1
- package/dist/utils/progress-utils.d.ts +3 -14
- package/dist/utils/progress-utils.d.ts.map +1 -1
- package/dist/utils/progress-utils.js +2 -21
- package/dist/utils/progress-utils.js.map +1 -1
- package/dist/utils/version.d.ts +9 -0
- package/dist/utils/version.d.ts.map +1 -0
- package/dist/utils/version.js +12 -0
- package/dist/utils/version.js.map +1 -0
- package/dist/utils/where-clause.d.ts +4 -0
- package/dist/utils/where-clause.d.ts.map +1 -1
- package/dist/utils/where-clause.js +16 -0
- package/dist/utils/where-clause.js.map +1 -1
- package/package.json +6 -4
- package/dist/adapters/postgresql/schemas/core.d.ts.map +0 -1
- package/dist/adapters/postgresql/schemas/core.js.map +0 -1
- package/dist/adapters/postgresql/schemas/extensions.d.ts +0 -852
- package/dist/adapters/postgresql/schemas/extensions.d.ts.map +0 -1
- package/dist/adapters/postgresql/schemas/extensions.js +0 -1202
- package/dist/adapters/postgresql/schemas/extensions.js.map +0 -1
- package/dist/adapters/postgresql/schemas/jsonb.d.ts +0 -541
- package/dist/adapters/postgresql/schemas/jsonb.d.ts.map +0 -1
- package/dist/adapters/postgresql/schemas/jsonb.js +0 -814
- package/dist/adapters/postgresql/schemas/jsonb.js.map +0 -1
- package/dist/adapters/postgresql/schemas/postgis.d.ts.map +0 -1
- package/dist/adapters/postgresql/schemas/postgis.js.map +0 -1
- package/dist/adapters/postgresql/schemas/stats.d.ts.map +0 -1
- package/dist/adapters/postgresql/schemas/stats.js.map +0 -1
- package/dist/adapters/postgresql/tools/citext.d.ts +0 -18
- package/dist/adapters/postgresql/tools/citext.d.ts.map +0 -1
- package/dist/adapters/postgresql/tools/citext.js.map +0 -1
- package/dist/adapters/postgresql/tools/introspection.d.ts +0 -15
- package/dist/adapters/postgresql/tools/introspection.d.ts.map +0 -1
- package/dist/adapters/postgresql/tools/introspection.js +0 -1682
- package/dist/adapters/postgresql/tools/introspection.js.map +0 -1
- package/dist/adapters/postgresql/tools/jsonb/advanced.d.ts.map +0 -1
- package/dist/adapters/postgresql/tools/jsonb/advanced.js.map +0 -1
- package/dist/adapters/postgresql/tools/jsonb/basic.d.ts +0 -20
- package/dist/adapters/postgresql/tools/jsonb/basic.d.ts.map +0 -1
- package/dist/adapters/postgresql/tools/jsonb/basic.js.map +0 -1
- package/dist/adapters/postgresql/tools/monitoring.d.ts +0 -13
- package/dist/adapters/postgresql/tools/monitoring.d.ts.map +0 -1
- package/dist/adapters/postgresql/tools/monitoring.js.map +0 -1
- package/dist/adapters/postgresql/tools/partitioning.d.ts +0 -13
- package/dist/adapters/postgresql/tools/partitioning.d.ts.map +0 -1
- package/dist/adapters/postgresql/tools/partitioning.js.map +0 -1
- package/dist/adapters/postgresql/tools/schema.d.ts +0 -13
- package/dist/adapters/postgresql/tools/schema.d.ts.map +0 -1
- package/dist/adapters/postgresql/tools/schema.js.map +0 -1
- package/dist/adapters/postgresql/tools/text.d.ts +0 -13
- package/dist/adapters/postgresql/tools/text.d.ts.map +0 -1
- package/dist/adapters/postgresql/tools/text.js +0 -1082
- package/dist/adapters/postgresql/tools/text.js.map +0 -1
- package/dist/adapters/postgresql/tools/vector/advanced.d.ts.map +0 -1
- package/dist/adapters/postgresql/tools/vector/advanced.js +0 -958
- package/dist/adapters/postgresql/tools/vector/advanced.js.map +0 -1
- package/dist/adapters/postgresql/tools/vector/basic.d.ts.map +0 -1
- package/dist/adapters/postgresql/tools/vector/basic.js +0 -1165
- package/dist/adapters/postgresql/tools/vector/basic.js.map +0 -1
- package/dist/codemode/api.d.ts.map +0 -1
- package/dist/codemode/api.js +0 -1544
- package/dist/codemode/api.js.map +0 -1
- package/dist/utils/promptGenerator.d.ts +0 -20
- package/dist/utils/promptGenerator.d.ts.map +0 -1
- package/dist/utils/promptGenerator.js +0 -81
- package/dist/utils/promptGenerator.js.map +0 -1
|
@@ -1,1165 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* PostgreSQL pgvector - Basic Operations
|
|
3
|
-
*/
|
|
4
|
-
import { z } from "zod";
|
|
5
|
-
import { readOnly, write } from "../../../../utils/annotations.js";
|
|
6
|
-
import { getToolIcons } from "../../../../utils/icons.js";
|
|
7
|
-
import { sanitizeIdentifier, sanitizeTableName, } from "../../../../utils/identifiers.js";
|
|
8
|
-
import { sanitizeWhereClause } from "../../../../utils/where-clause.js";
|
|
9
|
-
import {
|
|
10
|
-
// Base schemas for MCP visibility (Split Schema pattern)
|
|
11
|
-
VectorSearchSchemaBase, VectorCreateIndexSchemaBase,
|
|
12
|
-
// Transformed schemas for handler validation
|
|
13
|
-
VectorSearchSchema, VectorCreateIndexSchema,
|
|
14
|
-
// Output schemas
|
|
15
|
-
VectorCreateExtensionOutputSchema, VectorAddColumnOutputSchema, VectorInsertOutputSchema, VectorSearchOutputSchema, VectorCreateIndexOutputSchema, VectorDistanceOutputSchema, VectorNormalizeOutputSchema, VectorAggregateOutputSchema, VectorValidateOutputSchema, } from "../../schemas/index.js";
|
|
16
|
-
/**
|
|
17
|
-
* Parse a PostgreSQL vector string to a number array.
|
|
18
|
-
* Handles formats like "[0.1,0.2,0.3]" or "(0.1,0.2,0.3)"
|
|
19
|
-
*/
|
|
20
|
-
function parseVector(vecStr) {
|
|
21
|
-
if (typeof vecStr !== "string")
|
|
22
|
-
return null;
|
|
23
|
-
try {
|
|
24
|
-
const cleaned = vecStr.replace(/[[\]()]/g, "");
|
|
25
|
-
return cleaned.split(",").map(Number);
|
|
26
|
-
}
|
|
27
|
-
catch {
|
|
28
|
-
return null;
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
/**
|
|
32
|
-
* Truncate a vector for display, showing first/last N values.
|
|
33
|
-
* For vectors <= maxDisplay, returns the full vector.
|
|
34
|
-
*/
|
|
35
|
-
export function truncateVector(vec, maxDisplay = 10) {
|
|
36
|
-
if (vec === null || vec === undefined) {
|
|
37
|
-
return { preview: null, dimensions: 0, truncated: false };
|
|
38
|
-
}
|
|
39
|
-
if (vec.length <= maxDisplay) {
|
|
40
|
-
return { preview: vec, dimensions: vec.length, truncated: false };
|
|
41
|
-
}
|
|
42
|
-
// Show first 5 and last 5
|
|
43
|
-
const half = Math.floor(maxDisplay / 2);
|
|
44
|
-
const preview = [...vec.slice(0, half), ...vec.slice(-half)];
|
|
45
|
-
return { preview, dimensions: vec.length, truncated: true };
|
|
46
|
-
}
|
|
47
|
-
/**
|
|
48
|
-
* Two-step existence check: table first, then column.
|
|
49
|
-
* Returns null if both exist, or {error, suggestion} if either is missing.
|
|
50
|
-
*/
|
|
51
|
-
export async function checkTableAndColumn(adapter, table, column, schema) {
|
|
52
|
-
// Step 1: check column existence (fast path — covers the common success case)
|
|
53
|
-
const colSql = `
|
|
54
|
-
SELECT 1 FROM information_schema.columns
|
|
55
|
-
WHERE table_schema = $1 AND table_name = $2 AND column_name = $3
|
|
56
|
-
`;
|
|
57
|
-
const colResult = await adapter.executeQuery(colSql, [schema, table, column]);
|
|
58
|
-
if ((colResult.rows?.length ?? 0) > 0)
|
|
59
|
-
return null; // both exist
|
|
60
|
-
// Step 2: disambiguate — is it the table or the column?
|
|
61
|
-
const tblSql = `
|
|
62
|
-
SELECT 1 FROM information_schema.tables
|
|
63
|
-
WHERE table_schema = $1 AND table_name = $2
|
|
64
|
-
`;
|
|
65
|
-
const tblResult = await adapter.executeQuery(tblSql, [schema, table]);
|
|
66
|
-
if ((tblResult.rows?.length ?? 0) === 0) {
|
|
67
|
-
return {
|
|
68
|
-
error: `Table '${table}' does not exist in schema '${schema}'`,
|
|
69
|
-
suggestion: "Use pg_list_tables to find available tables",
|
|
70
|
-
};
|
|
71
|
-
}
|
|
72
|
-
return {
|
|
73
|
-
error: `Column '${column}' does not exist in table '${table}'`,
|
|
74
|
-
suggestion: "Use pg_describe_table to find available columns",
|
|
75
|
-
};
|
|
76
|
-
}
|
|
77
|
-
export function createVectorExtensionTool(adapter) {
|
|
78
|
-
return {
|
|
79
|
-
name: "pg_vector_create_extension",
|
|
80
|
-
description: "Enable the pgvector extension for vector similarity search.",
|
|
81
|
-
group: "vector",
|
|
82
|
-
inputSchema: z.object({}),
|
|
83
|
-
outputSchema: VectorCreateExtensionOutputSchema,
|
|
84
|
-
annotations: write("Create Vector Extension"),
|
|
85
|
-
icons: getToolIcons("vector", write("Create Vector Extension")),
|
|
86
|
-
handler: async (_params, _context) => {
|
|
87
|
-
await adapter.executeQuery("CREATE EXTENSION IF NOT EXISTS vector");
|
|
88
|
-
return { success: true, message: "pgvector extension enabled" };
|
|
89
|
-
},
|
|
90
|
-
};
|
|
91
|
-
}
|
|
92
|
-
export function createVectorAddColumnTool(adapter) {
|
|
93
|
-
// Base schema for MCP visibility (Split Schema pattern)
|
|
94
|
-
const AddColumnSchemaBase = z.object({
|
|
95
|
-
table: z.string().optional().describe("Table name"),
|
|
96
|
-
tableName: z.string().optional().describe("Alias for table"),
|
|
97
|
-
column: z.string().optional().describe("Column name"),
|
|
98
|
-
col: z.string().optional().describe("Alias for column"),
|
|
99
|
-
dimensions: z
|
|
100
|
-
.number()
|
|
101
|
-
.describe("Vector dimensions (e.g., 1536 for OpenAI)"),
|
|
102
|
-
schema: z.string().optional().describe("Database schema (default: public)"),
|
|
103
|
-
ifNotExists: z
|
|
104
|
-
.boolean()
|
|
105
|
-
.optional()
|
|
106
|
-
.describe("Skip if column already exists (default: false)"),
|
|
107
|
-
});
|
|
108
|
-
// Transformed schema with alias resolution for handler
|
|
109
|
-
const AddColumnSchema = AddColumnSchemaBase.transform((data) => ({
|
|
110
|
-
table: data.table ?? data.tableName ?? "",
|
|
111
|
-
column: data.column ?? data.col ?? "",
|
|
112
|
-
dimensions: data.dimensions,
|
|
113
|
-
schema: data.schema,
|
|
114
|
-
ifNotExists: data.ifNotExists ?? false,
|
|
115
|
-
}));
|
|
116
|
-
return {
|
|
117
|
-
name: "pg_vector_add_column",
|
|
118
|
-
description: "Add a vector column to a table. Requires: table, column, dimensions.",
|
|
119
|
-
group: "vector",
|
|
120
|
-
// Use base schema for MCP visibility
|
|
121
|
-
inputSchema: AddColumnSchemaBase,
|
|
122
|
-
outputSchema: VectorAddColumnOutputSchema,
|
|
123
|
-
annotations: write("Add Vector Column"),
|
|
124
|
-
icons: getToolIcons("vector", write("Add Vector Column")),
|
|
125
|
-
handler: async (params, _context) => {
|
|
126
|
-
const parsed = AddColumnSchema.parse(params);
|
|
127
|
-
// Validate required params with clear errors
|
|
128
|
-
if (parsed.table === "") {
|
|
129
|
-
return {
|
|
130
|
-
success: false,
|
|
131
|
-
error: "table (or tableName) parameter is required",
|
|
132
|
-
requiredParams: ["table", "column", "dimensions"],
|
|
133
|
-
};
|
|
134
|
-
}
|
|
135
|
-
if (parsed.column === "") {
|
|
136
|
-
return {
|
|
137
|
-
success: false,
|
|
138
|
-
error: "column (or col) parameter is required",
|
|
139
|
-
requiredParams: ["table", "column", "dimensions"],
|
|
140
|
-
};
|
|
141
|
-
}
|
|
142
|
-
const schemaName = parsed.schema ?? "public";
|
|
143
|
-
const tableName = sanitizeTableName(parsed.table, parsed.schema);
|
|
144
|
-
const columnName = sanitizeIdentifier(parsed.column);
|
|
145
|
-
// Verify table exists before ALTER TABLE
|
|
146
|
-
const tblCheckSql = `
|
|
147
|
-
SELECT 1 FROM information_schema.tables
|
|
148
|
-
WHERE table_schema = $1 AND table_name = $2
|
|
149
|
-
`;
|
|
150
|
-
const tblCheckResult = await adapter.executeQuery(tblCheckSql, [
|
|
151
|
-
schemaName,
|
|
152
|
-
parsed.table,
|
|
153
|
-
]);
|
|
154
|
-
if ((tblCheckResult.rows?.length ?? 0) === 0) {
|
|
155
|
-
return {
|
|
156
|
-
success: false,
|
|
157
|
-
error: `Table '${parsed.table}' does not exist in schema '${schemaName}'`,
|
|
158
|
-
suggestion: "Use pg_list_tables to find available tables",
|
|
159
|
-
};
|
|
160
|
-
}
|
|
161
|
-
// Check if column exists when ifNotExists is true
|
|
162
|
-
if (parsed.ifNotExists) {
|
|
163
|
-
const checkSql = `
|
|
164
|
-
SELECT 1 FROM information_schema.columns
|
|
165
|
-
WHERE table_schema = $1 AND table_name = $2 AND column_name = $3
|
|
166
|
-
`;
|
|
167
|
-
const checkResult = await adapter.executeQuery(checkSql, [
|
|
168
|
-
schemaName,
|
|
169
|
-
parsed.table,
|
|
170
|
-
parsed.column,
|
|
171
|
-
]);
|
|
172
|
-
if (checkResult.rows && checkResult.rows.length > 0) {
|
|
173
|
-
return {
|
|
174
|
-
success: true,
|
|
175
|
-
table: parsed.table,
|
|
176
|
-
column: parsed.column,
|
|
177
|
-
dimensions: parsed.dimensions,
|
|
178
|
-
ifNotExists: true,
|
|
179
|
-
alreadyExists: true,
|
|
180
|
-
message: `Column ${parsed.column} already exists on table ${parsed.table}`,
|
|
181
|
-
};
|
|
182
|
-
}
|
|
183
|
-
}
|
|
184
|
-
const sql = `ALTER TABLE ${tableName} ADD COLUMN ${columnName} vector(${String(parsed.dimensions)})`;
|
|
185
|
-
try {
|
|
186
|
-
await adapter.executeQuery(sql);
|
|
187
|
-
return {
|
|
188
|
-
success: true,
|
|
189
|
-
table: parsed.table,
|
|
190
|
-
column: parsed.column,
|
|
191
|
-
dimensions: parsed.dimensions,
|
|
192
|
-
ifNotExists: parsed.ifNotExists,
|
|
193
|
-
};
|
|
194
|
-
}
|
|
195
|
-
catch (err) {
|
|
196
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
197
|
-
// Duplicate column: PG code 42701
|
|
198
|
-
if (msg.includes("already exists")) {
|
|
199
|
-
return {
|
|
200
|
-
success: false,
|
|
201
|
-
error: `Column '${parsed.column}' already exists on table '${parsed.table}'`,
|
|
202
|
-
suggestion: "Use ifNotExists: true to skip if column already exists",
|
|
203
|
-
};
|
|
204
|
-
}
|
|
205
|
-
throw err;
|
|
206
|
-
}
|
|
207
|
-
},
|
|
208
|
-
};
|
|
209
|
-
}
|
|
210
|
-
export function createVectorInsertTool(adapter) {
|
|
211
|
-
// Base schema for MCP visibility (Split Schema pattern)
|
|
212
|
-
const VectorInsertSchemaBase = z.object({
|
|
213
|
-
table: z.string().optional().describe("Table name"),
|
|
214
|
-
tableName: z.string().optional().describe("Alias for table"),
|
|
215
|
-
column: z.string().optional().describe("Column name"),
|
|
216
|
-
col: z.string().optional().describe("Alias for column"),
|
|
217
|
-
vector: z.array(z.number()),
|
|
218
|
-
additionalColumns: z.record(z.string(), z.unknown()).optional(),
|
|
219
|
-
schema: z.string().optional(),
|
|
220
|
-
updateExisting: z
|
|
221
|
-
.boolean()
|
|
222
|
-
.optional()
|
|
223
|
-
.describe("Update vector on existing row (requires conflictColumn and conflictValue)"),
|
|
224
|
-
conflictColumn: z
|
|
225
|
-
.string()
|
|
226
|
-
.optional()
|
|
227
|
-
.describe("Column to match for updates (e.g., id)"),
|
|
228
|
-
conflictValue: z
|
|
229
|
-
.union([z.string(), z.number()])
|
|
230
|
-
.optional()
|
|
231
|
-
.describe("Value of conflictColumn to match (e.g., 123)"),
|
|
232
|
-
});
|
|
233
|
-
// Transformed schema with alias resolution for handler
|
|
234
|
-
const VectorInsertSchema = VectorInsertSchemaBase.transform((data) => ({
|
|
235
|
-
table: data.table ?? data.tableName ?? "",
|
|
236
|
-
column: data.column ?? data.col ?? "",
|
|
237
|
-
vector: data.vector,
|
|
238
|
-
additionalColumns: data.additionalColumns,
|
|
239
|
-
schema: data.schema,
|
|
240
|
-
updateExisting: data.updateExisting,
|
|
241
|
-
conflictColumn: data.conflictColumn,
|
|
242
|
-
conflictValue: data.conflictValue,
|
|
243
|
-
}));
|
|
244
|
-
return {
|
|
245
|
-
name: "pg_vector_insert",
|
|
246
|
-
description: "Insert a vector into a table, or update an existing row's vector. For upsert: use updateExisting + conflictColumn + conflictValue to UPDATE existing rows (avoids NOT NULL issues).",
|
|
247
|
-
group: "vector",
|
|
248
|
-
// Use base schema for MCP visibility
|
|
249
|
-
inputSchema: VectorInsertSchemaBase,
|
|
250
|
-
outputSchema: VectorInsertOutputSchema,
|
|
251
|
-
annotations: write("Insert Vector"),
|
|
252
|
-
icons: getToolIcons("vector", write("Insert Vector")),
|
|
253
|
-
handler: async (params, _context) => {
|
|
254
|
-
// Use transformed schema for alias resolution
|
|
255
|
-
const parsed = VectorInsertSchema.parse(params);
|
|
256
|
-
// Validate required params with clear errors
|
|
257
|
-
if (parsed.table === "") {
|
|
258
|
-
return {
|
|
259
|
-
success: false,
|
|
260
|
-
error: "table (or tableName) parameter is required",
|
|
261
|
-
requiredParams: ["table", "column", "vector"],
|
|
262
|
-
};
|
|
263
|
-
}
|
|
264
|
-
if (parsed.column === "") {
|
|
265
|
-
return {
|
|
266
|
-
success: false,
|
|
267
|
-
error: "column (or col) parameter is required",
|
|
268
|
-
requiredParams: ["table", "column", "vector"],
|
|
269
|
-
};
|
|
270
|
-
}
|
|
271
|
-
if (parsed.vector === undefined ||
|
|
272
|
-
!Array.isArray(parsed.vector) ||
|
|
273
|
-
parsed.vector.length === 0) {
|
|
274
|
-
return {
|
|
275
|
-
success: false,
|
|
276
|
-
error: "vector parameter is required and must be a non-empty array of numbers",
|
|
277
|
-
requiredParams: ["table", "column", "vector"],
|
|
278
|
-
};
|
|
279
|
-
}
|
|
280
|
-
// Validate upsert mode parameters
|
|
281
|
-
if (parsed.updateExisting === true) {
|
|
282
|
-
if (parsed.conflictColumn === undefined ||
|
|
283
|
-
parsed.conflictValue === undefined) {
|
|
284
|
-
return {
|
|
285
|
-
success: false,
|
|
286
|
-
error: "updateExisting requires both conflictColumn and conflictValue parameters",
|
|
287
|
-
suggestion: 'Specify conflictColumn (e.g., "id") and conflictValue (e.g., 123) to identify the row to update',
|
|
288
|
-
example: '{ updateExisting: true, conflictColumn: "id", conflictValue: 42, vector: [...] }',
|
|
289
|
-
};
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
// Parse schema.table format (embedded schema takes priority over explicit schema param)
|
|
293
|
-
let resolvedTable = parsed.table;
|
|
294
|
-
let resolvedSchema = parsed.schema;
|
|
295
|
-
if (parsed.table.includes(".")) {
|
|
296
|
-
const parts = parsed.table.split(".");
|
|
297
|
-
resolvedSchema = parts[0] ?? parsed.schema ?? "public";
|
|
298
|
-
resolvedTable = parts[1] ?? parsed.table;
|
|
299
|
-
}
|
|
300
|
-
const insertSchemaName = resolvedSchema ?? "public";
|
|
301
|
-
const tableName = sanitizeTableName(resolvedTable, resolvedSchema);
|
|
302
|
-
const columnName = sanitizeIdentifier(parsed.column);
|
|
303
|
-
const vectorStr = `[${parsed.vector.join(",")}]`;
|
|
304
|
-
// Pre-validate table and column exist
|
|
305
|
-
const missing = await checkTableAndColumn(adapter, resolvedTable, parsed.column, insertSchemaName);
|
|
306
|
-
if (missing) {
|
|
307
|
-
return { success: false, ...missing };
|
|
308
|
-
}
|
|
309
|
-
// Use direct UPDATE for updateExisting mode (avoids NOT NULL constraint issues)
|
|
310
|
-
if (parsed.updateExisting === true &&
|
|
311
|
-
parsed.conflictColumn !== undefined &&
|
|
312
|
-
parsed.conflictValue !== undefined) {
|
|
313
|
-
const conflictCol = sanitizeIdentifier(parsed.conflictColumn);
|
|
314
|
-
// Build SET clause including vector and additionalColumns
|
|
315
|
-
const setClauses = [`${columnName} = $1::vector`];
|
|
316
|
-
const queryParams = [vectorStr, parsed.conflictValue];
|
|
317
|
-
let paramIndex = 3; // $1 = vector, $2 = conflictValue
|
|
318
|
-
if (parsed.additionalColumns !== undefined) {
|
|
319
|
-
for (const [col, val] of Object.entries(parsed.additionalColumns)) {
|
|
320
|
-
setClauses.push(`${sanitizeIdentifier(col)} = $${String(paramIndex)}`);
|
|
321
|
-
queryParams.push(val);
|
|
322
|
-
paramIndex++;
|
|
323
|
-
}
|
|
324
|
-
}
|
|
325
|
-
const sql = `UPDATE ${tableName} SET ${setClauses.join(", ")} WHERE ${conflictCol} = $2`;
|
|
326
|
-
const result = await adapter.executeQuery(sql, queryParams);
|
|
327
|
-
if (result.rowsAffected === 0) {
|
|
328
|
-
return {
|
|
329
|
-
success: false,
|
|
330
|
-
error: `No row found with ${parsed.conflictColumn} = ${String(parsed.conflictValue)}`,
|
|
331
|
-
suggestion: "Use insert mode (without updateExisting) to create new rows, or verify the conflictValue exists",
|
|
332
|
-
};
|
|
333
|
-
}
|
|
334
|
-
return {
|
|
335
|
-
success: true,
|
|
336
|
-
rowsAffected: result.rowsAffected,
|
|
337
|
-
mode: "update",
|
|
338
|
-
columnsUpdated: setClauses.length,
|
|
339
|
-
};
|
|
340
|
-
}
|
|
341
|
-
// Standard INSERT mode
|
|
342
|
-
const columns = [columnName];
|
|
343
|
-
const values = [vectorStr];
|
|
344
|
-
const params_ = [];
|
|
345
|
-
let paramIndex = 1;
|
|
346
|
-
if (parsed.additionalColumns !== undefined) {
|
|
347
|
-
for (const [col, val] of Object.entries(parsed.additionalColumns)) {
|
|
348
|
-
columns.push(sanitizeIdentifier(col));
|
|
349
|
-
values.push(`$${String(paramIndex++)}`);
|
|
350
|
-
params_.push(val);
|
|
351
|
-
}
|
|
352
|
-
}
|
|
353
|
-
const sql = `INSERT INTO ${tableName} (${columns.join(", ")}) VALUES ('${vectorStr}'${params_.length > 0 ? ", " + values.slice(1).join(", ") : ""})`;
|
|
354
|
-
try {
|
|
355
|
-
const result = await adapter.executeQuery(sql, params_);
|
|
356
|
-
return { success: true, rowsAffected: result.rowsAffected };
|
|
357
|
-
}
|
|
358
|
-
catch (error) {
|
|
359
|
-
// Parse dimension mismatch errors for user-friendly message
|
|
360
|
-
if (error instanceof Error) {
|
|
361
|
-
const dimMatch = /expected (\d+) dimensions?, not (\d+)/.exec(error.message);
|
|
362
|
-
if (dimMatch) {
|
|
363
|
-
const expectedDim = dimMatch[1] ?? "0";
|
|
364
|
-
const providedDim = dimMatch[2] ?? "0";
|
|
365
|
-
return {
|
|
366
|
-
success: false,
|
|
367
|
-
error: "Vector dimension mismatch",
|
|
368
|
-
expectedDimensions: parseInt(expectedDim, 10),
|
|
369
|
-
providedDimensions: parseInt(providedDim, 10),
|
|
370
|
-
suggestion: `Column expects ${expectedDim} dimensions but vector has ${providedDim}. Resize vector or check embedding model.`,
|
|
371
|
-
};
|
|
372
|
-
}
|
|
373
|
-
// Check for NOT NULL constraint violation
|
|
374
|
-
if (error.message.includes("NOT NULL") ||
|
|
375
|
-
error.message.includes("null value in column")) {
|
|
376
|
-
return {
|
|
377
|
-
success: false,
|
|
378
|
-
error: "NOT NULL constraint violation",
|
|
379
|
-
rawError: error.message,
|
|
380
|
-
suggestion: "Table has NOT NULL columns that require values. Use additionalColumns param or updateExisting mode to update existing rows.",
|
|
381
|
-
};
|
|
382
|
-
}
|
|
383
|
-
// Catch relation/column not found from UPDATE path
|
|
384
|
-
if (error.message.includes("does not exist")) {
|
|
385
|
-
return {
|
|
386
|
-
success: false,
|
|
387
|
-
error: error.message,
|
|
388
|
-
suggestion: "Verify the table and column names using pg_list_tables and pg_describe_table",
|
|
389
|
-
};
|
|
390
|
-
}
|
|
391
|
-
}
|
|
392
|
-
throw error;
|
|
393
|
-
}
|
|
394
|
-
},
|
|
395
|
-
};
|
|
396
|
-
}
|
|
397
|
-
export function createVectorSearchTool(adapter) {
|
|
398
|
-
return {
|
|
399
|
-
name: "pg_vector_search",
|
|
400
|
-
description: 'Search for similar vectors. Requires: table, column, vector. Use select param to include identifying columns (e.g., select: ["id", "name"]).',
|
|
401
|
-
group: "vector",
|
|
402
|
-
// Use base schema for MCP visibility (Split Schema pattern)
|
|
403
|
-
inputSchema: VectorSearchSchemaBase,
|
|
404
|
-
outputSchema: VectorSearchOutputSchema,
|
|
405
|
-
annotations: readOnly("Vector Search"),
|
|
406
|
-
icons: getToolIcons("vector", readOnly("Vector Search")),
|
|
407
|
-
handler: async (params, _context) => {
|
|
408
|
-
// Use transformed schema for alias resolution
|
|
409
|
-
const { table, column, vector, metric, limit, select, where, schema } = VectorSearchSchema.parse(params);
|
|
410
|
-
// Validate required params with clear errors
|
|
411
|
-
if (table === "") {
|
|
412
|
-
return {
|
|
413
|
-
success: false,
|
|
414
|
-
error: "table (or tableName) parameter is required",
|
|
415
|
-
requiredParams: ["table", "column", "vector"],
|
|
416
|
-
};
|
|
417
|
-
}
|
|
418
|
-
if (column === "") {
|
|
419
|
-
return {
|
|
420
|
-
success: false,
|
|
421
|
-
error: "column (or col) parameter is required for the vector column name",
|
|
422
|
-
requiredParams: ["table", "column", "vector"],
|
|
423
|
-
};
|
|
424
|
-
}
|
|
425
|
-
const tableName = sanitizeTableName(table, schema);
|
|
426
|
-
const columnName = sanitizeIdentifier(column);
|
|
427
|
-
const schemaName = schema ?? "public";
|
|
428
|
-
// Two-step existence check: table first, then column
|
|
429
|
-
const existenceCheck = await checkTableAndColumn(adapter, table, column, schemaName);
|
|
430
|
-
if (existenceCheck) {
|
|
431
|
-
return { success: false, ...existenceCheck };
|
|
432
|
-
}
|
|
433
|
-
// Validate column is actually a vector type
|
|
434
|
-
const typeCheckSql = `
|
|
435
|
-
SELECT udt_name FROM information_schema.columns
|
|
436
|
-
WHERE table_schema = $1 AND table_name = $2 AND column_name = $3
|
|
437
|
-
`;
|
|
438
|
-
const typeResult = await adapter.executeQuery(typeCheckSql, [
|
|
439
|
-
schemaName,
|
|
440
|
-
table,
|
|
441
|
-
column,
|
|
442
|
-
]);
|
|
443
|
-
const udtName = typeResult.rows?.[0]?.["udt_name"];
|
|
444
|
-
if (udtName !== "vector") {
|
|
445
|
-
return {
|
|
446
|
-
success: false,
|
|
447
|
-
error: `Column '${column}' is not a vector column (type: ${udtName ?? "unknown"})`,
|
|
448
|
-
suggestion: "Use a column with vector type, or use pg_vector_add_column to create one",
|
|
449
|
-
};
|
|
450
|
-
}
|
|
451
|
-
const vectorStr = `[${vector.join(",")}]`;
|
|
452
|
-
const limitVal = limit !== undefined && limit > 0 ? limit : 10;
|
|
453
|
-
const selectCols = select !== undefined && select.length > 0
|
|
454
|
-
? select.map((c) => sanitizeIdentifier(c)).join(", ") + ", "
|
|
455
|
-
: "";
|
|
456
|
-
const whereClause = where ? ` AND ${sanitizeWhereClause(where)}` : "";
|
|
457
|
-
const { excludeNull } = VectorSearchSchema.parse(params);
|
|
458
|
-
const nullFilter = excludeNull === true ? ` AND ${columnName} IS NOT NULL` : "";
|
|
459
|
-
let distanceExpr;
|
|
460
|
-
switch (metric) {
|
|
461
|
-
case "cosine":
|
|
462
|
-
distanceExpr = `${columnName} <=> '${vectorStr}'`;
|
|
463
|
-
break;
|
|
464
|
-
case "inner_product":
|
|
465
|
-
distanceExpr = `${columnName} <#>'${vectorStr}'`;
|
|
466
|
-
break;
|
|
467
|
-
default: // l2
|
|
468
|
-
distanceExpr = `${columnName} <-> '${vectorStr}'`;
|
|
469
|
-
}
|
|
470
|
-
const sql = `SELECT ${selectCols}${distanceExpr} as distance
|
|
471
|
-
FROM ${tableName}
|
|
472
|
-
WHERE TRUE${nullFilter}${whereClause}
|
|
473
|
-
ORDER BY ${distanceExpr}
|
|
474
|
-
LIMIT ${String(limitVal)} `;
|
|
475
|
-
try {
|
|
476
|
-
const result = await adapter.executeQuery(sql);
|
|
477
|
-
// Check for NULL distance values (from NULL vectors)
|
|
478
|
-
const nullCount = (result.rows ?? []).filter((r) => r["distance"] === null).length;
|
|
479
|
-
const response = {
|
|
480
|
-
results: result.rows,
|
|
481
|
-
count: result.rows?.length ?? 0,
|
|
482
|
-
metric: metric ?? "l2",
|
|
483
|
-
};
|
|
484
|
-
// Add hint when no select columns specified
|
|
485
|
-
if (select === undefined || select.length === 0) {
|
|
486
|
-
response["hint"] =
|
|
487
|
-
'Results only contain distance. Use select param (e.g., select: ["id", "name"]) to include identifying columns.';
|
|
488
|
-
}
|
|
489
|
-
// Note about NULL vectors
|
|
490
|
-
if (nullCount > 0) {
|
|
491
|
-
response["note"] =
|
|
492
|
-
`${String(nullCount)} result(s) have NULL distance (rows with NULL vectors). Filter with WHERE ${column} IS NOT NULL.`;
|
|
493
|
-
}
|
|
494
|
-
return response;
|
|
495
|
-
}
|
|
496
|
-
catch (error) {
|
|
497
|
-
// Parse dimension mismatch errors for user-friendly message
|
|
498
|
-
if (error instanceof Error) {
|
|
499
|
-
const dimMatch = /different vector dimensions (\d+) and (\d+)/.exec(error.message);
|
|
500
|
-
if (dimMatch) {
|
|
501
|
-
const expectedDim = dimMatch[1] ?? "0";
|
|
502
|
-
const providedDim = dimMatch[2] ?? "0";
|
|
503
|
-
return {
|
|
504
|
-
success: false,
|
|
505
|
-
error: `Vector dimension mismatch: column '${column}' expects ${expectedDim} dimensions, but you provided ${providedDim} dimensions.`,
|
|
506
|
-
expectedDimensions: parseInt(expectedDim, 10),
|
|
507
|
-
providedDimensions: parseInt(providedDim, 10),
|
|
508
|
-
suggestion: "Ensure your query vector has the same dimensions as the column.",
|
|
509
|
-
};
|
|
510
|
-
}
|
|
511
|
-
}
|
|
512
|
-
throw error;
|
|
513
|
-
}
|
|
514
|
-
},
|
|
515
|
-
};
|
|
516
|
-
}
|
|
517
|
-
export function createVectorCreateIndexTool(adapter) {
|
|
518
|
-
return {
|
|
519
|
-
name: "pg_vector_create_index",
|
|
520
|
-
description: "Create vector index. Requires: table, column, type (ivfflat or hnsw).",
|
|
521
|
-
group: "vector",
|
|
522
|
-
// Use base schema for MCP visibility (Split Schema pattern)
|
|
523
|
-
inputSchema: VectorCreateIndexSchemaBase,
|
|
524
|
-
outputSchema: VectorCreateIndexOutputSchema,
|
|
525
|
-
annotations: write("Create Vector Index"),
|
|
526
|
-
icons: getToolIcons("vector", write("Create Vector Index")),
|
|
527
|
-
handler: async (params, _context) => {
|
|
528
|
-
// Use transformed schema for alias resolution
|
|
529
|
-
const { table, column, type, metric, ifNotExists, lists, m, efConstruction, schema, } = VectorCreateIndexSchema.parse(params);
|
|
530
|
-
// Validate required params with clear errors
|
|
531
|
-
if (table === "") {
|
|
532
|
-
return {
|
|
533
|
-
success: false,
|
|
534
|
-
error: "table (or tableName) parameter is required",
|
|
535
|
-
requiredParams: ["table", "column", "type"],
|
|
536
|
-
};
|
|
537
|
-
}
|
|
538
|
-
if (column === "") {
|
|
539
|
-
return {
|
|
540
|
-
success: false,
|
|
541
|
-
error: "column (or col) parameter is required for the vector column name",
|
|
542
|
-
requiredParams: ["table", "column", "type"],
|
|
543
|
-
};
|
|
544
|
-
}
|
|
545
|
-
// P154: Verify table and column exist before attempting index creation
|
|
546
|
-
const existenceError = await checkTableAndColumn(adapter, table, column, schema ?? "public");
|
|
547
|
-
if (existenceError !== null) {
|
|
548
|
-
return { success: false, ...existenceError };
|
|
549
|
-
}
|
|
550
|
-
const tableName = sanitizeTableName(table, schema);
|
|
551
|
-
const columnName = sanitizeIdentifier(column);
|
|
552
|
-
// Include metric in index name to allow multiple indexes with different metrics
|
|
553
|
-
const metricSuffix = metric !== "l2" ? `_${metric}` : "";
|
|
554
|
-
const indexNameRaw = `idx_${table}_${column}_${type}${metricSuffix}`;
|
|
555
|
-
const indexName = sanitizeIdentifier(indexNameRaw);
|
|
556
|
-
// Map metric to PostgreSQL operator class
|
|
557
|
-
const opsMap = {
|
|
558
|
-
l2: "vector_l2_ops",
|
|
559
|
-
cosine: "vector_cosine_ops",
|
|
560
|
-
inner_product: "vector_ip_ops",
|
|
561
|
-
};
|
|
562
|
-
const opsClass = opsMap[metric] ?? "vector_l2_ops";
|
|
563
|
-
// If ifNotExists is true, check if index already exists BEFORE creating
|
|
564
|
-
if (ifNotExists === true) {
|
|
565
|
-
const checkSql = `
|
|
566
|
-
SELECT 1 FROM pg_indexes
|
|
567
|
-
WHERE indexname = $1
|
|
568
|
-
`;
|
|
569
|
-
const checkResult = await adapter.executeQuery(checkSql, [
|
|
570
|
-
indexNameRaw,
|
|
571
|
-
]);
|
|
572
|
-
if (checkResult.rows && checkResult.rows.length > 0) {
|
|
573
|
-
return {
|
|
574
|
-
success: true,
|
|
575
|
-
index: indexNameRaw,
|
|
576
|
-
type,
|
|
577
|
-
metric,
|
|
578
|
-
table,
|
|
579
|
-
column,
|
|
580
|
-
ifNotExists: true,
|
|
581
|
-
alreadyExists: true,
|
|
582
|
-
message: `Index ${indexNameRaw} already exists`,
|
|
583
|
-
};
|
|
584
|
-
}
|
|
585
|
-
}
|
|
586
|
-
let withClause;
|
|
587
|
-
let appliedParams;
|
|
588
|
-
if (type === "ivfflat") {
|
|
589
|
-
const numLists = lists ?? 100;
|
|
590
|
-
withClause = `WITH(lists = ${String(numLists)})`;
|
|
591
|
-
appliedParams = { lists: numLists };
|
|
592
|
-
}
|
|
593
|
-
else {
|
|
594
|
-
// hnsw
|
|
595
|
-
const mVal = m ?? 16;
|
|
596
|
-
const efVal = efConstruction ?? 64;
|
|
597
|
-
withClause = `WITH(m = ${String(mVal)}, ef_construction = ${String(efVal)})`;
|
|
598
|
-
appliedParams = { m: mVal, efConstruction: efVal };
|
|
599
|
-
}
|
|
600
|
-
const sql = `CREATE INDEX ${indexName} ON ${tableName} USING ${type} (${columnName} ${opsClass}) ${withClause} `;
|
|
601
|
-
try {
|
|
602
|
-
await adapter.executeQuery(sql);
|
|
603
|
-
return {
|
|
604
|
-
success: true,
|
|
605
|
-
index: indexNameRaw,
|
|
606
|
-
type,
|
|
607
|
-
metric,
|
|
608
|
-
table,
|
|
609
|
-
column,
|
|
610
|
-
appliedParams,
|
|
611
|
-
ifNotExists: ifNotExists ?? false,
|
|
612
|
-
};
|
|
613
|
-
}
|
|
614
|
-
catch (error) {
|
|
615
|
-
if (error instanceof Error) {
|
|
616
|
-
// If ifNotExists is true and the error is "already exists", return success with alreadyExists flag
|
|
617
|
-
// (This handles race conditions where index is created between check and create)
|
|
618
|
-
if (ifNotExists === true) {
|
|
619
|
-
const msg = error.message.toLowerCase();
|
|
620
|
-
if (msg.includes("already exists") || msg.includes("duplicate")) {
|
|
621
|
-
return {
|
|
622
|
-
success: true,
|
|
623
|
-
index: indexNameRaw,
|
|
624
|
-
type,
|
|
625
|
-
table,
|
|
626
|
-
column,
|
|
627
|
-
ifNotExists: true,
|
|
628
|
-
alreadyExists: true,
|
|
629
|
-
message: `Index ${indexNameRaw} already exists`,
|
|
630
|
-
};
|
|
631
|
-
}
|
|
632
|
-
}
|
|
633
|
-
// Handle non-vector column errors (operator class does not accept data type)
|
|
634
|
-
const opClassMatch = /does not accept data type (\w+)/.exec(error.message);
|
|
635
|
-
if (opClassMatch) {
|
|
636
|
-
return {
|
|
637
|
-
success: false,
|
|
638
|
-
error: `Column '${column}' is not a vector column (type: ${opClassMatch[1] ?? "unknown"}). Vector indexes can only be created on vector columns.`,
|
|
639
|
-
suggestion: "Use a column with vector type, or use pg_vector_add_column to create one",
|
|
640
|
-
};
|
|
641
|
-
}
|
|
642
|
-
}
|
|
643
|
-
// Re-throw other errors
|
|
644
|
-
throw error;
|
|
645
|
-
}
|
|
646
|
-
},
|
|
647
|
-
};
|
|
648
|
-
}
|
|
649
|
-
export function createVectorDistanceTool(adapter) {
|
|
650
|
-
const DistanceSchema = z.object({
|
|
651
|
-
vector1: z.array(z.number()),
|
|
652
|
-
vector2: z.array(z.number()),
|
|
653
|
-
metric: z.enum(["l2", "cosine", "inner_product"]).optional(),
|
|
654
|
-
});
|
|
655
|
-
return {
|
|
656
|
-
name: "pg_vector_distance",
|
|
657
|
-
description: "Calculate distance between two vectors. Valid metrics: l2 (default), cosine, inner_product.",
|
|
658
|
-
group: "vector",
|
|
659
|
-
inputSchema: DistanceSchema,
|
|
660
|
-
outputSchema: VectorDistanceOutputSchema,
|
|
661
|
-
annotations: readOnly("Vector Distance"),
|
|
662
|
-
icons: getToolIcons("vector", readOnly("Vector Distance")),
|
|
663
|
-
handler: async (params, _context) => {
|
|
664
|
-
const parsed = DistanceSchema.parse(params);
|
|
665
|
-
// Validate dimension match before query
|
|
666
|
-
if (parsed.vector1.length !== parsed.vector2.length) {
|
|
667
|
-
return {
|
|
668
|
-
success: false,
|
|
669
|
-
error: `Vector dimensions must match: vector1 has ${String(parsed.vector1.length)} dimensions, vector2 has ${String(parsed.vector2.length)} dimensions`,
|
|
670
|
-
suggestion: "Ensure both vectors have the same number of dimensions",
|
|
671
|
-
};
|
|
672
|
-
}
|
|
673
|
-
const v1 = `[${parsed.vector1.join(",")}]`;
|
|
674
|
-
const v2 = `[${parsed.vector2.join(",")}]`;
|
|
675
|
-
const metric = parsed.metric ?? "l2";
|
|
676
|
-
let op;
|
|
677
|
-
switch (metric) {
|
|
678
|
-
case "cosine":
|
|
679
|
-
op = "<=>";
|
|
680
|
-
break;
|
|
681
|
-
case "inner_product":
|
|
682
|
-
op = "<#>";
|
|
683
|
-
break;
|
|
684
|
-
default:
|
|
685
|
-
op = "<->"; // l2
|
|
686
|
-
}
|
|
687
|
-
const sql = `SELECT '${v1}'::vector ${op} '${v2}':: vector as distance`;
|
|
688
|
-
const result = await adapter.executeQuery(sql);
|
|
689
|
-
return { distance: result.rows?.[0]?.["distance"], metric };
|
|
690
|
-
},
|
|
691
|
-
};
|
|
692
|
-
}
|
|
693
|
-
export function createVectorNormalizeTool() {
|
|
694
|
-
const NormalizeSchema = z.object({
|
|
695
|
-
vector: z.array(z.number()).describe("Vector to normalize to unit length"),
|
|
696
|
-
});
|
|
697
|
-
return {
|
|
698
|
-
name: "pg_vector_normalize",
|
|
699
|
-
description: "Normalize a vector to unit length.",
|
|
700
|
-
group: "vector",
|
|
701
|
-
inputSchema: NormalizeSchema,
|
|
702
|
-
outputSchema: VectorNormalizeOutputSchema,
|
|
703
|
-
annotations: readOnly("Normalize Vector"),
|
|
704
|
-
icons: getToolIcons("vector", readOnly("Normalize Vector")),
|
|
705
|
-
handler: (params, _context) => {
|
|
706
|
-
const parsed = NormalizeSchema.parse(params ?? {});
|
|
707
|
-
const magnitude = Math.sqrt(parsed.vector.reduce((sum, x) => sum + x * x, 0));
|
|
708
|
-
// Check for zero vector
|
|
709
|
-
if (magnitude === 0) {
|
|
710
|
-
return Promise.resolve({
|
|
711
|
-
success: false,
|
|
712
|
-
error: "Cannot normalize a zero vector (all values are 0)",
|
|
713
|
-
suggestion: "Provide a vector with at least one non-zero value",
|
|
714
|
-
magnitude: 0,
|
|
715
|
-
});
|
|
716
|
-
}
|
|
717
|
-
const normalized = parsed.vector.map((x) => x / magnitude);
|
|
718
|
-
return Promise.resolve({ normalized, magnitude });
|
|
719
|
-
},
|
|
720
|
-
};
|
|
721
|
-
}
|
|
722
|
-
export function createVectorAggregateTool(adapter) {
|
|
723
|
-
// Base schema exposes all properties to MCP without transform
|
|
724
|
-
const AggregateSchemaBase = z.object({
|
|
725
|
-
table: z.string().optional().describe("Table name"),
|
|
726
|
-
tableName: z.string().optional().describe("Alias for table"),
|
|
727
|
-
column: z.string().optional().describe("Vector column"),
|
|
728
|
-
col: z.string().optional().describe("Alias for column"),
|
|
729
|
-
where: z.string().optional(),
|
|
730
|
-
groupBy: z.string().optional().describe("Column to group results by"),
|
|
731
|
-
schema: z.string().optional().describe("Database schema (default: public)"),
|
|
732
|
-
excludeNullGroups: z
|
|
733
|
-
.boolean()
|
|
734
|
-
.optional()
|
|
735
|
-
.describe("Filter out groups with NULL average vectors"),
|
|
736
|
-
summarizeVector: z
|
|
737
|
-
.boolean()
|
|
738
|
-
.optional()
|
|
739
|
-
.describe("Truncate large vectors to preview (default: true)"),
|
|
740
|
-
});
|
|
741
|
-
// Transformed schema applies alias resolution
|
|
742
|
-
const AggregateSchema = AggregateSchemaBase.transform((data) => ({
|
|
743
|
-
table: data.table ?? data.tableName ?? "",
|
|
744
|
-
column: data.column ?? data.col ?? "",
|
|
745
|
-
where: data.where,
|
|
746
|
-
groupBy: data.groupBy,
|
|
747
|
-
schema: data.schema,
|
|
748
|
-
excludeNullGroups: data.excludeNullGroups,
|
|
749
|
-
summarizeVector: data.summarizeVector ?? true,
|
|
750
|
-
}));
|
|
751
|
-
return {
|
|
752
|
-
name: "pg_vector_aggregate",
|
|
753
|
-
description: "Calculate average vector. Requires: table, column. Optional: groupBy, where.",
|
|
754
|
-
group: "vector",
|
|
755
|
-
inputSchema: AggregateSchemaBase,
|
|
756
|
-
outputSchema: VectorAggregateOutputSchema,
|
|
757
|
-
annotations: readOnly("Vector Aggregate"),
|
|
758
|
-
icons: getToolIcons("vector", readOnly("Vector Aggregate")),
|
|
759
|
-
handler: async (params, _context) => {
|
|
760
|
-
const parsed = AggregateSchema.parse(params);
|
|
761
|
-
// Validate required params with clear errors
|
|
762
|
-
if (parsed.table === "") {
|
|
763
|
-
return {
|
|
764
|
-
success: false,
|
|
765
|
-
error: "table (or tableName) parameter is required",
|
|
766
|
-
requiredParams: ["table", "column"],
|
|
767
|
-
};
|
|
768
|
-
}
|
|
769
|
-
if (parsed.column === "") {
|
|
770
|
-
return {
|
|
771
|
-
success: false,
|
|
772
|
-
error: "column (or col) parameter is required for the vector column name",
|
|
773
|
-
requiredParams: ["table", "column"],
|
|
774
|
-
};
|
|
775
|
-
}
|
|
776
|
-
// Parse schema.table format (embedded schema takes priority over explicit schema param)
|
|
777
|
-
let resolvedTable = parsed.table;
|
|
778
|
-
let resolvedSchema = parsed.schema;
|
|
779
|
-
if (parsed.table.includes(".")) {
|
|
780
|
-
const parts = parsed.table.split(".");
|
|
781
|
-
resolvedSchema = parts[0] ?? parsed.schema ?? "public";
|
|
782
|
-
resolvedTable = parts[1] ?? parsed.table;
|
|
783
|
-
}
|
|
784
|
-
const schemaName = resolvedSchema ?? "public";
|
|
785
|
-
// Two-step existence check: table first, then column
|
|
786
|
-
const existenceCheck = await checkTableAndColumn(adapter, resolvedTable, parsed.column, schemaName);
|
|
787
|
-
if (existenceCheck) {
|
|
788
|
-
return { success: false, ...existenceCheck };
|
|
789
|
-
}
|
|
790
|
-
// Validate column is actually a vector type
|
|
791
|
-
const typeCheckSql = `
|
|
792
|
-
SELECT udt_name FROM information_schema.columns
|
|
793
|
-
WHERE table_schema = $1 AND table_name = $2 AND column_name = $3
|
|
794
|
-
`;
|
|
795
|
-
const typeResult = await adapter.executeQuery(typeCheckSql, [
|
|
796
|
-
schemaName,
|
|
797
|
-
resolvedTable,
|
|
798
|
-
parsed.column,
|
|
799
|
-
]);
|
|
800
|
-
const udtName = typeResult.rows?.[0]?.["udt_name"];
|
|
801
|
-
if (udtName !== "vector") {
|
|
802
|
-
return {
|
|
803
|
-
success: false,
|
|
804
|
-
error: `Column '${parsed.column}' is not a vector column (type: ${udtName ?? "unknown"})`,
|
|
805
|
-
suggestion: "Use a column with vector type, or use pg_vector_add_column to create one",
|
|
806
|
-
};
|
|
807
|
-
}
|
|
808
|
-
const whereClause = parsed.where !== undefined
|
|
809
|
-
? ` WHERE ${sanitizeWhereClause(parsed.where)} `
|
|
810
|
-
: "";
|
|
811
|
-
const tableName = sanitizeTableName(resolvedTable, resolvedSchema);
|
|
812
|
-
const columnName = sanitizeIdentifier(parsed.column);
|
|
813
|
-
// Handle groupBy mode
|
|
814
|
-
if (parsed.groupBy !== undefined) {
|
|
815
|
-
// Validate groupBy is a simple column name, not an expression
|
|
816
|
-
let groupByCol;
|
|
817
|
-
try {
|
|
818
|
-
groupByCol = sanitizeIdentifier(parsed.groupBy);
|
|
819
|
-
}
|
|
820
|
-
catch {
|
|
821
|
-
return {
|
|
822
|
-
success: false,
|
|
823
|
-
error: `Invalid groupBy value: '${parsed.groupBy}' is not a valid column name`,
|
|
824
|
-
suggestion: "groupBy only supports simple column names (not expressions like LOWER(column)). Use a direct column reference.",
|
|
825
|
-
};
|
|
826
|
-
}
|
|
827
|
-
const sql = `SELECT ${groupByCol} as group_key, avg(${columnName})::text as average_vector, count(*):: integer as count
|
|
828
|
-
FROM ${tableName}${whereClause}
|
|
829
|
-
GROUP BY ${groupByCol}
|
|
830
|
-
ORDER BY ${groupByCol} `;
|
|
831
|
-
const result = await adapter.executeQuery(sql);
|
|
832
|
-
let groups = result.rows?.map((row) => {
|
|
833
|
-
const vec = parseVector(row["average_vector"]);
|
|
834
|
-
return {
|
|
835
|
-
group_key: row["group_key"],
|
|
836
|
-
average_vector: parsed.summarizeVector && vec !== null
|
|
837
|
-
? truncateVector(vec)
|
|
838
|
-
: (vec ?? row["average_vector"]),
|
|
839
|
-
count: typeof row["count"] === "string"
|
|
840
|
-
? parseInt(row["count"], 10)
|
|
841
|
-
: (row["count"] ?? 0),
|
|
842
|
-
};
|
|
843
|
-
}) ?? [];
|
|
844
|
-
// Check for groups with NULL average vector
|
|
845
|
-
const nullGroups = groups.filter((g) => g.average_vector === null ||
|
|
846
|
-
(typeof g.average_vector === "object" &&
|
|
847
|
-
g.average_vector !== null &&
|
|
848
|
-
"preview" in g.average_vector &&
|
|
849
|
-
g.average_vector.preview === null));
|
|
850
|
-
// Filter out null groups if requested
|
|
851
|
-
if (parsed.excludeNullGroups === true) {
|
|
852
|
-
groups = groups.filter((g) => !(g.average_vector === null ||
|
|
853
|
-
(typeof g.average_vector === "object" &&
|
|
854
|
-
g.average_vector !== null &&
|
|
855
|
-
"preview" in g.average_vector &&
|
|
856
|
-
g.average_vector.preview === null)));
|
|
857
|
-
}
|
|
858
|
-
const response = {
|
|
859
|
-
groups,
|
|
860
|
-
count: groups.length,
|
|
861
|
-
};
|
|
862
|
-
if (nullGroups.length > 0 && parsed.excludeNullGroups !== true) {
|
|
863
|
-
response["note"] =
|
|
864
|
-
`${String(nullGroups.length)} group(s) have NULL average_vector. Use excludeNullGroups: true to filter them.`;
|
|
865
|
-
}
|
|
866
|
-
return response;
|
|
867
|
-
}
|
|
868
|
-
// Non-grouped overall average
|
|
869
|
-
const sql = `SELECT avg(${columnName})::text as average_vector, count(*):: integer as count
|
|
870
|
-
FROM ${tableName}${whereClause} `;
|
|
871
|
-
const result = await adapter.executeQuery(sql);
|
|
872
|
-
const row = result.rows?.[0] ?? {};
|
|
873
|
-
// Ensure count is a number (PostgreSQL returns bigint as string)
|
|
874
|
-
const countVal = row["count"];
|
|
875
|
-
const count = typeof countVal === "string"
|
|
876
|
-
? parseInt(countVal, 10)
|
|
877
|
-
: typeof countVal === "number"
|
|
878
|
-
? countVal
|
|
879
|
-
: 0;
|
|
880
|
-
const vec = parseVector(row["average_vector"]);
|
|
881
|
-
const response = {
|
|
882
|
-
average_vector: parsed.summarizeVector && vec !== null
|
|
883
|
-
? truncateVector(vec)
|
|
884
|
-
: (vec ?? row["average_vector"]),
|
|
885
|
-
count,
|
|
886
|
-
};
|
|
887
|
-
// Add message for empty/null result
|
|
888
|
-
if (vec === null && count === 0) {
|
|
889
|
-
response["note"] =
|
|
890
|
-
"No vectors found to aggregate (table empty or all vectors are NULL)";
|
|
891
|
-
}
|
|
892
|
-
else if (vec === null) {
|
|
893
|
-
response["note"] = `All ${String(count)} rows have NULL vectors`;
|
|
894
|
-
}
|
|
895
|
-
return response;
|
|
896
|
-
},
|
|
897
|
-
};
|
|
898
|
-
}
|
|
899
|
-
export function createVectorBatchInsertTool(adapter) {
|
|
900
|
-
// Base schema for MCP visibility (Split Schema pattern)
|
|
901
|
-
const BatchInsertSchemaBase = z.object({
|
|
902
|
-
table: z.string().optional().describe("Table name"),
|
|
903
|
-
tableName: z.string().optional().describe("Alias for table"),
|
|
904
|
-
column: z.string().optional().describe("Vector column"),
|
|
905
|
-
col: z.string().optional().describe("Alias for column"),
|
|
906
|
-
vectors: z
|
|
907
|
-
.array(z.object({
|
|
908
|
-
vector: z.array(z.number()),
|
|
909
|
-
data: z
|
|
910
|
-
.record(z.string(), z.unknown())
|
|
911
|
-
.optional()
|
|
912
|
-
.describe("Additional column values"),
|
|
913
|
-
}))
|
|
914
|
-
.describe("Array of vectors with optional additional data"),
|
|
915
|
-
schema: z.string().optional().describe("Database schema (default: public)"),
|
|
916
|
-
});
|
|
917
|
-
// Transformed schema with alias resolution for handler
|
|
918
|
-
const BatchInsertSchema = BatchInsertSchemaBase.transform((data) => ({
|
|
919
|
-
table: data.table ?? data.tableName ?? "",
|
|
920
|
-
column: data.column ?? data.col ?? "",
|
|
921
|
-
vectors: data.vectors,
|
|
922
|
-
schema: data.schema,
|
|
923
|
-
}));
|
|
924
|
-
return {
|
|
925
|
-
name: "pg_vector_batch_insert",
|
|
926
|
-
description: 'Efficiently insert multiple vectors. vectors param expects array of {vector: [...], data?: {...}} objects, NOT raw arrays. Example: vectors: [{vector: [0.1, 0.2], data: {name: "a"}}]',
|
|
927
|
-
group: "vector",
|
|
928
|
-
// Use base schema for MCP visibility
|
|
929
|
-
inputSchema: BatchInsertSchemaBase,
|
|
930
|
-
annotations: write("Batch Insert Vectors"),
|
|
931
|
-
icons: getToolIcons("vector", write("Batch Insert Vectors")),
|
|
932
|
-
handler: async (params, _context) => {
|
|
933
|
-
const parsed = BatchInsertSchema.parse(params);
|
|
934
|
-
// Parse schema.table format (embedded schema takes priority over explicit schema param)
|
|
935
|
-
let resolvedTable = parsed.table;
|
|
936
|
-
let resolvedSchema = parsed.schema;
|
|
937
|
-
if (parsed.table.includes(".")) {
|
|
938
|
-
const parts = parsed.table.split(".");
|
|
939
|
-
resolvedSchema = parts[0] ?? parsed.schema ?? "public";
|
|
940
|
-
resolvedTable = parts[1] ?? parsed.table;
|
|
941
|
-
}
|
|
942
|
-
const tableName = sanitizeTableName(resolvedTable, resolvedSchema);
|
|
943
|
-
const columnName = sanitizeIdentifier(parsed.column);
|
|
944
|
-
// P154: Pre-validate table and column exist
|
|
945
|
-
const existenceError = await checkTableAndColumn(adapter, resolvedTable, parsed.column, resolvedSchema ?? "public");
|
|
946
|
-
if (existenceError !== null) {
|
|
947
|
-
return { success: false, ...existenceError };
|
|
948
|
-
}
|
|
949
|
-
if (parsed.vectors.length === 0) {
|
|
950
|
-
return {
|
|
951
|
-
success: true,
|
|
952
|
-
rowsInserted: 0,
|
|
953
|
-
message: "No vectors to insert",
|
|
954
|
-
};
|
|
955
|
-
}
|
|
956
|
-
// Build batch INSERT with VALUES clause
|
|
957
|
-
const allDataKeys = new Set();
|
|
958
|
-
for (const v of parsed.vectors) {
|
|
959
|
-
if (v.data !== undefined) {
|
|
960
|
-
for (const k of Object.keys(v.data)) {
|
|
961
|
-
allDataKeys.add(k);
|
|
962
|
-
}
|
|
963
|
-
}
|
|
964
|
-
}
|
|
965
|
-
const dataColumns = Array.from(allDataKeys);
|
|
966
|
-
const columns = [
|
|
967
|
-
columnName,
|
|
968
|
-
...dataColumns.map((c) => sanitizeIdentifier(c)),
|
|
969
|
-
];
|
|
970
|
-
const valueRows = [];
|
|
971
|
-
const allParams = [];
|
|
972
|
-
let paramIndex = 1;
|
|
973
|
-
for (const v of parsed.vectors) {
|
|
974
|
-
const vectorStr = `'[${v.vector.join(", ")}]':: vector`;
|
|
975
|
-
const rowValues = [vectorStr];
|
|
976
|
-
for (const col of dataColumns) {
|
|
977
|
-
rowValues.push(`$${String(paramIndex++)} `);
|
|
978
|
-
allParams.push(v.data?.[col] ?? null);
|
|
979
|
-
}
|
|
980
|
-
valueRows.push(`(${rowValues.join(", ")})`);
|
|
981
|
-
}
|
|
982
|
-
const sql = `INSERT INTO ${tableName} (${columns.join(", ")}) VALUES ${valueRows.join(", ")} `;
|
|
983
|
-
try {
|
|
984
|
-
const result = await adapter.executeQuery(sql, allParams);
|
|
985
|
-
return {
|
|
986
|
-
success: true,
|
|
987
|
-
rowsInserted: parsed.vectors.length,
|
|
988
|
-
rowsAffected: result.rowsAffected,
|
|
989
|
-
};
|
|
990
|
-
}
|
|
991
|
-
catch (error) {
|
|
992
|
-
if (error instanceof Error) {
|
|
993
|
-
const dimMatch = /expected (\d+) dimensions?, not (\d+)/.exec(error.message);
|
|
994
|
-
if (dimMatch) {
|
|
995
|
-
const expectedDim = dimMatch[1] ?? "0";
|
|
996
|
-
const providedDim = dimMatch[2] ?? "0";
|
|
997
|
-
return {
|
|
998
|
-
success: false,
|
|
999
|
-
error: "Vector dimension mismatch",
|
|
1000
|
-
expectedDimensions: parseInt(expectedDim, 10),
|
|
1001
|
-
providedDimensions: parseInt(providedDim, 10),
|
|
1002
|
-
suggestion: `Column expects ${expectedDim} dimensions but vectors have ${providedDim}. Resize vectors or check embedding model.`,
|
|
1003
|
-
};
|
|
1004
|
-
}
|
|
1005
|
-
}
|
|
1006
|
-
throw error;
|
|
1007
|
-
}
|
|
1008
|
-
},
|
|
1009
|
-
};
|
|
1010
|
-
}
|
|
1011
|
-
export function createVectorValidateTool(adapter) {
|
|
1012
|
-
// Base schema exposes all properties to MCP without transform
|
|
1013
|
-
const ValidateSchemaBase = z.object({
|
|
1014
|
-
table: z.string().optional().describe("Table name"),
|
|
1015
|
-
tableName: z.string().optional().describe("Alias for table"),
|
|
1016
|
-
column: z.string().optional().describe("Vector column"),
|
|
1017
|
-
col: z.string().optional().describe("Alias for column"),
|
|
1018
|
-
vector: z
|
|
1019
|
-
.array(z.number())
|
|
1020
|
-
.optional()
|
|
1021
|
-
.describe("Vector to validate dimensions"),
|
|
1022
|
-
dimensions: z.number().optional().describe("Expected dimensions to check"),
|
|
1023
|
-
schema: z.string().optional().describe("Database schema (default: public)"),
|
|
1024
|
-
});
|
|
1025
|
-
// Transformed schema applies alias resolution
|
|
1026
|
-
const ValidateSchema = ValidateSchemaBase.transform((data) => ({
|
|
1027
|
-
table: data.table ?? data.tableName ?? "",
|
|
1028
|
-
column: data.column ?? data.col ?? "",
|
|
1029
|
-
vector: data.vector,
|
|
1030
|
-
dimensions: data.dimensions,
|
|
1031
|
-
schema: data.schema,
|
|
1032
|
-
}));
|
|
1033
|
-
return {
|
|
1034
|
-
name: "pg_vector_validate",
|
|
1035
|
-
description: "Returns `{valid: bool, vectorDimensions}`. Validate vector dimensions against a column or check a vector before operations. Empty vector `[]` returns `{valid: true, vectorDimensions: 0}`.",
|
|
1036
|
-
group: "vector",
|
|
1037
|
-
inputSchema: ValidateSchemaBase,
|
|
1038
|
-
outputSchema: VectorValidateOutputSchema,
|
|
1039
|
-
annotations: readOnly("Validate Vector"),
|
|
1040
|
-
icons: getToolIcons("vector", readOnly("Validate Vector")),
|
|
1041
|
-
handler: async (params, _context) => {
|
|
1042
|
-
// Wrap validation in try-catch for user-friendly errors
|
|
1043
|
-
let parsed;
|
|
1044
|
-
try {
|
|
1045
|
-
parsed = ValidateSchema.parse(params);
|
|
1046
|
-
}
|
|
1047
|
-
catch (error) {
|
|
1048
|
-
// Return user-friendly error for invalid input types
|
|
1049
|
-
if (error instanceof z.ZodError) {
|
|
1050
|
-
const firstIssue = error.issues[0];
|
|
1051
|
-
if (firstIssue) {
|
|
1052
|
-
const path = firstIssue.path.join(".");
|
|
1053
|
-
const message = firstIssue.message;
|
|
1054
|
-
return {
|
|
1055
|
-
valid: false,
|
|
1056
|
-
error: `Invalid ${path || "input"}: ${message}`,
|
|
1057
|
-
suggestion: path === "vector"
|
|
1058
|
-
? "Ensure vector is an array of numbers, e.g., [0.1, 0.2, 0.3]"
|
|
1059
|
-
: "Check the parameter types and try again",
|
|
1060
|
-
};
|
|
1061
|
-
}
|
|
1062
|
-
}
|
|
1063
|
-
throw error;
|
|
1064
|
-
}
|
|
1065
|
-
// Get column dimensions if table/column specified
|
|
1066
|
-
let columnDimensions;
|
|
1067
|
-
if (parsed.table !== "" && parsed.column !== "") {
|
|
1068
|
-
const schemaName = parsed.schema ?? "public";
|
|
1069
|
-
// First check if table and column exist
|
|
1070
|
-
const existsSql = `
|
|
1071
|
-
SELECT 1 FROM information_schema.columns
|
|
1072
|
-
WHERE table_schema = $1 AND table_name = $2 AND column_name = $3
|
|
1073
|
-
`;
|
|
1074
|
-
const existsResult = await adapter.executeQuery(existsSql, [
|
|
1075
|
-
schemaName,
|
|
1076
|
-
parsed.table,
|
|
1077
|
-
parsed.column,
|
|
1078
|
-
]);
|
|
1079
|
-
if ((existsResult.rows?.length ?? 0) === 0) {
|
|
1080
|
-
// Check if table exists at all
|
|
1081
|
-
const tableCheckSql = `
|
|
1082
|
-
SELECT 1 FROM information_schema.tables
|
|
1083
|
-
WHERE table_schema = $1 AND table_name = $2
|
|
1084
|
-
`;
|
|
1085
|
-
const tableCheckResult = await adapter.executeQuery(tableCheckSql, [
|
|
1086
|
-
schemaName,
|
|
1087
|
-
parsed.table,
|
|
1088
|
-
]);
|
|
1089
|
-
if ((tableCheckResult.rows?.length ?? 0) === 0) {
|
|
1090
|
-
return {
|
|
1091
|
-
valid: false,
|
|
1092
|
-
error: `Table '${parsed.table}' does not exist in schema '${schemaName}'`,
|
|
1093
|
-
suggestion: "Use pg_list_tables to find available tables",
|
|
1094
|
-
};
|
|
1095
|
-
}
|
|
1096
|
-
return {
|
|
1097
|
-
valid: false,
|
|
1098
|
-
error: `Column '${parsed.column}' does not exist in table '${parsed.table}'`,
|
|
1099
|
-
suggestion: "Use pg_describe_table to find available columns",
|
|
1100
|
-
};
|
|
1101
|
-
}
|
|
1102
|
-
// Check column type before calling vector_dims() to avoid raw PG errors
|
|
1103
|
-
const typeCheckSql = `
|
|
1104
|
-
SELECT udt_name FROM information_schema.columns
|
|
1105
|
-
WHERE table_schema = $1 AND table_name = $2 AND column_name = $3
|
|
1106
|
-
`;
|
|
1107
|
-
const typeResult = await adapter.executeQuery(typeCheckSql, [
|
|
1108
|
-
schemaName,
|
|
1109
|
-
parsed.table,
|
|
1110
|
-
parsed.column,
|
|
1111
|
-
]);
|
|
1112
|
-
const udtName = typeResult.rows?.[0]?.["udt_name"];
|
|
1113
|
-
if (udtName !== "vector") {
|
|
1114
|
-
return {
|
|
1115
|
-
valid: false,
|
|
1116
|
-
error: `Column '${parsed.column}' is not a vector column (type: ${udtName ?? "unknown"})`,
|
|
1117
|
-
suggestion: "Use a column with vector type, or use pg_vector_add_column to create one",
|
|
1118
|
-
};
|
|
1119
|
-
}
|
|
1120
|
-
// Try to get actual dimensions from a sample row
|
|
1121
|
-
const sampleSql = `
|
|
1122
|
-
SELECT vector_dims("${parsed.column}") as dimensions
|
|
1123
|
-
FROM "${schemaName}"."${parsed.table}"
|
|
1124
|
-
WHERE "${parsed.column}" IS NOT NULL
|
|
1125
|
-
LIMIT 1
|
|
1126
|
-
`;
|
|
1127
|
-
try {
|
|
1128
|
-
const sampleResult = await adapter.executeQuery(sampleSql);
|
|
1129
|
-
const dims = sampleResult.rows?.[0]?.["dimensions"];
|
|
1130
|
-
if (dims !== undefined && dims !== null) {
|
|
1131
|
-
columnDimensions =
|
|
1132
|
-
typeof dims === "string" ? parseInt(dims, 10) : Number(dims);
|
|
1133
|
-
}
|
|
1134
|
-
}
|
|
1135
|
-
catch {
|
|
1136
|
-
// Table might be empty — columnDimensions remains undefined
|
|
1137
|
-
}
|
|
1138
|
-
}
|
|
1139
|
-
const expectedDimensions = parsed.dimensions ?? columnDimensions;
|
|
1140
|
-
const vectorDimensions = parsed.vector?.length;
|
|
1141
|
-
// Validation results
|
|
1142
|
-
const valid = vectorDimensions !== undefined && expectedDimensions !== undefined
|
|
1143
|
-
? vectorDimensions === expectedDimensions
|
|
1144
|
-
: true;
|
|
1145
|
-
return {
|
|
1146
|
-
valid,
|
|
1147
|
-
vectorDimensions,
|
|
1148
|
-
columnDimensions,
|
|
1149
|
-
expectedDimensions,
|
|
1150
|
-
...(parsed.vector !== undefined &&
|
|
1151
|
-
expectedDimensions !== undefined &&
|
|
1152
|
-
vectorDimensions !== undefined &&
|
|
1153
|
-
vectorDimensions !== expectedDimensions
|
|
1154
|
-
? {
|
|
1155
|
-
error: `Vector has ${String(vectorDimensions)} dimensions but column expects ${String(expectedDimensions)} `,
|
|
1156
|
-
suggestion: vectorDimensions > expectedDimensions
|
|
1157
|
-
? "Use pg_vector_dimension_reduce to reduce dimensions"
|
|
1158
|
-
: "Ensure your embedding model outputs the correct dimensions",
|
|
1159
|
-
}
|
|
1160
|
-
: {}),
|
|
1161
|
-
};
|
|
1162
|
-
},
|
|
1163
|
-
};
|
|
1164
|
-
}
|
|
1165
|
-
//# sourceMappingURL=basic.js.map
|