@payloadcms-vectorize/pg 0.6.0-beta

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,254 @@
1
+ # @payloadcms-vectorize/pg
2
+
3
+ PostgreSQL adapter for [payloadcms-vectorize](https://github.com/your-repo/payloadcms-vectorize). Enables vector search capabilities using PostgreSQL's pgvector extension.
4
+
5
+ ## Prerequisites
6
+
7
+ - PostgreSQL with pgvector extension
8
+ - Payload CMS 3.x with `@payloadcms/db-postgres`
9
+ - Node.js 18+
10
+
11
+ ## Installation
12
+
13
+ ```bash
14
+ pnpm add @payloadcms-vectorize/pg payloadcms-vectorize
15
+ ```
16
+
17
+ ## Quick Start
18
+
19
+ ### 1. Ensure pgvector permissions
20
+
21
+ The plugin expects the `vector` extension to be configured when Payload initializes. Your PostgreSQL database user must have permission to create extensions. If your user doesn't have these permissions, someone with permissions may need to manually create the extension once:
22
+
23
+ ```sql
24
+ CREATE EXTENSION IF NOT EXISTS vector;
25
+ ```
26
+
27
+ **Note:** Most managed PostgreSQL services (like AWS RDS, Supabase, etc.) require superuser privileges or specific extension permissions. If you encounter permission errors, contact your database administrator or check your service's documentation.
28
+
29
+ ### 2. Configure the Plugin
30
+
31
+ ```typescript
32
+ import { buildConfig } from 'payload'
33
+ import { postgresAdapter } from '@payloadcms/db-postgres'
34
+ import { createPostgresVectorIntegration } from '@payloadcms-vectorize/pg'
35
+ import payloadcmsVectorize from 'payloadcms-vectorize'
36
+
37
+ // Create the integration with static configs (dims, ivfflatLists)
38
+ const integration = createPostgresVectorIntegration({
39
+ // Note: Changing dims requires a migration with TRUNCATE.
40
+ // Changing ivfflatLists rebuilds the index (non-destructive).
41
+ default: {
42
+ dims: 1536, // Vector dimensions (must match your embedding model)
43
+ ivfflatLists: 100, // IVFFLAT index parameter
44
+ },
45
+ })
46
+
47
+ export default buildConfig({
48
+ // ... your existing config
49
+ db: postgresAdapter({
50
+ // Configure the 'vector' extension
51
+ extensions: ['vector'],
52
+ // afterSchemaInitHook adds vector columns and IVFFLAT indexes to your schema
53
+ afterSchemaInit: [integration.afterSchemaInitHook],
54
+ pool: {
55
+ connectionString: process.env.DATABASE_URL,
56
+ },
57
+ }),
58
+ plugins: [
59
+ payloadcmsVectorize({
60
+ dbAdapter: integration.adapter,
61
+ knowledgePools: {
62
+ default: {
63
+ collections: {
64
+ posts: {
65
+ toKnowledgePool: async (doc) => [{ chunk: doc.title || '' }],
66
+ },
67
+ },
68
+ embeddingConfig: {
69
+ version: 'v1.0.0',
70
+ queryFn: embedQuery,
71
+ realTimeIngestionFn: embedDocs,
72
+ },
73
+ },
74
+ },
75
+ }),
76
+ ],
77
+ })
78
+ ```
79
+
80
+ ## Static Configuration
81
+
82
+ The `createPostgresVectorIntegration` function accepts a configuration object where each key is a knowledge pool name:
83
+
84
+ ```typescript
85
+ const integration = createPostgresVectorIntegration({
86
+ poolName: {
87
+ dims: number, // Required: Vector dimensions
88
+ ivfflatLists: number // Required: IVFFLAT index lists parameter
89
+ },
90
+ // ... additional pools
91
+ })
92
+ ```
93
+
94
+ ### Configuration Options
95
+
96
+ | Option | Type | Required | Description |
97
+ |--------|------|----------|-------------|
98
+ | `dims` | `number` | Yes | Vector dimensions for the pgvector column. Must match your embedding model's output dimensions. |
99
+ | `ivfflatLists` | `number` | Yes | Number of lists for the IVFFLAT index. Higher values = faster queries but slower index builds. Recommended: `sqrt(num_rows)` to `num_rows / 1000`. |
100
+
101
+ ## Integration Return Value
102
+
103
+ `createPostgresVectorIntegration` returns an object with:
104
+
105
+ | Property | Type | Description |
106
+ |----------|------|-------------|
107
+ | `afterSchemaInitHook` | Function | Hook for `postgresAdapter.afterSchemaInit` that adds vector columns and IVFFLAT indexes |
108
+ | `adapter` | `DbAdapter` | The database adapter to pass to `payloadcmsVectorize({ dbAdapter: ... })` |
109
+
110
+ ## Migrations
111
+
112
+ ### Initial Setup
113
+
114
+ After configuring the plugin, create and apply your initial migration. The IVFFLAT indexes are created automatically via the `afterSchemaInitHook` using Drizzle's `extraConfig`.
115
+
116
+ ```bash
117
+ # Create migration (includes embedding columns and IVFFLAT indexes)
118
+ pnpm payload migrate:create --name initial
119
+
120
+ # Review the migration file in src/migrations/
121
+
122
+ # Apply the migration
123
+ pnpm payload migrate
124
+ ```
125
+
126
+ ### Changing `ivfflatLists`
127
+
128
+ Changing `ivfflatLists` is **non-destructive**. Simply update the config and create a new migration:
129
+
130
+ ```bash
131
+ pnpm payload migrate:create --name update_ivfflat_lists
132
+ pnpm payload migrate
133
+ ```
134
+
135
+ Drizzle will automatically generate SQL to rebuild the index with the new lists parameter.
136
+
137
+ ### Changing `dims` (Destructive)
138
+
139
+ **Warning:** Changing `dims` is **DESTRUCTIVE** - it requires truncating the embeddings table and re-embedding all your data.
140
+
141
+ 1. Update your static config with the new `dims` value
142
+
143
+ 2. Create a migration:
144
+ ```bash
145
+ pnpm payload migrate:create --name change_dims
146
+ ```
147
+
148
+ 3. Run the vectorize:migrate CLI to add the TRUNCATE statement:
149
+ ```bash
150
+ pnpm payload vectorize:migrate
151
+ ```
152
+
153
+ The CLI will:
154
+ - Detect the dims change
155
+ - Patch the migration with `TRUNCATE TABLE ... CASCADE`
156
+ - Add appropriate down migration to restore the old column type
157
+
158
+ 4. Review the migration file
159
+
160
+ 5. Apply the migration:
161
+ ```bash
162
+ pnpm payload migrate
163
+ ```
164
+
165
+ 6. Re-embed all documents using the bulk embed feature
166
+
167
+ ### Schema Name Qualification
168
+
169
+ The CLI automatically uses the `schemaName` from your Postgres adapter configuration. If you use a custom schema (e.g., `postgresAdapter({ schemaName: 'custom' })`), all SQL in the migration will be properly qualified with that schema name.
170
+
171
+ ### Idempotency
172
+
173
+ Running `pnpm payload vectorize:migrate` multiple times with no config changes will not create duplicate migrations. The CLI detects when no changes are needed and exits early.
174
+
175
+ ## PostgreSQL Custom Schema Support
176
+
177
+ The adapter reads the `schemaName` configuration from your Postgres adapter.
178
+
179
+ When you configure a custom schema via `postgresAdapter({ schemaName: 'custom' })`, all plugin SQL queries (for vector columns, indexes, and embeddings) are qualified with that schema name. This is useful for multi-tenant setups or when content tables live in a dedicated schema.
180
+
181
+ Where `schemaName` is not specified, the adapter falls back to `public` as is the default adapter behaviour.
182
+
183
+ ## Multiple Knowledge Pools
184
+
185
+ You can configure multiple knowledge pools with different dimensions and index parameters:
186
+
187
+ ```typescript
188
+ const integration = createPostgresVectorIntegration({
189
+ documents: {
190
+ dims: 1536,
191
+ ivfflatLists: 100,
192
+ },
193
+ images: {
194
+ dims: 512,
195
+ ivfflatLists: 50,
196
+ },
197
+ })
198
+
199
+ export default buildConfig({
200
+ db: postgresAdapter({
201
+ extensions: ['vector'],
202
+ afterSchemaInit: [integration.afterSchemaInitHook],
203
+ // ...
204
+ }),
205
+ plugins: [
206
+ payloadcmsVectorize({
207
+ dbAdapter: integration.adapter,
208
+ knowledgePools: {
209
+ documents: {
210
+ collections: { /* ... */ },
211
+ embeddingConfig: { /* ... */ },
212
+ },
213
+ images: {
214
+ collections: { /* ... */ },
215
+ embeddingConfig: { /* ... */ },
216
+ },
217
+ },
218
+ }),
219
+ ],
220
+ })
221
+ ```
222
+
223
+ ## Using with Voyage AI
224
+
225
+ ```typescript
226
+ import { embed, embedMany } from 'ai'
227
+ import { voyage } from 'voyage-ai-provider'
228
+
229
+ export const embedDocs = async (texts: string[]): Promise<number[][]> => {
230
+ const embedResult = await embedMany({
231
+ model: voyage.textEmbeddingModel('voyage-3.5-lite'),
232
+ values: texts,
233
+ providerOptions: {
234
+ voyage: { inputType: 'document' },
235
+ },
236
+ })
237
+ return embedResult.embeddings
238
+ }
239
+
240
+ export const embedQuery = async (text: string): Promise<number[]> => {
241
+ const embedResult = await embed({
242
+ model: voyage.textEmbeddingModel('voyage-3.5-lite'),
243
+ value: text,
244
+ providerOptions: {
245
+ voyage: { inputType: 'query' },
246
+ },
247
+ })
248
+ return embedResult.embedding
249
+ }
250
+ ```
251
+
252
+ ## License
253
+
254
+ MIT
@@ -0,0 +1,223 @@
1
+ import { getPayload } from 'payload';
2
+ import { readFileSync, writeFileSync, readdirSync, statSync, existsSync } from 'fs';
3
+ import { join, resolve } from 'path';
4
+ import toSnakeCase from 'to-snake-case';
5
+ import { getVectorizedPayload } from 'payloadcms-vectorize';
6
+ /**
7
+ * Get prior dims state from existing migrations
8
+ */ function getPriorDimsFromMigrations(migrationsDir, poolNames) {
9
+ const state = new Map();
10
+ // Initialize with null (unknown state)
11
+ for (const poolName of poolNames){
12
+ state.set(poolName, null);
13
+ }
14
+ if (!existsSync(migrationsDir)) {
15
+ return state;
16
+ }
17
+ // Find all migration files and read them in reverse order (newest first)
18
+ // Exclude index.ts/index.js as those are not migration files
19
+ const migrationFiles = readdirSync(migrationsDir).filter((f)=>(f.endsWith('.ts') || f.endsWith('.js')) && f !== 'index.ts' && f !== 'index.js').map((f)=>({
20
+ name: f,
21
+ path: join(migrationsDir, f),
22
+ mtime: statSync(join(migrationsDir, f)).mtime
23
+ })).sort((a, b)=>b.mtime.getTime() - a.mtime.getTime());
24
+ // Skip the most recent migration when determining prior dims, since it may contain
25
+ // the pending dims change that we're trying to detect
26
+ const filesToCheck = migrationFiles.slice(1);
27
+ // Read migration files to find vector dims
28
+ for (const file of filesToCheck){
29
+ try {
30
+ const content = readFileSync(file.path, 'utf-8');
31
+ // Extract only the UP function content to avoid matching values in DOWN function
32
+ const upFunctionMatch = content.match(/export\s+async\s+function\s+up\s*\([^)]*\)[^{]*\{([\s\S]*?)(?=\}\s*(?:export\s+async\s+function\s+down|$))/i);
33
+ const upContent = upFunctionMatch ? upFunctionMatch[1] : content;
34
+ // Look for dims in vector column definition (pool-specific patterns)
35
+ for (const poolName of poolNames){
36
+ const tableName = toSnakeCase(poolName);
37
+ const pattern1 = new RegExp(`ALTER\\s+TABLE[^;]*?"${tableName}"[^;]*?vector\\((\\d+)\\)`, 'is');
38
+ const pattern2 = new RegExp(`CREATE\\s+TABLE[^;]*?"${tableName}"[^;]*?embedding[^;]*?vector\\((\\d+)\\)`, 'is');
39
+ const pattern3 = new RegExp(`"${tableName}"\\s*\\([^)]*embedding[^)]*vector\\((\\d+)\\)`, 'is');
40
+ const match1 = upContent.match(pattern1);
41
+ const match2 = upContent.match(pattern2);
42
+ const match3 = upContent.match(pattern3);
43
+ const dimsMatch = match1 || match2 || match3;
44
+ if (dimsMatch && !state.get(poolName)) {
45
+ const dims = parseInt(dimsMatch[1], 10);
46
+ state.set(poolName, dims);
47
+ }
48
+ }
49
+ } catch (err) {
50
+ continue;
51
+ }
52
+ }
53
+ return state;
54
+ }
55
+ /**
56
+ * Generate SQL code for destructive dims change (truncate table)
57
+ */ function generateDimsChangeTruncateCode(tableName, schemaName, oldDims, newDims) {
58
+ return ` // payloadcms-vectorize: WARNING - Changing dims from ${oldDims} to ${newDims} is DESTRUCTIVE
59
+ // All existing embeddings will be deleted. You must re-embed all documents after this migration.
60
+ // Truncate table (destructive - all embeddings are lost)
61
+ // Use CASCADE to handle foreign key constraints
62
+ await db.execute(sql.raw(\`TRUNCATE TABLE "${schemaName}"."${tableName}" CASCADE\`));`;
63
+ }
64
+ /**
65
+ * Generate SQL code for down migration (restore old dims column type)
66
+ */ function generateDimsChangeDownCode(tableName, schemaName, oldDims) {
67
+ return ` // payloadcms-vectorize: Revert column type to old dimensions
68
+ // WARNING: Data was truncated during up migration and cannot be restored.
69
+ // You will need to re-embed all documents after rolling back.
70
+ await db.execute(sql.raw(\`ALTER TABLE "${schemaName}"."${tableName}" ALTER COLUMN embedding TYPE vector(${oldDims})\`));`;
71
+ }
72
+ /**
73
+ * Patch a migration file with truncate SQL for dims changes
74
+ */ function patchMigrationFileForDimsChange(migrationPath, tableName, schemaName, oldDims, newDims) {
75
+ let content = readFileSync(migrationPath, 'utf-8');
76
+ // Ensure sql import exists for injected sql.raw usage
77
+ const sqlImportRegex = /import\s+\{([^}]+)\}\s+from\s+['"]@payloadcms\/db-postgres['"]/;
78
+ const importMatch = content.match(sqlImportRegex);
79
+ if (importMatch) {
80
+ const imports = importMatch[1].split(',').map((part)=>part.trim()).filter(Boolean);
81
+ if (!imports.includes('sql')) {
82
+ imports.push('sql');
83
+ const updatedImport = `import { ${imports.join(', ')} } from '@payloadcms/db-postgres'`;
84
+ content = content.replace(importMatch[0], updatedImport);
85
+ }
86
+ } else {
87
+ content = `import { sql } from '@payloadcms/db-postgres'\n${content}`;
88
+ }
89
+ // Generate SQL code
90
+ const truncateCode = generateDimsChangeTruncateCode(tableName, schemaName, oldDims, newDims);
91
+ const downCode = generateDimsChangeDownCode(tableName, schemaName, oldDims);
92
+ // Find the up function and insert code before the closing brace
93
+ const upFunctionMatch = content.match(/export\s+async\s+function\s+up\s*\([^)]*\)\s*:\s*Promise<void>\s*\{/i);
94
+ if (!upFunctionMatch) {
95
+ throw new Error(`Could not find 'up' function in migration file: ${migrationPath}`);
96
+ }
97
+ const upFunctionStart = upFunctionMatch.index + upFunctionMatch[0].length;
98
+ const downFunctionMatch = content.match(/export\s+async\s+function\s+down\s*\([^)]*\)/i);
99
+ const searchEnd = downFunctionMatch ? downFunctionMatch.index : content.length;
100
+ // Find the last closing brace before down function or end
101
+ const upFunctionBody = content.substring(upFunctionStart, searchEnd);
102
+ const lastBraceIndex = upFunctionBody.lastIndexOf('}');
103
+ if (lastBraceIndex === -1) {
104
+ throw new Error(`Could not find closing brace for 'up' function in migration file: ${migrationPath}`);
105
+ }
106
+ // Insert our code before the closing brace
107
+ const beforeBrace = content.substring(0, upFunctionStart + lastBraceIndex);
108
+ const afterBrace = content.substring(upFunctionStart + lastBraceIndex);
109
+ const codeToInsert = '\n' + truncateCode + '\n';
110
+ let newContent = beforeBrace + codeToInsert + afterBrace;
111
+ // Handle down function
112
+ if (downFunctionMatch) {
113
+ const downFunctionStart = downFunctionMatch.index + downFunctionMatch[0].length;
114
+ const downBraceMatch = newContent.substring(downFunctionStart).match(/\{/);
115
+ if (downBraceMatch) {
116
+ const downBodyStart = downFunctionStart + downBraceMatch.index + 1;
117
+ const downBody = newContent.substring(downBodyStart);
118
+ const downLastBraceIndex = downBody.lastIndexOf('}');
119
+ if (downLastBraceIndex !== -1) {
120
+ const beforeDownBrace = newContent.substring(0, downBodyStart + downLastBraceIndex);
121
+ const afterDownBrace = newContent.substring(downBodyStart + downLastBraceIndex);
122
+ const downCodeToInsert = '\n' + downCode + '\n';
123
+ newContent = beforeDownBrace + downCodeToInsert + afterDownBrace;
124
+ }
125
+ }
126
+ }
127
+ writeFileSync(migrationPath, newContent, 'utf-8');
128
+ }
129
+ /**
130
+ * Bin script entry point for patching vector migrations with truncate for dims changes
131
+ *
132
+ * NOTE: As of v0.5.3, the IVFFLAT index is created automatically via afterSchemaInitHook
133
+ * using Drizzle's extraConfig. This script is only needed when changing dims, which
134
+ * requires truncating the embeddings table (destructive operation).
135
+ */ export const script = async (config)=>{
136
+ // Get Payload instance to access static configs via VectorizedPayload
137
+ const getPayloadOptions = {
138
+ config,
139
+ // In test environment, use unique key
140
+ ...process.env.TEST_ENV ? {
141
+ key: `vectorize-migrate-${Date.now()}`
142
+ } : {}
143
+ };
144
+ const payload = await getPayload(getPayloadOptions);
145
+ // Get static configs from VectorizedPayload
146
+ const vectorizedPayload = getVectorizedPayload(payload);
147
+ if (!vectorizedPayload) {
148
+ throw new Error('[payloadcms-vectorize] Vectorize plugin not found. Ensure payloadcmsVectorize is configured in your Payload config.');
149
+ }
150
+ const staticConfigs = vectorizedPayload.getDbAdapterCustom()._staticConfigs;
151
+ if (!staticConfigs || Object.keys(staticConfigs).length === 0) {
152
+ throw new Error('[payloadcms-vectorize] No static configs found');
153
+ }
154
+ const poolNames = Object.keys(staticConfigs);
155
+ const schemaName = payload.db.schemaName || 'public';
156
+ // Get migrations directory
157
+ const dbMigrationDir = payload.db.migrationDir;
158
+ const migrationsDir = dbMigrationDir || resolve(process.cwd(), 'src/migrations');
159
+ // Get prior dims state from migrations
160
+ const priorDims = getPriorDimsFromMigrations(migrationsDir, poolNames);
161
+ // Check if any dims have changed
162
+ const dimsChanges = [];
163
+ for (const poolName of poolNames){
164
+ const currentConfig = staticConfigs[poolName];
165
+ const priorDimsValue = priorDims.get(poolName);
166
+ const currentDims = currentConfig.dims;
167
+ // Only flag as change if we have a prior value AND it's different
168
+ if (priorDimsValue !== null && priorDimsValue !== undefined && priorDimsValue !== currentDims) {
169
+ dimsChanges.push({
170
+ poolName,
171
+ tableName: toSnakeCase(poolName),
172
+ oldDims: priorDimsValue,
173
+ newDims: currentDims
174
+ });
175
+ }
176
+ }
177
+ // If no dims changes detected, show deprecation message
178
+ if (dimsChanges.length === 0) {
179
+ console.log('\n[payloadcms-vectorize] No dims changes detected. ' + 'This script is only needed when changing dims (which requires truncating the embeddings table). ');
180
+ return;
181
+ }
182
+ // Dims changed - we need to patch the most recent migration with TRUNCATE
183
+ console.log('\n[payloadcms-vectorize] Detected dims changes:');
184
+ for (const change of dimsChanges){
185
+ console.log(` - ${change.poolName}: ${change.oldDims} → ${change.newDims}`);
186
+ }
187
+ console.log('');
188
+ // Find the most recent migration file
189
+ if (!existsSync(migrationsDir)) {
190
+ throw new Error(`[payloadcms-vectorize] Migrations directory not found: ${migrationsDir}\n` + `Please run 'payload migrate:create' first to create a migration for the dims change.`);
191
+ }
192
+ const migrationFiles = readdirSync(migrationsDir).filter((f)=>(f.endsWith('.ts') || f.endsWith('.js')) && f !== 'index.ts' && f !== 'index.js').map((f)=>({
193
+ name: f,
194
+ path: join(migrationsDir, f),
195
+ mtime: statSync(join(migrationsDir, f)).mtime
196
+ })).sort((a, b)=>b.mtime.getTime() - a.mtime.getTime());
197
+ if (migrationFiles.length === 0) {
198
+ throw new Error(`[payloadcms-vectorize] No migration files found in ${migrationsDir}\n` + `Please run 'payload migrate:create' first to create a migration for the dims change.`);
199
+ }
200
+ const latestMigration = migrationFiles[0];
201
+ // Check if migration already has truncate code
202
+ const migrationContent = readFileSync(latestMigration.path, 'utf-8');
203
+ if (migrationContent.includes('TRUNCATE TABLE') && migrationContent.includes('payloadcms-vectorize')) {
204
+ console.log('[payloadcms-vectorize] Migration already patched with TRUNCATE. No changes needed.');
205
+ return;
206
+ }
207
+ // Patch the migration for each dims change
208
+ for (const change of dimsChanges){
209
+ patchMigrationFileForDimsChange(latestMigration.path, change.tableName, schemaName, change.oldDims, change.newDims);
210
+ }
211
+ console.log(`[payloadcms-vectorize] Migration patched successfully: ${latestMigration.name}`);
212
+ console.log('');
213
+ console.log('⚠️ WARNING: This migration will TRUNCATE your embeddings table(s).');
214
+ console.log(' All existing embeddings will be deleted.');
215
+ console.log(' After running the migration, you must re-embed all documents.');
216
+ console.log('');
217
+ // Only exit if not in test environment
218
+ if (process.env.NODE_ENV !== 'test' && !process.env.VITEST) {
219
+ process.exit(0);
220
+ }
221
+ };
222
+
223
+ //# sourceMappingURL=bin-vectorize-migrate.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/bin-vectorize-migrate.ts"],"sourcesContent":["import type { SanitizedConfig } from 'payload'\nimport { getPayload } from 'payload'\nimport { readFileSync, writeFileSync, readdirSync, statSync, existsSync } from 'fs'\nimport { join, resolve } from 'path'\nimport toSnakeCase from 'to-snake-case'\n\nimport { getVectorizedPayload } from 'payloadcms-vectorize'\nimport { KnowledgePoolsConfig } from './types'\n\n/**\n * Get prior dims state from existing migrations\n */\nfunction getPriorDimsFromMigrations(\n migrationsDir: string,\n poolNames: string[],\n): Map<string, number | null> {\n const state = new Map<string, number | null>()\n\n // Initialize with null (unknown state)\n for (const poolName of poolNames) {\n state.set(poolName, null)\n }\n\n if (!existsSync(migrationsDir)) {\n return state\n }\n\n // Find all migration files and read them in reverse order (newest first)\n // Exclude index.ts/index.js as those are not migration files\n const migrationFiles = readdirSync(migrationsDir)\n .filter((f) => (f.endsWith('.ts') || f.endsWith('.js')) && f !== 'index.ts' && f !== 'index.js')\n .map((f) => ({\n name: f,\n path: join(migrationsDir, f),\n mtime: statSync(join(migrationsDir, f)).mtime,\n }))\n .sort((a, b) => b.mtime.getTime() - a.mtime.getTime())\n\n // Skip the most recent migration when determining prior dims, since it may contain\n // the pending dims change that we're trying to detect\n const filesToCheck = migrationFiles.slice(1)\n\n // Read migration files to find vector dims\n for (const file of filesToCheck) {\n try {\n const content = readFileSync(file.path, 'utf-8')\n\n // Extract only the UP function content to avoid matching values in DOWN function\n const upFunctionMatch = content.match(\n /export\\s+async\\s+function\\s+up\\s*\\([^)]*\\)[^{]*\\{([\\s\\S]*?)(?=\\}\\s*(?:export\\s+async\\s+function\\s+down|$))/i,\n )\n const upContent = upFunctionMatch ? upFunctionMatch[1] : content\n\n // Look for dims in vector column definition (pool-specific patterns)\n for (const poolName of poolNames) {\n const tableName = toSnakeCase(poolName)\n\n const pattern1 = new RegExp(\n `ALTER\\\\s+TABLE[^;]*?\"${tableName}\"[^;]*?vector\\\\((\\\\d+)\\\\)`,\n 'is',\n )\n const pattern2 = new RegExp(\n `CREATE\\\\s+TABLE[^;]*?\"${tableName}\"[^;]*?embedding[^;]*?vector\\\\((\\\\d+)\\\\)`,\n 'is',\n )\n const pattern3 = new RegExp(\n `\"${tableName}\"\\\\s*\\\\([^)]*embedding[^)]*vector\\\\((\\\\d+)\\\\)`,\n 'is',\n )\n\n const match1 = upContent.match(pattern1)\n const match2 = upContent.match(pattern2)\n const match3 = upContent.match(pattern3)\n\n const dimsMatch = match1 || match2 || match3\n\n if (dimsMatch && !state.get(poolName)) {\n const dims = parseInt(dimsMatch[1], 10)\n state.set(poolName, dims)\n }\n }\n } catch (err) {\n // Skip files that can't be read\n continue\n }\n }\n\n return state\n}\n\n/**\n * Generate SQL code for destructive dims change (truncate table)\n */\nfunction generateDimsChangeTruncateCode(\n tableName: string,\n schemaName: string,\n oldDims: number,\n newDims: number,\n): string {\n return ` // payloadcms-vectorize: WARNING - Changing dims from ${oldDims} to ${newDims} is DESTRUCTIVE\n // All existing embeddings will be deleted. You must re-embed all documents after this migration.\n // Truncate table (destructive - all embeddings are lost)\n // Use CASCADE to handle foreign key constraints\n await db.execute(sql.raw(\\`TRUNCATE TABLE \"${schemaName}\".\"${tableName}\" CASCADE\\`));`\n}\n\n/**\n * Generate SQL code for down migration (restore old dims column type)\n */\nfunction generateDimsChangeDownCode(\n tableName: string,\n schemaName: string,\n oldDims: number,\n): string {\n return ` // payloadcms-vectorize: Revert column type to old dimensions\n // WARNING: Data was truncated during up migration and cannot be restored.\n // You will need to re-embed all documents after rolling back.\n await db.execute(sql.raw(\\`ALTER TABLE \"${schemaName}\".\"${tableName}\" ALTER COLUMN embedding TYPE vector(${oldDims})\\`));`\n}\n\n/**\n * Patch a migration file with truncate SQL for dims changes\n */\nfunction patchMigrationFileForDimsChange(\n migrationPath: string,\n tableName: string,\n schemaName: string,\n oldDims: number,\n newDims: number,\n): void {\n let content = readFileSync(migrationPath, 'utf-8')\n\n // Ensure sql import exists for injected sql.raw usage\n const sqlImportRegex = /import\\s+\\{([^}]+)\\}\\s+from\\s+['\"]@payloadcms\\/db-postgres['\"]/\n const importMatch = content.match(sqlImportRegex)\n if (importMatch) {\n const imports = importMatch[1]\n .split(',')\n .map((part) => part.trim())\n .filter(Boolean)\n if (!imports.includes('sql')) {\n imports.push('sql')\n const updatedImport = `import { ${imports.join(', ')} } from '@payloadcms/db-postgres'`\n content = content.replace(importMatch[0], updatedImport)\n }\n } else {\n content = `import { sql } from '@payloadcms/db-postgres'\\n${content}`\n }\n\n // Generate SQL code\n const truncateCode = generateDimsChangeTruncateCode(tableName, schemaName, oldDims, newDims)\n const downCode = generateDimsChangeDownCode(tableName, schemaName, oldDims)\n\n // Find the up function and insert code before the closing brace\n const upFunctionMatch = content.match(\n /export\\s+async\\s+function\\s+up\\s*\\([^)]*\\)\\s*:\\s*Promise<void>\\s*\\{/i,\n )\n if (!upFunctionMatch) {\n throw new Error(`Could not find 'up' function in migration file: ${migrationPath}`)\n }\n\n const upFunctionStart = upFunctionMatch.index! + upFunctionMatch[0].length\n const downFunctionMatch = content.match(/export\\s+async\\s+function\\s+down\\s*\\([^)]*\\)/i)\n const searchEnd = downFunctionMatch ? downFunctionMatch.index! : content.length\n\n // Find the last closing brace before down function or end\n const upFunctionBody = content.substring(upFunctionStart, searchEnd)\n const lastBraceIndex = upFunctionBody.lastIndexOf('}')\n if (lastBraceIndex === -1) {\n throw new Error(\n `Could not find closing brace for 'up' function in migration file: ${migrationPath}`,\n )\n }\n\n // Insert our code before the closing brace\n const beforeBrace = content.substring(0, upFunctionStart + lastBraceIndex)\n const afterBrace = content.substring(upFunctionStart + lastBraceIndex)\n\n const codeToInsert = '\\n' + truncateCode + '\\n'\n let newContent = beforeBrace + codeToInsert + afterBrace\n\n // Handle down function\n if (downFunctionMatch) {\n const downFunctionStart = downFunctionMatch.index! + downFunctionMatch[0].length\n const downBraceMatch = newContent.substring(downFunctionStart).match(/\\{/)\n if (downBraceMatch) {\n const downBodyStart = downFunctionStart + downBraceMatch.index! + 1\n const downBody = newContent.substring(downBodyStart)\n const downLastBraceIndex = downBody.lastIndexOf('}')\n if (downLastBraceIndex !== -1) {\n const beforeDownBrace = newContent.substring(0, downBodyStart + downLastBraceIndex)\n const afterDownBrace = newContent.substring(downBodyStart + downLastBraceIndex)\n const downCodeToInsert = '\\n' + downCode + '\\n'\n newContent = beforeDownBrace + downCodeToInsert + afterDownBrace\n }\n }\n }\n\n writeFileSync(migrationPath, newContent, 'utf-8')\n}\n\n/**\n * Bin script entry point for patching vector migrations with truncate for dims changes\n *\n * NOTE: As of v0.5.3, the IVFFLAT index is created automatically via afterSchemaInitHook\n * using Drizzle's extraConfig. This script is only needed when changing dims, which\n * requires truncating the embeddings table (destructive operation).\n */\nexport const script = async (config: SanitizedConfig): Promise<void> => {\n // Get Payload instance to access static configs via VectorizedPayload\n const getPayloadOptions = {\n config,\n // In test environment, use unique key\n ...(process.env.TEST_ENV ? { key: `vectorize-migrate-${Date.now()}` } : {}),\n }\n\n const payload = await getPayload(getPayloadOptions)\n\n // Get static configs from VectorizedPayload\n const vectorizedPayload = getVectorizedPayload(payload)\n if (!vectorizedPayload) {\n throw new Error(\n '[payloadcms-vectorize] Vectorize plugin not found. Ensure payloadcmsVectorize is configured in your Payload config.',\n )\n }\n\n const staticConfigs = (\n vectorizedPayload.getDbAdapterCustom() as { _staticConfigs: KnowledgePoolsConfig }\n )._staticConfigs\n if (!staticConfigs || Object.keys(staticConfigs).length === 0) {\n throw new Error('[payloadcms-vectorize] No static configs found')\n }\n\n const poolNames = Object.keys(staticConfigs)\n const schemaName = (payload.db as any).schemaName || 'public'\n\n // Get migrations directory\n const dbMigrationDir = (payload.db as any).migrationDir\n const migrationsDir = dbMigrationDir || resolve(process.cwd(), 'src/migrations')\n\n // Get prior dims state from migrations\n const priorDims = getPriorDimsFromMigrations(migrationsDir, poolNames)\n\n // Check if any dims have changed\n const dimsChanges: Array<{\n poolName: string\n tableName: string\n oldDims: number\n newDims: number\n }> = []\n\n for (const poolName of poolNames) {\n const currentConfig = staticConfigs[poolName]\n const priorDimsValue = priorDims.get(poolName)\n const currentDims = currentConfig.dims\n\n // Only flag as change if we have a prior value AND it's different\n if (priorDimsValue !== null && priorDimsValue !== undefined && priorDimsValue !== currentDims) {\n dimsChanges.push({\n poolName,\n tableName: toSnakeCase(poolName),\n oldDims: priorDimsValue as number,\n newDims: currentDims,\n })\n }\n }\n\n // If no dims changes detected, show deprecation message\n if (dimsChanges.length === 0) {\n console.log(\n '\\n[payloadcms-vectorize] No dims changes detected. ' +\n 'This script is only needed when changing dims (which requires truncating the embeddings table). ',\n )\n return\n }\n\n // Dims changed - we need to patch the most recent migration with TRUNCATE\n console.log('\\n[payloadcms-vectorize] Detected dims changes:')\n for (const change of dimsChanges) {\n console.log(` - ${change.poolName}: ${change.oldDims} → ${change.newDims}`)\n }\n console.log('')\n\n // Find the most recent migration file\n if (!existsSync(migrationsDir)) {\n throw new Error(\n `[payloadcms-vectorize] Migrations directory not found: ${migrationsDir}\\n` +\n `Please run 'payload migrate:create' first to create a migration for the dims change.`,\n )\n }\n\n const migrationFiles = readdirSync(migrationsDir)\n .filter((f) => (f.endsWith('.ts') || f.endsWith('.js')) && f !== 'index.ts' && f !== 'index.js')\n .map((f) => ({\n name: f,\n path: join(migrationsDir, f),\n mtime: statSync(join(migrationsDir, f)).mtime,\n }))\n .sort((a, b) => b.mtime.getTime() - a.mtime.getTime())\n\n if (migrationFiles.length === 0) {\n throw new Error(\n `[payloadcms-vectorize] No migration files found in ${migrationsDir}\\n` +\n `Please run 'payload migrate:create' first to create a migration for the dims change.`,\n )\n }\n\n const latestMigration = migrationFiles[0]\n\n // Check if migration already has truncate code\n const migrationContent = readFileSync(latestMigration.path, 'utf-8')\n if (\n migrationContent.includes('TRUNCATE TABLE') &&\n migrationContent.includes('payloadcms-vectorize')\n ) {\n console.log(\n '[payloadcms-vectorize] Migration already patched with TRUNCATE. No changes needed.',\n )\n return\n }\n\n // Patch the migration for each dims change\n for (const change of dimsChanges) {\n patchMigrationFileForDimsChange(\n latestMigration.path,\n change.tableName,\n schemaName,\n change.oldDims,\n change.newDims,\n )\n }\n\n console.log(`[payloadcms-vectorize] Migration patched successfully: ${latestMigration.name}`)\n console.log('')\n console.log('⚠️ WARNING: This migration will TRUNCATE your embeddings table(s).')\n console.log(' All existing embeddings will be deleted.')\n console.log(' After running the migration, you must re-embed all documents.')\n console.log('')\n\n // Only exit if not in test environment\n if (process.env.NODE_ENV !== 'test' && !process.env.VITEST) {\n process.exit(0)\n }\n}\n"],"names":["getPayload","readFileSync","writeFileSync","readdirSync","statSync","existsSync","join","resolve","toSnakeCase","getVectorizedPayload","getPriorDimsFromMigrations","migrationsDir","poolNames","state","Map","poolName","set","migrationFiles","filter","f","endsWith","map","name","path","mtime","sort","a","b","getTime","filesToCheck","slice","file","content","upFunctionMatch","match","upContent","tableName","pattern1","RegExp","pattern2","pattern3","match1","match2","match3","dimsMatch","get","dims","parseInt","err","generateDimsChangeTruncateCode","schemaName","oldDims","newDims","generateDimsChangeDownCode","patchMigrationFileForDimsChange","migrationPath","sqlImportRegex","importMatch","imports","split","part","trim","Boolean","includes","push","updatedImport","replace","truncateCode","downCode","Error","upFunctionStart","index","length","downFunctionMatch","searchEnd","upFunctionBody","substring","lastBraceIndex","lastIndexOf","beforeBrace","afterBrace","codeToInsert","newContent","downFunctionStart","downBraceMatch","downBodyStart","downBody","downLastBraceIndex","beforeDownBrace","afterDownBrace","downCodeToInsert","script","config","getPayloadOptions","process","env","TEST_ENV","key","Date","now","payload","vectorizedPayload","staticConfigs","getDbAdapterCustom","_staticConfigs","Object","keys","db","dbMigrationDir","migrationDir","cwd","priorDims","dimsChanges","currentConfig","priorDimsValue","currentDims","undefined","console","log","change","latestMigration","migrationContent","NODE_ENV","VITEST","exit"],"mappings":"AACA,SAASA,UAAU,QAAQ,UAAS;AACpC,SAASC,YAAY,EAAEC,aAAa,EAAEC,WAAW,EAAEC,QAAQ,EAAEC,UAAU,QAAQ,KAAI;AACnF,SAASC,IAAI,EAAEC,OAAO,QAAQ,OAAM;AACpC,OAAOC,iBAAiB,gBAAe;AAEvC,SAASC,oBAAoB,QAAQ,uBAAsB;AAG3D;;CAEC,GACD,SAASC,2BACPC,aAAqB,EACrBC,SAAmB;IAEnB,MAAMC,QAAQ,IAAIC;IAElB,uCAAuC;IACvC,KAAK,MAAMC,YAAYH,UAAW;QAChCC,MAAMG,GAAG,CAACD,UAAU;IACtB;IAEA,IAAI,CAACV,WAAWM,gBAAgB;QAC9B,OAAOE;IACT;IAEA,yEAAyE;IACzE,6DAA6D;IAC7D,MAAMI,iBAAiBd,YAAYQ,eAChCO,MAAM,CAAC,CAACC,IAAM,AAACA,CAAAA,EAAEC,QAAQ,CAAC,UAAUD,EAAEC,QAAQ,CAAC,MAAK,KAAMD,MAAM,cAAcA,MAAM,YACpFE,GAAG,CAAC,CAACF,IAAO,CAAA;YACXG,MAAMH;YACNI,MAAMjB,KAAKK,eAAeQ;YAC1BK,OAAOpB,SAASE,KAAKK,eAAeQ,IAAIK,KAAK;QAC/C,CAAA,GACCC,IAAI,CAAC,CAACC,GAAGC,IAAMA,EAAEH,KAAK,CAACI,OAAO,KAAKF,EAAEF,KAAK,CAACI,OAAO;IAErD,mFAAmF;IACnF,sDAAsD;IACtD,MAAMC,eAAeZ,eAAea,KAAK,CAAC;IAE1C,2CAA2C;IAC3C,KAAK,MAAMC,QAAQF,aAAc;QAC/B,IAAI;YACF,MAAMG,UAAU/B,aAAa8B,KAAKR,IAAI,EAAE;YAExC,iFAAiF;YACjF,MAAMU,kBAAkBD,QAAQE,KAAK,CACnC;YAEF,MAAMC,YAAYF,kBAAkBA,eAAe,CAAC,EAAE,GAAGD;YAEzD,qEAAqE;YACrE,KAAK,MAAMjB,YAAYH,UAAW;gBAChC,MAAMwB,YAAY5B,YAAYO;gBAE9B,MAAMsB,WAAW,IAAIC,OACnB,CAAC,qBAAqB,EAAEF,UAAU,yBAAyB,CAAC,EAC5D;gBAEF,MAAMG,WAAW,IAAID,OACnB,CAAC,sBAAsB,EAAEF,UAAU,wCAAwC,CAAC,EAC5E;gBAEF,MAAMI,WAAW,IAAIF,OACnB,CAAC,CAAC,EAAEF,UAAU,6CAA6C,CAAC,EAC5D;gBAGF,MAAMK,SAASN,UAAUD,KAAK,CAACG;gBAC/B,MAAMK,SAASP,UAAUD,KAAK,CAACK;gBAC/B,MAAMI,SAASR,UAAUD,KAAK,CAACM;gBAE/B,MAAMI,YAAYH,UAAUC,UAAUC;gBAEtC,IAAIC,aAAa,CAAC/B,MAAMgC,GAAG,CAAC9B,WAAW;oBACrC,MAAM+B,OAAOC,SAASH,SAAS,CAAC,EAAE,EAAE;oBACpC/B,MAAMG,GAAG,CAACD,UAAU+B;gBACtB;YACF;QACF,EAAE,OAAOE,KAAK;YAEZ;QACF;IACF;IAEA,OAAOnC;AACT;AAEA;;CAEC,GACD,SAASoC,+BACPb,SAAiB,EACjBc,UAAkB,EAClBC,OAAe,EACfC,OAAe;IAEf,OAAO,CAAC,wDAAwD,EAAED,QAAQ,IAAI,EAAEC,QAAQ;;;;4CAI9C,EAAEF,WAAW,GAAG,EAAEd,UAAU,cAAc,CAAC;AACvF;AAEA;;CAEC,GACD,SAASiB,2BACPjB,SAAiB,EACjBc,UAAkB,EAClBC,OAAe;IAEf,OAAO,CAAC;;;yCAG+B,EAAED,WAAW,GAAG,EAAEd,UAAU,qCAAqC,EAAEe,QAAQ,MAAM,CAAC;AAC3H;AAEA;;CAEC,GACD,SAASG,gCACPC,aAAqB,EACrBnB,SAAiB,EACjBc,UAAkB,EAClBC,OAAe,EACfC,OAAe;IAEf,IAAIpB,UAAU/B,aAAasD,eAAe;IAE1C,sDAAsD;IACtD,MAAMC,iBAAiB;IACvB,MAAMC,cAAczB,QAAQE,KAAK,CAACsB;IAClC,IAAIC,aAAa;QACf,MAAMC,UAAUD,WAAW,CAAC,EAAE,CAC3BE,KAAK,CAAC,KACNtC,GAAG,CAAC,CAACuC,OAASA,KAAKC,IAAI,IACvB3C,MAAM,CAAC4C;QACV,IAAI,CAACJ,QAAQK,QAAQ,CAAC,QAAQ;YAC5BL,QAAQM,IAAI,CAAC;YACb,MAAMC,gBAAgB,CAAC,SAAS,EAAEP,QAAQpD,IAAI,CAAC,MAAM,iCAAiC,CAAC;YACvF0B,UAAUA,QAAQkC,OAAO,CAACT,WAAW,CAAC,EAAE,EAAEQ;QAC5C;IACF,OAAO;QACLjC,UAAU,CAAC,+CAA+C,EAAEA,SAAS;IACvE;IAEA,oBAAoB;IACpB,MAAMmC,eAAelB,+BAA+Bb,WAAWc,YAAYC,SAASC;IACpF,MAAMgB,WAAWf,2BAA2BjB,WAAWc,YAAYC;IAEnE,gEAAgE;IAChE,MAAMlB,kBAAkBD,QAAQE,KAAK,CACnC;IAEF,IAAI,CAACD,iBAAiB;QACpB,MAAM,IAAIoC,MAAM,CAAC,gDAAgD,EAAEd,eAAe;IACpF;IAEA,MAAMe,kBAAkBrC,gBAAgBsC,KAAK,GAAItC,eAAe,CAAC,EAAE,CAACuC,MAAM;IAC1E,MAAMC,oBAAoBzC,QAAQE,KAAK,CAAC;IACxC,MAAMwC,YAAYD,oBAAoBA,kBAAkBF,KAAK,GAAIvC,QAAQwC,MAAM;IAE/E,0DAA0D;IAC1D,MAAMG,iBAAiB3C,QAAQ4C,SAAS,CAACN,iBAAiBI;IAC1D,MAAMG,iBAAiBF,eAAeG,WAAW,CAAC;IAClD,IAAID,mBAAmB,CAAC,GAAG;QACzB,MAAM,IAAIR,MACR,CAAC,kEAAkE,EAAEd,eAAe;IAExF;IAEA,2CAA2C;IAC3C,MAAMwB,cAAc/C,QAAQ4C,SAAS,CAAC,GAAGN,kBAAkBO;IAC3D,MAAMG,aAAahD,QAAQ4C,SAAS,CAACN,kBAAkBO;IAEvD,MAAMI,eAAe,OAAOd,eAAe;IAC3C,IAAIe,aAAaH,cAAcE,eAAeD;IAE9C,uBAAuB;IACvB,IAAIP,mBAAmB;QACrB,MAAMU,oBAAoBV,kBAAkBF,KAAK,GAAIE,iBAAiB,CAAC,EAAE,CAACD,MAAM;QAChF,MAAMY,iBAAiBF,WAAWN,SAAS,CAACO,mBAAmBjD,KAAK,CAAC;QACrE,IAAIkD,gBAAgB;YAClB,MAAMC,gBAAgBF,oBAAoBC,eAAeb,KAAK,GAAI;YAClE,MAAMe,WAAWJ,WAAWN,SAAS,CAACS;YACtC,MAAME,qBAAqBD,SAASR,WAAW,CAAC;YAChD,IAAIS,uBAAuB,CAAC,GAAG;gBAC7B,MAAMC,kBAAkBN,WAAWN,SAAS,CAAC,GAAGS,gBAAgBE;gBAChE,MAAME,iBAAiBP,WAAWN,SAAS,CAACS,gBAAgBE;gBAC5D,MAAMG,mBAAmB,OAAOtB,WAAW;gBAC3Cc,aAAaM,kBAAkBE,mBAAmBD;YACpD;QACF;IACF;IAEAvF,cAAcqD,eAAe2B,YAAY;AAC3C;AAEA;;;;;;CAMC,GACD,OAAO,MAAMS,SAAS,OAAOC;IAC3B,sEAAsE;IACtE,MAAMC,oBAAoB;QACxBD;QACA,sCAAsC;QACtC,GAAIE,QAAQC,GAAG,CAACC,QAAQ,GAAG;YAAEC,KAAK,CAAC,kBAAkB,EAAEC,KAAKC,GAAG,IAAI;QAAC,IAAI,CAAC,CAAC;IAC5E;IAEA,MAAMC,UAAU,MAAMpG,WAAW6F;IAEjC,4CAA4C;IAC5C,MAAMQ,oBAAoB5F,qBAAqB2F;IAC/C,IAAI,CAACC,mBAAmB;QACtB,MAAM,IAAIhC,MACR;IAEJ;IAEA,MAAMiC,gBAAgB,AACpBD,kBAAkBE,kBAAkB,GACpCC,cAAc;IAChB,IAAI,CAACF,iBAAiBG,OAAOC,IAAI,CAACJ,eAAe9B,MAAM,KAAK,GAAG;QAC7D,MAAM,IAAIH,MAAM;IAClB;IAEA,MAAMzD,YAAY6F,OAAOC,IAAI,CAACJ;IAC9B,MAAMpD,aAAa,AAACkD,QAAQO,EAAE,CAASzD,UAAU,IAAI;IAErD,2BAA2B;IAC3B,MAAM0D,iBAAiB,AAACR,QAAQO,EAAE,CAASE,YAAY;IACvD,MAAMlG,gBAAgBiG,kBAAkBrG,QAAQuF,QAAQgB,GAAG,IAAI;IAE/D,uCAAuC;IACvC,MAAMC,YAAYrG,2BAA2BC,eAAeC;IAE5D,iCAAiC;IACjC,MAAMoG,cAKD,EAAE;IAEP,KAAK,MAAMjG,YAAYH,UAAW;QAChC,MAAMqG,gBAAgBX,aAAa,CAACvF,SAAS;QAC7C,MAAMmG,iBAAiBH,UAAUlE,GAAG,CAAC9B;QACrC,MAAMoG,cAAcF,cAAcnE,IAAI;QAEtC,kEAAkE;QAClE,IAAIoE,mBAAmB,QAAQA,mBAAmBE,aAAaF,mBAAmBC,aAAa;YAC7FH,YAAYhD,IAAI,CAAC;gBACfjD;gBACAqB,WAAW5B,YAAYO;gBACvBoC,SAAS+D;gBACT9D,SAAS+D;YACX;QACF;IACF;IAEA,wDAAwD;IACxD,IAAIH,YAAYxC,MAAM,KAAK,GAAG;QAC5B6C,QAAQC,GAAG,CACT,wDACE;QAEJ;IACF;IAEA,0EAA0E;IAC1ED,QAAQC,GAAG,CAAC;IACZ,KAAK,MAAMC,UAAUP,YAAa;QAChCK,QAAQC,GAAG,CAAC,CAAC,IAAI,EAAEC,OAAOxG,QAAQ,CAAC,EAAE,EAAEwG,OAAOpE,OAAO,CAAC,GAAG,EAAEoE,OAAOnE,OAAO,EAAE;IAC7E;IACAiE,QAAQC,GAAG,CAAC;IAEZ,sCAAsC;IACtC,IAAI,CAACjH,WAAWM,gBAAgB;QAC9B,MAAM,IAAI0D,MACR,CAAC,uDAAuD,EAAE1D,cAAc,EAAE,CAAC,GACzE,CAAC,oFAAoF,CAAC;IAE5F;IAEA,MAAMM,iBAAiBd,YAAYQ,eAChCO,MAAM,CAAC,CAACC,IAAM,AAACA,CAAAA,EAAEC,QAAQ,CAAC,UAAUD,EAAEC,QAAQ,CAAC,MAAK,KAAMD,MAAM,cAAcA,MAAM,YACpFE,GAAG,CAAC,CAACF,IAAO,CAAA;YACXG,MAAMH;YACNI,MAAMjB,KAAKK,eAAeQ;YAC1BK,OAAOpB,SAASE,KAAKK,eAAeQ,IAAIK,KAAK;QAC/C,CAAA,GACCC,IAAI,CAAC,CAACC,GAAGC,IAAMA,EAAEH,KAAK,CAACI,OAAO,KAAKF,EAAEF,KAAK,CAACI,OAAO;IAErD,IAAIX,eAAeuD,MAAM,KAAK,GAAG;QAC/B,MAAM,IAAIH,MACR,CAAC,mDAAmD,EAAE1D,cAAc,EAAE,CAAC,GACrE,CAAC,oFAAoF,CAAC;IAE5F;IAEA,MAAM6G,kBAAkBvG,cAAc,CAAC,EAAE;IAEzC,+CAA+C;IAC/C,MAAMwG,mBAAmBxH,aAAauH,gBAAgBjG,IAAI,EAAE;IAC5D,IACEkG,iBAAiB1D,QAAQ,CAAC,qBAC1B0D,iBAAiB1D,QAAQ,CAAC,yBAC1B;QACAsD,QAAQC,GAAG,CACT;QAEF;IACF;IAEA,2CAA2C;IAC3C,KAAK,MAAMC,UAAUP,YAAa;QAChC1D,gCACEkE,gBAAgBjG,IAAI,EACpBgG,OAAOnF,SAAS,EAChBc,YACAqE,OAAOpE,OAAO,EACdoE,OAAOnE,OAAO;IAElB;IAEAiE,QAAQC,GAAG,CAAC,CAAC,uDAAuD,EAAEE,gBAAgBlG,IAAI,EAAE;IAC5F+F,QAAQC,GAAG,CAAC;IACZD,QAAQC,GAAG,CAAC;IACZD,QAAQC,GAAG,CAAC;IACZD,QAAQC,GAAG,CAAC;IACZD,QAAQC,GAAG,CAAC;IAEZ,uCAAuC;IACvC,IAAIxB,QAAQC,GAAG,CAAC2B,QAAQ,KAAK,UAAU,CAAC5B,QAAQC,GAAG,CAAC4B,MAAM,EAAE;QAC1D7B,QAAQ8B,IAAI,CAAC;IACf;AACF,EAAC"}
@@ -0,0 +1,12 @@
1
+ const embeddingsTables = new Map();
2
+ export function registerEmbeddingsTable(poolName, table) {
3
+ embeddingsTables.set(poolName, table);
4
+ }
5
+ export function getEmbeddingsTable(poolName) {
6
+ return embeddingsTables.get(poolName);
7
+ }
8
+ export function clearEmbeddingsTables() {
9
+ embeddingsTables.clear();
10
+ }
11
+
12
+ //# sourceMappingURL=drizzle.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/drizzle.ts"],"sourcesContent":["import type { KnowledgePoolName } from 'payloadcms-vectorize'\nimport type { Table } from '@payloadcms/db-postgres/drizzle'\n\n// Extend Table to allow dynamic column access (for extension fields)\ntype DrizzleTable = Table & Record<string, any>\n\nconst embeddingsTables = new Map<KnowledgePoolName, DrizzleTable>()\n\nexport function registerEmbeddingsTable(poolName: KnowledgePoolName, table: DrizzleTable): void {\n embeddingsTables.set(poolName, table)\n}\n\nexport function getEmbeddingsTable(poolName: KnowledgePoolName): DrizzleTable | undefined {\n return embeddingsTables.get(poolName)\n}\n\nexport function clearEmbeddingsTables(): void {\n embeddingsTables.clear()\n}\n"],"names":["embeddingsTables","Map","registerEmbeddingsTable","poolName","table","set","getEmbeddingsTable","get","clearEmbeddingsTables","clear"],"mappings":"AAMA,MAAMA,mBAAmB,IAAIC;AAE7B,OAAO,SAASC,wBAAwBC,QAA2B,EAAEC,KAAmB;IACtFJ,iBAAiBK,GAAG,CAACF,UAAUC;AACjC;AAEA,OAAO,SAASE,mBAAmBH,QAA2B;IAC5D,OAAOH,iBAAiBO,GAAG,CAACJ;AAC9B;AAEA,OAAO,SAASK;IACdR,iBAAiBS,KAAK;AACxB"}
package/dist/embed.js ADDED
@@ -0,0 +1,30 @@
1
+ import { isPostgresPayload } from './types';
2
+ import toSnakeCase from 'to-snake-case';
3
+ export default (async (payload, poolName, id, embedding)=>{
4
+ const isPostgres = isPostgresPayload(payload);
5
+ if (!isPostgres) {
6
+ throw new Error('[@payloadcms-vectorize/pg] Only works with Postgres');
7
+ }
8
+ const runSQL = async (sql, params)=>{
9
+ if (postgresPayload.db.pool?.query) return postgresPayload.db.pool.query(sql, params);
10
+ if (postgresPayload.db.drizzle?.execute) return postgresPayload.db.drizzle.execute(sql, params);
11
+ throw new Error('[@payloadcms-vectorize/pg] Failed to persist vector column');
12
+ };
13
+ const literal = `[${Array.from(embedding).join(',')}]`;
14
+ const postgresPayload = payload;
15
+ const schemaName = postgresPayload.db.schemaName || 'public';
16
+ // Drizzle converts camelCase collection slugs to snake_case table names
17
+ const sql = `UPDATE "${schemaName}"."${toSnakeCase(poolName)}" SET embedding = $1 WHERE id = $2`;
18
+ try {
19
+ await runSQL(sql, [
20
+ literal,
21
+ id
22
+ ]);
23
+ } catch (e) {
24
+ const errorMessage = e.message || e.toString();
25
+ payload.logger.error(`[@payloadcms-vectorize/pg] Failed to persist vector column: ${errorMessage}`);
26
+ throw new Error(`[@payloadcms-vectorize/pg] Failed to persist vector column: ${e}`);
27
+ }
28
+ });
29
+
30
+ //# sourceMappingURL=embed.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/embed.ts"],"sourcesContent":["import { Payload } from 'payload'\nimport { isPostgresPayload, PostgresPayload } from './types'\nimport toSnakeCase from 'to-snake-case'\n\nexport default async (\n payload: Payload,\n poolName: string,\n id: string,\n embedding: number[] | Float32Array,\n) => {\n const isPostgres = isPostgresPayload(payload)\n if (!isPostgres) {\n throw new Error('[@payloadcms-vectorize/pg] Only works with Postgres')\n }\n const runSQL = async (sql: string, params?: any[]) => {\n if (postgresPayload.db.pool?.query) return postgresPayload.db.pool.query(sql, params)\n if (postgresPayload.db.drizzle?.execute) return postgresPayload.db.drizzle.execute(sql, params)\n throw new Error('[@payloadcms-vectorize/pg] Failed to persist vector column')\n }\n const literal = `[${Array.from(embedding).join(',')}]`\n const postgresPayload = payload as PostgresPayload\n const schemaName = postgresPayload.db.schemaName || 'public'\n // Drizzle converts camelCase collection slugs to snake_case table names\n const sql =\n `UPDATE \"${schemaName}\".\"${toSnakeCase(poolName)}\" SET embedding = $1 WHERE id = $2` as string\n try {\n await runSQL(sql, [literal, id])\n } catch (e) {\n const errorMessage = (e as Error).message || (e as any).toString()\n payload.logger.error(\n `[@payloadcms-vectorize/pg] Failed to persist vector column: ${errorMessage}`,\n )\n throw new Error(`[@payloadcms-vectorize/pg] Failed to persist vector column: ${e}`)\n }\n}\n"],"names":["isPostgresPayload","toSnakeCase","payload","poolName","id","embedding","isPostgres","Error","runSQL","sql","params","postgresPayload","db","pool","query","drizzle","execute","literal","Array","from","join","schemaName","e","errorMessage","message","toString","logger","error"],"mappings":"AACA,SAASA,iBAAiB,QAAyB,UAAS;AAC5D,OAAOC,iBAAiB,gBAAe;AAEvC,eAAe,CAAA,OACbC,SACAC,UACAC,IACAC;IAEA,MAAMC,aAAaN,kBAAkBE;IACrC,IAAI,CAACI,YAAY;QACf,MAAM,IAAIC,MAAM;IAClB;IACA,MAAMC,SAAS,OAAOC,KAAaC;QACjC,IAAIC,gBAAgBC,EAAE,CAACC,IAAI,EAAEC,OAAO,OAAOH,gBAAgBC,EAAE,CAACC,IAAI,CAACC,KAAK,CAACL,KAAKC;QAC9E,IAAIC,gBAAgBC,EAAE,CAACG,OAAO,EAAEC,SAAS,OAAOL,gBAAgBC,EAAE,CAACG,OAAO,CAACC,OAAO,CAACP,KAAKC;QACxF,MAAM,IAAIH,MAAM;IAClB;IACA,MAAMU,UAAU,CAAC,CAAC,EAAEC,MAAMC,IAAI,CAACd,WAAWe,IAAI,CAAC,KAAK,CAAC,CAAC;IACtD,MAAMT,kBAAkBT;IACxB,MAAMmB,aAAaV,gBAAgBC,EAAE,CAACS,UAAU,IAAI;IACpD,wEAAwE;IACxE,MAAMZ,MACJ,CAAC,QAAQ,EAAEY,WAAW,GAAG,EAAEpB,YAAYE,UAAU,kCAAkC,CAAC;IACtF,IAAI;QACF,MAAMK,OAAOC,KAAK;YAACQ;YAASb;SAAG;IACjC,EAAE,OAAOkB,GAAG;QACV,MAAMC,eAAe,AAACD,EAAYE,OAAO,IAAI,AAACF,EAAUG,QAAQ;QAChEvB,QAAQwB,MAAM,CAACC,KAAK,CAClB,CAAC,4DAA4D,EAAEJ,cAAc;QAE/E,MAAM,IAAIhB,MAAM,CAAC,4DAA4D,EAAEe,GAAG;IACpF;AACF,CAAA,EAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,73 @@
1
+ import { clearEmbeddingsTables, registerEmbeddingsTable } from './drizzle.js';
2
+ import { customType, index } from '@payloadcms/db-postgres/drizzle/pg-core';
3
+ import toSnakeCase from 'to-snake-case';
4
+ import { fileURLToPath } from 'url';
5
+ import { dirname, resolve } from 'path';
6
+ import embed from './embed';
7
+ import search from './search';
8
+ export const createPostgresVectorIntegration = (config)=>{
9
+ // Augment the generated schema so push/migrations are aware of our custom columns
10
+ const afterSchemaInitHook = async ({ schema, extendTable })=>{
11
+ // Ensure registry reflects the latest schema
12
+ clearEmbeddingsTables();
13
+ // Extend schema for each knowledge pool
14
+ for(const poolName in config){
15
+ const staticConfig = config[poolName];
16
+ const dims = staticConfig.dims;
17
+ const vectorType = customType({
18
+ dataType () {
19
+ return `vector(${dims})`;
20
+ }
21
+ });
22
+ // Drizzle converts camelCase collection slugs to snake_case table names
23
+ const tableName = toSnakeCase(poolName);
24
+ const table = schema?.tables?.[tableName];
25
+ if (!table) {
26
+ throw new Error(`[@payloadcms-vectorize/pg] Embeddings table "${poolName}" (table: "${tableName}") not found during schema initialization. Ensure the collection has been registered.`);
27
+ }
28
+ if (typeof extendTable === 'function') {
29
+ extendTable({
30
+ table,
31
+ columns: {
32
+ embedding: vectorType('embedding')
33
+ },
34
+ extraConfig: (cols)=>({
35
+ embeddingIvfflatIndex: index(`${tableName}_embedding_ivfflat`).using('ivfflat', cols.embedding.op('vector_cosine_ops')).with({
36
+ lists: staticConfig.ivfflatLists
37
+ })
38
+ })
39
+ });
40
+ }
41
+ registerEmbeddingsTable(poolName, table);
42
+ }
43
+ return schema;
44
+ };
45
+ const adapter = {
46
+ getConfigExtension: ()=>{
47
+ // Register bin script for migration helper
48
+ const __filename = fileURLToPath(import.meta.url);
49
+ const __dirname = dirname(__filename);
50
+ const binScriptPath = resolve(__dirname, 'bin-vectorize-migrate.js');
51
+ return {
52
+ bins: [
53
+ {
54
+ // Register bin script for migration helper
55
+ key: 'vectorize:migrate',
56
+ scriptPath: binScriptPath
57
+ }
58
+ ],
59
+ custom: {
60
+ _staticConfigs: config
61
+ }
62
+ };
63
+ },
64
+ search,
65
+ embed
66
+ };
67
+ return {
68
+ afterSchemaInitHook,
69
+ adapter
70
+ };
71
+ };
72
+
73
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import { KnowledgePoolsConfig } from './types'\nimport type { PostgresAdapterArgs } from '@payloadcms/db-postgres'\nimport { clearEmbeddingsTables, registerEmbeddingsTable } from './drizzle.js'\nimport { customType, index } from '@payloadcms/db-postgres/drizzle/pg-core'\nimport toSnakeCase from 'to-snake-case'\nimport type { DbAdapter } from 'payloadcms-vectorize'\nimport { fileURLToPath } from 'url'\nimport { dirname, resolve } from 'path'\nimport embed from './embed'\nimport search from './search'\n\nexport type { KnowledgePoolsConfig as KnowledgePoolConfig }\n\nexport const createPostgresVectorIntegration = (\n config: KnowledgePoolsConfig,\n): {\n afterSchemaInitHook: Required<PostgresAdapterArgs>['afterSchemaInit'][number]\n adapter: DbAdapter\n} => {\n // Augment the generated schema so push/migrations are aware of our custom columns\n const afterSchemaInitHook: Required<PostgresAdapterArgs>['afterSchemaInit'][number] = async ({\n schema,\n extendTable,\n }) => {\n // Ensure registry reflects the latest schema\n clearEmbeddingsTables()\n\n // Extend schema for each knowledge pool\n for (const poolName in config) {\n const staticConfig = config[poolName]\n const dims = staticConfig.dims\n\n const vectorType = customType({\n dataType() {\n return `vector(${dims})`\n },\n })\n\n // Drizzle converts camelCase collection slugs to snake_case table names\n const tableName = toSnakeCase(poolName)\n const table = schema?.tables?.[tableName]\n if (!table) {\n throw new Error(\n `[@payloadcms-vectorize/pg] Embeddings table \"${poolName}\" (table: \"${tableName}\") not found during schema initialization. Ensure the collection has been registered.`,\n )\n }\n\n if (typeof extendTable === 'function') {\n extendTable({\n table,\n columns: {\n embedding: vectorType('embedding'),\n },\n extraConfig: (cols) => ({\n embeddingIvfflatIndex: index(`${tableName}_embedding_ivfflat`)\n .using('ivfflat', cols.embedding.op('vector_cosine_ops'))\n .with({ lists: staticConfig.ivfflatLists }),\n }),\n })\n }\n\n registerEmbeddingsTable(poolName, table)\n }\n\n return schema\n }\n\n const adapter: DbAdapter = {\n getConfigExtension: () => {\n // Register bin script for migration helper\n const __filename = fileURLToPath(import.meta.url)\n const __dirname = dirname(__filename)\n const binScriptPath = resolve(__dirname, 'bin-vectorize-migrate.js')\n\n return {\n bins: [\n {\n // Register bin script for migration helper\n key: 'vectorize:migrate',\n scriptPath: binScriptPath,\n },\n ],\n custom: {\n _staticConfigs: config,\n },\n }\n },\n search,\n embed,\n }\n\n return { afterSchemaInitHook, adapter }\n}\n"],"names":["clearEmbeddingsTables","registerEmbeddingsTable","customType","index","toSnakeCase","fileURLToPath","dirname","resolve","embed","search","createPostgresVectorIntegration","config","afterSchemaInitHook","schema","extendTable","poolName","staticConfig","dims","vectorType","dataType","tableName","table","tables","Error","columns","embedding","extraConfig","cols","embeddingIvfflatIndex","using","op","with","lists","ivfflatLists","adapter","getConfigExtension","__filename","url","__dirname","binScriptPath","bins","key","scriptPath","custom","_staticConfigs"],"mappings":"AAEA,SAASA,qBAAqB,EAAEC,uBAAuB,QAAQ,eAAc;AAC7E,SAASC,UAAU,EAAEC,KAAK,QAAQ,0CAAyC;AAC3E,OAAOC,iBAAiB,gBAAe;AAEvC,SAASC,aAAa,QAAQ,MAAK;AACnC,SAASC,OAAO,EAAEC,OAAO,QAAQ,OAAM;AACvC,OAAOC,WAAW,UAAS;AAC3B,OAAOC,YAAY,WAAU;AAI7B,OAAO,MAAMC,kCAAkC,CAC7CC;IAKA,kFAAkF;IAClF,MAAMC,sBAAgF,OAAO,EAC3FC,MAAM,EACNC,WAAW,EACZ;QACC,6CAA6C;QAC7Cd;QAEA,wCAAwC;QACxC,IAAK,MAAMe,YAAYJ,OAAQ;YAC7B,MAAMK,eAAeL,MAAM,CAACI,SAAS;YACrC,MAAME,OAAOD,aAAaC,IAAI;YAE9B,MAAMC,aAAahB,WAAW;gBAC5BiB;oBACE,OAAO,CAAC,OAAO,EAAEF,KAAK,CAAC,CAAC;gBAC1B;YACF;YAEA,wEAAwE;YACxE,MAAMG,YAAYhB,YAAYW;YAC9B,MAAMM,QAAQR,QAAQS,QAAQ,CAACF,UAAU;YACzC,IAAI,CAACC,OAAO;gBACV,MAAM,IAAIE,MACR,CAAC,6CAA6C,EAAER,SAAS,WAAW,EAAEK,UAAU,qFAAqF,CAAC;YAE1K;YAEA,IAAI,OAAON,gBAAgB,YAAY;gBACrCA,YAAY;oBACVO;oBACAG,SAAS;wBACPC,WAAWP,WAAW;oBACxB;oBACAQ,aAAa,CAACC,OAAU,CAAA;4BACtBC,uBAAuBzB,MAAM,GAAGiB,UAAU,kBAAkB,CAAC,EAC1DS,KAAK,CAAC,WAAWF,KAAKF,SAAS,CAACK,EAAE,CAAC,sBACnCC,IAAI,CAAC;gCAAEC,OAAOhB,aAAaiB,YAAY;4BAAC;wBAC7C,CAAA;gBACF;YACF;YAEAhC,wBAAwBc,UAAUM;QACpC;QAEA,OAAOR;IACT;IAEA,MAAMqB,UAAqB;QACzBC,oBAAoB;YAClB,2CAA2C;YAC3C,MAAMC,aAAa/B,cAAc,YAAYgC,GAAG;YAChD,MAAMC,YAAYhC,QAAQ8B;YAC1B,MAAMG,gBAAgBhC,QAAQ+B,WAAW;YAEzC,OAAO;gBACLE,MAAM;oBACJ;wBACE,2CAA2C;wBAC3CC,KAAK;wBACLC,YAAYH;oBACd;iBACD;gBACDI,QAAQ;oBACNC,gBAAgBjC;gBAClB;YACF;QACF;QACAF;QACAD;IACF;IAEA,OAAO;QAAEI;QAAqBsB;IAAQ;AACxC,EAAC"}
package/dist/search.js ADDED
@@ -0,0 +1,238 @@
1
+ import { sql, cosineDistance, inArray, eq, and, or, not, like, gt, gte, lt, lte, ne, isNull, isNotNull } from '@payloadcms/db-postgres/drizzle';
2
+ import toSnakeCase from 'to-snake-case';
3
+ import { getEmbeddingsTable } from './drizzle';
4
+ export default (async (payload, queryEmbedding, poolName, limit = 10, where)=>{
5
+ const isPostgres = payload.db?.pool?.query || payload.db?.drizzle;
6
+ if (!isPostgres) {
7
+ throw new Error('Only works with Postgres');
8
+ }
9
+ // In PayloadCMS, payload.db IS the adapter, and drizzle is at payload.db.drizzle
10
+ const adapter = payload.db;
11
+ if (!adapter) {
12
+ throw new Error('Drizzle adapter not found');
13
+ }
14
+ // Get drizzle instance
15
+ const drizzle = adapter.drizzle;
16
+ if (!drizzle) {
17
+ throw new Error('Drizzle instance not found in adapter');
18
+ }
19
+ // Get collection config and table name
20
+ const collectionConfig = payload.collections[poolName]?.config;
21
+ if (!collectionConfig) {
22
+ throw new Error(`Collection ${poolName} not found`);
23
+ }
24
+ const table = getEmbeddingsTable(poolName);
25
+ if (!table) {
26
+ throw new Error(`[payloadcms-vectorize] Embeddings table for knowledge pool "${poolName}" not registered. Ensure the plugin's afterSchemaInit hook ran and the pool exists.`);
27
+ }
28
+ // Use Drizzle's query builder with cosineDistance function
29
+ // cosineDistance returns distance, so we calculate score as 1 - distance
30
+ // The table from fullSchema should have columns as direct properties
31
+ const embeddingColumn = table.embedding;
32
+ if (!embeddingColumn) {
33
+ throw new Error(`Embedding column not found in table for pool "${poolName}". Available properties: ${Object.keys(table).join(', ')}`);
34
+ }
35
+ // Convert WHERE clause to Drizzle conditions
36
+ let drizzleWhere = undefined;
37
+ if (where) {
38
+ drizzleWhere = convertWhereToDrizzle(where, table, collectionConfig.flattenedFields);
39
+ if (drizzleWhere === null) {
40
+ // WHERE clause resulted in an empty condition (e.g., empty 'and' or 'or' array)
41
+ // This semantically means "match nothing", so return empty results
42
+ throw new Error(`[payloadcms-vectorize] WHERE clause resulted in no valid conditions. This typically occurs when using empty 'and' or 'or' arrays, or when all field conditions reference non-existent columns.`);
43
+ }
44
+ if (drizzleWhere === undefined) {
45
+ // WHERE clause could not be converted (invalid structure or unsupported operators)
46
+ throw new Error(`[payloadcms-vectorize] WHERE clause could not be converted to Drizzle conditions. Please check that all field names exist and operators are supported.`);
47
+ }
48
+ }
49
+ // Build query using Drizzle's query builder
50
+ // Column names in the table are camelCase (docId, chunkText, etc.)
51
+ // but their database names are snake_case (doc_id, chunk_text, etc.)
52
+ // The table from fullSchema should have columns as direct properties
53
+ // Calculate score: 1 - cosineDistance (distance)
54
+ // Need to cast 1 to numeric to avoid "integer - vector" error
55
+ const distanceExpr = cosineDistance(embeddingColumn, queryEmbedding);
56
+ // Build select object with score
57
+ const selectObj = {
58
+ id: table.id,
59
+ score: sql`1 - (${distanceExpr})`
60
+ };
61
+ // Add reserved + extension fields from collection config
62
+ for (const field of collectionConfig.fields ?? []){
63
+ if (typeof field === 'object' && 'name' in field) {
64
+ const name = field.name;
65
+ if (name in table) {
66
+ selectObj[name] = table[name];
67
+ } else if (toSnakeCase(name) in table) {
68
+ selectObj[name] = table[toSnakeCase(name)];
69
+ }
70
+ }
71
+ }
72
+ let query = drizzle.select(selectObj).from(table);
73
+ // Add WHERE clause if provided
74
+ if (drizzleWhere) {
75
+ query = query.where(drizzleWhere);
76
+ }
77
+ // Order by cosine distance (ascending = most similar first) and limit
78
+ // Reuse the same distance expression for ordering
79
+ query = query.orderBy(distanceExpr).limit(limit);
80
+ // Execute the query
81
+ const result = await query;
82
+ return mapRowsToResults(result, collectionConfig);
83
+ });
84
+ /**
85
+ * Convert Payload WHERE clause to Drizzle conditions
86
+ * Simplified version inspired by Payload's buildQuery
87
+ */ function convertWhereToDrizzle(where, table, fields) {
88
+ if (!where || typeof where !== 'object') {
89
+ return undefined;
90
+ }
91
+ // Handle 'and' operator
92
+ if ('and' in where && Array.isArray(where.and)) {
93
+ const conditions = where.and.map((condition)=>convertWhereToDrizzle(condition, table, fields)).filter((c)=>c !== undefined && c !== null);
94
+ if (conditions.length === 0) return null;
95
+ if (conditions.length === 1) return conditions[0];
96
+ return and(...conditions);
97
+ }
98
+ // Handle 'or' operator
99
+ if ('or' in where && Array.isArray(where.or)) {
100
+ const conditions = where.or.map((condition)=>convertWhereToDrizzle(condition, table, fields)).filter((c)=>c !== undefined && c !== null);
101
+ if (conditions.length === 0) return null;
102
+ if (conditions.length === 1) return conditions[0];
103
+ return or(...conditions);
104
+ }
105
+ // Handle field conditions - collect all field conditions and combine with AND
106
+ const fieldConditions = [];
107
+ for (const [fieldName, condition] of Object.entries(where)){
108
+ if (fieldName === 'and' || fieldName === 'or') continue;
109
+ // Get the column from the table
110
+ // Drizzle tables have columns as direct properties
111
+ // Try camelCase first, then snake_case as fallback
112
+ // Use 'in' operator to check existence, then access the property
113
+ let column = undefined;
114
+ if (fieldName in table) {
115
+ column = table[fieldName];
116
+ } else if (toSnakeCase(fieldName) in table) {
117
+ column = table[toSnakeCase(fieldName)];
118
+ } else if (table.columns) {
119
+ // Fallback to table.columns if it exists
120
+ if (fieldName in table.columns) {
121
+ column = table.columns[fieldName];
122
+ } else if (toSnakeCase(fieldName) in table.columns) {
123
+ column = table.columns[toSnakeCase(fieldName)];
124
+ }
125
+ }
126
+ if (!column) {
127
+ continue;
128
+ }
129
+ if (typeof condition !== 'object' || condition === null || Array.isArray(condition)) {
130
+ continue;
131
+ }
132
+ const cond = condition;
133
+ // Handle equals
134
+ if ('equals' in cond) {
135
+ fieldConditions.push(eq(column, cond.equals));
136
+ continue;
137
+ }
138
+ // Handle not_equals / notEquals
139
+ if ('not_equals' in cond || 'notEquals' in cond) {
140
+ fieldConditions.push(ne(column, cond.not_equals ?? cond.notEquals));
141
+ continue;
142
+ }
143
+ // Handle in
144
+ if ('in' in cond && Array.isArray(cond.in)) {
145
+ fieldConditions.push(inArray(column, cond.in));
146
+ continue;
147
+ }
148
+ // Handle not_in / notIn
149
+ if ('not_in' in cond || 'notIn' in cond) {
150
+ const values = cond.not_in ?? cond.notIn;
151
+ if (Array.isArray(values)) {
152
+ fieldConditions.push(not(inArray(column, values)));
153
+ }
154
+ continue;
155
+ }
156
+ // Handle like
157
+ if ('like' in cond && typeof cond.like === 'string') {
158
+ fieldConditions.push(like(column, cond.like));
159
+ continue;
160
+ }
161
+ // Handle contains
162
+ if ('contains' in cond && typeof cond.contains === 'string') {
163
+ fieldConditions.push(like(column, `%${cond.contains}%`));
164
+ continue;
165
+ }
166
+ // Handle greater_than / greaterThan
167
+ if ('greater_than' in cond || 'greaterThan' in cond) {
168
+ fieldConditions.push(gt(column, cond.greater_than ?? cond.greaterThan));
169
+ continue;
170
+ }
171
+ // Handle greater_than_equal / greaterThanEqual
172
+ if ('greater_than_equal' in cond || 'greaterThanEqual' in cond) {
173
+ fieldConditions.push(gte(column, cond.greater_than_equal ?? cond.greaterThanEqual));
174
+ continue;
175
+ }
176
+ // Handle less_than / lessThan
177
+ if ('less_than' in cond || 'lessThan' in cond) {
178
+ fieldConditions.push(lt(column, cond.less_than ?? cond.lessThan));
179
+ continue;
180
+ }
181
+ // Handle less_than_equal / lessThanEqual
182
+ if ('less_than_equal' in cond || 'lessThanEqual' in cond) {
183
+ fieldConditions.push(lte(column, cond.less_than_equal ?? cond.lessThanEqual));
184
+ continue;
185
+ }
186
+ // Handle exists (null check)
187
+ if ('exists' in cond && typeof cond.exists === 'boolean') {
188
+ fieldConditions.push(cond.exists ? isNotNull(column) : isNull(column));
189
+ continue;
190
+ }
191
+ }
192
+ // Combine all field conditions with AND
193
+ if (fieldConditions.length === 0) {
194
+ return undefined;
195
+ }
196
+ if (fieldConditions.length === 1) {
197
+ return fieldConditions[0];
198
+ }
199
+ return and(...fieldConditions);
200
+ }
201
+ function mapRowsToResults(rows, collectionConfig) {
202
+ // Collect names of fields that are typed as number on the collection
203
+ const numberFields = new Set();
204
+ if (collectionConfig?.fields) {
205
+ for (const field of collectionConfig.fields){
206
+ if (typeof field === 'object' && 'name' in field && field.type === 'number') {
207
+ numberFields.add(field.name);
208
+ }
209
+ }
210
+ }
211
+ return rows.map((row)=>{
212
+ // Drizzle returns columns with the names we selected (camelCase)
213
+ // Handle both camelCase and snake_case for robustness
214
+ const rawDocId = row.docId ?? row.doc_id;
215
+ const rawChunkIndex = row.chunkIndex ?? row.chunk_index;
216
+ const rawScore = row.score;
217
+ const result = {
218
+ ...row,
219
+ id: String(row.id),
220
+ docId: String(rawDocId),
221
+ score: typeof rawScore === 'number' ? rawScore : parseFloat(String(rawScore)),
222
+ chunkIndex: typeof rawChunkIndex === 'number' ? rawChunkIndex : parseInt(String(rawChunkIndex), 10)
223
+ };
224
+ // Ensure any number fields from the schema are numbers in the result
225
+ for (const fieldName of numberFields){
226
+ const value = result[fieldName];
227
+ if (value != null && typeof value !== 'number') {
228
+ const parsed = parseFloat(String(value));
229
+ if (!Number.isNaN(parsed)) {
230
+ result[fieldName] = parsed;
231
+ }
232
+ }
233
+ }
234
+ return result;
235
+ });
236
+ }
237
+
238
+ //# sourceMappingURL=search.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/search.ts"],"sourcesContent":["import {\n sql,\n cosineDistance,\n inArray,\n eq,\n and,\n or,\n not,\n like,\n gt,\n gte,\n lt,\n lte,\n ne,\n isNull,\n isNotNull,\n} from '@payloadcms/db-postgres/drizzle'\nimport { BasePayload, Where } from 'payload'\nimport { KnowledgePoolName, VectorSearchResult } from 'payloadcms-vectorize'\nimport toSnakeCase from 'to-snake-case'\nimport { getEmbeddingsTable } from './drizzle'\n\nexport default async (\n payload: BasePayload,\n queryEmbedding: number[],\n poolName: KnowledgePoolName,\n limit: number = 10,\n where?: Where,\n): Promise<Array<VectorSearchResult>> => {\n const isPostgres = payload.db?.pool?.query || payload.db?.drizzle\n\n if (!isPostgres) {\n throw new Error('Only works with Postgres')\n }\n\n // In PayloadCMS, payload.db IS the adapter, and drizzle is at payload.db.drizzle\n const adapter = payload.db\n if (!adapter) {\n throw new Error('Drizzle adapter not found')\n }\n\n // Get drizzle instance\n const drizzle = adapter.drizzle\n if (!drizzle) {\n throw new Error('Drizzle instance not found in adapter')\n }\n\n // Get collection config and table name\n const collectionConfig = payload.collections[poolName]?.config\n if (!collectionConfig) {\n throw new Error(`Collection ${poolName} not found`)\n }\n\n const table = getEmbeddingsTable(poolName)\n if (!table) {\n throw new Error(\n `[payloadcms-vectorize] Embeddings table for knowledge pool \"${poolName}\" not registered. Ensure the plugin's afterSchemaInit hook ran and the pool exists.`,\n )\n }\n\n // Use Drizzle's query builder with cosineDistance function\n // cosineDistance returns distance, so we calculate score as 1 - distance\n // The table from fullSchema should have columns as direct properties\n const embeddingColumn = table.embedding\n if (!embeddingColumn) {\n throw new Error(\n `Embedding column not found in table for pool \"${poolName}\". Available properties: ${Object.keys(table).join(', ')}`,\n )\n }\n\n // Convert WHERE clause to Drizzle conditions\n let drizzleWhere: any = undefined\n if (where) {\n drizzleWhere = convertWhereToDrizzle(where, table, collectionConfig.flattenedFields)\n if (drizzleWhere === null) {\n // WHERE clause resulted in an empty condition (e.g., empty 'and' or 'or' array)\n // This semantically means \"match nothing\", so return empty results\n throw new Error(\n `[payloadcms-vectorize] WHERE clause resulted in no valid conditions. This typically occurs when using empty 'and' or 'or' arrays, or when all field conditions reference non-existent columns.`,\n )\n }\n if (drizzleWhere === undefined) {\n // WHERE clause could not be converted (invalid structure or unsupported operators)\n throw new Error(\n `[payloadcms-vectorize] WHERE clause could not be converted to Drizzle conditions. Please check that all field names exist and operators are supported.`,\n )\n }\n }\n\n // Build query using Drizzle's query builder\n // Column names in the table are camelCase (docId, chunkText, etc.)\n // but their database names are snake_case (doc_id, chunk_text, etc.)\n // The table from fullSchema should have columns as direct properties\n // Calculate score: 1 - cosineDistance (distance)\n // Need to cast 1 to numeric to avoid \"integer - vector\" error\n const distanceExpr = cosineDistance(embeddingColumn, queryEmbedding)\n\n // Build select object with score\n const selectObj: Record<string, any> = {\n id: table.id, // ensure we select id explicitly\n score: sql<number>`1 - (${distanceExpr})`,\n }\n\n // Add reserved + extension fields from collection config\n for (const field of collectionConfig.fields ?? []) {\n if (typeof field === 'object' && 'name' in field) {\n const name = field.name as string\n if (name in table) {\n selectObj[name] = table[name]\n } else if (toSnakeCase(name) in table) {\n selectObj[name] = table[toSnakeCase(name)]\n }\n }\n }\n\n let query: any = drizzle.select(selectObj).from(table)\n\n // Add WHERE clause if provided\n if (drizzleWhere) {\n query = query.where(drizzleWhere)\n }\n\n // Order by cosine distance (ascending = most similar first) and limit\n // Reuse the same distance expression for ordering\n query = query.orderBy(distanceExpr).limit(limit)\n\n // Execute the query\n const result = await query\n\n return mapRowsToResults(result, collectionConfig)\n}\n\n/**\n * Convert Payload WHERE clause to Drizzle conditions\n * Simplified version inspired by Payload's buildQuery\n */\nfunction convertWhereToDrizzle(where: Where, table: any, fields: any[]): any {\n if (!where || typeof where !== 'object') {\n return undefined\n }\n\n // Handle 'and' operator\n if ('and' in where && Array.isArray(where.and)) {\n const conditions = where.and\n .map((condition) => convertWhereToDrizzle(condition, table, fields))\n .filter((c) => c !== undefined && c !== null)\n if (conditions.length === 0) return null\n if (conditions.length === 1) return conditions[0]\n return and(...conditions)\n }\n\n // Handle 'or' operator\n if ('or' in where && Array.isArray(where.or)) {\n const conditions = where.or\n .map((condition) => convertWhereToDrizzle(condition, table, fields))\n .filter((c) => c !== undefined && c !== null)\n if (conditions.length === 0) return null\n if (conditions.length === 1) return conditions[0]\n return or(...conditions)\n }\n\n // Handle field conditions - collect all field conditions and combine with AND\n const fieldConditions: any[] = []\n for (const [fieldName, condition] of Object.entries(where)) {\n if (fieldName === 'and' || fieldName === 'or') continue\n\n // Get the column from the table\n // Drizzle tables have columns as direct properties\n // Try camelCase first, then snake_case as fallback\n // Use 'in' operator to check existence, then access the property\n let column: any = undefined\n if (fieldName in table) {\n column = table[fieldName]\n } else if (toSnakeCase(fieldName) in table) {\n column = table[toSnakeCase(fieldName)]\n } else if (table.columns) {\n // Fallback to table.columns if it exists\n if (fieldName in table.columns) {\n column = table.columns[fieldName]\n } else if (toSnakeCase(fieldName) in table.columns) {\n column = table.columns[toSnakeCase(fieldName)]\n }\n }\n\n if (!column) {\n // Field not found, skip (could be a nested field we don't support)\n continue\n }\n\n if (typeof condition !== 'object' || condition === null || Array.isArray(condition)) {\n continue\n }\n\n const cond = condition as Record<string, any>\n\n // Handle equals\n if ('equals' in cond) {\n fieldConditions.push(eq(column, cond.equals))\n continue\n }\n\n // Handle not_equals / notEquals\n if ('not_equals' in cond || 'notEquals' in cond) {\n fieldConditions.push(ne(column, cond.not_equals ?? cond.notEquals))\n continue\n }\n\n // Handle in\n if ('in' in cond && Array.isArray(cond.in)) {\n fieldConditions.push(inArray(column, cond.in))\n continue\n }\n\n // Handle not_in / notIn\n if ('not_in' in cond || 'notIn' in cond) {\n const values = cond.not_in ?? cond.notIn\n if (Array.isArray(values)) {\n fieldConditions.push(not(inArray(column, values)))\n }\n continue\n }\n\n // Handle like\n if ('like' in cond && typeof cond.like === 'string') {\n fieldConditions.push(like(column, cond.like))\n continue\n }\n\n // Handle contains\n if ('contains' in cond && typeof cond.contains === 'string') {\n fieldConditions.push(like(column, `%${cond.contains}%`))\n continue\n }\n\n // Handle greater_than / greaterThan\n if ('greater_than' in cond || 'greaterThan' in cond) {\n fieldConditions.push(gt(column, cond.greater_than ?? cond.greaterThan))\n continue\n }\n\n // Handle greater_than_equal / greaterThanEqual\n if ('greater_than_equal' in cond || 'greaterThanEqual' in cond) {\n fieldConditions.push(gte(column, cond.greater_than_equal ?? cond.greaterThanEqual))\n continue\n }\n\n // Handle less_than / lessThan\n if ('less_than' in cond || 'lessThan' in cond) {\n fieldConditions.push(lt(column, cond.less_than ?? cond.lessThan))\n continue\n }\n\n // Handle less_than_equal / lessThanEqual\n if ('less_than_equal' in cond || 'lessThanEqual' in cond) {\n fieldConditions.push(lte(column, cond.less_than_equal ?? cond.lessThanEqual))\n continue\n }\n\n // Handle exists (null check)\n if ('exists' in cond && typeof cond.exists === 'boolean') {\n fieldConditions.push(cond.exists ? isNotNull(column) : isNull(column))\n continue\n }\n }\n\n // Combine all field conditions with AND\n if (fieldConditions.length === 0) {\n return undefined\n }\n if (fieldConditions.length === 1) {\n return fieldConditions[0]\n }\n return and(...fieldConditions)\n}\n\nfunction mapRowsToResults(rows: any[], collectionConfig: any): Array<VectorSearchResult> {\n // Collect names of fields that are typed as number on the collection\n const numberFields = new Set<string>()\n if (collectionConfig?.fields) {\n for (const field of collectionConfig.fields) {\n if (typeof field === 'object' && 'name' in field && field.type === 'number') {\n numberFields.add(field.name as string)\n }\n }\n }\n\n return rows.map((row: any) => {\n // Drizzle returns columns with the names we selected (camelCase)\n // Handle both camelCase and snake_case for robustness\n const rawDocId = row.docId ?? row.doc_id\n const rawChunkIndex = row.chunkIndex ?? row.chunk_index\n const rawScore = row.score\n\n const result: any = {\n ...row,\n id: String(row.id),\n docId: String(rawDocId),\n score: typeof rawScore === 'number' ? rawScore : parseFloat(String(rawScore)),\n chunkIndex:\n typeof rawChunkIndex === 'number' ? rawChunkIndex : parseInt(String(rawChunkIndex), 10),\n }\n\n // Ensure any number fields from the schema are numbers in the result\n for (const fieldName of numberFields) {\n const value = result[fieldName]\n if (value != null && typeof value !== 'number') {\n const parsed = parseFloat(String(value))\n if (!Number.isNaN(parsed)) {\n result[fieldName] = parsed\n }\n }\n }\n\n return result\n })\n}\n"],"names":["sql","cosineDistance","inArray","eq","and","or","not","like","gt","gte","lt","lte","ne","isNull","isNotNull","toSnakeCase","getEmbeddingsTable","payload","queryEmbedding","poolName","limit","where","isPostgres","db","pool","query","drizzle","Error","adapter","collectionConfig","collections","config","table","embeddingColumn","embedding","Object","keys","join","drizzleWhere","undefined","convertWhereToDrizzle","flattenedFields","distanceExpr","selectObj","id","score","field","fields","name","select","from","orderBy","result","mapRowsToResults","Array","isArray","conditions","map","condition","filter","c","length","fieldConditions","fieldName","entries","column","columns","cond","push","equals","not_equals","notEquals","in","values","not_in","notIn","contains","greater_than","greaterThan","greater_than_equal","greaterThanEqual","less_than","lessThan","less_than_equal","lessThanEqual","exists","rows","numberFields","Set","type","add","row","rawDocId","docId","doc_id","rawChunkIndex","chunkIndex","chunk_index","rawScore","String","parseFloat","parseInt","value","parsed","Number","isNaN"],"mappings":"AAAA,SACEA,GAAG,EACHC,cAAc,EACdC,OAAO,EACPC,EAAE,EACFC,GAAG,EACHC,EAAE,EACFC,GAAG,EACHC,IAAI,EACJC,EAAE,EACFC,GAAG,EACHC,EAAE,EACFC,GAAG,EACHC,EAAE,EACFC,MAAM,EACNC,SAAS,QACJ,kCAAiC;AAGxC,OAAOC,iBAAiB,gBAAe;AACvC,SAASC,kBAAkB,QAAQ,YAAW;AAE9C,eAAe,CAAA,OACbC,SACAC,gBACAC,UACAC,QAAgB,EAAE,EAClBC;IAEA,MAAMC,aAAaL,QAAQM,EAAE,EAAEC,MAAMC,SAASR,QAAQM,EAAE,EAAEG;IAE1D,IAAI,CAACJ,YAAY;QACf,MAAM,IAAIK,MAAM;IAClB;IAEA,iFAAiF;IACjF,MAAMC,UAAUX,QAAQM,EAAE;IAC1B,IAAI,CAACK,SAAS;QACZ,MAAM,IAAID,MAAM;IAClB;IAEA,uBAAuB;IACvB,MAAMD,UAAUE,QAAQF,OAAO;IAC/B,IAAI,CAACA,SAAS;QACZ,MAAM,IAAIC,MAAM;IAClB;IAEA,uCAAuC;IACvC,MAAME,mBAAmBZ,QAAQa,WAAW,CAACX,SAAS,EAAEY;IACxD,IAAI,CAACF,kBAAkB;QACrB,MAAM,IAAIF,MAAM,CAAC,WAAW,EAAER,SAAS,UAAU,CAAC;IACpD;IAEA,MAAMa,QAAQhB,mBAAmBG;IACjC,IAAI,CAACa,OAAO;QACV,MAAM,IAAIL,MACR,CAAC,4DAA4D,EAAER,SAAS,mFAAmF,CAAC;IAEhK;IAEA,2DAA2D;IAC3D,yEAAyE;IACzE,qEAAqE;IACrE,MAAMc,kBAAkBD,MAAME,SAAS;IACvC,IAAI,CAACD,iBAAiB;QACpB,MAAM,IAAIN,MACR,CAAC,8CAA8C,EAAER,SAAS,yBAAyB,EAAEgB,OAAOC,IAAI,CAACJ,OAAOK,IAAI,CAAC,OAAO;IAExH;IAEA,6CAA6C;IAC7C,IAAIC,eAAoBC;IACxB,IAAIlB,OAAO;QACTiB,eAAeE,sBAAsBnB,OAAOW,OAAOH,iBAAiBY,eAAe;QACnF,IAAIH,iBAAiB,MAAM;YACzB,gFAAgF;YAChF,mEAAmE;YACnE,MAAM,IAAIX,MACR,CAAC,8LAA8L,CAAC;QAEpM;QACA,IAAIW,iBAAiBC,WAAW;YAC9B,mFAAmF;YACnF,MAAM,IAAIZ,MACR,CAAC,sJAAsJ,CAAC;QAE5J;IACF;IAEA,4CAA4C;IAC5C,mEAAmE;IACnE,qEAAqE;IACrE,qEAAqE;IACrE,iDAAiD;IACjD,8DAA8D;IAC9D,MAAMe,eAAezC,eAAegC,iBAAiBf;IAErD,iCAAiC;IACjC,MAAMyB,YAAiC;QACrCC,IAAIZ,MAAMY,EAAE;QACZC,OAAO7C,GAAW,CAAC,KAAK,EAAE0C,aAAa,CAAC,CAAC;IAC3C;IAEA,yDAAyD;IACzD,KAAK,MAAMI,SAASjB,iBAAiBkB,MAAM,IAAI,EAAE,CAAE;QACjD,IAAI,OAAOD,UAAU,YAAY,UAAUA,OAAO;YAChD,MAAME,OAAOF,MAAME,IAAI;YACvB,IAAIA,QAAQhB,OAAO;gBACjBW,SAAS,CAACK,KAAK,GAAGhB,KAAK,CAACgB,KAAK;YAC/B,OAAO,IAAIjC,YAAYiC,SAAShB,OAAO;gBACrCW,SAAS,CAACK,KAAK,GAAGhB,KAAK,CAACjB,YAAYiC,MAAM;YAC5C;QACF;IACF;IAEA,IAAIvB,QAAaC,QAAQuB,MAAM,CAACN,WAAWO,IAAI,CAAClB;IAEhD,+BAA+B;IAC/B,IAAIM,cAAc;QAChBb,QAAQA,MAAMJ,KAAK,CAACiB;IACtB;IAEA,sEAAsE;IACtE,kDAAkD;IAClDb,QAAQA,MAAM0B,OAAO,CAACT,cAActB,KAAK,CAACA;IAE1C,oBAAoB;IACpB,MAAMgC,SAAS,MAAM3B;IAErB,OAAO4B,iBAAiBD,QAAQvB;AAClC,CAAA,EAAC;AAED;;;CAGC,GACD,SAASW,sBAAsBnB,KAAY,EAAEW,KAAU,EAAEe,MAAa;IACpE,IAAI,CAAC1B,SAAS,OAAOA,UAAU,UAAU;QACvC,OAAOkB;IACT;IAEA,wBAAwB;IACxB,IAAI,SAASlB,SAASiC,MAAMC,OAAO,CAAClC,MAAMjB,GAAG,GAAG;QAC9C,MAAMoD,aAAanC,MAAMjB,GAAG,CACzBqD,GAAG,CAAC,CAACC,YAAclB,sBAAsBkB,WAAW1B,OAAOe,SAC3DY,MAAM,CAAC,CAACC,IAAMA,MAAMrB,aAAaqB,MAAM;QAC1C,IAAIJ,WAAWK,MAAM,KAAK,GAAG,OAAO;QACpC,IAAIL,WAAWK,MAAM,KAAK,GAAG,OAAOL,UAAU,CAAC,EAAE;QACjD,OAAOpD,OAAOoD;IAChB;IAEA,uBAAuB;IACvB,IAAI,QAAQnC,SAASiC,MAAMC,OAAO,CAAClC,MAAMhB,EAAE,GAAG;QAC5C,MAAMmD,aAAanC,MAAMhB,EAAE,CACxBoD,GAAG,CAAC,CAACC,YAAclB,sBAAsBkB,WAAW1B,OAAOe,SAC3DY,MAAM,CAAC,CAACC,IAAMA,MAAMrB,aAAaqB,MAAM;QAC1C,IAAIJ,WAAWK,MAAM,KAAK,GAAG,OAAO;QACpC,IAAIL,WAAWK,MAAM,KAAK,GAAG,OAAOL,UAAU,CAAC,EAAE;QACjD,OAAOnD,MAAMmD;IACf;IAEA,8EAA8E;IAC9E,MAAMM,kBAAyB,EAAE;IACjC,KAAK,MAAM,CAACC,WAAWL,UAAU,IAAIvB,OAAO6B,OAAO,CAAC3C,OAAQ;QAC1D,IAAI0C,cAAc,SAASA,cAAc,MAAM;QAE/C,gCAAgC;QAChC,mDAAmD;QACnD,mDAAmD;QACnD,iEAAiE;QACjE,IAAIE,SAAc1B;QAClB,IAAIwB,aAAa/B,OAAO;YACtBiC,SAASjC,KAAK,CAAC+B,UAAU;QAC3B,OAAO,IAAIhD,YAAYgD,cAAc/B,OAAO;YAC1CiC,SAASjC,KAAK,CAACjB,YAAYgD,WAAW;QACxC,OAAO,IAAI/B,MAAMkC,OAAO,EAAE;YACxB,yCAAyC;YACzC,IAAIH,aAAa/B,MAAMkC,OAAO,EAAE;gBAC9BD,SAASjC,MAAMkC,OAAO,CAACH,UAAU;YACnC,OAAO,IAAIhD,YAAYgD,cAAc/B,MAAMkC,OAAO,EAAE;gBAClDD,SAASjC,MAAMkC,OAAO,CAACnD,YAAYgD,WAAW;YAChD;QACF;QAEA,IAAI,CAACE,QAAQ;YAEX;QACF;QAEA,IAAI,OAAOP,cAAc,YAAYA,cAAc,QAAQJ,MAAMC,OAAO,CAACG,YAAY;YACnF;QACF;QAEA,MAAMS,OAAOT;QAEb,gBAAgB;QAChB,IAAI,YAAYS,MAAM;YACpBL,gBAAgBM,IAAI,CAACjE,GAAG8D,QAAQE,KAAKE,MAAM;YAC3C;QACF;QAEA,gCAAgC;QAChC,IAAI,gBAAgBF,QAAQ,eAAeA,MAAM;YAC/CL,gBAAgBM,IAAI,CAACxD,GAAGqD,QAAQE,KAAKG,UAAU,IAAIH,KAAKI,SAAS;YACjE;QACF;QAEA,YAAY;QACZ,IAAI,QAAQJ,QAAQb,MAAMC,OAAO,CAACY,KAAKK,EAAE,GAAG;YAC1CV,gBAAgBM,IAAI,CAAClE,QAAQ+D,QAAQE,KAAKK,EAAE;YAC5C;QACF;QAEA,wBAAwB;QACxB,IAAI,YAAYL,QAAQ,WAAWA,MAAM;YACvC,MAAMM,SAASN,KAAKO,MAAM,IAAIP,KAAKQ,KAAK;YACxC,IAAIrB,MAAMC,OAAO,CAACkB,SAAS;gBACzBX,gBAAgBM,IAAI,CAAC9D,IAAIJ,QAAQ+D,QAAQQ;YAC3C;YACA;QACF;QAEA,cAAc;QACd,IAAI,UAAUN,QAAQ,OAAOA,KAAK5D,IAAI,KAAK,UAAU;YACnDuD,gBAAgBM,IAAI,CAAC7D,KAAK0D,QAAQE,KAAK5D,IAAI;YAC3C;QACF;QAEA,kBAAkB;QAClB,IAAI,cAAc4D,QAAQ,OAAOA,KAAKS,QAAQ,KAAK,UAAU;YAC3Dd,gBAAgBM,IAAI,CAAC7D,KAAK0D,QAAQ,CAAC,CAAC,EAAEE,KAAKS,QAAQ,CAAC,CAAC,CAAC;YACtD;QACF;QAEA,oCAAoC;QACpC,IAAI,kBAAkBT,QAAQ,iBAAiBA,MAAM;YACnDL,gBAAgBM,IAAI,CAAC5D,GAAGyD,QAAQE,KAAKU,YAAY,IAAIV,KAAKW,WAAW;YACrE;QACF;QAEA,+CAA+C;QAC/C,IAAI,wBAAwBX,QAAQ,sBAAsBA,MAAM;YAC9DL,gBAAgBM,IAAI,CAAC3D,IAAIwD,QAAQE,KAAKY,kBAAkB,IAAIZ,KAAKa,gBAAgB;YACjF;QACF;QAEA,8BAA8B;QAC9B,IAAI,eAAeb,QAAQ,cAAcA,MAAM;YAC7CL,gBAAgBM,IAAI,CAAC1D,GAAGuD,QAAQE,KAAKc,SAAS,IAAId,KAAKe,QAAQ;YAC/D;QACF;QAEA,yCAAyC;QACzC,IAAI,qBAAqBf,QAAQ,mBAAmBA,MAAM;YACxDL,gBAAgBM,IAAI,CAACzD,IAAIsD,QAAQE,KAAKgB,eAAe,IAAIhB,KAAKiB,aAAa;YAC3E;QACF;QAEA,6BAA6B;QAC7B,IAAI,YAAYjB,QAAQ,OAAOA,KAAKkB,MAAM,KAAK,WAAW;YACxDvB,gBAAgBM,IAAI,CAACD,KAAKkB,MAAM,GAAGvE,UAAUmD,UAAUpD,OAAOoD;YAC9D;QACF;IACF;IAEA,wCAAwC;IACxC,IAAIH,gBAAgBD,MAAM,KAAK,GAAG;QAChC,OAAOtB;IACT;IACA,IAAIuB,gBAAgBD,MAAM,KAAK,GAAG;QAChC,OAAOC,eAAe,CAAC,EAAE;IAC3B;IACA,OAAO1D,OAAO0D;AAChB;AAEA,SAAST,iBAAiBiC,IAAW,EAAEzD,gBAAqB;IAC1D,qEAAqE;IACrE,MAAM0D,eAAe,IAAIC;IACzB,IAAI3D,kBAAkBkB,QAAQ;QAC5B,KAAK,MAAMD,SAASjB,iBAAiBkB,MAAM,CAAE;YAC3C,IAAI,OAAOD,UAAU,YAAY,UAAUA,SAASA,MAAM2C,IAAI,KAAK,UAAU;gBAC3EF,aAAaG,GAAG,CAAC5C,MAAME,IAAI;YAC7B;QACF;IACF;IAEA,OAAOsC,KAAK7B,GAAG,CAAC,CAACkC;QACf,iEAAiE;QACjE,sDAAsD;QACtD,MAAMC,WAAWD,IAAIE,KAAK,IAAIF,IAAIG,MAAM;QACxC,MAAMC,gBAAgBJ,IAAIK,UAAU,IAAIL,IAAIM,WAAW;QACvD,MAAMC,WAAWP,IAAI9C,KAAK;QAE1B,MAAMO,SAAc;YAClB,GAAGuC,GAAG;YACN/C,IAAIuD,OAAOR,IAAI/C,EAAE;YACjBiD,OAAOM,OAAOP;YACd/C,OAAO,OAAOqD,aAAa,WAAWA,WAAWE,WAAWD,OAAOD;YACnEF,YACE,OAAOD,kBAAkB,WAAWA,gBAAgBM,SAASF,OAAOJ,gBAAgB;QACxF;QAEA,qEAAqE;QACrE,KAAK,MAAMhC,aAAawB,aAAc;YACpC,MAAMe,QAAQlD,MAAM,CAACW,UAAU;YAC/B,IAAIuC,SAAS,QAAQ,OAAOA,UAAU,UAAU;gBAC9C,MAAMC,SAASH,WAAWD,OAAOG;gBACjC,IAAI,CAACE,OAAOC,KAAK,CAACF,SAAS;oBACzBnD,MAAM,CAACW,UAAU,GAAGwC;gBACtB;YACF;QACF;QAEA,OAAOnD;IACT;AACF"}
package/dist/types.js ADDED
@@ -0,0 +1,6 @@
1
+ /** Configuration for a knowledge pool */ // Type guard to check if Payload is using Postgres adapter
2
+ export function isPostgresPayload(payload) {
3
+ return typeof payload?.db?.pool?.query === 'function' || typeof payload?.db?.drizzle?.execute === 'function';
4
+ }
5
+
6
+ //# sourceMappingURL=types.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/types.ts"],"sourcesContent":["/** Configuration for a knowledge pool */\n\nimport { KnowledgePoolName } from 'payloadcms-vectorize'\n\n/** Note current limitation: needs a migration in order to change */\nexport type KnowledgePoolsConfig = Record<\n KnowledgePoolName,\n {\n /** Vector dimensions for pgvector column */\n dims: number\n /** IVFFLAT lists parameter used when creating the index */\n ivfflatLists: number\n }\n>\n\n// Type guard to check if Payload is using Postgres adapter\nexport function isPostgresPayload(payload: any): payload is any & {\n db: {\n pool?: { query: (sql: string, params?: any[]) => Promise<any> }\n drizzle?: { execute: (sql: string) => Promise<any> }\n }\n} {\n return (\n typeof payload?.db?.pool?.query === 'function' ||\n typeof payload?.db?.drizzle?.execute === 'function'\n )\n}\n\n// Type for Payload with Postgres database\nexport type PostgresPayload = any & {\n db: {\n pool?: { query: (sql: string, params?: any[]) => Promise<any> }\n drizzle?: { execute: (sql: string) => Promise<any> }\n }\n}\n"],"names":["isPostgresPayload","payload","db","pool","query","drizzle","execute"],"mappings":"AAAA,uCAAuC,GAevC,2DAA2D;AAC3D,OAAO,SAASA,kBAAkBC,OAAY;IAM5C,OACE,OAAOA,SAASC,IAAIC,MAAMC,UAAU,cACpC,OAAOH,SAASC,IAAIG,SAASC,YAAY;AAE7C"}
package/package.json ADDED
@@ -0,0 +1,27 @@
1
+ {
2
+ "name": "@payloadcms-vectorize/pg",
3
+ "version": "0.6.0-beta",
4
+ "description": "PostgreSQL adapter for payloadcms-vectorize",
5
+ "license": "MIT",
6
+ "type": "module",
7
+ "files": [
8
+ "dist"
9
+ ],
10
+ "main": "./dist/index.js",
11
+ "types": "./dist/index.d.ts",
12
+ "peerDependencies": {
13
+ "payload": ">=3.0.0 <4.0.0",
14
+ "payloadcms-vectorize": ">=0.6.0-beta <1.0.0",
15
+ "@payloadcms/db-postgres": ">=3.0.0 <4.0.0"
16
+ },
17
+ "devDependencies": {
18
+ "payloadcms-vectorize": "0.6.0-beta"
19
+ },
20
+ "dependencies": {
21
+ "to-snake-case": "1.0.0"
22
+ },
23
+ "engines": {
24
+ "node": "^18.20.2 || >=20.9.0",
25
+ "pnpm": "^9 || ^10"
26
+ }
27
+ }