@payloadcms/db-postgres 3.0.0-canary.6d9b462 → 3.0.0-canary.c13f9ba
Sign up to get free protection for your applications and to get access to all the features.
- package/dist/createMigration.js +2 -2
- package/dist/createMigration.js.map +1 -1
- package/dist/predefinedMigrations/v2-v3/index.d.ts +1 -1
- package/dist/predefinedMigrations/v2-v3/index.d.ts.map +1 -1
- package/dist/predefinedMigrations/v2-v3/index.js +4 -4
- package/dist/predefinedMigrations/v2-v3/index.js.map +1 -1
- package/dist/predefinedMigrations/v2-v3/migrateRelationships.d.ts +1 -1
- package/dist/predefinedMigrations/v2-v3/migrateRelationships.d.ts.map +1 -1
- package/dist/predefinedMigrations/v2-v3/migrateRelationships.js +1 -1
- package/dist/predefinedMigrations/v2-v3/migrateRelationships.js.map +1 -1
- package/dist/utilities/pushDevSchema.d.ts.map +1 -1
- package/dist/utilities/pushDevSchema.js +1 -1
- package/dist/utilities/pushDevSchema.js.map +1 -1
- package/package.json +5 -5
package/dist/createMigration.js
CHANGED
@@ -85,9 +85,9 @@ export const createMigration = async function createMigration({ file, forceAccep
|
|
85
85
|
process.exit(0);
|
86
86
|
}
|
87
87
|
}
|
88
|
+
// write schema
|
89
|
+
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2));
|
88
90
|
}
|
89
|
-
// write schema
|
90
|
-
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2));
|
91
91
|
// write migration
|
92
92
|
fs.writeFileSync(`${filePath}.ts`, migrationTemplate({
|
93
93
|
downSQL: downSQL || ` // Migration code`,
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/createMigration.ts"],"sourcesContent":["/* eslint-disable no-restricted-syntax, no-await-in-loop */\nimport type { DrizzleSnapshotJSON } from 'drizzle-kit/payload'\nimport type { CreateMigration, MigrationTemplateArgs } from 'payload/database'\n\nimport fs from 'fs'\nimport { createRequire } from 'module'\nimport path from 'path'\nimport { getPredefinedMigration } from 'payload/database'\nimport prompts from 'prompts'\nimport { fileURLToPath } from 'url'\n\nimport type { PostgresAdapter } from './types.js'\n\nconst require = createRequire(import.meta.url)\n\nconst migrationTemplate = ({\n downSQL,\n imports,\n upSQL,\n}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-postgres'\n${imports ? `${imports}\\n` : ''}\nexport async function up({ payload, req }: MigrateUpArgs): Promise<void> {\n${upSQL}\n};\n\nexport async function down({ payload, req }: MigrateDownArgs): Promise<void> {\n${downSQL}\n};\n`\n\nconst getDefaultDrizzleSnapshot = (): DrizzleSnapshotJSON => ({\n id: '00000000-0000-0000-0000-000000000000',\n _meta: {\n columns: {},\n schemas: {},\n tables: {},\n },\n dialect: 'pg',\n enums: {},\n prevId: '00000000-0000-0000-0000-00000000000',\n schemas: {},\n tables: {},\n version: '5',\n})\n\nexport const createMigration: CreateMigration = async function createMigration(\n this: PostgresAdapter,\n { file, forceAcceptWarning, migrationName, payload },\n) {\n const filename = fileURLToPath(import.meta.url)\n const dirname = path.dirname(filename)\n const dir = payload.db.migrationDir\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir)\n }\n const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload')\n const drizzleJsonAfter = generateDrizzleJson(this.schema)\n const [yyymmdd, hhmmss] = new Date().toISOString().split('T')\n const formattedDate = yyymmdd.replace(/\\D/g, '')\n const formattedTime = hhmmss.split('.')[0].replace(/\\D/g, '')\n let imports: string = ''\n let downSQL: string\n let upSQL: string\n ;({ downSQL, imports, upSQL } = await getPredefinedMigration({\n dirname,\n file,\n migrationName,\n payload,\n }))\n\n const timestamp = `${formattedDate}_${formattedTime}`\n\n const name = migrationName || file?.split('/').slice(2).join('/')\n const fileName = `${timestamp}${name ? `_${name.replace(/\\W/g, '_')}` : ''}`\n\n const filePath = `${dir}/${fileName}`\n\n let drizzleJsonBefore = getDefaultDrizzleSnapshot()\n\n if (!upSQL) {\n // Get latest migration snapshot\n const latestSnapshot = fs\n .readdirSync(dir)\n .filter((file) => file.endsWith('.json'))\n .sort()\n .reverse()?.[0]\n\n if (latestSnapshot) {\n drizzleJsonBefore = JSON.parse(\n fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'),\n ) as DrizzleSnapshotJSON\n }\n\n const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)\n const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)\n const sqlExecute = 'await payload.db.drizzle.execute(sql`'\n\n if (sqlStatementsUp?.length) {\n upSQL = `${sqlExecute}\\n ${sqlStatementsUp?.join('\\n')}\\`)`\n }\n if (sqlStatementsDown?.length) {\n downSQL = `${sqlExecute}\\n ${sqlStatementsDown?.join('\\n')}\\`)`\n }\n\n if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {\n const { confirm: shouldCreateBlankMigration } = await prompts(\n {\n name: 'confirm',\n type: 'confirm',\n initial: false,\n message: 'No schema changes detected. Would you like to create a blank migration file?',\n },\n {\n onCancel: () => {\n process.exit(0)\n },\n },\n )\n\n if (!shouldCreateBlankMigration) {\n process.exit(0)\n }\n }\n
|
1
|
+
{"version":3,"sources":["../src/createMigration.ts"],"sourcesContent":["/* eslint-disable no-restricted-syntax, no-await-in-loop */\nimport type { DrizzleSnapshotJSON } from 'drizzle-kit/payload'\nimport type { CreateMigration, MigrationTemplateArgs } from 'payload/database'\n\nimport fs from 'fs'\nimport { createRequire } from 'module'\nimport path from 'path'\nimport { getPredefinedMigration } from 'payload/database'\nimport prompts from 'prompts'\nimport { fileURLToPath } from 'url'\n\nimport type { PostgresAdapter } from './types.js'\n\nconst require = createRequire(import.meta.url)\n\nconst migrationTemplate = ({\n downSQL,\n imports,\n upSQL,\n}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-postgres'\n${imports ? `${imports}\\n` : ''}\nexport async function up({ payload, req }: MigrateUpArgs): Promise<void> {\n${upSQL}\n};\n\nexport async function down({ payload, req }: MigrateDownArgs): Promise<void> {\n${downSQL}\n};\n`\n\nconst getDefaultDrizzleSnapshot = (): DrizzleSnapshotJSON => ({\n id: '00000000-0000-0000-0000-000000000000',\n _meta: {\n columns: {},\n schemas: {},\n tables: {},\n },\n dialect: 'pg',\n enums: {},\n prevId: '00000000-0000-0000-0000-00000000000',\n schemas: {},\n tables: {},\n version: '5',\n})\n\nexport const createMigration: CreateMigration = async function createMigration(\n this: PostgresAdapter,\n { file, forceAcceptWarning, migrationName, payload },\n) {\n const filename = fileURLToPath(import.meta.url)\n const dirname = path.dirname(filename)\n const dir = payload.db.migrationDir\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir)\n }\n const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload')\n const drizzleJsonAfter = generateDrizzleJson(this.schema)\n const [yyymmdd, hhmmss] = new Date().toISOString().split('T')\n const formattedDate = yyymmdd.replace(/\\D/g, '')\n const formattedTime = hhmmss.split('.')[0].replace(/\\D/g, '')\n let imports: string = ''\n let downSQL: string\n let upSQL: string\n ;({ downSQL, imports, upSQL } = await getPredefinedMigration({\n dirname,\n file,\n migrationName,\n payload,\n }))\n\n const timestamp = `${formattedDate}_${formattedTime}`\n\n const name = migrationName || file?.split('/').slice(2).join('/')\n const fileName = `${timestamp}${name ? `_${name.replace(/\\W/g, '_')}` : ''}`\n\n const filePath = `${dir}/${fileName}`\n\n let drizzleJsonBefore = getDefaultDrizzleSnapshot()\n\n if (!upSQL) {\n // Get latest migration snapshot\n const latestSnapshot = fs\n .readdirSync(dir)\n .filter((file) => file.endsWith('.json'))\n .sort()\n .reverse()?.[0]\n\n if (latestSnapshot) {\n drizzleJsonBefore = JSON.parse(\n fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'),\n ) as DrizzleSnapshotJSON\n }\n\n const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)\n const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)\n const sqlExecute = 'await payload.db.drizzle.execute(sql`'\n\n if (sqlStatementsUp?.length) {\n upSQL = `${sqlExecute}\\n ${sqlStatementsUp?.join('\\n')}\\`)`\n }\n if (sqlStatementsDown?.length) {\n downSQL = `${sqlExecute}\\n ${sqlStatementsDown?.join('\\n')}\\`)`\n }\n\n if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {\n const { confirm: shouldCreateBlankMigration } = await prompts(\n {\n name: 'confirm',\n type: 'confirm',\n initial: false,\n message: 'No schema changes detected. Would you like to create a blank migration file?',\n },\n {\n onCancel: () => {\n process.exit(0)\n },\n },\n )\n\n if (!shouldCreateBlankMigration) {\n process.exit(0)\n }\n }\n\n // write schema\n fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))\n }\n\n // write migration\n fs.writeFileSync(\n `${filePath}.ts`,\n migrationTemplate({\n downSQL: downSQL || ` // Migration code`,\n imports,\n upSQL: upSQL || ` // Migration code`,\n }),\n )\n payload.logger.info({ msg: `Migration created at ${filePath}.ts` })\n}\n"],"names":["fs","createRequire","path","getPredefinedMigration","prompts","fileURLToPath","require","url","migrationTemplate","downSQL","imports","upSQL","getDefaultDrizzleSnapshot","id","_meta","columns","schemas","tables","dialect","enums","prevId","version","createMigration","file","forceAcceptWarning","migrationName","payload","filename","dirname","dir","db","migrationDir","existsSync","mkdirSync","generateDrizzleJson","generateMigration","drizzleJsonAfter","schema","yyymmdd","hhmmss","Date","toISOString","split","formattedDate","replace","formattedTime","timestamp","name","slice","join","fileName","filePath","drizzleJsonBefore","latestSnapshot","readdirSync","filter","endsWith","sort","reverse","JSON","parse","readFileSync","sqlStatementsUp","sqlStatementsDown","sqlExecute","length","confirm","shouldCreateBlankMigration","type","initial","message","onCancel","process","exit","writeFileSync","stringify","logger","info","msg"],"rangeMappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;","mappings":"AAAA,yDAAyD,GAIzD,OAAOA,QAAQ,KAAI;AACnB,SAASC,aAAa,QAAQ,SAAQ;AACtC,OAAOC,UAAU,OAAM;AACvB,SAASC,sBAAsB,QAAQ,mBAAkB;AACzD,OAAOC,aAAa,UAAS;AAC7B,SAASC,aAAa,QAAQ,MAAK;AAInC,MAAMC,UAAUL,cAAc,YAAYM,GAAG;AAE7C,MAAMC,oBAAoB,CAAC,EACzBC,OAAO,EACPC,OAAO,EACPC,KAAK,EACiB,GAAa,CAAC;AACtC,EAAED,UAAU,CAAC,EAAEA,QAAQ,EAAE,CAAC,GAAG,GAAG;;AAEhC,EAAEC,MAAM;;;;AAIR,EAAEF,QAAQ;;AAEV,CAAC;AAED,MAAMG,4BAA4B,IAA4B,CAAA;QAC5DC,IAAI;QACJC,OAAO;YACLC,SAAS,CAAC;YACVC,SAAS,CAAC;YACVC,QAAQ,CAAC;QACX;QACAC,SAAS;QACTC,OAAO,CAAC;QACRC,QAAQ;QACRJ,SAAS,CAAC;QACVC,QAAQ,CAAC;QACTI,SAAS;IACX,CAAA;AAEA,OAAO,MAAMC,kBAAmC,eAAeA,gBAE7D,EAAEC,IAAI,EAAEC,kBAAkB,EAAEC,aAAa,EAAEC,OAAO,EAAE;IAEpD,MAAMC,WAAWtB,cAAc,YAAYE,GAAG;IAC9C,MAAMqB,UAAU1B,KAAK0B,OAAO,CAACD;IAC7B,MAAME,MAAMH,QAAQI,EAAE,CAACC,YAAY;IACnC,IAAI,CAAC/B,GAAGgC,UAAU,CAACH,MAAM;QACvB7B,GAAGiC,SAAS,CAACJ;IACf;IACA,MAAM,EAAEK,mBAAmB,EAAEC,iBAAiB,EAAE,GAAG7B,QAAQ;IAC3D,MAAM8B,mBAAmBF,oBAAoB,IAAI,CAACG,MAAM;IACxD,MAAM,CAACC,SAASC,OAAO,GAAG,IAAIC,OAAOC,WAAW,GAAGC,KAAK,CAAC;IACzD,MAAMC,gBAAgBL,QAAQM,OAAO,CAAC,OAAO;IAC7C,MAAMC,gBAAgBN,OAAOG,KAAK,CAAC,IAAI,CAAC,EAAE,CAACE,OAAO,CAAC,OAAO;IAC1D,IAAIlC,UAAkB;IACtB,IAAID;IACJ,IAAIE;IACF,CAAA,EAAEF,OAAO,EAAEC,OAAO,EAAEC,KAAK,EAAE,GAAG,MAAMR,uBAAuB;QAC3DyB;QACAL;QACAE;QACAC;IACF,EAAC;IAED,MAAMoB,YAAY,CAAC,EAAEH,cAAc,CAAC,EAAEE,cAAc,CAAC;IAErD,MAAME,OAAOtB,iBAAiBF,MAAMmB,MAAM,KAAKM,MAAM,GAAGC,KAAK;IAC7D,MAAMC,WAAW,CAAC,EAAEJ,UAAU,EAAEC,OAAO,CAAC,CAAC,EAAEA,KAAKH,OAAO,CAAC,OAAO,KAAK,CAAC,GAAG,GAAG,CAAC;IAE5E,MAAMO,WAAW,CAAC,EAAEtB,IAAI,CAAC,EAAEqB,SAAS,CAAC;IAErC,IAAIE,oBAAoBxC;IAExB,IAAI,CAACD,OAAO;QACV,gCAAgC;QAChC,MAAM0C,iBAAiBrD,GACpBsD,WAAW,CAACzB,KACZ0B,MAAM,CAAC,CAAChC,OAASA,KAAKiC,QAAQ,CAAC,UAC/BC,IAAI,GACJC,OAAO,IAAI,CAAC,EAAE;QAEjB,IAAIL,gBAAgB;YAClBD,oBAAoBO,KAAKC,KAAK,CAC5B5D,GAAG6D,YAAY,CAAC,CAAC,EAAEhC,IAAI,CAAC,EAAEwB,eAAe,CAAC,EAAE;QAEhD;QAEA,MAAMS,kBAAkB,MAAM3B,kBAAkBiB,mBAAmBhB;QACnE,MAAM2B,oBAAoB,MAAM5B,kBAAkBC,kBAAkBgB;QACpE,MAAMY,aAAa;QAEnB,IAAIF,iBAAiBG,QAAQ;YAC3BtD,QAAQ,CAAC,EAAEqD,WAAW,GAAG,EAAEF,iBAAiBb,KAAK,MAAM,GAAG,CAAC;QAC7D;QACA,IAAIc,mBAAmBE,QAAQ;YAC7BxD,UAAU,CAAC,EAAEuD,WAAW,GAAG,EAAED,mBAAmBd,KAAK,MAAM,GAAG,CAAC;QACjE;QAEA,IAAI,CAACtC,OAAOsD,UAAU,CAACxD,SAASwD,UAAU,CAACzC,oBAAoB;YAC7D,MAAM,EAAE0C,SAASC,0BAA0B,EAAE,GAAG,MAAM/D,QACpD;gBACE2C,MAAM;gBACNqB,MAAM;gBACNC,SAAS;gBACTC,SAAS;YACX,GACA;gBACEC,UAAU;oBACRC,QAAQC,IAAI,CAAC;gBACf;YACF;YAGF,IAAI,CAACN,4BAA4B;gBAC/BK,QAAQC,IAAI,CAAC;YACf;QACF;QAEA,eAAe;QACfzE,GAAG0E,aAAa,CAAC,CAAC,EAAEvB,SAAS,KAAK,CAAC,EAAEQ,KAAKgB,SAAS,CAACvC,kBAAkB,MAAM;IAC9E;IAEA,kBAAkB;IAClBpC,GAAG0E,aAAa,CACd,CAAC,EAAEvB,SAAS,GAAG,CAAC,EAChB3C,kBAAkB;QAChBC,SAASA,WAAW,CAAC,mBAAmB,CAAC;QACzCC;QACAC,OAAOA,SAAS,CAAC,mBAAmB,CAAC;IACvC;IAEFe,QAAQkD,MAAM,CAACC,IAAI,CAAC;QAAEC,KAAK,CAAC,qBAAqB,EAAE3B,SAAS,GAAG,CAAC;IAAC;AACnE,EAAC"}
|
@@ -3,7 +3,7 @@ import type { PayloadRequestWithData } from 'payload/types';
|
|
3
3
|
type Args = {
|
4
4
|
debug?: boolean;
|
5
5
|
payload: Payload;
|
6
|
-
req
|
6
|
+
req: PayloadRequestWithData;
|
7
7
|
};
|
8
8
|
/**
|
9
9
|
* Moves upload and relationship columns from the join table and into the tables while moving data
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/predefinedMigrations/v2-v3/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,SAAS,CAAA;AACtC,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,eAAe,CAAA;AAiB3D,KAAK,IAAI,GAAG;IACV,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,OAAO,EAAE,OAAO,CAAA;IAChB,GAAG,
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/predefinedMigrations/v2-v3/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,SAAS,CAAA;AACtC,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,eAAe,CAAA;AAiB3D,KAAK,IAAI,GAAG;IACV,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,OAAO,EAAE,OAAO,CAAA;IAChB,GAAG,EAAE,sBAAsB,CAAA;CAC5B,CAAA;AAED;;;;;;;;;;;;;GAaG;AACH,eAAO,MAAM,qBAAqB,4BAAmC,IAAI,kBA8OxE,CAAA"}
|
@@ -27,12 +27,12 @@ const require = createRequire(import.meta.url);
|
|
27
27
|
// get the drizzle migrateUpSQL from drizzle using the last schema
|
28
28
|
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload');
|
29
29
|
const drizzleJsonAfter = generateDrizzleJson(adapter.schema);
|
30
|
-
// Get
|
31
|
-
const
|
32
|
-
if (!
|
30
|
+
// Get latest migration snapshot
|
31
|
+
const latestSnapshot = fs.readdirSync(dir).filter((file)=>file.endsWith('.json')).sort().reverse()?.[0];
|
32
|
+
if (!latestSnapshot) {
|
33
33
|
throw new Error(`No previous migration schema file found! A prior migration from v2 is required to migrate to v3.`);
|
34
34
|
}
|
35
|
-
const drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${
|
35
|
+
const drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'));
|
36
36
|
const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter);
|
37
37
|
if (!generatedSQL.length) {
|
38
38
|
payload.logger.info(`No schema changes needed.`);
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../../../src/predefinedMigrations/v2-v3/index.ts"],"sourcesContent":["import type { DrizzleSnapshotJSON } from 'drizzle-kit/payload'\nimport type { Payload } from 'payload'\nimport type { PayloadRequestWithData } from 'payload/types'\n\nimport { sql } from 'drizzle-orm'\nimport fs from 'fs'\nimport { createRequire } from 'module'\nimport { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload/versions'\nimport toSnakeCase from 'to-snake-case'\n\nimport type { PostgresAdapter } from '../../types.js'\nimport type { PathsToQuery } from './types.js'\n\nimport { groupUpSQLStatements } from './groupUpSQLStatements.js'\nimport { migrateRelationships } from './migrateRelationships.js'\nimport { traverseFields } from './traverseFields.js'\n\nconst require = createRequire(import.meta.url)\n\ntype Args = {\n debug?: boolean\n payload: Payload\n req?: Partial<PayloadRequestWithData>\n}\n\n/**\n * Moves upload and relationship columns from the join table and into the tables while moving data\n * This is done in the following order:\n * ADD COLUMNs\n * -- manipulate data to move relationships to new columns\n * ADD CONSTRAINTs\n * NOT NULLs\n * DROP TABLEs\n * DROP CONSTRAINTs\n * DROP COLUMNs\n * @param debug\n * @param payload\n * @param req\n */\nexport const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {\n const adapter = payload.db as PostgresAdapter\n const db = adapter.sessions[req.transactionID]?.db\n const dir = payload.db.migrationDir\n\n // get the drizzle migrateUpSQL from drizzle using the last schema\n const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload')\n const drizzleJsonAfter = generateDrizzleJson(adapter.schema)\n\n // Get the previous migration snapshot\n const previousSnapshot = fs\n .readdirSync(dir)\n .filter((file) => file.endsWith('.json') && !file.endsWith('relationships_v2_v3.json'))\n .sort()\n .reverse()?.[0]\n\n if (!previousSnapshot) {\n throw new Error(\n `No previous migration schema file found! A prior migration from v2 is required to migrate to v3.`,\n )\n }\n\n const drizzleJsonBefore = JSON.parse(\n fs.readFileSync(`${dir}/${previousSnapshot}`, 'utf8'),\n ) as DrizzleSnapshotJSON\n\n const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)\n\n if (!generatedSQL.length) {\n payload.logger.info(`No schema changes needed.`)\n process.exit(0)\n }\n\n const sqlUpStatements = groupUpSQLStatements(generatedSQL)\n\n const addColumnsStatement = sqlUpStatements.addColumn.join('\\n')\n\n if (debug) {\n payload.logger.info('CREATING NEW RELATIONSHIP COLUMNS')\n payload.logger.info(addColumnsStatement)\n }\n\n await db.execute(sql.raw(addColumnsStatement))\n\n for (const collection of payload.config.collections) {\n const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))\n const pathsToQuery: PathsToQuery = new Set()\n\n traverseFields({\n adapter,\n collectionSlug: collection.slug,\n columnPrefix: '',\n db,\n disableNotNull: false,\n fields: collection.fields,\n isVersions: false,\n newTableName: tableName,\n parentTableName: tableName,\n path: '',\n pathsToQuery,\n payload,\n rootTableName: tableName,\n })\n\n await migrateRelationships({\n adapter,\n collectionSlug: collection.slug,\n db,\n debug,\n fields: collection.fields,\n isVersions: false,\n pathsToQuery,\n payload,\n req,\n tableName,\n })\n\n if (collection.versions) {\n const versionsTableName = adapter.tableNameMap.get(\n `_${toSnakeCase(collection.slug)}${adapter.versionsSuffix}`,\n )\n const versionFields = buildVersionCollectionFields(collection)\n const versionPathsToQuery: PathsToQuery = new Set()\n\n traverseFields({\n adapter,\n collectionSlug: collection.slug,\n columnPrefix: '',\n db,\n disableNotNull: true,\n fields: versionFields,\n isVersions: true,\n newTableName: versionsTableName,\n parentTableName: versionsTableName,\n path: '',\n pathsToQuery: versionPathsToQuery,\n payload,\n rootTableName: versionsTableName,\n })\n\n await migrateRelationships({\n adapter,\n collectionSlug: collection.slug,\n db,\n debug,\n fields: versionFields,\n isVersions: true,\n pathsToQuery: versionPathsToQuery,\n payload,\n req,\n tableName: versionsTableName,\n })\n }\n }\n\n for (const global of payload.config.globals) {\n const tableName = adapter.tableNameMap.get(toSnakeCase(global.slug))\n\n const pathsToQuery: PathsToQuery = new Set()\n\n traverseFields({\n adapter,\n columnPrefix: '',\n db,\n disableNotNull: false,\n fields: global.fields,\n globalSlug: global.slug,\n isVersions: false,\n newTableName: tableName,\n parentTableName: tableName,\n path: '',\n pathsToQuery,\n payload,\n rootTableName: tableName,\n })\n\n await migrateRelationships({\n adapter,\n db,\n debug,\n fields: global.fields,\n globalSlug: global.slug,\n isVersions: false,\n pathsToQuery,\n payload,\n req,\n tableName,\n })\n\n if (global.versions) {\n const versionsTableName = adapter.tableNameMap.get(\n `_${toSnakeCase(global.slug)}${adapter.versionsSuffix}`,\n )\n\n const versionFields = buildVersionGlobalFields(global)\n\n const versionPathsToQuery: PathsToQuery = new Set()\n\n traverseFields({\n adapter,\n columnPrefix: '',\n db,\n disableNotNull: true,\n fields: versionFields,\n globalSlug: global.slug,\n isVersions: true,\n newTableName: versionsTableName,\n parentTableName: versionsTableName,\n path: '',\n pathsToQuery: versionPathsToQuery,\n payload,\n rootTableName: versionsTableName,\n })\n\n await migrateRelationships({\n adapter,\n db,\n debug,\n fields: versionFields,\n globalSlug: global.slug,\n isVersions: true,\n pathsToQuery: versionPathsToQuery,\n payload,\n req,\n tableName: versionsTableName,\n })\n }\n }\n\n // ADD CONSTRAINT\n const addConstraintsStatement = sqlUpStatements.addConstraint.join('\\n')\n\n if (debug) {\n payload.logger.info('ADDING CONSTRAINTS')\n payload.logger.info(addConstraintsStatement)\n }\n\n await db.execute(sql.raw(addConstraintsStatement))\n\n // NOT NULL\n const notNullStatements = sqlUpStatements.notNull.join('\\n')\n\n if (debug) {\n payload.logger.info('NOT NULL CONSTRAINTS')\n payload.logger.info(notNullStatements)\n }\n\n await db.execute(sql.raw(notNullStatements))\n\n // DROP TABLE\n const dropTablesStatement = sqlUpStatements.dropTable.join('\\n')\n\n if (debug) {\n payload.logger.info('DROPPING TABLES')\n payload.logger.info(dropTablesStatement)\n }\n\n await db.execute(sql.raw(dropTablesStatement))\n\n // DROP CONSTRAINT\n const dropConstraintsStatement = sqlUpStatements.dropConstraint.join('\\n')\n\n if (debug) {\n payload.logger.info('DROPPING CONSTRAINTS')\n payload.logger.info(dropConstraintsStatement)\n }\n\n await db.execute(sql.raw(dropConstraintsStatement))\n\n // DROP COLUMN\n const dropColumnsStatement = sqlUpStatements.dropColumn.join('\\n')\n\n if (debug) {\n payload.logger.info('DROPPING COLUMNS')\n payload.logger.info(dropColumnsStatement)\n }\n\n await db.execute(sql.raw(dropColumnsStatement))\n}\n"],"names":["sql","fs","createRequire","buildVersionCollectionFields","buildVersionGlobalFields","toSnakeCase","groupUpSQLStatements","migrateRelationships","traverseFields","require","url","migratePostgresV2toV3","debug","payload","req","adapter","db","sessions","transactionID","dir","migrationDir","generateDrizzleJson","generateMigration","drizzleJsonAfter","schema","previousSnapshot","readdirSync","filter","file","endsWith","sort","reverse","Error","drizzleJsonBefore","JSON","parse","readFileSync","generatedSQL","length","logger","info","process","exit","sqlUpStatements","addColumnsStatement","addColumn","join","execute","raw","collection","config","collections","tableName","tableNameMap","get","slug","pathsToQuery","Set","collectionSlug","columnPrefix","disableNotNull","fields","isVersions","newTableName","parentTableName","path","rootTableName","versions","versionsTableName","versionsSuffix","versionFields","versionPathsToQuery","global","globals","globalSlug","addConstraintsStatement","addConstraint","notNullStatements","notNull","dropTablesStatement","dropTable","dropConstraintsStatement","dropConstraint","dropColumnsStatement","dropColumn"],"rangeMappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;","mappings":"AAIA,SAASA,GAAG,QAAQ,cAAa;AACjC,OAAOC,QAAQ,KAAI;AACnB,SAASC,aAAa,QAAQ,SAAQ;AACtC,SAASC,4BAA4B,EAAEC,wBAAwB,QAAQ,mBAAkB;AACzF,OAAOC,iBAAiB,gBAAe;AAKvC,SAASC,oBAAoB,QAAQ,4BAA2B;AAChE,SAASC,oBAAoB,QAAQ,4BAA2B;AAChE,SAASC,cAAc,QAAQ,sBAAqB;AAEpD,MAAMC,UAAUP,cAAc,YAAYQ,GAAG;AAQ7C;;;;;;;;;;;;;CAaC,GACD,OAAO,MAAMC,wBAAwB,OAAO,EAAEC,KAAK,EAAEC,OAAO,EAAEC,GAAG,EAAQ;IACvE,MAAMC,UAAUF,QAAQG,EAAE;IAC1B,MAAMA,KAAKD,QAAQE,QAAQ,CAACH,IAAII,aAAa,CAAC,EAAEF;IAChD,MAAMG,MAAMN,QAAQG,EAAE,CAACI,YAAY;IAEnC,kEAAkE;IAClE,MAAM,EAAEC,mBAAmB,EAAEC,iBAAiB,EAAE,GAAGb,QAAQ;IAC3D,MAAMc,mBAAmBF,oBAAoBN,QAAQS,MAAM;IAE3D,sCAAsC;IACtC,MAAMC,mBAAmBxB,GACtByB,WAAW,CAACP,KACZQ,MAAM,CAAC,CAACC,OAASA,KAAKC,QAAQ,CAAC,YAAY,CAACD,KAAKC,QAAQ,CAAC,6BAC1DC,IAAI,GACJC,OAAO,IAAI,CAAC,EAAE;IAEjB,IAAI,CAACN,kBAAkB;QACrB,MAAM,IAAIO,MACR,CAAC,gGAAgG,CAAC;IAEtG;IAEA,MAAMC,oBAAoBC,KAAKC,KAAK,CAClClC,GAAGmC,YAAY,CAAC,CAAC,EAAEjB,IAAI,CAAC,EAAEM,iBAAiB,CAAC,EAAE;IAGhD,MAAMY,eAAe,MAAMf,kBAAkBW,mBAAmBV;IAEhE,IAAI,CAACc,aAAaC,MAAM,EAAE;QACxBzB,QAAQ0B,MAAM,CAACC,IAAI,CAAC,CAAC,yBAAyB,CAAC;QAC/CC,QAAQC,IAAI,CAAC;IACf;IAEA,MAAMC,kBAAkBrC,qBAAqB+B;IAE7C,MAAMO,sBAAsBD,gBAAgBE,SAAS,CAACC,IAAI,CAAC;IAE3D,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAACI;IACtB;IAEA,MAAM5B,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAACJ;IAEzB,KAAK,MAAMK,cAAcpC,QAAQqC,MAAM,CAACC,WAAW,CAAE;QACnD,MAAMC,YAAYrC,QAAQsC,YAAY,CAACC,GAAG,CAACjD,YAAY4C,WAAWM,IAAI;QACtE,MAAMC,eAA6B,IAAIC;QAEvCjD,eAAe;YACbO;YACA2C,gBAAgBT,WAAWM,IAAI;YAC/BI,cAAc;YACd3C;YACA4C,gBAAgB;YAChBC,QAAQZ,WAAWY,MAAM;YACzBC,YAAY;YACZC,cAAcX;YACdY,iBAAiBZ;YACjBa,MAAM;YACNT;YACA3C;YACAqD,eAAed;QACjB;QAEA,MAAM7C,qBAAqB;YACzBQ;YACA2C,gBAAgBT,WAAWM,IAAI;YAC/BvC;YACAJ;YACAiD,QAAQZ,WAAWY,MAAM;YACzBC,YAAY;YACZN;YACA3C;YACAC;YACAsC;QACF;QAEA,IAAIH,WAAWkB,QAAQ,EAAE;YACvB,MAAMC,oBAAoBrD,QAAQsC,YAAY,CAACC,GAAG,CAChD,CAAC,CAAC,EAAEjD,YAAY4C,WAAWM,IAAI,EAAE,EAAExC,QAAQsD,cAAc,CAAC,CAAC;YAE7D,MAAMC,gBAAgBnE,6BAA6B8C;YACnD,MAAMsB,sBAAoC,IAAId;YAE9CjD,eAAe;gBACbO;gBACA2C,gBAAgBT,WAAWM,IAAI;gBAC/BI,cAAc;gBACd3C;gBACA4C,gBAAgB;gBAChBC,QAAQS;gBACRR,YAAY;gBACZC,cAAcK;gBACdJ,iBAAiBI;gBACjBH,MAAM;gBACNT,cAAce;gBACd1D;gBACAqD,eAAeE;YACjB;YAEA,MAAM7D,qBAAqB;gBACzBQ;gBACA2C,gBAAgBT,WAAWM,IAAI;gBAC/BvC;gBACAJ;gBACAiD,QAAQS;gBACRR,YAAY;gBACZN,cAAce;gBACd1D;gBACAC;gBACAsC,WAAWgB;YACb;QACF;IACF;IAEA,KAAK,MAAMI,UAAU3D,QAAQqC,MAAM,CAACuB,OAAO,CAAE;QAC3C,MAAMrB,YAAYrC,QAAQsC,YAAY,CAACC,GAAG,CAACjD,YAAYmE,OAAOjB,IAAI;QAElE,MAAMC,eAA6B,IAAIC;QAEvCjD,eAAe;YACbO;YACA4C,cAAc;YACd3C;YACA4C,gBAAgB;YAChBC,QAAQW,OAAOX,MAAM;YACrBa,YAAYF,OAAOjB,IAAI;YACvBO,YAAY;YACZC,cAAcX;YACdY,iBAAiBZ;YACjBa,MAAM;YACNT;YACA3C;YACAqD,eAAed;QACjB;QAEA,MAAM7C,qBAAqB;YACzBQ;YACAC;YACAJ;YACAiD,QAAQW,OAAOX,MAAM;YACrBa,YAAYF,OAAOjB,IAAI;YACvBO,YAAY;YACZN;YACA3C;YACAC;YACAsC;QACF;QAEA,IAAIoB,OAAOL,QAAQ,EAAE;YACnB,MAAMC,oBAAoBrD,QAAQsC,YAAY,CAACC,GAAG,CAChD,CAAC,CAAC,EAAEjD,YAAYmE,OAAOjB,IAAI,EAAE,EAAExC,QAAQsD,cAAc,CAAC,CAAC;YAGzD,MAAMC,gBAAgBlE,yBAAyBoE;YAE/C,MAAMD,sBAAoC,IAAId;YAE9CjD,eAAe;gBACbO;gBACA4C,cAAc;gBACd3C;gBACA4C,gBAAgB;gBAChBC,QAAQS;gBACRI,YAAYF,OAAOjB,IAAI;gBACvBO,YAAY;gBACZC,cAAcK;gBACdJ,iBAAiBI;gBACjBH,MAAM;gBACNT,cAAce;gBACd1D;gBACAqD,eAAeE;YACjB;YAEA,MAAM7D,qBAAqB;gBACzBQ;gBACAC;gBACAJ;gBACAiD,QAAQS;gBACRI,YAAYF,OAAOjB,IAAI;gBACvBO,YAAY;gBACZN,cAAce;gBACd1D;gBACAC;gBACAsC,WAAWgB;YACb;QACF;IACF;IAEA,iBAAiB;IACjB,MAAMO,0BAA0BhC,gBAAgBiC,aAAa,CAAC9B,IAAI,CAAC;IAEnE,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAACmC;IACtB;IAEA,MAAM3D,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAAC2B;IAEzB,WAAW;IACX,MAAME,oBAAoBlC,gBAAgBmC,OAAO,CAAChC,IAAI,CAAC;IAEvD,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAACqC;IACtB;IAEA,MAAM7D,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAAC6B;IAEzB,aAAa;IACb,MAAME,sBAAsBpC,gBAAgBqC,SAAS,CAAClC,IAAI,CAAC;IAE3D,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAACuC;IACtB;IAEA,MAAM/D,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAAC+B;IAEzB,kBAAkB;IAClB,MAAME,2BAA2BtC,gBAAgBuC,cAAc,CAACpC,IAAI,CAAC;IAErE,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAACyC;IACtB;IAEA,MAAMjE,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAACiC;IAEzB,cAAc;IACd,MAAME,uBAAuBxC,gBAAgByC,UAAU,CAACtC,IAAI,CAAC;IAE7D,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAAC2C;IACtB;IAEA,MAAMnE,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAACmC;AAC3B,EAAC"}
|
1
|
+
{"version":3,"sources":["../../../src/predefinedMigrations/v2-v3/index.ts"],"sourcesContent":["import type { DrizzleSnapshotJSON } from 'drizzle-kit/payload'\nimport type { Payload } from 'payload'\nimport type { PayloadRequestWithData } from 'payload/types'\n\nimport { sql } from 'drizzle-orm'\nimport fs from 'fs'\nimport { createRequire } from 'module'\nimport { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload/versions'\nimport toSnakeCase from 'to-snake-case'\n\nimport type { PostgresAdapter } from '../../types.js'\nimport type { PathsToQuery } from './types.js'\n\nimport { groupUpSQLStatements } from './groupUpSQLStatements.js'\nimport { migrateRelationships } from './migrateRelationships.js'\nimport { traverseFields } from './traverseFields.js'\n\nconst require = createRequire(import.meta.url)\n\ntype Args = {\n debug?: boolean\n payload: Payload\n req: PayloadRequestWithData\n}\n\n/**\n * Moves upload and relationship columns from the join table and into the tables while moving data\n * This is done in the following order:\n * ADD COLUMNs\n * -- manipulate data to move relationships to new columns\n * ADD CONSTRAINTs\n * NOT NULLs\n * DROP TABLEs\n * DROP CONSTRAINTs\n * DROP COLUMNs\n * @param debug\n * @param payload\n * @param req\n */\nexport const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {\n const adapter = payload.db as PostgresAdapter\n const db = adapter.sessions[req.transactionID]?.db\n const dir = payload.db.migrationDir\n\n // get the drizzle migrateUpSQL from drizzle using the last schema\n const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload')\n const drizzleJsonAfter = generateDrizzleJson(adapter.schema)\n\n // Get latest migration snapshot\n const latestSnapshot = fs\n .readdirSync(dir)\n .filter((file) => file.endsWith('.json'))\n .sort()\n .reverse()?.[0]\n\n if (!latestSnapshot) {\n throw new Error(\n `No previous migration schema file found! A prior migration from v2 is required to migrate to v3.`,\n )\n }\n\n const drizzleJsonBefore = JSON.parse(\n fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'),\n ) as DrizzleSnapshotJSON\n\n const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)\n\n if (!generatedSQL.length) {\n payload.logger.info(`No schema changes needed.`)\n process.exit(0)\n }\n\n const sqlUpStatements = groupUpSQLStatements(generatedSQL)\n\n const addColumnsStatement = sqlUpStatements.addColumn.join('\\n')\n\n if (debug) {\n payload.logger.info('CREATING NEW RELATIONSHIP COLUMNS')\n payload.logger.info(addColumnsStatement)\n }\n\n await db.execute(sql.raw(addColumnsStatement))\n\n for (const collection of payload.config.collections) {\n const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))\n const pathsToQuery: PathsToQuery = new Set()\n\n traverseFields({\n adapter,\n collectionSlug: collection.slug,\n columnPrefix: '',\n db,\n disableNotNull: false,\n fields: collection.fields,\n isVersions: false,\n newTableName: tableName,\n parentTableName: tableName,\n path: '',\n pathsToQuery,\n payload,\n rootTableName: tableName,\n })\n\n await migrateRelationships({\n adapter,\n collectionSlug: collection.slug,\n db,\n debug,\n fields: collection.fields,\n isVersions: false,\n pathsToQuery,\n payload,\n req,\n tableName,\n })\n\n if (collection.versions) {\n const versionsTableName = adapter.tableNameMap.get(\n `_${toSnakeCase(collection.slug)}${adapter.versionsSuffix}`,\n )\n const versionFields = buildVersionCollectionFields(collection)\n const versionPathsToQuery: PathsToQuery = new Set()\n\n traverseFields({\n adapter,\n collectionSlug: collection.slug,\n columnPrefix: '',\n db,\n disableNotNull: true,\n fields: versionFields,\n isVersions: true,\n newTableName: versionsTableName,\n parentTableName: versionsTableName,\n path: '',\n pathsToQuery: versionPathsToQuery,\n payload,\n rootTableName: versionsTableName,\n })\n\n await migrateRelationships({\n adapter,\n collectionSlug: collection.slug,\n db,\n debug,\n fields: versionFields,\n isVersions: true,\n pathsToQuery: versionPathsToQuery,\n payload,\n req,\n tableName: versionsTableName,\n })\n }\n }\n\n for (const global of payload.config.globals) {\n const tableName = adapter.tableNameMap.get(toSnakeCase(global.slug))\n\n const pathsToQuery: PathsToQuery = new Set()\n\n traverseFields({\n adapter,\n columnPrefix: '',\n db,\n disableNotNull: false,\n fields: global.fields,\n globalSlug: global.slug,\n isVersions: false,\n newTableName: tableName,\n parentTableName: tableName,\n path: '',\n pathsToQuery,\n payload,\n rootTableName: tableName,\n })\n\n await migrateRelationships({\n adapter,\n db,\n debug,\n fields: global.fields,\n globalSlug: global.slug,\n isVersions: false,\n pathsToQuery,\n payload,\n req,\n tableName,\n })\n\n if (global.versions) {\n const versionsTableName = adapter.tableNameMap.get(\n `_${toSnakeCase(global.slug)}${adapter.versionsSuffix}`,\n )\n\n const versionFields = buildVersionGlobalFields(global)\n\n const versionPathsToQuery: PathsToQuery = new Set()\n\n traverseFields({\n adapter,\n columnPrefix: '',\n db,\n disableNotNull: true,\n fields: versionFields,\n globalSlug: global.slug,\n isVersions: true,\n newTableName: versionsTableName,\n parentTableName: versionsTableName,\n path: '',\n pathsToQuery: versionPathsToQuery,\n payload,\n rootTableName: versionsTableName,\n })\n\n await migrateRelationships({\n adapter,\n db,\n debug,\n fields: versionFields,\n globalSlug: global.slug,\n isVersions: true,\n pathsToQuery: versionPathsToQuery,\n payload,\n req,\n tableName: versionsTableName,\n })\n }\n }\n\n // ADD CONSTRAINT\n const addConstraintsStatement = sqlUpStatements.addConstraint.join('\\n')\n\n if (debug) {\n payload.logger.info('ADDING CONSTRAINTS')\n payload.logger.info(addConstraintsStatement)\n }\n\n await db.execute(sql.raw(addConstraintsStatement))\n\n // NOT NULL\n const notNullStatements = sqlUpStatements.notNull.join('\\n')\n\n if (debug) {\n payload.logger.info('NOT NULL CONSTRAINTS')\n payload.logger.info(notNullStatements)\n }\n\n await db.execute(sql.raw(notNullStatements))\n\n // DROP TABLE\n const dropTablesStatement = sqlUpStatements.dropTable.join('\\n')\n\n if (debug) {\n payload.logger.info('DROPPING TABLES')\n payload.logger.info(dropTablesStatement)\n }\n\n await db.execute(sql.raw(dropTablesStatement))\n\n // DROP CONSTRAINT\n const dropConstraintsStatement = sqlUpStatements.dropConstraint.join('\\n')\n\n if (debug) {\n payload.logger.info('DROPPING CONSTRAINTS')\n payload.logger.info(dropConstraintsStatement)\n }\n\n await db.execute(sql.raw(dropConstraintsStatement))\n\n // DROP COLUMN\n const dropColumnsStatement = sqlUpStatements.dropColumn.join('\\n')\n\n if (debug) {\n payload.logger.info('DROPPING COLUMNS')\n payload.logger.info(dropColumnsStatement)\n }\n\n await db.execute(sql.raw(dropColumnsStatement))\n}\n"],"names":["sql","fs","createRequire","buildVersionCollectionFields","buildVersionGlobalFields","toSnakeCase","groupUpSQLStatements","migrateRelationships","traverseFields","require","url","migratePostgresV2toV3","debug","payload","req","adapter","db","sessions","transactionID","dir","migrationDir","generateDrizzleJson","generateMigration","drizzleJsonAfter","schema","latestSnapshot","readdirSync","filter","file","endsWith","sort","reverse","Error","drizzleJsonBefore","JSON","parse","readFileSync","generatedSQL","length","logger","info","process","exit","sqlUpStatements","addColumnsStatement","addColumn","join","execute","raw","collection","config","collections","tableName","tableNameMap","get","slug","pathsToQuery","Set","collectionSlug","columnPrefix","disableNotNull","fields","isVersions","newTableName","parentTableName","path","rootTableName","versions","versionsTableName","versionsSuffix","versionFields","versionPathsToQuery","global","globals","globalSlug","addConstraintsStatement","addConstraint","notNullStatements","notNull","dropTablesStatement","dropTable","dropConstraintsStatement","dropConstraint","dropColumnsStatement","dropColumn"],"rangeMappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;","mappings":"AAIA,SAASA,GAAG,QAAQ,cAAa;AACjC,OAAOC,QAAQ,KAAI;AACnB,SAASC,aAAa,QAAQ,SAAQ;AACtC,SAASC,4BAA4B,EAAEC,wBAAwB,QAAQ,mBAAkB;AACzF,OAAOC,iBAAiB,gBAAe;AAKvC,SAASC,oBAAoB,QAAQ,4BAA2B;AAChE,SAASC,oBAAoB,QAAQ,4BAA2B;AAChE,SAASC,cAAc,QAAQ,sBAAqB;AAEpD,MAAMC,UAAUP,cAAc,YAAYQ,GAAG;AAQ7C;;;;;;;;;;;;;CAaC,GACD,OAAO,MAAMC,wBAAwB,OAAO,EAAEC,KAAK,EAAEC,OAAO,EAAEC,GAAG,EAAQ;IACvE,MAAMC,UAAUF,QAAQG,EAAE;IAC1B,MAAMA,KAAKD,QAAQE,QAAQ,CAACH,IAAII,aAAa,CAAC,EAAEF;IAChD,MAAMG,MAAMN,QAAQG,EAAE,CAACI,YAAY;IAEnC,kEAAkE;IAClE,MAAM,EAAEC,mBAAmB,EAAEC,iBAAiB,EAAE,GAAGb,QAAQ;IAC3D,MAAMc,mBAAmBF,oBAAoBN,QAAQS,MAAM;IAE3D,gCAAgC;IAChC,MAAMC,iBAAiBxB,GACpByB,WAAW,CAACP,KACZQ,MAAM,CAAC,CAACC,OAASA,KAAKC,QAAQ,CAAC,UAC/BC,IAAI,GACJC,OAAO,IAAI,CAAC,EAAE;IAEjB,IAAI,CAACN,gBAAgB;QACnB,MAAM,IAAIO,MACR,CAAC,gGAAgG,CAAC;IAEtG;IAEA,MAAMC,oBAAoBC,KAAKC,KAAK,CAClClC,GAAGmC,YAAY,CAAC,CAAC,EAAEjB,IAAI,CAAC,EAAEM,eAAe,CAAC,EAAE;IAG9C,MAAMY,eAAe,MAAMf,kBAAkBW,mBAAmBV;IAEhE,IAAI,CAACc,aAAaC,MAAM,EAAE;QACxBzB,QAAQ0B,MAAM,CAACC,IAAI,CAAC,CAAC,yBAAyB,CAAC;QAC/CC,QAAQC,IAAI,CAAC;IACf;IAEA,MAAMC,kBAAkBrC,qBAAqB+B;IAE7C,MAAMO,sBAAsBD,gBAAgBE,SAAS,CAACC,IAAI,CAAC;IAE3D,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAACI;IACtB;IAEA,MAAM5B,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAACJ;IAEzB,KAAK,MAAMK,cAAcpC,QAAQqC,MAAM,CAACC,WAAW,CAAE;QACnD,MAAMC,YAAYrC,QAAQsC,YAAY,CAACC,GAAG,CAACjD,YAAY4C,WAAWM,IAAI;QACtE,MAAMC,eAA6B,IAAIC;QAEvCjD,eAAe;YACbO;YACA2C,gBAAgBT,WAAWM,IAAI;YAC/BI,cAAc;YACd3C;YACA4C,gBAAgB;YAChBC,QAAQZ,WAAWY,MAAM;YACzBC,YAAY;YACZC,cAAcX;YACdY,iBAAiBZ;YACjBa,MAAM;YACNT;YACA3C;YACAqD,eAAed;QACjB;QAEA,MAAM7C,qBAAqB;YACzBQ;YACA2C,gBAAgBT,WAAWM,IAAI;YAC/BvC;YACAJ;YACAiD,QAAQZ,WAAWY,MAAM;YACzBC,YAAY;YACZN;YACA3C;YACAC;YACAsC;QACF;QAEA,IAAIH,WAAWkB,QAAQ,EAAE;YACvB,MAAMC,oBAAoBrD,QAAQsC,YAAY,CAACC,GAAG,CAChD,CAAC,CAAC,EAAEjD,YAAY4C,WAAWM,IAAI,EAAE,EAAExC,QAAQsD,cAAc,CAAC,CAAC;YAE7D,MAAMC,gBAAgBnE,6BAA6B8C;YACnD,MAAMsB,sBAAoC,IAAId;YAE9CjD,eAAe;gBACbO;gBACA2C,gBAAgBT,WAAWM,IAAI;gBAC/BI,cAAc;gBACd3C;gBACA4C,gBAAgB;gBAChBC,QAAQS;gBACRR,YAAY;gBACZC,cAAcK;gBACdJ,iBAAiBI;gBACjBH,MAAM;gBACNT,cAAce;gBACd1D;gBACAqD,eAAeE;YACjB;YAEA,MAAM7D,qBAAqB;gBACzBQ;gBACA2C,gBAAgBT,WAAWM,IAAI;gBAC/BvC;gBACAJ;gBACAiD,QAAQS;gBACRR,YAAY;gBACZN,cAAce;gBACd1D;gBACAC;gBACAsC,WAAWgB;YACb;QACF;IACF;IAEA,KAAK,MAAMI,UAAU3D,QAAQqC,MAAM,CAACuB,OAAO,CAAE;QAC3C,MAAMrB,YAAYrC,QAAQsC,YAAY,CAACC,GAAG,CAACjD,YAAYmE,OAAOjB,IAAI;QAElE,MAAMC,eAA6B,IAAIC;QAEvCjD,eAAe;YACbO;YACA4C,cAAc;YACd3C;YACA4C,gBAAgB;YAChBC,QAAQW,OAAOX,MAAM;YACrBa,YAAYF,OAAOjB,IAAI;YACvBO,YAAY;YACZC,cAAcX;YACdY,iBAAiBZ;YACjBa,MAAM;YACNT;YACA3C;YACAqD,eAAed;QACjB;QAEA,MAAM7C,qBAAqB;YACzBQ;YACAC;YACAJ;YACAiD,QAAQW,OAAOX,MAAM;YACrBa,YAAYF,OAAOjB,IAAI;YACvBO,YAAY;YACZN;YACA3C;YACAC;YACAsC;QACF;QAEA,IAAIoB,OAAOL,QAAQ,EAAE;YACnB,MAAMC,oBAAoBrD,QAAQsC,YAAY,CAACC,GAAG,CAChD,CAAC,CAAC,EAAEjD,YAAYmE,OAAOjB,IAAI,EAAE,EAAExC,QAAQsD,cAAc,CAAC,CAAC;YAGzD,MAAMC,gBAAgBlE,yBAAyBoE;YAE/C,MAAMD,sBAAoC,IAAId;YAE9CjD,eAAe;gBACbO;gBACA4C,cAAc;gBACd3C;gBACA4C,gBAAgB;gBAChBC,QAAQS;gBACRI,YAAYF,OAAOjB,IAAI;gBACvBO,YAAY;gBACZC,cAAcK;gBACdJ,iBAAiBI;gBACjBH,MAAM;gBACNT,cAAce;gBACd1D;gBACAqD,eAAeE;YACjB;YAEA,MAAM7D,qBAAqB;gBACzBQ;gBACAC;gBACAJ;gBACAiD,QAAQS;gBACRI,YAAYF,OAAOjB,IAAI;gBACvBO,YAAY;gBACZN,cAAce;gBACd1D;gBACAC;gBACAsC,WAAWgB;YACb;QACF;IACF;IAEA,iBAAiB;IACjB,MAAMO,0BAA0BhC,gBAAgBiC,aAAa,CAAC9B,IAAI,CAAC;IAEnE,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAACmC;IACtB;IAEA,MAAM3D,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAAC2B;IAEzB,WAAW;IACX,MAAME,oBAAoBlC,gBAAgBmC,OAAO,CAAChC,IAAI,CAAC;IAEvD,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAACqC;IACtB;IAEA,MAAM7D,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAAC6B;IAEzB,aAAa;IACb,MAAME,sBAAsBpC,gBAAgBqC,SAAS,CAAClC,IAAI,CAAC;IAE3D,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAACuC;IACtB;IAEA,MAAM/D,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAAC+B;IAEzB,kBAAkB;IAClB,MAAME,2BAA2BtC,gBAAgBuC,cAAc,CAACpC,IAAI,CAAC;IAErE,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAACyC;IACtB;IAEA,MAAMjE,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAACiC;IAEzB,cAAc;IACd,MAAME,uBAAuBxC,gBAAgByC,UAAU,CAACtC,IAAI,CAAC;IAE7D,IAAIlC,OAAO;QACTC,QAAQ0B,MAAM,CAACC,IAAI,CAAC;QACpB3B,QAAQ0B,MAAM,CAACC,IAAI,CAAC2C;IACtB;IAEA,MAAMnE,GAAG+B,OAAO,CAAC/C,IAAIgD,GAAG,CAACmC;AAC3B,EAAC"}
|
@@ -11,7 +11,7 @@ type Args = {
|
|
11
11
|
isVersions: boolean;
|
12
12
|
pathsToQuery: PathsToQuery;
|
13
13
|
payload: Payload;
|
14
|
-
req
|
14
|
+
req: PayloadRequestWithData;
|
15
15
|
tableName: string;
|
16
16
|
};
|
17
17
|
export declare const migrateRelationships: ({ adapter, collectionSlug, db, debug, fields, globalSlug, isVersions, pathsToQuery, payload, req, tableName, }: Args) => Promise<void>;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"migrateRelationships.d.ts","sourceRoot":"","sources":["../../../src/predefinedMigrations/v2-v3/migrateRelationships.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,sBAAsB,EAAE,MAAM,eAAe,CAAA;AAI3E,OAAO,KAAK,EAAE,kBAAkB,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAA;AACzE,OAAO,KAAK,EAAgB,YAAY,EAAE,MAAM,YAAY,CAAA;AAI5D,KAAK,IAAI,GAAG;IACV,OAAO,EAAE,eAAe,CAAA;IACxB,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,EAAE,EAAE,kBAAkB,CAAA;IACtB,KAAK,EAAE,OAAO,CAAA;IACd,MAAM,EAAE,KAAK,EAAE,CAAA;IACf,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,UAAU,EAAE,OAAO,CAAA;IACnB,YAAY,EAAE,YAAY,CAAA;IAC1B,OAAO,EAAE,OAAO,CAAA;IAChB,GAAG,
|
1
|
+
{"version":3,"file":"migrateRelationships.d.ts","sourceRoot":"","sources":["../../../src/predefinedMigrations/v2-v3/migrateRelationships.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,sBAAsB,EAAE,MAAM,eAAe,CAAA;AAI3E,OAAO,KAAK,EAAE,kBAAkB,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAA;AACzE,OAAO,KAAK,EAAgB,YAAY,EAAE,MAAM,YAAY,CAAA;AAI5D,KAAK,IAAI,GAAG;IACV,OAAO,EAAE,eAAe,CAAA;IACxB,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,EAAE,EAAE,kBAAkB,CAAA;IACtB,KAAK,EAAE,OAAO,CAAA;IACd,MAAM,EAAE,KAAK,EAAE,CAAA;IACf,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,UAAU,EAAE,OAAO,CAAA;IACnB,YAAY,EAAE,YAAY,CAAA;IAC1B,OAAO,EAAE,OAAO,CAAA;IAChB,GAAG,EAAE,sBAAsB,CAAA;IAC3B,SAAS,EAAE,MAAM,CAAA;CAClB,CAAA;AAED,eAAO,MAAM,oBAAoB,mHAY9B,IAAI,kBAkEN,CAAA"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../../../src/predefinedMigrations/v2-v3/migrateRelationships.ts"],"sourcesContent":["import type { Field, Payload, PayloadRequestWithData } from 'payload/types'\n\nimport { sql } from 'drizzle-orm'\n\nimport type { DrizzleTransaction, PostgresAdapter } from '../../types.js'\nimport type { DocsToResave, PathsToQuery } from './types.js'\n\nimport { fetchAndResave } from './fetchAndResave/index.js'\n\ntype Args = {\n adapter: PostgresAdapter\n collectionSlug?: string\n db: DrizzleTransaction\n debug: boolean\n fields: Field[]\n globalSlug?: string\n isVersions: boolean\n pathsToQuery: PathsToQuery\n payload: Payload\n req
|
1
|
+
{"version":3,"sources":["../../../src/predefinedMigrations/v2-v3/migrateRelationships.ts"],"sourcesContent":["import type { Field, Payload, PayloadRequestWithData } from 'payload/types'\n\nimport { sql } from 'drizzle-orm'\n\nimport type { DrizzleTransaction, PostgresAdapter } from '../../types.js'\nimport type { DocsToResave, PathsToQuery } from './types.js'\n\nimport { fetchAndResave } from './fetchAndResave/index.js'\n\ntype Args = {\n adapter: PostgresAdapter\n collectionSlug?: string\n db: DrizzleTransaction\n debug: boolean\n fields: Field[]\n globalSlug?: string\n isVersions: boolean\n pathsToQuery: PathsToQuery\n payload: Payload\n req: PayloadRequestWithData\n tableName: string\n}\n\nexport const migrateRelationships = async ({\n adapter,\n collectionSlug,\n db,\n debug,\n fields,\n globalSlug,\n isVersions,\n pathsToQuery,\n payload,\n req,\n tableName,\n}: Args) => {\n if (pathsToQuery.size === 0) return\n\n let offset = 0\n\n let paginationResult\n\n const where = Array.from(pathsToQuery).reduce((statement, path, i) => {\n return (statement += `\n\"${tableName}${adapter.relationshipsSuffix}\".\"path\" LIKE '${path}'${pathsToQuery.size !== i + 1 ? ' OR' : ''}\n`)\n }, '')\n\n while (typeof paginationResult === 'undefined' || paginationResult.rows.length > 0) {\n const paginationStatement = `SELECT DISTINCT parent_id FROM ${tableName}${adapter.relationshipsSuffix} WHERE\n ${where} ORDER BY parent_id LIMIT 500 OFFSET ${offset * 500};\n `\n\n paginationResult = await adapter.drizzle.execute(sql.raw(`${paginationStatement}`))\n\n if (paginationResult.rows.length === 0) return\n\n offset += 1\n\n const statement = `SELECT * FROM ${tableName}${adapter.relationshipsSuffix} WHERE\n (${where}) AND parent_id IN (${paginationResult.rows.map((row) => row.parent_id).join(', ')});\n`\n if (debug) {\n payload.logger.info('FINDING ROWS TO MIGRATE')\n payload.logger.info(statement)\n }\n\n const result = await adapter.drizzle.execute(sql.raw(`${statement}`))\n\n const docsToResave: DocsToResave = {}\n\n result.rows.forEach((row) => {\n const parentID = row.parent_id\n\n if (typeof parentID === 'string' || typeof parentID === 'number') {\n if (!docsToResave[parentID]) docsToResave[parentID] = []\n docsToResave[parentID].push(row)\n }\n })\n\n await fetchAndResave({\n adapter,\n collectionSlug,\n db,\n debug,\n docsToResave,\n fields,\n globalSlug,\n isVersions,\n payload,\n req,\n tableName,\n })\n }\n\n const deleteStatement = `DELETE FROM ${tableName}${adapter.relationshipsSuffix} WHERE ${where}`\n if (debug) {\n payload.logger.info('DELETING ROWS')\n payload.logger.info(deleteStatement)\n }\n await db.execute(sql.raw(`${deleteStatement}`))\n}\n"],"names":["sql","fetchAndResave","migrateRelationships","adapter","collectionSlug","db","debug","fields","globalSlug","isVersions","pathsToQuery","payload","req","tableName","size","offset","paginationResult","where","Array","from","reduce","statement","path","i","relationshipsSuffix","rows","length","paginationStatement","drizzle","execute","raw","map","row","parent_id","join","logger","info","result","docsToResave","forEach","parentID","push","deleteStatement"],"rangeMappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;","mappings":"AAEA,SAASA,GAAG,QAAQ,cAAa;AAKjC,SAASC,cAAc,QAAQ,4BAA2B;AAgB1D,OAAO,MAAMC,uBAAuB,OAAO,EACzCC,OAAO,EACPC,cAAc,EACdC,EAAE,EACFC,KAAK,EACLC,MAAM,EACNC,UAAU,EACVC,UAAU,EACVC,YAAY,EACZC,OAAO,EACPC,GAAG,EACHC,SAAS,EACJ;IACL,IAAIH,aAAaI,IAAI,KAAK,GAAG;IAE7B,IAAIC,SAAS;IAEb,IAAIC;IAEJ,MAAMC,QAAQC,MAAMC,IAAI,CAACT,cAAcU,MAAM,CAAC,CAACC,WAAWC,MAAMC;QAC9D,OAAQF,aAAa,CAAC;CACzB,EAAER,UAAU,EAAEV,QAAQqB,mBAAmB,CAAC,eAAe,EAAEF,KAAK,CAAC,EAAEZ,aAAaI,IAAI,KAAKS,IAAI,IAAI,QAAQ,GAAG;AAC7G,CAAC;IACC,GAAG;IAEH,MAAO,OAAOP,qBAAqB,eAAeA,iBAAiBS,IAAI,CAACC,MAAM,GAAG,EAAG;QAClF,MAAMC,sBAAsB,CAAC,+BAA+B,EAAEd,UAAU,EAAEV,QAAQqB,mBAAmB,CAAC;IACtG,EAAEP,MAAM,qCAAqC,EAAEF,SAAS,IAAI;EAC9D,CAAC;QAECC,mBAAmB,MAAMb,QAAQyB,OAAO,CAACC,OAAO,CAAC7B,IAAI8B,GAAG,CAAC,CAAC,EAAEH,oBAAoB,CAAC;QAEjF,IAAIX,iBAAiBS,IAAI,CAACC,MAAM,KAAK,GAAG;QAExCX,UAAU;QAEV,MAAMM,YAAY,CAAC,cAAc,EAAER,UAAU,EAAEV,QAAQqB,mBAAmB,CAAC;KAC1E,EAAEP,MAAM,oBAAoB,EAAED,iBAAiBS,IAAI,CAACM,GAAG,CAAC,CAACC,MAAQA,IAAIC,SAAS,EAAEC,IAAI,CAAC,MAAM;AAChG,CAAC;QACG,IAAI5B,OAAO;YACTK,QAAQwB,MAAM,CAACC,IAAI,CAAC;YACpBzB,QAAQwB,MAAM,CAACC,IAAI,CAACf;QACtB;QAEA,MAAMgB,SAAS,MAAMlC,QAAQyB,OAAO,CAACC,OAAO,CAAC7B,IAAI8B,GAAG,CAAC,CAAC,EAAET,UAAU,CAAC;QAEnE,MAAMiB,eAA6B,CAAC;QAEpCD,OAAOZ,IAAI,CAACc,OAAO,CAAC,CAACP;YACnB,MAAMQ,WAAWR,IAAIC,SAAS;YAE9B,IAAI,OAAOO,aAAa,YAAY,OAAOA,aAAa,UAAU;gBAChE,IAAI,CAACF,YAAY,CAACE,SAAS,EAAEF,YAAY,CAACE,SAAS,GAAG,EAAE;gBACxDF,YAAY,CAACE,SAAS,CAACC,IAAI,CAACT;YAC9B;QACF;QAEA,MAAM/B,eAAe;YACnBE;YACAC;YACAC;YACAC;YACAgC;YACA/B;YACAC;YACAC;YACAE;YACAC;YACAC;QACF;IACF;IAEA,MAAM6B,kBAAkB,CAAC,YAAY,EAAE7B,UAAU,EAAEV,QAAQqB,mBAAmB,CAAC,OAAO,EAAEP,MAAM,CAAC;IAC/F,IAAIX,OAAO;QACTK,QAAQwB,MAAM,CAACC,IAAI,CAAC;QACpBzB,QAAQwB,MAAM,CAACC,IAAI,CAACM;IACtB;IACA,MAAMrC,GAAGwB,OAAO,CAAC7B,IAAI8B,GAAG,CAAC,CAAC,EAAEY,gBAAgB,CAAC;AAC/C,EAAC"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"pushDevSchema.d.ts","sourceRoot":"","sources":["../../src/utilities/pushDevSchema.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAIlD;;;;;GAKG;AACH,eAAO,MAAM,aAAa,OAAc,eAAe,
|
1
|
+
{"version":3,"file":"pushDevSchema.d.ts","sourceRoot":"","sources":["../../src/utilities/pushDevSchema.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAIlD;;;;;GAKG;AACH,eAAO,MAAM,aAAa,OAAc,eAAe,kBAmEtD,CAAA"}
|
@@ -11,7 +11,7 @@ const require = createRequire(import.meta.url);
|
|
11
11
|
*/ export const pushDevSchema = async (db)=>{
|
12
12
|
const { pushSchema } = require('drizzle-kit/payload');
|
13
13
|
// This will prompt if clarifications are needed for Drizzle to push new schema
|
14
|
-
const { apply, hasDataLoss, warnings } = await pushSchema(db.schema, db.drizzle);
|
14
|
+
const { apply, hasDataLoss, statementsToExecute, warnings } = await pushSchema(db.schema, db.drizzle);
|
15
15
|
if (warnings.length) {
|
16
16
|
let message = `Warnings detected during schema push: \n\n${warnings.join('\n')}\n\n`;
|
17
17
|
if (hasDataLoss) {
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../../src/utilities/pushDevSchema.ts"],"sourcesContent":["import { eq } from 'drizzle-orm'\nimport { numeric, timestamp, varchar } from 'drizzle-orm/pg-core'\nimport { createRequire } from 'module'\nimport prompts from 'prompts'\n\nimport type { PostgresAdapter } from '../types.js'\n\nconst require = createRequire(import.meta.url)\n\n/**\n * Pushes the development schema to the database using Drizzle.\n *\n * @param {PostgresAdapter} db - The PostgresAdapter instance connected to the database.\n * @returns {Promise<void>} - A promise that resolves once the schema push is complete.\n */\nexport const pushDevSchema = async (db: PostgresAdapter) => {\n const { pushSchema } = require('drizzle-kit/payload')\n\n // This will prompt if clarifications are needed for Drizzle to push new schema\n const { apply, hasDataLoss, warnings } = await pushSchema(db.schema
|
1
|
+
{"version":3,"sources":["../../src/utilities/pushDevSchema.ts"],"sourcesContent":["import { eq } from 'drizzle-orm'\nimport { numeric, timestamp, varchar } from 'drizzle-orm/pg-core'\nimport { createRequire } from 'module'\nimport prompts from 'prompts'\n\nimport type { PostgresAdapter } from '../types.js'\n\nconst require = createRequire(import.meta.url)\n\n/**\n * Pushes the development schema to the database using Drizzle.\n *\n * @param {PostgresAdapter} db - The PostgresAdapter instance connected to the database.\n * @returns {Promise<void>} - A promise that resolves once the schema push is complete.\n */\nexport const pushDevSchema = async (db: PostgresAdapter) => {\n const { pushSchema } = require('drizzle-kit/payload')\n\n // This will prompt if clarifications are needed for Drizzle to push new schema\n const { apply, hasDataLoss, statementsToExecute, warnings } = await pushSchema(\n db.schema,\n db.drizzle,\n )\n\n if (warnings.length) {\n let message = `Warnings detected during schema push: \\n\\n${warnings.join('\\n')}\\n\\n`\n\n if (hasDataLoss) {\n message += `DATA LOSS WARNING: Possible data loss detected if schema is pushed.\\n\\n`\n }\n\n message += `Accept warnings and push schema to database?`\n\n const { confirm: acceptWarnings } = await prompts(\n {\n name: 'confirm',\n type: 'confirm',\n initial: false,\n message,\n },\n {\n onCancel: () => {\n process.exit(0)\n },\n },\n )\n\n // Exit if user does not accept warnings.\n // Q: Is this the right type of exit for this interaction?\n if (!acceptWarnings) {\n process.exit(0)\n }\n }\n\n await apply()\n\n // Migration table def in order to use query using drizzle\n const migrationsSchema = db.pgSchema.table('payload_migrations', {\n name: varchar('name'),\n batch: numeric('batch'),\n created_at: timestamp('created_at'),\n updated_at: timestamp('updated_at'),\n })\n\n const devPush = await db.drizzle\n .select()\n .from(migrationsSchema)\n .where(eq(migrationsSchema.batch, '-1'))\n\n if (!devPush.length) {\n await db.drizzle.insert(migrationsSchema).values({\n name: 'dev',\n batch: '-1',\n })\n } else {\n await db.drizzle\n .update(migrationsSchema)\n .set({\n updated_at: new Date(),\n })\n .where(eq(migrationsSchema.batch, '-1'))\n }\n}\n"],"names":["eq","numeric","timestamp","varchar","createRequire","prompts","require","url","pushDevSchema","db","pushSchema","apply","hasDataLoss","statementsToExecute","warnings","schema","drizzle","length","message","join","confirm","acceptWarnings","name","type","initial","onCancel","process","exit","migrationsSchema","pgSchema","table","batch","created_at","updated_at","devPush","select","from","where","insert","values","update","set","Date"],"rangeMappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;","mappings":"AAAA,SAASA,EAAE,QAAQ,cAAa;AAChC,SAASC,OAAO,EAAEC,SAAS,EAAEC,OAAO,QAAQ,sBAAqB;AACjE,SAASC,aAAa,QAAQ,SAAQ;AACtC,OAAOC,aAAa,UAAS;AAI7B,MAAMC,UAAUF,cAAc,YAAYG,GAAG;AAE7C;;;;;CAKC,GACD,OAAO,MAAMC,gBAAgB,OAAOC;IAClC,MAAM,EAAEC,UAAU,EAAE,GAAGJ,QAAQ;IAE/B,+EAA+E;IAC/E,MAAM,EAAEK,KAAK,EAAEC,WAAW,EAAEC,mBAAmB,EAAEC,QAAQ,EAAE,GAAG,MAAMJ,WAClED,GAAGM,MAAM,EACTN,GAAGO,OAAO;IAGZ,IAAIF,SAASG,MAAM,EAAE;QACnB,IAAIC,UAAU,CAAC,0CAA0C,EAAEJ,SAASK,IAAI,CAAC,MAAM,IAAI,CAAC;QAEpF,IAAIP,aAAa;YACfM,WAAW,CAAC,uEAAuE,CAAC;QACtF;QAEAA,WAAW,CAAC,4CAA4C,CAAC;QAEzD,MAAM,EAAEE,SAASC,cAAc,EAAE,GAAG,MAAMhB,QACxC;YACEiB,MAAM;YACNC,MAAM;YACNC,SAAS;YACTN;QACF,GACA;YACEO,UAAU;gBACRC,QAAQC,IAAI,CAAC;YACf;QACF;QAGF,yCAAyC;QACzC,0DAA0D;QAC1D,IAAI,CAACN,gBAAgB;YACnBK,QAAQC,IAAI,CAAC;QACf;IACF;IAEA,MAAMhB;IAEN,0DAA0D;IAC1D,MAAMiB,mBAAmBnB,GAAGoB,QAAQ,CAACC,KAAK,CAAC,sBAAsB;QAC/DR,MAAMnB,QAAQ;QACd4B,OAAO9B,QAAQ;QACf+B,YAAY9B,UAAU;QACtB+B,YAAY/B,UAAU;IACxB;IAEA,MAAMgC,UAAU,MAAMzB,GAAGO,OAAO,CAC7BmB,MAAM,GACNC,IAAI,CAACR,kBACLS,KAAK,CAACrC,GAAG4B,iBAAiBG,KAAK,EAAE;IAEpC,IAAI,CAACG,QAAQjB,MAAM,EAAE;QACnB,MAAMR,GAAGO,OAAO,CAACsB,MAAM,CAACV,kBAAkBW,MAAM,CAAC;YAC/CjB,MAAM;YACNS,OAAO;QACT;IACF,OAAO;QACL,MAAMtB,GAAGO,OAAO,CACbwB,MAAM,CAACZ,kBACPa,GAAG,CAAC;YACHR,YAAY,IAAIS;QAClB,GACCL,KAAK,CAACrC,GAAG4B,iBAAiBG,KAAK,EAAE;IACtC;AACF,EAAC"}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@payloadcms/db-postgres",
|
3
|
-
"version": "3.0.0-canary.
|
3
|
+
"version": "3.0.0-canary.c13f9ba",
|
4
4
|
"description": "The officially supported Postgres database adapter for Payload",
|
5
5
|
"homepage": "https://payloadcms.com",
|
6
6
|
"repository": {
|
@@ -42,16 +42,16 @@
|
|
42
42
|
"pg": "8.11.3",
|
43
43
|
"prompts": "2.4.2",
|
44
44
|
"to-snake-case": "1.0.0",
|
45
|
-
"uuid": "
|
45
|
+
"uuid": "9.0.0"
|
46
46
|
},
|
47
47
|
"devDependencies": {
|
48
48
|
"@types/pg": "8.10.2",
|
49
49
|
"@types/to-snake-case": "1.0.0",
|
50
|
-
"
|
51
|
-
"
|
50
|
+
"payload": "3.0.0-canary.c13f9ba",
|
51
|
+
"@payloadcms/eslint-config": "1.1.1"
|
52
52
|
},
|
53
53
|
"peerDependencies": {
|
54
|
-
"payload": "3.0.0-canary.
|
54
|
+
"payload": "3.0.0-canary.c13f9ba"
|
55
55
|
},
|
56
56
|
"scripts": {
|
57
57
|
"build": "pnpm build:swc && pnpm build:types && pnpm renamePredefinedMigrations",
|