@strapi/core 5.0.0-rc.21 → 5.0.0-rc.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"5.0.0-discard-drafts.d.ts","sourceRoot":"","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAIH,OAAO,KAAK,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAG5D,KAAK,eAAe,GAAG;IAAE,UAAU,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAE,CAAC;AAC9D,KAAK,IAAI,GAAG,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAgF3C;;;;;GAKG;AACH,wBAAuB,iBAAiB,CAAC,EACvC,EAAE,EACF,GAAG,EACH,GAAG,EACH,SAAgB,GACjB,EAAE;IACD,EAAE,EAAE,QAAQ,CAAC;IACb,GAAG,EAAE,IAAI,CAAC;IACV,GAAG,EAAE,MAAM,CAAC;IACZ,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB,oDAuBA;AAwCD,eAAO,MAAM,qBAAqB,EAAE,SAQnC,CAAC"}
1
+ {"version":3,"file":"5.0.0-discard-drafts.d.ts","sourceRoot":"","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAIH,OAAO,KAAK,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAG5D,KAAK,eAAe,GAAG;IAAE,UAAU,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAE,CAAC;AAC9D,KAAK,IAAI,GAAG,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAqF3C;;;;;GAKG;AACH,wBAAuB,iBAAiB,CAAC,EACvC,EAAE,EACF,GAAG,EACH,GAAG,EACH,SAAgB,GACjB,EAAE;IACD,EAAE,EAAE,QAAQ,CAAC;IACb,GAAG,EAAE,IAAI,CAAC;IACV,GAAG,EAAE,MAAM,CAAC;IACZ,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB,oDAuBA;AAwCD,eAAO,MAAM,qBAAqB,EAAE,SAQnC,CAAC"}
@@ -29,11 +29,16 @@ async function copyPublishedEntriesToDraft({
29
29
  }
30
30
  return acc;
31
31
  }, []);
32
- await trx.into(trx.raw(`${meta.tableName} (${scalarAttributes.join(", ")})`)).insert((subQb) => {
32
+ await trx.into(
33
+ trx.raw(`?? (${scalarAttributes.map(() => `??`).join(", ")})`, [
34
+ meta.tableName,
35
+ ...scalarAttributes
36
+ ])
37
+ ).insert((subQb) => {
33
38
  subQb.select(
34
39
  ...scalarAttributes.map((att) => {
35
40
  if (att === "published_at") {
36
- return trx.raw("NULL as published_at");
41
+ return trx.raw("NULL as ??", "published_at");
37
42
  }
38
43
  return att;
39
44
  })
@@ -1 +1 @@
1
- {"version":3,"file":"5.0.0-discard-drafts.js","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"sourcesContent":["/**\n * This migration is responsible for creating the draft counterpart for all the entries that were in a published state.\n *\n * In v4, entries could either be in a draft or published state, but not both at the same time.\n * In v5, we introduced the concept of document, and an entry can be in a draft or published state.\n *\n * This means the migration needs to create the draft counterpart if an entry was published.\n *\n * This migration performs the following steps:\n * 1. Creates draft entries for all published entries, without it's components, dynamic zones or relations.\n * 2. Using the document service, discard those same drafts to copy its relations.\n */\n\n/* eslint-disable no-continue */\nimport type { UID } from '@strapi/types';\nimport type { Database, Migration } from '@strapi/database';\nimport { async, contentTypes } from '@strapi/utils';\n\ntype DocumentVersion = { documentId: string; locale: string };\ntype Knex = Parameters<Migration['up']>[0];\n\n/**\n * Check if the model has draft and publish enabled.\n */\nconst hasDraftAndPublish = async (trx: Knex, meta: any) => {\n const hasTable = await trx.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n return false;\n }\n\n const uid = meta.uid as UID.ContentType;\n const model = strapi.getModel(uid);\n const hasDP = contentTypes.hasDraftAndPublish(model);\n if (!hasDP) {\n return false;\n }\n\n return true;\n};\n\n/**\n * Copy all the published entries to draft entries, without it's components, dynamic zones or relations.\n * This ensures all necessary draft's exist before copying it's relations.\n */\nasync function copyPublishedEntriesToDraft({\n db,\n trx,\n uid,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n}) {\n // Extract all scalar attributes to use in the insert query\n const meta = db.metadata.get(uid);\n\n // Get scalar attributes that will be copied over the new draft\n const scalarAttributes = Object.values(meta.attributes).reduce((acc, attribute: any) => {\n if (['id'].includes(attribute.columnName)) {\n return acc;\n }\n\n if (contentTypes.isScalarAttribute(attribute)) {\n acc.push(attribute.columnName);\n }\n\n return acc;\n }, [] as string[]);\n\n /**\n * Query to copy the published entries into draft entries.\n *\n * INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n * SELECT columnName1, columnName2, columnName3, ...\n * FROM tableName\n */\n await trx\n // INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n .into(trx.raw(`${meta.tableName} (${scalarAttributes.join(', ')})`))\n .insert((subQb: typeof trx) => {\n // SELECT columnName1, columnName2, columnName3, ...\n subQb\n .select(\n ...scalarAttributes.map((att: string) => {\n // Override 'publishedAt' and 'updatedAt' attributes\n if (att === 'published_at') {\n return trx.raw('NULL as published_at');\n }\n\n return att;\n })\n )\n .from(meta.tableName)\n // Only select entries that were published\n .whereNotNull('published_at');\n });\n}\n\n/**\n * Load a batch of versions to discard.\n *\n * Versions with only a draft version will be ignored.\n * Only versions with a published version (which always have a draft version) will be discarded.\n */\nexport async function* getBatchToDiscard({\n db,\n trx,\n uid,\n batchSize = 1000,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n batchSize?: number;\n}) {\n let offset = 0;\n let hasMore = true;\n\n while (hasMore) {\n // Look for the published entries to discard\n const batch: DocumentVersion[] = await db\n .queryBuilder(uid)\n .select(['id', 'documentId', 'locale'])\n .where({ publishedAt: { $ne: null } })\n .limit(batchSize)\n .offset(offset)\n .orderBy('id')\n .transacting(trx)\n .execute();\n\n if (batch.length < batchSize) {\n hasMore = false;\n }\n\n offset += batchSize;\n yield batch;\n }\n}\n\n/**\n * 2 pass migration to create the draft entries for all the published entries.\n * And then discard the drafts to copy the relations.\n */\nconst migrateUp = async (trx: Knex, db: Database) => {\n const dpModels = [];\n for (const meta of db.metadata.values()) {\n const hasDP = await hasDraftAndPublish(trx, meta);\n if (hasDP) {\n dpModels.push(meta);\n }\n }\n\n /**\n * Create plain draft entries for all the entries that were published.\n */\n for (const model of dpModels) {\n await copyPublishedEntriesToDraft({ db, trx, uid: model.uid });\n }\n\n /**\n * Discard the drafts will copy the relations from the published entries to the newly created drafts.\n *\n * Load a batch of entries (batched to prevent loading millions of rows at once ),\n * and discard them using the document service.\n */\n for (const model of dpModels) {\n const discardDraft = async (entry: DocumentVersion) =>\n strapi\n .documents(model.uid as UID.ContentType)\n .discardDraft({ documentId: entry.documentId, locale: entry.locale });\n\n for await (const batch of getBatchToDiscard({ db, trx, uid: model.uid })) {\n await async.map(batch, discardDraft, { concurrency: 10 });\n }\n }\n};\n\nexport const discardDocumentDrafts: Migration = {\n name: 'core::5.0.0-discard-drafts',\n async up(trx, db) {\n await migrateUp(trx, db);\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["contentTypes","async"],"mappings":";;;AAwBA,MAAM,qBAAqB,OAAO,KAAW,SAAc;AACzD,QAAM,WAAW,MAAM,IAAI,OAAO,SAAS,KAAK,SAAS;AAEzD,MAAI,CAAC,UAAU;AACN,WAAA;AAAA,EACT;AAEA,QAAM,MAAM,KAAK;AACX,QAAA,QAAQ,OAAO,SAAS,GAAG;AAC3B,QAAA,QAAQA,YAAAA,aAAa,mBAAmB,KAAK;AACnD,MAAI,CAAC,OAAO;AACH,WAAA;AAAA,EACT;AAEO,SAAA;AACT;AAMA,eAAe,4BAA4B;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AAED,QAAM,OAAO,GAAG,SAAS,IAAI,GAAG;AAG1B,QAAA,mBAAmB,OAAO,OAAO,KAAK,UAAU,EAAE,OAAO,CAAC,KAAK,cAAmB;AACtF,QAAI,CAAC,IAAI,EAAE,SAAS,UAAU,UAAU,GAAG;AAClC,aAAA;AAAA,IACT;AAEI,QAAAA,YAAA,aAAa,kBAAkB,SAAS,GAAG;AACzC,UAAA,KAAK,UAAU,UAAU;AAAA,IAC/B;AAEO,WAAA;AAAA,EACT,GAAG,CAAc,CAAA;AASjB,QAAM,IAEH,KAAK,IAAI,IAAI,GAAG,KAAK,SAAS,KAAK,iBAAiB,KAAK,IAAI,CAAC,GAAG,CAAC,EAClE,OAAO,CAAC,UAAsB;AAG1B,UAAA;AAAA,MACC,GAAG,iBAAiB,IAAI,CAAC,QAAgB;AAEvC,YAAI,QAAQ,gBAAgB;AACnB,iBAAA,IAAI,IAAI,sBAAsB;AAAA,QACvC;AAEO,eAAA;AAAA,MAAA,CACR;AAAA,IAAA,EAEF,KAAK,KAAK,SAAS,EAEnB,aAAa,cAAc;AAAA,EAAA,CAC/B;AACL;AAQA,gBAAuB,kBAAkB;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AACd,GAKG;AACD,MAAI,SAAS;AACb,MAAI,UAAU;AAEd,SAAO,SAAS;AAEd,UAAM,QAA2B,MAAM,GACpC,aAAa,GAAG,EAChB,OAAO,CAAC,MAAM,cAAc,QAAQ,CAAC,EACrC,MAAM,EAAE,aAAa,EAAE,KAAK,KAAO,EAAA,CAAC,EACpC,MAAM,SAAS,EACf,OAAO,MAAM,EACb,QAAQ,IAAI,EACZ,YAAY,GAAG,EACf,QAAQ;AAEP,QAAA,MAAM,SAAS,WAAW;AAClB,gBAAA;AAAA,IACZ;AAEU,cAAA;AACJ,UAAA;AAAA,EACR;AACF;AAMA,MAAM,YAAY,OAAO,KAAW,OAAiB;AACnD,QAAM,WAAW,CAAA;AACjB,aAAW,QAAQ,GAAG,SAAS,OAAA,GAAU;AACvC,UAAM,QAAQ,MAAM,mBAAmB,KAAK,IAAI;AAChD,QAAI,OAAO;AACT,eAAS,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAKA,aAAW,SAAS,UAAU;AAC5B,UAAM,4BAA4B,EAAE,IAAI,KAAK,KAAK,MAAM,KAAK;AAAA,EAC/D;AAQA,aAAW,SAAS,UAAU;AAC5B,UAAM,eAAe,OAAO,UAC1B,OACG,UAAU,MAAM,GAAsB,EACtC,aAAa,EAAE,YAAY,MAAM,YAAY,QAAQ,MAAM,QAAQ;AAEvD,qBAAA,SAAS,kBAAkB,EAAE,IAAI,KAAK,KAAK,MAAM,IAAI,CAAC,GAAG;AACxE,YAAMC,YAAAA,MAAM,IAAI,OAAO,cAAc,EAAE,aAAa,IAAI;AAAA,IAC1D;AAAA,EACF;AACF;AAEO,MAAM,wBAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,MAAM,GAAG,KAAK,IAAI;AACV,UAAA,UAAU,KAAK,EAAE;AAAA,EACzB;AAAA,EACA,MAAM,OAAO;AACL,UAAA,IAAI,MAAM,iBAAiB;AAAA,EACnC;AACF;;;"}
1
+ {"version":3,"file":"5.0.0-discard-drafts.js","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"sourcesContent":["/**\n * This migration is responsible for creating the draft counterpart for all the entries that were in a published state.\n *\n * In v4, entries could either be in a draft or published state, but not both at the same time.\n * In v5, we introduced the concept of document, and an entry can be in a draft or published state.\n *\n * This means the migration needs to create the draft counterpart if an entry was published.\n *\n * This migration performs the following steps:\n * 1. Creates draft entries for all published entries, without it's components, dynamic zones or relations.\n * 2. Using the document service, discard those same drafts to copy its relations.\n */\n\n/* eslint-disable no-continue */\nimport type { UID } from '@strapi/types';\nimport type { Database, Migration } from '@strapi/database';\nimport { async, contentTypes } from '@strapi/utils';\n\ntype DocumentVersion = { documentId: string; locale: string };\ntype Knex = Parameters<Migration['up']>[0];\n\n/**\n * Check if the model has draft and publish enabled.\n */\nconst hasDraftAndPublish = async (trx: Knex, meta: any) => {\n const hasTable = await trx.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n return false;\n }\n\n const uid = meta.uid as UID.ContentType;\n const model = strapi.getModel(uid);\n const hasDP = contentTypes.hasDraftAndPublish(model);\n if (!hasDP) {\n return false;\n }\n\n return true;\n};\n\n/**\n * Copy all the published entries to draft entries, without it's components, dynamic zones or relations.\n * This ensures all necessary draft's exist before copying it's relations.\n */\nasync function copyPublishedEntriesToDraft({\n db,\n trx,\n uid,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n}) {\n // Extract all scalar attributes to use in the insert query\n const meta = db.metadata.get(uid);\n\n // Get scalar attributes that will be copied over the new draft\n const scalarAttributes = Object.values(meta.attributes).reduce((acc, attribute: any) => {\n if (['id'].includes(attribute.columnName)) {\n return acc;\n }\n\n if (contentTypes.isScalarAttribute(attribute)) {\n acc.push(attribute.columnName);\n }\n\n return acc;\n }, [] as string[]);\n\n /**\n * Query to copy the published entries into draft entries.\n *\n * INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n * SELECT columnName1, columnName2, columnName3, ...\n * FROM tableName\n */\n await trx\n // INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n .into(\n trx.raw(`?? (${scalarAttributes.map(() => `??`).join(', ')})`, [\n meta.tableName,\n ...scalarAttributes,\n ])\n )\n .insert((subQb: typeof trx) => {\n // SELECT columnName1, columnName2, columnName3, ...\n subQb\n .select(\n ...scalarAttributes.map((att: string) => {\n // Override 'publishedAt' and 'updatedAt' attributes\n if (att === 'published_at') {\n return trx.raw('NULL as ??', 'published_at');\n }\n\n return att;\n })\n )\n .from(meta.tableName)\n // Only select entries that were published\n .whereNotNull('published_at');\n });\n}\n\n/**\n * Load a batch of versions to discard.\n *\n * Versions with only a draft version will be ignored.\n * Only versions with a published version (which always have a draft version) will be discarded.\n */\nexport async function* getBatchToDiscard({\n db,\n trx,\n uid,\n batchSize = 1000,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n batchSize?: number;\n}) {\n let offset = 0;\n let hasMore = true;\n\n while (hasMore) {\n // Look for the published entries to discard\n const batch: DocumentVersion[] = await db\n .queryBuilder(uid)\n .select(['id', 'documentId', 'locale'])\n .where({ publishedAt: { $ne: null } })\n .limit(batchSize)\n .offset(offset)\n .orderBy('id')\n .transacting(trx)\n .execute();\n\n if (batch.length < batchSize) {\n hasMore = false;\n }\n\n offset += batchSize;\n yield batch;\n }\n}\n\n/**\n * 2 pass migration to create the draft entries for all the published entries.\n * And then discard the drafts to copy the relations.\n */\nconst migrateUp = async (trx: Knex, db: Database) => {\n const dpModels = [];\n for (const meta of db.metadata.values()) {\n const hasDP = await hasDraftAndPublish(trx, meta);\n if (hasDP) {\n dpModels.push(meta);\n }\n }\n\n /**\n * Create plain draft entries for all the entries that were published.\n */\n for (const model of dpModels) {\n await copyPublishedEntriesToDraft({ db, trx, uid: model.uid });\n }\n\n /**\n * Discard the drafts will copy the relations from the published entries to the newly created drafts.\n *\n * Load a batch of entries (batched to prevent loading millions of rows at once ),\n * and discard them using the document service.\n */\n for (const model of dpModels) {\n const discardDraft = async (entry: DocumentVersion) =>\n strapi\n .documents(model.uid as UID.ContentType)\n .discardDraft({ documentId: entry.documentId, locale: entry.locale });\n\n for await (const batch of getBatchToDiscard({ db, trx, uid: model.uid })) {\n await async.map(batch, discardDraft, { concurrency: 10 });\n }\n }\n};\n\nexport const discardDocumentDrafts: Migration = {\n name: 'core::5.0.0-discard-drafts',\n async up(trx, db) {\n await migrateUp(trx, db);\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["contentTypes","async"],"mappings":";;;AAwBA,MAAM,qBAAqB,OAAO,KAAW,SAAc;AACzD,QAAM,WAAW,MAAM,IAAI,OAAO,SAAS,KAAK,SAAS;AAEzD,MAAI,CAAC,UAAU;AACN,WAAA;AAAA,EACT;AAEA,QAAM,MAAM,KAAK;AACX,QAAA,QAAQ,OAAO,SAAS,GAAG;AAC3B,QAAA,QAAQA,YAAAA,aAAa,mBAAmB,KAAK;AACnD,MAAI,CAAC,OAAO;AACH,WAAA;AAAA,EACT;AAEO,SAAA;AACT;AAMA,eAAe,4BAA4B;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AAED,QAAM,OAAO,GAAG,SAAS,IAAI,GAAG;AAG1B,QAAA,mBAAmB,OAAO,OAAO,KAAK,UAAU,EAAE,OAAO,CAAC,KAAK,cAAmB;AACtF,QAAI,CAAC,IAAI,EAAE,SAAS,UAAU,UAAU,GAAG;AAClC,aAAA;AAAA,IACT;AAEI,QAAAA,YAAA,aAAa,kBAAkB,SAAS,GAAG;AACzC,UAAA,KAAK,UAAU,UAAU;AAAA,IAC/B;AAEO,WAAA;AAAA,EACT,GAAG,CAAc,CAAA;AASjB,QAAM,IAEH;AAAA,IACC,IAAI,IAAI,OAAO,iBAAiB,IAAI,MAAM,IAAI,EAAE,KAAK,IAAI,CAAC,KAAK;AAAA,MAC7D,KAAK;AAAA,MACL,GAAG;AAAA,IAAA,CACJ;AAAA,EAAA,EAEF,OAAO,CAAC,UAAsB;AAG1B,UAAA;AAAA,MACC,GAAG,iBAAiB,IAAI,CAAC,QAAgB;AAEvC,YAAI,QAAQ,gBAAgB;AACnB,iBAAA,IAAI,IAAI,cAAc,cAAc;AAAA,QAC7C;AAEO,eAAA;AAAA,MAAA,CACR;AAAA,IAAA,EAEF,KAAK,KAAK,SAAS,EAEnB,aAAa,cAAc;AAAA,EAAA,CAC/B;AACL;AAQA,gBAAuB,kBAAkB;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AACd,GAKG;AACD,MAAI,SAAS;AACb,MAAI,UAAU;AAEd,SAAO,SAAS;AAEd,UAAM,QAA2B,MAAM,GACpC,aAAa,GAAG,EAChB,OAAO,CAAC,MAAM,cAAc,QAAQ,CAAC,EACrC,MAAM,EAAE,aAAa,EAAE,KAAK,KAAO,EAAA,CAAC,EACpC,MAAM,SAAS,EACf,OAAO,MAAM,EACb,QAAQ,IAAI,EACZ,YAAY,GAAG,EACf,QAAQ;AAEP,QAAA,MAAM,SAAS,WAAW;AAClB,gBAAA;AAAA,IACZ;AAEU,cAAA;AACJ,UAAA;AAAA,EACR;AACF;AAMA,MAAM,YAAY,OAAO,KAAW,OAAiB;AACnD,QAAM,WAAW,CAAA;AACjB,aAAW,QAAQ,GAAG,SAAS,OAAA,GAAU;AACvC,UAAM,QAAQ,MAAM,mBAAmB,KAAK,IAAI;AAChD,QAAI,OAAO;AACT,eAAS,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAKA,aAAW,SAAS,UAAU;AAC5B,UAAM,4BAA4B,EAAE,IAAI,KAAK,KAAK,MAAM,KAAK;AAAA,EAC/D;AAQA,aAAW,SAAS,UAAU;AAC5B,UAAM,eAAe,OAAO,UAC1B,OACG,UAAU,MAAM,GAAsB,EACtC,aAAa,EAAE,YAAY,MAAM,YAAY,QAAQ,MAAM,QAAQ;AAEvD,qBAAA,SAAS,kBAAkB,EAAE,IAAI,KAAK,KAAK,MAAM,IAAI,CAAC,GAAG;AACxE,YAAMC,YAAAA,MAAM,IAAI,OAAO,cAAc,EAAE,aAAa,IAAI;AAAA,IAC1D;AAAA,EACF;AACF;AAEO,MAAM,wBAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,MAAM,GAAG,KAAK,IAAI;AACV,UAAA,UAAU,KAAK,EAAE;AAAA,EACzB;AAAA,EACA,MAAM,OAAO;AACL,UAAA,IAAI,MAAM,iBAAiB;AAAA,EACnC;AACF;;;"}
@@ -27,11 +27,16 @@ async function copyPublishedEntriesToDraft({
27
27
  }
28
28
  return acc;
29
29
  }, []);
30
- await trx.into(trx.raw(`${meta.tableName} (${scalarAttributes.join(", ")})`)).insert((subQb) => {
30
+ await trx.into(
31
+ trx.raw(`?? (${scalarAttributes.map(() => `??`).join(", ")})`, [
32
+ meta.tableName,
33
+ ...scalarAttributes
34
+ ])
35
+ ).insert((subQb) => {
31
36
  subQb.select(
32
37
  ...scalarAttributes.map((att) => {
33
38
  if (att === "published_at") {
34
- return trx.raw("NULL as published_at");
39
+ return trx.raw("NULL as ??", "published_at");
35
40
  }
36
41
  return att;
37
42
  })
@@ -1 +1 @@
1
- {"version":3,"file":"5.0.0-discard-drafts.mjs","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"sourcesContent":["/**\n * This migration is responsible for creating the draft counterpart for all the entries that were in a published state.\n *\n * In v4, entries could either be in a draft or published state, but not both at the same time.\n * In v5, we introduced the concept of document, and an entry can be in a draft or published state.\n *\n * This means the migration needs to create the draft counterpart if an entry was published.\n *\n * This migration performs the following steps:\n * 1. Creates draft entries for all published entries, without it's components, dynamic zones or relations.\n * 2. Using the document service, discard those same drafts to copy its relations.\n */\n\n/* eslint-disable no-continue */\nimport type { UID } from '@strapi/types';\nimport type { Database, Migration } from '@strapi/database';\nimport { async, contentTypes } from '@strapi/utils';\n\ntype DocumentVersion = { documentId: string; locale: string };\ntype Knex = Parameters<Migration['up']>[0];\n\n/**\n * Check if the model has draft and publish enabled.\n */\nconst hasDraftAndPublish = async (trx: Knex, meta: any) => {\n const hasTable = await trx.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n return false;\n }\n\n const uid = meta.uid as UID.ContentType;\n const model = strapi.getModel(uid);\n const hasDP = contentTypes.hasDraftAndPublish(model);\n if (!hasDP) {\n return false;\n }\n\n return true;\n};\n\n/**\n * Copy all the published entries to draft entries, without it's components, dynamic zones or relations.\n * This ensures all necessary draft's exist before copying it's relations.\n */\nasync function copyPublishedEntriesToDraft({\n db,\n trx,\n uid,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n}) {\n // Extract all scalar attributes to use in the insert query\n const meta = db.metadata.get(uid);\n\n // Get scalar attributes that will be copied over the new draft\n const scalarAttributes = Object.values(meta.attributes).reduce((acc, attribute: any) => {\n if (['id'].includes(attribute.columnName)) {\n return acc;\n }\n\n if (contentTypes.isScalarAttribute(attribute)) {\n acc.push(attribute.columnName);\n }\n\n return acc;\n }, [] as string[]);\n\n /**\n * Query to copy the published entries into draft entries.\n *\n * INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n * SELECT columnName1, columnName2, columnName3, ...\n * FROM tableName\n */\n await trx\n // INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n .into(trx.raw(`${meta.tableName} (${scalarAttributes.join(', ')})`))\n .insert((subQb: typeof trx) => {\n // SELECT columnName1, columnName2, columnName3, ...\n subQb\n .select(\n ...scalarAttributes.map((att: string) => {\n // Override 'publishedAt' and 'updatedAt' attributes\n if (att === 'published_at') {\n return trx.raw('NULL as published_at');\n }\n\n return att;\n })\n )\n .from(meta.tableName)\n // Only select entries that were published\n .whereNotNull('published_at');\n });\n}\n\n/**\n * Load a batch of versions to discard.\n *\n * Versions with only a draft version will be ignored.\n * Only versions with a published version (which always have a draft version) will be discarded.\n */\nexport async function* getBatchToDiscard({\n db,\n trx,\n uid,\n batchSize = 1000,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n batchSize?: number;\n}) {\n let offset = 0;\n let hasMore = true;\n\n while (hasMore) {\n // Look for the published entries to discard\n const batch: DocumentVersion[] = await db\n .queryBuilder(uid)\n .select(['id', 'documentId', 'locale'])\n .where({ publishedAt: { $ne: null } })\n .limit(batchSize)\n .offset(offset)\n .orderBy('id')\n .transacting(trx)\n .execute();\n\n if (batch.length < batchSize) {\n hasMore = false;\n }\n\n offset += batchSize;\n yield batch;\n }\n}\n\n/**\n * 2 pass migration to create the draft entries for all the published entries.\n * And then discard the drafts to copy the relations.\n */\nconst migrateUp = async (trx: Knex, db: Database) => {\n const dpModels = [];\n for (const meta of db.metadata.values()) {\n const hasDP = await hasDraftAndPublish(trx, meta);\n if (hasDP) {\n dpModels.push(meta);\n }\n }\n\n /**\n * Create plain draft entries for all the entries that were published.\n */\n for (const model of dpModels) {\n await copyPublishedEntriesToDraft({ db, trx, uid: model.uid });\n }\n\n /**\n * Discard the drafts will copy the relations from the published entries to the newly created drafts.\n *\n * Load a batch of entries (batched to prevent loading millions of rows at once ),\n * and discard them using the document service.\n */\n for (const model of dpModels) {\n const discardDraft = async (entry: DocumentVersion) =>\n strapi\n .documents(model.uid as UID.ContentType)\n .discardDraft({ documentId: entry.documentId, locale: entry.locale });\n\n for await (const batch of getBatchToDiscard({ db, trx, uid: model.uid })) {\n await async.map(batch, discardDraft, { concurrency: 10 });\n }\n }\n};\n\nexport const discardDocumentDrafts: Migration = {\n name: 'core::5.0.0-discard-drafts',\n async up(trx, db) {\n await migrateUp(trx, db);\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":[],"mappings":";AAwBA,MAAM,qBAAqB,OAAO,KAAW,SAAc;AACzD,QAAM,WAAW,MAAM,IAAI,OAAO,SAAS,KAAK,SAAS;AAEzD,MAAI,CAAC,UAAU;AACN,WAAA;AAAA,EACT;AAEA,QAAM,MAAM,KAAK;AACX,QAAA,QAAQ,OAAO,SAAS,GAAG;AAC3B,QAAA,QAAQ,aAAa,mBAAmB,KAAK;AACnD,MAAI,CAAC,OAAO;AACH,WAAA;AAAA,EACT;AAEO,SAAA;AACT;AAMA,eAAe,4BAA4B;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AAED,QAAM,OAAO,GAAG,SAAS,IAAI,GAAG;AAG1B,QAAA,mBAAmB,OAAO,OAAO,KAAK,UAAU,EAAE,OAAO,CAAC,KAAK,cAAmB;AACtF,QAAI,CAAC,IAAI,EAAE,SAAS,UAAU,UAAU,GAAG;AAClC,aAAA;AAAA,IACT;AAEI,QAAA,aAAa,kBAAkB,SAAS,GAAG;AACzC,UAAA,KAAK,UAAU,UAAU;AAAA,IAC/B;AAEO,WAAA;AAAA,EACT,GAAG,CAAc,CAAA;AASjB,QAAM,IAEH,KAAK,IAAI,IAAI,GAAG,KAAK,SAAS,KAAK,iBAAiB,KAAK,IAAI,CAAC,GAAG,CAAC,EAClE,OAAO,CAAC,UAAsB;AAG1B,UAAA;AAAA,MACC,GAAG,iBAAiB,IAAI,CAAC,QAAgB;AAEvC,YAAI,QAAQ,gBAAgB;AACnB,iBAAA,IAAI,IAAI,sBAAsB;AAAA,QACvC;AAEO,eAAA;AAAA,MAAA,CACR;AAAA,IAAA,EAEF,KAAK,KAAK,SAAS,EAEnB,aAAa,cAAc;AAAA,EAAA,CAC/B;AACL;AAQA,gBAAuB,kBAAkB;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AACd,GAKG;AACD,MAAI,SAAS;AACb,MAAI,UAAU;AAEd,SAAO,SAAS;AAEd,UAAM,QAA2B,MAAM,GACpC,aAAa,GAAG,EAChB,OAAO,CAAC,MAAM,cAAc,QAAQ,CAAC,EACrC,MAAM,EAAE,aAAa,EAAE,KAAK,KAAO,EAAA,CAAC,EACpC,MAAM,SAAS,EACf,OAAO,MAAM,EACb,QAAQ,IAAI,EACZ,YAAY,GAAG,EACf,QAAQ;AAEP,QAAA,MAAM,SAAS,WAAW;AAClB,gBAAA;AAAA,IACZ;AAEU,cAAA;AACJ,UAAA;AAAA,EACR;AACF;AAMA,MAAM,YAAY,OAAO,KAAW,OAAiB;AACnD,QAAM,WAAW,CAAA;AACjB,aAAW,QAAQ,GAAG,SAAS,OAAA,GAAU;AACvC,UAAM,QAAQ,MAAM,mBAAmB,KAAK,IAAI;AAChD,QAAI,OAAO;AACT,eAAS,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAKA,aAAW,SAAS,UAAU;AAC5B,UAAM,4BAA4B,EAAE,IAAI,KAAK,KAAK,MAAM,KAAK;AAAA,EAC/D;AAQA,aAAW,SAAS,UAAU;AAC5B,UAAM,eAAe,OAAO,UAC1B,OACG,UAAU,MAAM,GAAsB,EACtC,aAAa,EAAE,YAAY,MAAM,YAAY,QAAQ,MAAM,QAAQ;AAEvD,qBAAA,SAAS,kBAAkB,EAAE,IAAI,KAAK,KAAK,MAAM,IAAI,CAAC,GAAG;AACxE,YAAM,MAAM,IAAI,OAAO,cAAc,EAAE,aAAa,IAAI;AAAA,IAC1D;AAAA,EACF;AACF;AAEO,MAAM,wBAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,MAAM,GAAG,KAAK,IAAI;AACV,UAAA,UAAU,KAAK,EAAE;AAAA,EACzB;AAAA,EACA,MAAM,OAAO;AACL,UAAA,IAAI,MAAM,iBAAiB;AAAA,EACnC;AACF;"}
1
+ {"version":3,"file":"5.0.0-discard-drafts.mjs","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"sourcesContent":["/**\n * This migration is responsible for creating the draft counterpart for all the entries that were in a published state.\n *\n * In v4, entries could either be in a draft or published state, but not both at the same time.\n * In v5, we introduced the concept of document, and an entry can be in a draft or published state.\n *\n * This means the migration needs to create the draft counterpart if an entry was published.\n *\n * This migration performs the following steps:\n * 1. Creates draft entries for all published entries, without it's components, dynamic zones or relations.\n * 2. Using the document service, discard those same drafts to copy its relations.\n */\n\n/* eslint-disable no-continue */\nimport type { UID } from '@strapi/types';\nimport type { Database, Migration } from '@strapi/database';\nimport { async, contentTypes } from '@strapi/utils';\n\ntype DocumentVersion = { documentId: string; locale: string };\ntype Knex = Parameters<Migration['up']>[0];\n\n/**\n * Check if the model has draft and publish enabled.\n */\nconst hasDraftAndPublish = async (trx: Knex, meta: any) => {\n const hasTable = await trx.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n return false;\n }\n\n const uid = meta.uid as UID.ContentType;\n const model = strapi.getModel(uid);\n const hasDP = contentTypes.hasDraftAndPublish(model);\n if (!hasDP) {\n return false;\n }\n\n return true;\n};\n\n/**\n * Copy all the published entries to draft entries, without it's components, dynamic zones or relations.\n * This ensures all necessary draft's exist before copying it's relations.\n */\nasync function copyPublishedEntriesToDraft({\n db,\n trx,\n uid,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n}) {\n // Extract all scalar attributes to use in the insert query\n const meta = db.metadata.get(uid);\n\n // Get scalar attributes that will be copied over the new draft\n const scalarAttributes = Object.values(meta.attributes).reduce((acc, attribute: any) => {\n if (['id'].includes(attribute.columnName)) {\n return acc;\n }\n\n if (contentTypes.isScalarAttribute(attribute)) {\n acc.push(attribute.columnName);\n }\n\n return acc;\n }, [] as string[]);\n\n /**\n * Query to copy the published entries into draft entries.\n *\n * INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n * SELECT columnName1, columnName2, columnName3, ...\n * FROM tableName\n */\n await trx\n // INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n .into(\n trx.raw(`?? (${scalarAttributes.map(() => `??`).join(', ')})`, [\n meta.tableName,\n ...scalarAttributes,\n ])\n )\n .insert((subQb: typeof trx) => {\n // SELECT columnName1, columnName2, columnName3, ...\n subQb\n .select(\n ...scalarAttributes.map((att: string) => {\n // Override 'publishedAt' and 'updatedAt' attributes\n if (att === 'published_at') {\n return trx.raw('NULL as ??', 'published_at');\n }\n\n return att;\n })\n )\n .from(meta.tableName)\n // Only select entries that were published\n .whereNotNull('published_at');\n });\n}\n\n/**\n * Load a batch of versions to discard.\n *\n * Versions with only a draft version will be ignored.\n * Only versions with a published version (which always have a draft version) will be discarded.\n */\nexport async function* getBatchToDiscard({\n db,\n trx,\n uid,\n batchSize = 1000,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n batchSize?: number;\n}) {\n let offset = 0;\n let hasMore = true;\n\n while (hasMore) {\n // Look for the published entries to discard\n const batch: DocumentVersion[] = await db\n .queryBuilder(uid)\n .select(['id', 'documentId', 'locale'])\n .where({ publishedAt: { $ne: null } })\n .limit(batchSize)\n .offset(offset)\n .orderBy('id')\n .transacting(trx)\n .execute();\n\n if (batch.length < batchSize) {\n hasMore = false;\n }\n\n offset += batchSize;\n yield batch;\n }\n}\n\n/**\n * 2 pass migration to create the draft entries for all the published entries.\n * And then discard the drafts to copy the relations.\n */\nconst migrateUp = async (trx: Knex, db: Database) => {\n const dpModels = [];\n for (const meta of db.metadata.values()) {\n const hasDP = await hasDraftAndPublish(trx, meta);\n if (hasDP) {\n dpModels.push(meta);\n }\n }\n\n /**\n * Create plain draft entries for all the entries that were published.\n */\n for (const model of dpModels) {\n await copyPublishedEntriesToDraft({ db, trx, uid: model.uid });\n }\n\n /**\n * Discard the drafts will copy the relations from the published entries to the newly created drafts.\n *\n * Load a batch of entries (batched to prevent loading millions of rows at once ),\n * and discard them using the document service.\n */\n for (const model of dpModels) {\n const discardDraft = async (entry: DocumentVersion) =>\n strapi\n .documents(model.uid as UID.ContentType)\n .discardDraft({ documentId: entry.documentId, locale: entry.locale });\n\n for await (const batch of getBatchToDiscard({ db, trx, uid: model.uid })) {\n await async.map(batch, discardDraft, { concurrency: 10 });\n }\n }\n};\n\nexport const discardDocumentDrafts: Migration = {\n name: 'core::5.0.0-discard-drafts',\n async up(trx, db) {\n await migrateUp(trx, db);\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":[],"mappings":";AAwBA,MAAM,qBAAqB,OAAO,KAAW,SAAc;AACzD,QAAM,WAAW,MAAM,IAAI,OAAO,SAAS,KAAK,SAAS;AAEzD,MAAI,CAAC,UAAU;AACN,WAAA;AAAA,EACT;AAEA,QAAM,MAAM,KAAK;AACX,QAAA,QAAQ,OAAO,SAAS,GAAG;AAC3B,QAAA,QAAQ,aAAa,mBAAmB,KAAK;AACnD,MAAI,CAAC,OAAO;AACH,WAAA;AAAA,EACT;AAEO,SAAA;AACT;AAMA,eAAe,4BAA4B;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AAED,QAAM,OAAO,GAAG,SAAS,IAAI,GAAG;AAG1B,QAAA,mBAAmB,OAAO,OAAO,KAAK,UAAU,EAAE,OAAO,CAAC,KAAK,cAAmB;AACtF,QAAI,CAAC,IAAI,EAAE,SAAS,UAAU,UAAU,GAAG;AAClC,aAAA;AAAA,IACT;AAEI,QAAA,aAAa,kBAAkB,SAAS,GAAG;AACzC,UAAA,KAAK,UAAU,UAAU;AAAA,IAC/B;AAEO,WAAA;AAAA,EACT,GAAG,CAAc,CAAA;AASjB,QAAM,IAEH;AAAA,IACC,IAAI,IAAI,OAAO,iBAAiB,IAAI,MAAM,IAAI,EAAE,KAAK,IAAI,CAAC,KAAK;AAAA,MAC7D,KAAK;AAAA,MACL,GAAG;AAAA,IAAA,CACJ;AAAA,EAAA,EAEF,OAAO,CAAC,UAAsB;AAG1B,UAAA;AAAA,MACC,GAAG,iBAAiB,IAAI,CAAC,QAAgB;AAEvC,YAAI,QAAQ,gBAAgB;AACnB,iBAAA,IAAI,IAAI,cAAc,cAAc;AAAA,QAC7C;AAEO,eAAA;AAAA,MAAA,CACR;AAAA,IAAA,EAEF,KAAK,KAAK,SAAS,EAEnB,aAAa,cAAc;AAAA,EAAA,CAC/B;AACL;AAQA,gBAAuB,kBAAkB;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AACd,GAKG;AACD,MAAI,SAAS;AACb,MAAI,UAAU;AAEd,SAAO,SAAS;AAEd,UAAM,QAA2B,MAAM,GACpC,aAAa,GAAG,EAChB,OAAO,CAAC,MAAM,cAAc,QAAQ,CAAC,EACrC,MAAM,EAAE,aAAa,EAAE,KAAK,KAAO,EAAA,CAAC,EACpC,MAAM,SAAS,EACf,OAAO,MAAM,EACb,QAAQ,IAAI,EACZ,YAAY,GAAG,EACf,QAAQ;AAEP,QAAA,MAAM,SAAS,WAAW;AAClB,gBAAA;AAAA,IACZ;AAEU,cAAA;AACJ,UAAA;AAAA,EACR;AACF;AAMA,MAAM,YAAY,OAAO,KAAW,OAAiB;AACnD,QAAM,WAAW,CAAA;AACjB,aAAW,QAAQ,GAAG,SAAS,OAAA,GAAU;AACvC,UAAM,QAAQ,MAAM,mBAAmB,KAAK,IAAI;AAChD,QAAI,OAAO;AACT,eAAS,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAKA,aAAW,SAAS,UAAU;AAC5B,UAAM,4BAA4B,EAAE,IAAI,KAAK,KAAK,MAAM,KAAK;AAAA,EAC/D;AAQA,aAAW,SAAS,UAAU;AAC5B,UAAM,eAAe,OAAO,UAC1B,OACG,UAAU,MAAM,GAAsB,EACtC,aAAa,EAAE,YAAY,MAAM,YAAY,QAAQ,MAAM,QAAQ;AAEvD,qBAAA,SAAS,kBAAkB,EAAE,IAAI,KAAK,KAAK,MAAM,IAAI,CAAC,GAAG;AACxE,YAAM,MAAM,IAAI,OAAO,cAAc,EAAE,aAAa,IAAI;AAAA,IAC1D;AAAA,EACF;AACF;AAEO,MAAM,wBAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,MAAM,GAAG,KAAK,IAAI;AACV,UAAA,UAAU,KAAK,EAAE;AAAA,EACzB;AAAA,EACA,MAAM,OAAO;AACL,UAAA,IAAI,MAAM,iBAAiB;AAAA,EACnC;AACF;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@strapi/core",
3
- "version": "5.0.0-rc.21",
3
+ "version": "5.0.0-rc.23",
4
4
  "description": "Core of Strapi",
5
5
  "homepage": "https://strapi.io",
6
6
  "bugs": {
@@ -55,15 +55,15 @@
55
55
  "@koa/cors": "5.0.0",
56
56
  "@koa/router": "12.0.1",
57
57
  "@paralleldrive/cuid2": "2.2.2",
58
- "@strapi/admin": "5.0.0-rc.21",
59
- "@strapi/database": "5.0.0-rc.21",
60
- "@strapi/generators": "5.0.0-rc.21",
61
- "@strapi/logger": "5.0.0-rc.21",
58
+ "@strapi/admin": "5.0.0-rc.23",
59
+ "@strapi/database": "5.0.0-rc.23",
60
+ "@strapi/generators": "5.0.0-rc.23",
61
+ "@strapi/logger": "5.0.0-rc.23",
62
62
  "@strapi/pack-up": "5.0.0",
63
- "@strapi/permissions": "5.0.0-rc.21",
64
- "@strapi/types": "5.0.0-rc.21",
65
- "@strapi/typescript-utils": "5.0.0-rc.21",
66
- "@strapi/utils": "5.0.0-rc.21",
63
+ "@strapi/permissions": "5.0.0-rc.23",
64
+ "@strapi/types": "5.0.0-rc.23",
65
+ "@strapi/typescript-utils": "5.0.0-rc.23",
66
+ "@strapi/utils": "5.0.0-rc.23",
67
67
  "bcryptjs": "2.4.3",
68
68
  "boxen": "5.1.2",
69
69
  "chalk": "4.1.2",
@@ -126,13 +126,13 @@
126
126
  "@types/node": "18.19.24",
127
127
  "@types/node-schedule": "2.1.7",
128
128
  "@types/statuses": "2.0.1",
129
- "eslint-config-custom": "5.0.0-rc.21",
129
+ "eslint-config-custom": "5.0.0-rc.23",
130
130
  "supertest": "6.3.3",
131
- "tsconfig": "5.0.0-rc.21"
131
+ "tsconfig": "5.0.0-rc.23"
132
132
  },
133
133
  "engines": {
134
134
  "node": ">=18.0.0 <=20.x.x",
135
135
  "npm": ">=6.0.0"
136
136
  },
137
- "gitHead": "b9dffe079793a0da8d0dd880395df4e90822db15"
137
+ "gitHead": "4eadad69ba4010caa82dd8f30eec0b5aa1cabde9"
138
138
  }