@strapi/core 0.0.0-next.b558642be856459a3e6c076f5d76fffbfc5fc5a1 → 0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/migrations/database/5.0.0-discard-drafts.d.ts.map +1 -1
- package/dist/migrations/database/5.0.0-discard-drafts.js +1 -1
- package/dist/migrations/database/5.0.0-discard-drafts.js.map +1 -1
- package/dist/migrations/database/5.0.0-discard-drafts.mjs +1 -1
- package/dist/migrations/database/5.0.0-discard-drafts.mjs.map +1 -1
- package/dist/services/cron.js +9 -4
- package/dist/services/cron.js.map +1 -1
- package/dist/services/cron.mjs +9 -4
- package/dist/services/cron.mjs.map +1 -1
- package/dist/services/document-service/transform/id-map.d.ts.map +1 -1
- package/dist/services/document-service/transform/id-map.js +13 -4
- package/dist/services/document-service/transform/id-map.js.map +1 -1
- package/dist/services/document-service/transform/id-map.mjs +14 -5
- package/dist/services/document-service/transform/id-map.mjs.map +1 -1
- package/dist/services/document-service/utils/populate.d.ts.map +1 -1
- package/dist/services/document-service/utils/populate.js +1 -1
- package/dist/services/document-service/utils/populate.js.map +1 -1
- package/dist/services/document-service/utils/populate.mjs +1 -1
- package/dist/services/document-service/utils/populate.mjs.map +1 -1
- package/dist/utils/transform-content-types-to-models.d.ts +28 -0
- package/dist/utils/transform-content-types-to-models.d.ts.map +1 -1
- package/package.json +14 -14
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"5.0.0-discard-drafts.d.ts","sourceRoot":"","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAIH,OAAO,KAAK,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAI5D,KAAK,eAAe,GAAG;IAAE,UAAU,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAE,CAAC;AAC9D,KAAK,IAAI,GAAG,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAqF3C;;;;;GAKG;AACH,wBAAuB,iBAAiB,CAAC,EACvC,EAAE,EACF,GAAG,EACH,GAAG,EACH,SAAgB,GACjB,EAAE;IACD,EAAE,EAAE,QAAQ,CAAC;IACb,GAAG,EAAE,IAAI,CAAC;IACV,GAAG,EAAE,MAAM,CAAC;IACZ,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB,oDAuBA;
|
1
|
+
{"version":3,"file":"5.0.0-discard-drafts.d.ts","sourceRoot":"","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAIH,OAAO,KAAK,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAI5D,KAAK,eAAe,GAAG;IAAE,UAAU,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAE,CAAC;AAC9D,KAAK,IAAI,GAAG,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAqF3C;;;;;GAKG;AACH,wBAAuB,iBAAiB,CAAC,EACvC,EAAE,EACF,GAAG,EACH,GAAG,EACH,SAAgB,GACjB,EAAE;IACD,EAAE,EAAE,QAAQ,CAAC;IACb,GAAG,EAAE,IAAI,CAAC;IACV,GAAG,EAAE,MAAM,CAAC;IACZ,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB,oDAuBA;AA2DD,eAAO,MAAM,qBAAqB,EAAE,SAQnC,CAAC"}
|
@@ -88,7 +88,7 @@ const migrateUp = async (trx, db) => {
|
|
88
88
|
locale: entry.locale
|
89
89
|
});
|
90
90
|
for await (const batch of getBatchToDiscard({ db, trx, uid: model.uid })) {
|
91
|
-
await strapiUtils.async.map(batch, discardDraft, { concurrency:
|
91
|
+
await strapiUtils.async.map(batch, discardDraft, { concurrency: 1 });
|
92
92
|
}
|
93
93
|
}
|
94
94
|
};
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"5.0.0-discard-drafts.js","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"sourcesContent":["/**\n * This migration is responsible for creating the draft counterpart for all the entries that were in a published state.\n *\n * In v4, entries could either be in a draft or published state, but not both at the same time.\n * In v5, we introduced the concept of document, and an entry can be in a draft or published state.\n *\n * This means the migration needs to create the draft counterpart if an entry was published.\n *\n * This migration performs the following steps:\n * 1. Creates draft entries for all published entries, without it's components, dynamic zones or relations.\n * 2. Using the document service, discard those same drafts to copy its relations.\n */\n\n/* eslint-disable no-continue */\nimport type { UID } from '@strapi/types';\nimport type { Database, Migration } from '@strapi/database';\nimport { async, contentTypes } from '@strapi/utils';\nimport { createDocumentService } from '../../services/document-service';\n\ntype DocumentVersion = { documentId: string; locale: string };\ntype Knex = Parameters<Migration['up']>[0];\n\n/**\n * Check if the model has draft and publish enabled.\n */\nconst hasDraftAndPublish = async (trx: Knex, meta: any) => {\n const hasTable = await trx.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n return false;\n }\n\n const uid = meta.uid as UID.ContentType;\n const model = strapi.getModel(uid);\n const hasDP = contentTypes.hasDraftAndPublish(model);\n if (!hasDP) {\n return false;\n }\n\n return true;\n};\n\n/**\n * Copy all the published entries to draft entries, without it's components, dynamic zones or relations.\n * This ensures all necessary draft's exist before copying it's relations.\n */\nasync function copyPublishedEntriesToDraft({\n db,\n trx,\n uid,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n}) {\n // Extract all scalar attributes to use in the insert query\n const meta = db.metadata.get(uid);\n\n // Get scalar attributes that will be copied over the new draft\n const scalarAttributes = Object.values(meta.attributes).reduce((acc, attribute: any) => {\n if (['id'].includes(attribute.columnName)) {\n return acc;\n }\n\n if (contentTypes.isScalarAttribute(attribute)) {\n acc.push(attribute.columnName);\n }\n\n return acc;\n }, [] as string[]);\n\n /**\n * Query to copy the published entries into draft entries.\n *\n * INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n * SELECT columnName1, columnName2, columnName3, ...\n * FROM tableName\n */\n await trx\n // INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n .into(\n trx.raw(`?? (${scalarAttributes.map(() => `??`).join(', ')})`, [\n meta.tableName,\n ...scalarAttributes,\n ])\n )\n .insert((subQb: typeof trx) => {\n // SELECT columnName1, columnName2, columnName3, ...\n subQb\n .select(\n ...scalarAttributes.map((att: string) => {\n // Override 'publishedAt' and 'updatedAt' attributes\n if (att === 'published_at') {\n return trx.raw('NULL as ??', 'published_at');\n }\n\n return att;\n })\n )\n .from(meta.tableName)\n // Only select entries that were published\n .whereNotNull('published_at');\n });\n}\n\n/**\n * Load a batch of versions to discard.\n *\n * Versions with only a draft version will be ignored.\n * Only versions with a published version (which always have a draft version) will be discarded.\n */\nexport async function* getBatchToDiscard({\n db,\n trx,\n uid,\n batchSize = 1000,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n batchSize?: number;\n}) {\n let offset = 0;\n let hasMore = true;\n\n while (hasMore) {\n // Look for the published entries to discard\n const batch: DocumentVersion[] = await db\n .queryBuilder(uid)\n .select(['id', 'documentId', 'locale'])\n .where({ publishedAt: { $ne: null } })\n .limit(batchSize)\n .offset(offset)\n .orderBy('id')\n .transacting(trx)\n .execute();\n\n if (batch.length < batchSize) {\n hasMore = false;\n }\n\n offset += batchSize;\n yield batch;\n }\n}\n\n/**\n * 2 pass migration to create the draft entries for all the published entries.\n * And then discard the drafts to copy the relations.\n */\nconst migrateUp = async (trx: Knex, db: Database) => {\n const dpModels = [];\n for (const meta of db.metadata.values()) {\n const hasDP = await hasDraftAndPublish(trx, meta);\n if (hasDP) {\n dpModels.push(meta);\n }\n }\n\n /**\n * Create plain draft entries for all the entries that were published.\n */\n for (const model of dpModels) {\n await copyPublishedEntriesToDraft({ db, trx, uid: model.uid });\n }\n\n /**\n * Discard the drafts will copy the relations from the published entries to the newly created drafts.\n *\n * Load a batch of entries (batched to prevent loading millions of rows at once ),\n * and discard them using the document service.\n *\n * NOTE: This is using a custom document service without any validations,\n * to prevent the migration from failing if users already had invalid data in V4.\n * E.g. @see https://github.com/strapi/strapi/issues/21583\n */\n const documentService = createDocumentService(strapi, {\n async validateEntityCreation(_, data) {\n return data;\n },\n async validateEntityUpdate(_, data) {\n // Data can be partially empty on partial updates\n // This migration doesn't trigger any update (or partial update),\n // so it's safe to return the data as is.\n return data as any;\n },\n });\n\n for (const model of dpModels) {\n const discardDraft = async (entry: DocumentVersion) =>\n documentService(model.uid as UID.ContentType).discardDraft({\n documentId: entry.documentId,\n locale: entry.locale,\n });\n\n for await (const batch of getBatchToDiscard({ db, trx, uid: model.uid })) {\n await async.map(batch, discardDraft, { concurrency:
|
1
|
+
{"version":3,"file":"5.0.0-discard-drafts.js","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"sourcesContent":["/**\n * This migration is responsible for creating the draft counterpart for all the entries that were in a published state.\n *\n * In v4, entries could either be in a draft or published state, but not both at the same time.\n * In v5, we introduced the concept of document, and an entry can be in a draft or published state.\n *\n * This means the migration needs to create the draft counterpart if an entry was published.\n *\n * This migration performs the following steps:\n * 1. Creates draft entries for all published entries, without it's components, dynamic zones or relations.\n * 2. Using the document service, discard those same drafts to copy its relations.\n */\n\n/* eslint-disable no-continue */\nimport type { UID } from '@strapi/types';\nimport type { Database, Migration } from '@strapi/database';\nimport { async, contentTypes } from '@strapi/utils';\nimport { createDocumentService } from '../../services/document-service';\n\ntype DocumentVersion = { documentId: string; locale: string };\ntype Knex = Parameters<Migration['up']>[0];\n\n/**\n * Check if the model has draft and publish enabled.\n */\nconst hasDraftAndPublish = async (trx: Knex, meta: any) => {\n const hasTable = await trx.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n return false;\n }\n\n const uid = meta.uid as UID.ContentType;\n const model = strapi.getModel(uid);\n const hasDP = contentTypes.hasDraftAndPublish(model);\n if (!hasDP) {\n return false;\n }\n\n return true;\n};\n\n/**\n * Copy all the published entries to draft entries, without it's components, dynamic zones or relations.\n * This ensures all necessary draft's exist before copying it's relations.\n */\nasync function copyPublishedEntriesToDraft({\n db,\n trx,\n uid,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n}) {\n // Extract all scalar attributes to use in the insert query\n const meta = db.metadata.get(uid);\n\n // Get scalar attributes that will be copied over the new draft\n const scalarAttributes = Object.values(meta.attributes).reduce((acc, attribute: any) => {\n if (['id'].includes(attribute.columnName)) {\n return acc;\n }\n\n if (contentTypes.isScalarAttribute(attribute)) {\n acc.push(attribute.columnName);\n }\n\n return acc;\n }, [] as string[]);\n\n /**\n * Query to copy the published entries into draft entries.\n *\n * INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n * SELECT columnName1, columnName2, columnName3, ...\n * FROM tableName\n */\n await trx\n // INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n .into(\n trx.raw(`?? (${scalarAttributes.map(() => `??`).join(', ')})`, [\n meta.tableName,\n ...scalarAttributes,\n ])\n )\n .insert((subQb: typeof trx) => {\n // SELECT columnName1, columnName2, columnName3, ...\n subQb\n .select(\n ...scalarAttributes.map((att: string) => {\n // Override 'publishedAt' and 'updatedAt' attributes\n if (att === 'published_at') {\n return trx.raw('NULL as ??', 'published_at');\n }\n\n return att;\n })\n )\n .from(meta.tableName)\n // Only select entries that were published\n .whereNotNull('published_at');\n });\n}\n\n/**\n * Load a batch of versions to discard.\n *\n * Versions with only a draft version will be ignored.\n * Only versions with a published version (which always have a draft version) will be discarded.\n */\nexport async function* getBatchToDiscard({\n db,\n trx,\n uid,\n batchSize = 1000,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n batchSize?: number;\n}) {\n let offset = 0;\n let hasMore = true;\n\n while (hasMore) {\n // Look for the published entries to discard\n const batch: DocumentVersion[] = await db\n .queryBuilder(uid)\n .select(['id', 'documentId', 'locale'])\n .where({ publishedAt: { $ne: null } })\n .limit(batchSize)\n .offset(offset)\n .orderBy('id')\n .transacting(trx)\n .execute();\n\n if (batch.length < batchSize) {\n hasMore = false;\n }\n\n offset += batchSize;\n yield batch;\n }\n}\n\n/**\n * 2 pass migration to create the draft entries for all the published entries.\n * And then discard the drafts to copy the relations.\n */\nconst migrateUp = async (trx: Knex, db: Database) => {\n const dpModels = [];\n for (const meta of db.metadata.values()) {\n const hasDP = await hasDraftAndPublish(trx, meta);\n if (hasDP) {\n dpModels.push(meta);\n }\n }\n\n /**\n * Create plain draft entries for all the entries that were published.\n */\n for (const model of dpModels) {\n await copyPublishedEntriesToDraft({ db, trx, uid: model.uid });\n }\n\n /**\n * Discard the drafts will copy the relations from the published entries to the newly created drafts.\n *\n * Load a batch of entries (batched to prevent loading millions of rows at once ),\n * and discard them using the document service.\n *\n * NOTE: This is using a custom document service without any validations,\n * to prevent the migration from failing if users already had invalid data in V4.\n * E.g. @see https://github.com/strapi/strapi/issues/21583\n */\n const documentService = createDocumentService(strapi, {\n async validateEntityCreation(_, data) {\n return data;\n },\n async validateEntityUpdate(_, data) {\n // Data can be partially empty on partial updates\n // This migration doesn't trigger any update (or partial update),\n // so it's safe to return the data as is.\n return data as any;\n },\n });\n\n for (const model of dpModels) {\n const discardDraft = async (entry: DocumentVersion) =>\n documentService(model.uid as UID.ContentType).discardDraft({\n documentId: entry.documentId,\n locale: entry.locale,\n });\n\n for await (const batch of getBatchToDiscard({ db, trx, uid: model.uid })) {\n // NOTE: concurrency had to be disabled to prevent a race condition with self-references\n // TODO: improve performance in a safe way\n await async.map(batch, discardDraft, { concurrency: 1 });\n }\n }\n};\n\nexport const discardDocumentDrafts: Migration = {\n name: 'core::5.0.0-discard-drafts',\n async up(trx, db) {\n await migrateUp(trx, db);\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["contentTypes","createDocumentService","async"],"mappings":";;;;AAyBA,MAAM,qBAAqB,OAAO,KAAW,SAAc;AACzD,QAAM,WAAW,MAAM,IAAI,OAAO,SAAS,KAAK,SAAS;AAEzD,MAAI,CAAC,UAAU;AACN,WAAA;AAAA,EACT;AAEA,QAAM,MAAM,KAAK;AACX,QAAA,QAAQ,OAAO,SAAS,GAAG;AAC3B,QAAA,QAAQA,YAAAA,aAAa,mBAAmB,KAAK;AACnD,MAAI,CAAC,OAAO;AACH,WAAA;AAAA,EACT;AAEO,SAAA;AACT;AAMA,eAAe,4BAA4B;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AAED,QAAM,OAAO,GAAG,SAAS,IAAI,GAAG;AAG1B,QAAA,mBAAmB,OAAO,OAAO,KAAK,UAAU,EAAE,OAAO,CAAC,KAAK,cAAmB;AACtF,QAAI,CAAC,IAAI,EAAE,SAAS,UAAU,UAAU,GAAG;AAClC,aAAA;AAAA,IACT;AAEI,QAAAA,YAAA,aAAa,kBAAkB,SAAS,GAAG;AACzC,UAAA,KAAK,UAAU,UAAU;AAAA,IAC/B;AAEO,WAAA;AAAA,EACT,GAAG,CAAc,CAAA;AASjB,QAAM,IAEH;AAAA,IACC,IAAI,IAAI,OAAO,iBAAiB,IAAI,MAAM,IAAI,EAAE,KAAK,IAAI,CAAC,KAAK;AAAA,MAC7D,KAAK;AAAA,MACL,GAAG;AAAA,IAAA,CACJ;AAAA,EAAA,EAEF,OAAO,CAAC,UAAsB;AAG1B,UAAA;AAAA,MACC,GAAG,iBAAiB,IAAI,CAAC,QAAgB;AAEvC,YAAI,QAAQ,gBAAgB;AACnB,iBAAA,IAAI,IAAI,cAAc,cAAc;AAAA,QAC7C;AAEO,eAAA;AAAA,MAAA,CACR;AAAA,IAAA,EAEF,KAAK,KAAK,SAAS,EAEnB,aAAa,cAAc;AAAA,EAAA,CAC/B;AACL;AAQA,gBAAuB,kBAAkB;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AACd,GAKG;AACD,MAAI,SAAS;AACb,MAAI,UAAU;AAEd,SAAO,SAAS;AAEd,UAAM,QAA2B,MAAM,GACpC,aAAa,GAAG,EAChB,OAAO,CAAC,MAAM,cAAc,QAAQ,CAAC,EACrC,MAAM,EAAE,aAAa,EAAE,KAAK,KAAO,EAAA,CAAC,EACpC,MAAM,SAAS,EACf,OAAO,MAAM,EACb,QAAQ,IAAI,EACZ,YAAY,GAAG,EACf,QAAQ;AAEP,QAAA,MAAM,SAAS,WAAW;AAClB,gBAAA;AAAA,IACZ;AAEU,cAAA;AACJ,UAAA;AAAA,EACR;AACF;AAMA,MAAM,YAAY,OAAO,KAAW,OAAiB;AACnD,QAAM,WAAW,CAAA;AACjB,aAAW,QAAQ,GAAG,SAAS,OAAA,GAAU;AACvC,UAAM,QAAQ,MAAM,mBAAmB,KAAK,IAAI;AAChD,QAAI,OAAO;AACT,eAAS,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAKA,aAAW,SAAS,UAAU;AAC5B,UAAM,4BAA4B,EAAE,IAAI,KAAK,KAAK,MAAM,KAAK;AAAA,EAC/D;AAYM,QAAA,kBAAkBC,4BAAsB,QAAQ;AAAA,IACpD,MAAM,uBAAuB,GAAG,MAAM;AAC7B,aAAA;AAAA,IACT;AAAA,IACA,MAAM,qBAAqB,GAAG,MAAM;AAI3B,aAAA;AAAA,IACT;AAAA,EAAA,CACD;AAED,aAAW,SAAS,UAAU;AAC5B,UAAM,eAAe,OAAO,UAC1B,gBAAgB,MAAM,GAAsB,EAAE,aAAa;AAAA,MACzD,YAAY,MAAM;AAAA,MAClB,QAAQ,MAAM;AAAA,IAAA,CACf;AAEc,qBAAA,SAAS,kBAAkB,EAAE,IAAI,KAAK,KAAK,MAAM,IAAI,CAAC,GAAG;AAGxE,YAAMC,YAAAA,MAAM,IAAI,OAAO,cAAc,EAAE,aAAa,GAAG;AAAA,IACzD;AAAA,EACF;AACF;AAEO,MAAM,wBAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,MAAM,GAAG,KAAK,IAAI;AACV,UAAA,UAAU,KAAK,EAAE;AAAA,EACzB;AAAA,EACA,MAAM,OAAO;AACL,UAAA,IAAI,MAAM,iBAAiB;AAAA,EACnC;AACF;;;"}
|
@@ -86,7 +86,7 @@ const migrateUp = async (trx, db) => {
|
|
86
86
|
locale: entry.locale
|
87
87
|
});
|
88
88
|
for await (const batch of getBatchToDiscard({ db, trx, uid: model.uid })) {
|
89
|
-
await async.map(batch, discardDraft, { concurrency:
|
89
|
+
await async.map(batch, discardDraft, { concurrency: 1 });
|
90
90
|
}
|
91
91
|
}
|
92
92
|
};
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"5.0.0-discard-drafts.mjs","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"sourcesContent":["/**\n * This migration is responsible for creating the draft counterpart for all the entries that were in a published state.\n *\n * In v4, entries could either be in a draft or published state, but not both at the same time.\n * In v5, we introduced the concept of document, and an entry can be in a draft or published state.\n *\n * This means the migration needs to create the draft counterpart if an entry was published.\n *\n * This migration performs the following steps:\n * 1. Creates draft entries for all published entries, without it's components, dynamic zones or relations.\n * 2. Using the document service, discard those same drafts to copy its relations.\n */\n\n/* eslint-disable no-continue */\nimport type { UID } from '@strapi/types';\nimport type { Database, Migration } from '@strapi/database';\nimport { async, contentTypes } from '@strapi/utils';\nimport { createDocumentService } from '../../services/document-service';\n\ntype DocumentVersion = { documentId: string; locale: string };\ntype Knex = Parameters<Migration['up']>[0];\n\n/**\n * Check if the model has draft and publish enabled.\n */\nconst hasDraftAndPublish = async (trx: Knex, meta: any) => {\n const hasTable = await trx.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n return false;\n }\n\n const uid = meta.uid as UID.ContentType;\n const model = strapi.getModel(uid);\n const hasDP = contentTypes.hasDraftAndPublish(model);\n if (!hasDP) {\n return false;\n }\n\n return true;\n};\n\n/**\n * Copy all the published entries to draft entries, without it's components, dynamic zones or relations.\n * This ensures all necessary draft's exist before copying it's relations.\n */\nasync function copyPublishedEntriesToDraft({\n db,\n trx,\n uid,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n}) {\n // Extract all scalar attributes to use in the insert query\n const meta = db.metadata.get(uid);\n\n // Get scalar attributes that will be copied over the new draft\n const scalarAttributes = Object.values(meta.attributes).reduce((acc, attribute: any) => {\n if (['id'].includes(attribute.columnName)) {\n return acc;\n }\n\n if (contentTypes.isScalarAttribute(attribute)) {\n acc.push(attribute.columnName);\n }\n\n return acc;\n }, [] as string[]);\n\n /**\n * Query to copy the published entries into draft entries.\n *\n * INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n * SELECT columnName1, columnName2, columnName3, ...\n * FROM tableName\n */\n await trx\n // INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n .into(\n trx.raw(`?? (${scalarAttributes.map(() => `??`).join(', ')})`, [\n meta.tableName,\n ...scalarAttributes,\n ])\n )\n .insert((subQb: typeof trx) => {\n // SELECT columnName1, columnName2, columnName3, ...\n subQb\n .select(\n ...scalarAttributes.map((att: string) => {\n // Override 'publishedAt' and 'updatedAt' attributes\n if (att === 'published_at') {\n return trx.raw('NULL as ??', 'published_at');\n }\n\n return att;\n })\n )\n .from(meta.tableName)\n // Only select entries that were published\n .whereNotNull('published_at');\n });\n}\n\n/**\n * Load a batch of versions to discard.\n *\n * Versions with only a draft version will be ignored.\n * Only versions with a published version (which always have a draft version) will be discarded.\n */\nexport async function* getBatchToDiscard({\n db,\n trx,\n uid,\n batchSize = 1000,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n batchSize?: number;\n}) {\n let offset = 0;\n let hasMore = true;\n\n while (hasMore) {\n // Look for the published entries to discard\n const batch: DocumentVersion[] = await db\n .queryBuilder(uid)\n .select(['id', 'documentId', 'locale'])\n .where({ publishedAt: { $ne: null } })\n .limit(batchSize)\n .offset(offset)\n .orderBy('id')\n .transacting(trx)\n .execute();\n\n if (batch.length < batchSize) {\n hasMore = false;\n }\n\n offset += batchSize;\n yield batch;\n }\n}\n\n/**\n * 2 pass migration to create the draft entries for all the published entries.\n * And then discard the drafts to copy the relations.\n */\nconst migrateUp = async (trx: Knex, db: Database) => {\n const dpModels = [];\n for (const meta of db.metadata.values()) {\n const hasDP = await hasDraftAndPublish(trx, meta);\n if (hasDP) {\n dpModels.push(meta);\n }\n }\n\n /**\n * Create plain draft entries for all the entries that were published.\n */\n for (const model of dpModels) {\n await copyPublishedEntriesToDraft({ db, trx, uid: model.uid });\n }\n\n /**\n * Discard the drafts will copy the relations from the published entries to the newly created drafts.\n *\n * Load a batch of entries (batched to prevent loading millions of rows at once ),\n * and discard them using the document service.\n *\n * NOTE: This is using a custom document service without any validations,\n * to prevent the migration from failing if users already had invalid data in V4.\n * E.g. @see https://github.com/strapi/strapi/issues/21583\n */\n const documentService = createDocumentService(strapi, {\n async validateEntityCreation(_, data) {\n return data;\n },\n async validateEntityUpdate(_, data) {\n // Data can be partially empty on partial updates\n // This migration doesn't trigger any update (or partial update),\n // so it's safe to return the data as is.\n return data as any;\n },\n });\n\n for (const model of dpModels) {\n const discardDraft = async (entry: DocumentVersion) =>\n documentService(model.uid as UID.ContentType).discardDraft({\n documentId: entry.documentId,\n locale: entry.locale,\n });\n\n for await (const batch of getBatchToDiscard({ db, trx, uid: model.uid })) {\n await async.map(batch, discardDraft, { concurrency:
|
1
|
+
{"version":3,"file":"5.0.0-discard-drafts.mjs","sources":["../../../src/migrations/database/5.0.0-discard-drafts.ts"],"sourcesContent":["/**\n * This migration is responsible for creating the draft counterpart for all the entries that were in a published state.\n *\n * In v4, entries could either be in a draft or published state, but not both at the same time.\n * In v5, we introduced the concept of document, and an entry can be in a draft or published state.\n *\n * This means the migration needs to create the draft counterpart if an entry was published.\n *\n * This migration performs the following steps:\n * 1. Creates draft entries for all published entries, without it's components, dynamic zones or relations.\n * 2. Using the document service, discard those same drafts to copy its relations.\n */\n\n/* eslint-disable no-continue */\nimport type { UID } from '@strapi/types';\nimport type { Database, Migration } from '@strapi/database';\nimport { async, contentTypes } from '@strapi/utils';\nimport { createDocumentService } from '../../services/document-service';\n\ntype DocumentVersion = { documentId: string; locale: string };\ntype Knex = Parameters<Migration['up']>[0];\n\n/**\n * Check if the model has draft and publish enabled.\n */\nconst hasDraftAndPublish = async (trx: Knex, meta: any) => {\n const hasTable = await trx.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n return false;\n }\n\n const uid = meta.uid as UID.ContentType;\n const model = strapi.getModel(uid);\n const hasDP = contentTypes.hasDraftAndPublish(model);\n if (!hasDP) {\n return false;\n }\n\n return true;\n};\n\n/**\n * Copy all the published entries to draft entries, without it's components, dynamic zones or relations.\n * This ensures all necessary draft's exist before copying it's relations.\n */\nasync function copyPublishedEntriesToDraft({\n db,\n trx,\n uid,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n}) {\n // Extract all scalar attributes to use in the insert query\n const meta = db.metadata.get(uid);\n\n // Get scalar attributes that will be copied over the new draft\n const scalarAttributes = Object.values(meta.attributes).reduce((acc, attribute: any) => {\n if (['id'].includes(attribute.columnName)) {\n return acc;\n }\n\n if (contentTypes.isScalarAttribute(attribute)) {\n acc.push(attribute.columnName);\n }\n\n return acc;\n }, [] as string[]);\n\n /**\n * Query to copy the published entries into draft entries.\n *\n * INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n * SELECT columnName1, columnName2, columnName3, ...\n * FROM tableName\n */\n await trx\n // INSERT INTO tableName (columnName1, columnName2, columnName3, ...)\n .into(\n trx.raw(`?? (${scalarAttributes.map(() => `??`).join(', ')})`, [\n meta.tableName,\n ...scalarAttributes,\n ])\n )\n .insert((subQb: typeof trx) => {\n // SELECT columnName1, columnName2, columnName3, ...\n subQb\n .select(\n ...scalarAttributes.map((att: string) => {\n // Override 'publishedAt' and 'updatedAt' attributes\n if (att === 'published_at') {\n return trx.raw('NULL as ??', 'published_at');\n }\n\n return att;\n })\n )\n .from(meta.tableName)\n // Only select entries that were published\n .whereNotNull('published_at');\n });\n}\n\n/**\n * Load a batch of versions to discard.\n *\n * Versions with only a draft version will be ignored.\n * Only versions with a published version (which always have a draft version) will be discarded.\n */\nexport async function* getBatchToDiscard({\n db,\n trx,\n uid,\n batchSize = 1000,\n}: {\n db: Database;\n trx: Knex;\n uid: string;\n batchSize?: number;\n}) {\n let offset = 0;\n let hasMore = true;\n\n while (hasMore) {\n // Look for the published entries to discard\n const batch: DocumentVersion[] = await db\n .queryBuilder(uid)\n .select(['id', 'documentId', 'locale'])\n .where({ publishedAt: { $ne: null } })\n .limit(batchSize)\n .offset(offset)\n .orderBy('id')\n .transacting(trx)\n .execute();\n\n if (batch.length < batchSize) {\n hasMore = false;\n }\n\n offset += batchSize;\n yield batch;\n }\n}\n\n/**\n * 2 pass migration to create the draft entries for all the published entries.\n * And then discard the drafts to copy the relations.\n */\nconst migrateUp = async (trx: Knex, db: Database) => {\n const dpModels = [];\n for (const meta of db.metadata.values()) {\n const hasDP = await hasDraftAndPublish(trx, meta);\n if (hasDP) {\n dpModels.push(meta);\n }\n }\n\n /**\n * Create plain draft entries for all the entries that were published.\n */\n for (const model of dpModels) {\n await copyPublishedEntriesToDraft({ db, trx, uid: model.uid });\n }\n\n /**\n * Discard the drafts will copy the relations from the published entries to the newly created drafts.\n *\n * Load a batch of entries (batched to prevent loading millions of rows at once ),\n * and discard them using the document service.\n *\n * NOTE: This is using a custom document service without any validations,\n * to prevent the migration from failing if users already had invalid data in V4.\n * E.g. @see https://github.com/strapi/strapi/issues/21583\n */\n const documentService = createDocumentService(strapi, {\n async validateEntityCreation(_, data) {\n return data;\n },\n async validateEntityUpdate(_, data) {\n // Data can be partially empty on partial updates\n // This migration doesn't trigger any update (or partial update),\n // so it's safe to return the data as is.\n return data as any;\n },\n });\n\n for (const model of dpModels) {\n const discardDraft = async (entry: DocumentVersion) =>\n documentService(model.uid as UID.ContentType).discardDraft({\n documentId: entry.documentId,\n locale: entry.locale,\n });\n\n for await (const batch of getBatchToDiscard({ db, trx, uid: model.uid })) {\n // NOTE: concurrency had to be disabled to prevent a race condition with self-references\n // TODO: improve performance in a safe way\n await async.map(batch, discardDraft, { concurrency: 1 });\n }\n }\n};\n\nexport const discardDocumentDrafts: Migration = {\n name: 'core::5.0.0-discard-drafts',\n async up(trx, db) {\n await migrateUp(trx, db);\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":[],"mappings":";;AAyBA,MAAM,qBAAqB,OAAO,KAAW,SAAc;AACzD,QAAM,WAAW,MAAM,IAAI,OAAO,SAAS,KAAK,SAAS;AAEzD,MAAI,CAAC,UAAU;AACN,WAAA;AAAA,EACT;AAEA,QAAM,MAAM,KAAK;AACX,QAAA,QAAQ,OAAO,SAAS,GAAG;AAC3B,QAAA,QAAQ,aAAa,mBAAmB,KAAK;AACnD,MAAI,CAAC,OAAO;AACH,WAAA;AAAA,EACT;AAEO,SAAA;AACT;AAMA,eAAe,4BAA4B;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AAED,QAAM,OAAO,GAAG,SAAS,IAAI,GAAG;AAG1B,QAAA,mBAAmB,OAAO,OAAO,KAAK,UAAU,EAAE,OAAO,CAAC,KAAK,cAAmB;AACtF,QAAI,CAAC,IAAI,EAAE,SAAS,UAAU,UAAU,GAAG;AAClC,aAAA;AAAA,IACT;AAEI,QAAA,aAAa,kBAAkB,SAAS,GAAG;AACzC,UAAA,KAAK,UAAU,UAAU;AAAA,IAC/B;AAEO,WAAA;AAAA,EACT,GAAG,CAAc,CAAA;AASjB,QAAM,IAEH;AAAA,IACC,IAAI,IAAI,OAAO,iBAAiB,IAAI,MAAM,IAAI,EAAE,KAAK,IAAI,CAAC,KAAK;AAAA,MAC7D,KAAK;AAAA,MACL,GAAG;AAAA,IAAA,CACJ;AAAA,EAAA,EAEF,OAAO,CAAC,UAAsB;AAG1B,UAAA;AAAA,MACC,GAAG,iBAAiB,IAAI,CAAC,QAAgB;AAEvC,YAAI,QAAQ,gBAAgB;AACnB,iBAAA,IAAI,IAAI,cAAc,cAAc;AAAA,QAC7C;AAEO,eAAA;AAAA,MAAA,CACR;AAAA,IAAA,EAEF,KAAK,KAAK,SAAS,EAEnB,aAAa,cAAc;AAAA,EAAA,CAC/B;AACL;AAQA,gBAAuB,kBAAkB;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AACd,GAKG;AACD,MAAI,SAAS;AACb,MAAI,UAAU;AAEd,SAAO,SAAS;AAEd,UAAM,QAA2B,MAAM,GACpC,aAAa,GAAG,EAChB,OAAO,CAAC,MAAM,cAAc,QAAQ,CAAC,EACrC,MAAM,EAAE,aAAa,EAAE,KAAK,KAAO,EAAA,CAAC,EACpC,MAAM,SAAS,EACf,OAAO,MAAM,EACb,QAAQ,IAAI,EACZ,YAAY,GAAG,EACf,QAAQ;AAEP,QAAA,MAAM,SAAS,WAAW;AAClB,gBAAA;AAAA,IACZ;AAEU,cAAA;AACJ,UAAA;AAAA,EACR;AACF;AAMA,MAAM,YAAY,OAAO,KAAW,OAAiB;AACnD,QAAM,WAAW,CAAA;AACjB,aAAW,QAAQ,GAAG,SAAS,OAAA,GAAU;AACvC,UAAM,QAAQ,MAAM,mBAAmB,KAAK,IAAI;AAChD,QAAI,OAAO;AACT,eAAS,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAKA,aAAW,SAAS,UAAU;AAC5B,UAAM,4BAA4B,EAAE,IAAI,KAAK,KAAK,MAAM,KAAK;AAAA,EAC/D;AAYM,QAAA,kBAAkB,sBAAsB,QAAQ;AAAA,IACpD,MAAM,uBAAuB,GAAG,MAAM;AAC7B,aAAA;AAAA,IACT;AAAA,IACA,MAAM,qBAAqB,GAAG,MAAM;AAI3B,aAAA;AAAA,IACT;AAAA,EAAA,CACD;AAED,aAAW,SAAS,UAAU;AAC5B,UAAM,eAAe,OAAO,UAC1B,gBAAgB,MAAM,GAAsB,EAAE,aAAa;AAAA,MACzD,YAAY,MAAM;AAAA,MAClB,QAAQ,MAAM;AAAA,IAAA,CACf;AAEc,qBAAA,SAAS,kBAAkB,EAAE,IAAI,KAAK,KAAK,MAAM,IAAI,CAAC,GAAG;AAGxE,YAAM,MAAM,IAAI,OAAO,cAAc,EAAE,aAAa,GAAG;AAAA,IACzD;AAAA,EACF;AACF;AAEO,MAAM,wBAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,MAAM,GAAG,KAAK,IAAI;AACV,UAAA,UAAU,KAAK,EAAE;AAAA,EACzB;AAAA,EACA,MAAM,OAAO;AACL,UAAA,IAAI,MAAM,iBAAiB;AAAA,EACnC;AACF;"}
|
package/dist/services/cron.js
CHANGED
@@ -34,11 +34,16 @@ const createCronService = () => {
|
|
34
34
|
return this;
|
35
35
|
},
|
36
36
|
remove(name) {
|
37
|
-
if (!name)
|
37
|
+
if (!name)
|
38
38
|
throw new Error("You must provide a name to remove a cron job.");
|
39
|
-
}
|
40
|
-
|
41
|
-
|
39
|
+
const matchingJobsSpecs = jobsSpecs.filter(({ name: jobSpecName }, index) => {
|
40
|
+
if (jobSpecName === name) {
|
41
|
+
jobsSpecs.splice(index, 1);
|
42
|
+
return true;
|
43
|
+
}
|
44
|
+
return false;
|
45
|
+
});
|
46
|
+
matchingJobsSpecs.forEach(({ job }) => job.cancel());
|
42
47
|
return this;
|
43
48
|
},
|
44
49
|
start() {
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"cron.js","sources":["../../src/services/cron.ts"],"sourcesContent":["import { Job, Spec } from 'node-schedule';\nimport { isFunction } from 'lodash/fp';\nimport type { Core } from '@strapi/types';\n\ninterface JobSpec {\n job: Job;\n options: Spec;\n name: string | null;\n}\n\ntype TaskFn = ({ strapi }: { strapi: Core.Strapi }, ...args: unknown[]) => Promise<unknown>;\n\ntype Task =\n | TaskFn\n | {\n task: TaskFn;\n options: Spec;\n };\n\ninterface Tasks {\n [key: string]: Task;\n}\n\nconst createCronService = () => {\n let jobsSpecs: JobSpec[] = [];\n let running = false;\n\n return {\n add(tasks: Tasks = {}) {\n for (const taskExpression of Object.keys(tasks)) {\n const taskValue = tasks[taskExpression];\n\n let fn: TaskFn;\n let options: Spec;\n let taskName: string | null;\n if (isFunction(taskValue)) {\n // don't use task name if key is the rule\n taskName = null;\n fn = taskValue.bind(tasks);\n options = taskExpression;\n } else if (isFunction(taskValue.task)) {\n // set task name if key is not the rule\n taskName = taskExpression;\n fn = taskValue.task.bind(taskValue);\n options = taskValue.options;\n } else {\n throw new Error(\n `Could not schedule a cron job for \"${taskExpression}\": no function found.`\n );\n }\n\n const fnWithStrapi = (...args: unknown[]) => fn({ strapi }, ...args);\n\n // const job = new Job(null, fnWithStrapi);\n const job = new Job(fnWithStrapi);\n jobsSpecs.push({ job, options, name: taskName });\n\n if (running) {\n job.schedule(options);\n }\n }\n return this;\n },\n\n remove(name: string) {\n if (!name)
|
1
|
+
{"version":3,"file":"cron.js","sources":["../../src/services/cron.ts"],"sourcesContent":["import { Job, Spec } from 'node-schedule';\nimport { isFunction } from 'lodash/fp';\nimport type { Core } from '@strapi/types';\n\ninterface JobSpec {\n job: Job;\n options: Spec;\n name: string | null;\n}\n\ntype TaskFn = ({ strapi }: { strapi: Core.Strapi }, ...args: unknown[]) => Promise<unknown>;\n\ntype Task =\n | TaskFn\n | {\n task: TaskFn;\n options: Spec;\n };\n\ninterface Tasks {\n [key: string]: Task;\n}\n\nconst createCronService = () => {\n let jobsSpecs: JobSpec[] = [];\n let running = false;\n\n return {\n add(tasks: Tasks = {}) {\n for (const taskExpression of Object.keys(tasks)) {\n const taskValue = tasks[taskExpression];\n\n let fn: TaskFn;\n let options: Spec;\n let taskName: string | null;\n if (isFunction(taskValue)) {\n // don't use task name if key is the rule\n taskName = null;\n fn = taskValue.bind(tasks);\n options = taskExpression;\n } else if (isFunction(taskValue.task)) {\n // set task name if key is not the rule\n taskName = taskExpression;\n fn = taskValue.task.bind(taskValue);\n options = taskValue.options;\n } else {\n throw new Error(\n `Could not schedule a cron job for \"${taskExpression}\": no function found.`\n );\n }\n\n const fnWithStrapi = (...args: unknown[]) => fn({ strapi }, ...args);\n\n // const job = new Job(null, fnWithStrapi);\n const job = new Job(fnWithStrapi);\n jobsSpecs.push({ job, options, name: taskName });\n\n if (running) {\n job.schedule(options);\n }\n }\n return this;\n },\n\n remove(name: string) {\n if (!name) throw new Error('You must provide a name to remove a cron job.');\n const matchingJobsSpecs = jobsSpecs.filter(({ name: jobSpecName }, index) => {\n if (jobSpecName === name) {\n jobsSpecs.splice(index, 1);\n return true;\n }\n return false;\n });\n matchingJobsSpecs.forEach(({ job }) => job.cancel());\n return this;\n },\n\n start() {\n jobsSpecs.forEach(({ job, options }) => job.schedule(options));\n running = true;\n return this;\n },\n\n stop() {\n jobsSpecs.forEach(({ job }) => job.cancel());\n running = false;\n return this;\n },\n\n destroy() {\n this.stop();\n jobsSpecs = [];\n return this;\n },\n jobs: jobsSpecs,\n };\n};\n\nexport default createCronService;\n"],"names":["isFunction","Job"],"mappings":";;;AAuBA,MAAM,oBAAoB,MAAM;AAC9B,MAAI,YAAuB,CAAA;AAC3B,MAAI,UAAU;AAEP,SAAA;AAAA,IACL,IAAI,QAAe,IAAI;AACrB,iBAAW,kBAAkB,OAAO,KAAK,KAAK,GAAG;AACzC,cAAA,YAAY,MAAM,cAAc;AAElC,YAAA;AACA,YAAA;AACA,YAAA;AACA,YAAAA,GAAAA,WAAW,SAAS,GAAG;AAEd,qBAAA;AACN,eAAA,UAAU,KAAK,KAAK;AACf,oBAAA;AAAA,QACD,WAAAA,GAAA,WAAW,UAAU,IAAI,GAAG;AAE1B,qBAAA;AACN,eAAA,UAAU,KAAK,KAAK,SAAS;AAClC,oBAAU,UAAU;AAAA,QAAA,OACf;AACL,gBAAM,IAAI;AAAA,YACR,sCAAsC,cAAc;AAAA,UAAA;AAAA,QAExD;AAEM,cAAA,eAAe,IAAI,SAAoB,GAAG,EAAE,OAAO,GAAG,GAAG,IAAI;AAG7D,cAAA,MAAM,IAAIC,iBAAI,YAAY;AAChC,kBAAU,KAAK,EAAE,KAAK,SAAS,MAAM,UAAU;AAE/C,YAAI,SAAS;AACX,cAAI,SAAS,OAAO;AAAA,QACtB;AAAA,MACF;AACO,aAAA;AAAA,IACT;AAAA,IAEA,OAAO,MAAc;AACnB,UAAI,CAAC;AAAY,cAAA,IAAI,MAAM,+CAA+C;AACpE,YAAA,oBAAoB,UAAU,OAAO,CAAC,EAAE,MAAM,eAAe,UAAU;AAC3E,YAAI,gBAAgB,MAAM;AACd,oBAAA,OAAO,OAAO,CAAC;AAClB,iBAAA;AAAA,QACT;AACO,eAAA;AAAA,MAAA,CACR;AACD,wBAAkB,QAAQ,CAAC,EAAE,IAAU,MAAA,IAAI,QAAQ;AAC5C,aAAA;AAAA,IACT;AAAA,IAEA,QAAQ;AACI,gBAAA,QAAQ,CAAC,EAAE,KAAK,QAAc,MAAA,IAAI,SAAS,OAAO,CAAC;AACnD,gBAAA;AACH,aAAA;AAAA,IACT;AAAA,IAEA,OAAO;AACL,gBAAU,QAAQ,CAAC,EAAE,IAAU,MAAA,IAAI,QAAQ;AACjC,gBAAA;AACH,aAAA;AAAA,IACT;AAAA,IAEA,UAAU;AACR,WAAK,KAAK;AACV,kBAAY,CAAA;AACL,aAAA;AAAA,IACT;AAAA,IACA,MAAM;AAAA,EAAA;AAEV;;"}
|
package/dist/services/cron.mjs
CHANGED
@@ -33,11 +33,16 @@ const createCronService = () => {
|
|
33
33
|
return this;
|
34
34
|
},
|
35
35
|
remove(name) {
|
36
|
-
if (!name)
|
36
|
+
if (!name)
|
37
37
|
throw new Error("You must provide a name to remove a cron job.");
|
38
|
-
}
|
39
|
-
|
40
|
-
|
38
|
+
const matchingJobsSpecs = jobsSpecs.filter(({ name: jobSpecName }, index) => {
|
39
|
+
if (jobSpecName === name) {
|
40
|
+
jobsSpecs.splice(index, 1);
|
41
|
+
return true;
|
42
|
+
}
|
43
|
+
return false;
|
44
|
+
});
|
45
|
+
matchingJobsSpecs.forEach(({ job }) => job.cancel());
|
41
46
|
return this;
|
42
47
|
},
|
43
48
|
start() {
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"cron.mjs","sources":["../../src/services/cron.ts"],"sourcesContent":["import { Job, Spec } from 'node-schedule';\nimport { isFunction } from 'lodash/fp';\nimport type { Core } from '@strapi/types';\n\ninterface JobSpec {\n job: Job;\n options: Spec;\n name: string | null;\n}\n\ntype TaskFn = ({ strapi }: { strapi: Core.Strapi }, ...args: unknown[]) => Promise<unknown>;\n\ntype Task =\n | TaskFn\n | {\n task: TaskFn;\n options: Spec;\n };\n\ninterface Tasks {\n [key: string]: Task;\n}\n\nconst createCronService = () => {\n let jobsSpecs: JobSpec[] = [];\n let running = false;\n\n return {\n add(tasks: Tasks = {}) {\n for (const taskExpression of Object.keys(tasks)) {\n const taskValue = tasks[taskExpression];\n\n let fn: TaskFn;\n let options: Spec;\n let taskName: string | null;\n if (isFunction(taskValue)) {\n // don't use task name if key is the rule\n taskName = null;\n fn = taskValue.bind(tasks);\n options = taskExpression;\n } else if (isFunction(taskValue.task)) {\n // set task name if key is not the rule\n taskName = taskExpression;\n fn = taskValue.task.bind(taskValue);\n options = taskValue.options;\n } else {\n throw new Error(\n `Could not schedule a cron job for \"${taskExpression}\": no function found.`\n );\n }\n\n const fnWithStrapi = (...args: unknown[]) => fn({ strapi }, ...args);\n\n // const job = new Job(null, fnWithStrapi);\n const job = new Job(fnWithStrapi);\n jobsSpecs.push({ job, options, name: taskName });\n\n if (running) {\n job.schedule(options);\n }\n }\n return this;\n },\n\n remove(name: string) {\n if (!name)
|
1
|
+
{"version":3,"file":"cron.mjs","sources":["../../src/services/cron.ts"],"sourcesContent":["import { Job, Spec } from 'node-schedule';\nimport { isFunction } from 'lodash/fp';\nimport type { Core } from '@strapi/types';\n\ninterface JobSpec {\n job: Job;\n options: Spec;\n name: string | null;\n}\n\ntype TaskFn = ({ strapi }: { strapi: Core.Strapi }, ...args: unknown[]) => Promise<unknown>;\n\ntype Task =\n | TaskFn\n | {\n task: TaskFn;\n options: Spec;\n };\n\ninterface Tasks {\n [key: string]: Task;\n}\n\nconst createCronService = () => {\n let jobsSpecs: JobSpec[] = [];\n let running = false;\n\n return {\n add(tasks: Tasks = {}) {\n for (const taskExpression of Object.keys(tasks)) {\n const taskValue = tasks[taskExpression];\n\n let fn: TaskFn;\n let options: Spec;\n let taskName: string | null;\n if (isFunction(taskValue)) {\n // don't use task name if key is the rule\n taskName = null;\n fn = taskValue.bind(tasks);\n options = taskExpression;\n } else if (isFunction(taskValue.task)) {\n // set task name if key is not the rule\n taskName = taskExpression;\n fn = taskValue.task.bind(taskValue);\n options = taskValue.options;\n } else {\n throw new Error(\n `Could not schedule a cron job for \"${taskExpression}\": no function found.`\n );\n }\n\n const fnWithStrapi = (...args: unknown[]) => fn({ strapi }, ...args);\n\n // const job = new Job(null, fnWithStrapi);\n const job = new Job(fnWithStrapi);\n jobsSpecs.push({ job, options, name: taskName });\n\n if (running) {\n job.schedule(options);\n }\n }\n return this;\n },\n\n remove(name: string) {\n if (!name) throw new Error('You must provide a name to remove a cron job.');\n const matchingJobsSpecs = jobsSpecs.filter(({ name: jobSpecName }, index) => {\n if (jobSpecName === name) {\n jobsSpecs.splice(index, 1);\n return true;\n }\n return false;\n });\n matchingJobsSpecs.forEach(({ job }) => job.cancel());\n return this;\n },\n\n start() {\n jobsSpecs.forEach(({ job, options }) => job.schedule(options));\n running = true;\n return this;\n },\n\n stop() {\n jobsSpecs.forEach(({ job }) => job.cancel());\n running = false;\n return this;\n },\n\n destroy() {\n this.stop();\n jobsSpecs = [];\n return this;\n },\n jobs: jobsSpecs,\n };\n};\n\nexport default createCronService;\n"],"names":[],"mappings":";;AAuBA,MAAM,oBAAoB,MAAM;AAC9B,MAAI,YAAuB,CAAA;AAC3B,MAAI,UAAU;AAEP,SAAA;AAAA,IACL,IAAI,QAAe,IAAI;AACrB,iBAAW,kBAAkB,OAAO,KAAK,KAAK,GAAG;AACzC,cAAA,YAAY,MAAM,cAAc;AAElC,YAAA;AACA,YAAA;AACA,YAAA;AACA,YAAA,WAAW,SAAS,GAAG;AAEd,qBAAA;AACN,eAAA,UAAU,KAAK,KAAK;AACf,oBAAA;AAAA,QACD,WAAA,WAAW,UAAU,IAAI,GAAG;AAE1B,qBAAA;AACN,eAAA,UAAU,KAAK,KAAK,SAAS;AAClC,oBAAU,UAAU;AAAA,QAAA,OACf;AACL,gBAAM,IAAI;AAAA,YACR,sCAAsC,cAAc;AAAA,UAAA;AAAA,QAExD;AAEM,cAAA,eAAe,IAAI,SAAoB,GAAG,EAAE,OAAO,GAAG,GAAG,IAAI;AAG7D,cAAA,MAAM,IAAI,IAAI,YAAY;AAChC,kBAAU,KAAK,EAAE,KAAK,SAAS,MAAM,UAAU;AAE/C,YAAI,SAAS;AACX,cAAI,SAAS,OAAO;AAAA,QACtB;AAAA,MACF;AACO,aAAA;AAAA,IACT;AAAA,IAEA,OAAO,MAAc;AACnB,UAAI,CAAC;AAAY,cAAA,IAAI,MAAM,+CAA+C;AACpE,YAAA,oBAAoB,UAAU,OAAO,CAAC,EAAE,MAAM,eAAe,UAAU;AAC3E,YAAI,gBAAgB,MAAM;AACd,oBAAA,OAAO,OAAO,CAAC;AAClB,iBAAA;AAAA,QACT;AACO,eAAA;AAAA,MAAA,CACR;AACD,wBAAkB,QAAQ,CAAC,EAAE,IAAU,MAAA,IAAI,QAAQ;AAC5C,aAAA;AAAA,IACT;AAAA,IAEA,QAAQ;AACI,gBAAA,QAAQ,CAAC,EAAE,KAAK,QAAc,MAAA,IAAI,SAAS,OAAO,CAAC;AACnD,gBAAA;AACH,aAAA;AAAA,IACT;AAAA,IAEA,OAAO;AACL,gBAAU,QAAQ,CAAC,EAAE,IAAU,MAAA,IAAI,QAAQ;AACjC,gBAAA;AACH,aAAA;AAAA,IACT;AAAA,IAEA,UAAU;AACR,WAAK,KAAK;AACV,kBAAY,CAAA;AACL,aAAA;AAAA,IACT;AAAA,IACA,MAAM;AAAA,EAAA;AAEV;"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"id-map.d.ts","sourceRoot":"","sources":["../../../../src/services/document-service/transform/id-map.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,IAAI,
|
1
|
+
{"version":3,"file":"id-map.d.ts","sourceRoot":"","sources":["../../../../src/services/document-service/transform/id-map.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,IAAI,EAAO,MAAM,eAAe,CAAC;AA6BhD,UAAU,SAAS;IACjB,GAAG,EAAE,MAAM,CAAC;IACZ,UAAU,EAAE,IAAI,CAAC,EAAE,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,MAAM,CAAC,EAAE,OAAO,GAAG,WAAW,CAAC;CAChC;AAED,MAAM,WAAW,KAAK;IACpB,SAAS,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC/B,SAAS,EAAE,GAAG,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAElC,GAAG,CAAC,IAAI,EAAE,SAAS,GAAG,IAAI,CAAC;IAC3B,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACtB,GAAG,CAAC,IAAI,EAAE,SAAS,GAAG,MAAM,GAAG,SAAS,CAAC;IACzC,KAAK,IAAI,IAAI,CAAC;CACf;AAED;;GAEG;AACH,QAAA,MAAM,WAAW,eAAgB;IAAE,MAAM,EAAE,KAAK,MAAM,CAAA;CAAE,KAAG,KAyF1D,CAAC;AAEF,OAAO,EAAE,WAAW,EAAE,CAAC"}
|
@@ -1,11 +1,18 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
3
3
|
const strapiUtils = require("@strapi/utils");
|
4
|
+
const hasDraftAndPublish = (uid) => {
|
5
|
+
const model = strapi.getModel(uid);
|
6
|
+
return strapiUtils.contentTypes.hasDraftAndPublish(model);
|
7
|
+
};
|
4
8
|
const encodeKey = (obj) => {
|
9
|
+
if (!hasDraftAndPublish(obj.uid)) {
|
10
|
+
delete obj.status;
|
11
|
+
}
|
5
12
|
const keys = Object.keys(obj).sort();
|
6
13
|
return keys.map((key) => `${key}:::${obj[key]}`).join("&&");
|
7
14
|
};
|
8
|
-
const createIdMap = ({ strapi }) => {
|
15
|
+
const createIdMap = ({ strapi: strapi2 }) => {
|
9
16
|
const loadedIds = /* @__PURE__ */ new Map();
|
10
17
|
const toLoadIds = /* @__PURE__ */ new Map();
|
11
18
|
return {
|
@@ -40,11 +47,13 @@ const createIdMap = ({ strapi }) => {
|
|
40
47
|
select: ["id", "documentId", "locale", "publishedAt"],
|
41
48
|
where: {
|
42
49
|
documentId: { $in: documentIds },
|
43
|
-
locale: locale || null
|
44
|
-
publishedAt: status === "draft" ? null : { $ne: null }
|
50
|
+
locale: locale || null
|
45
51
|
}
|
46
52
|
};
|
47
|
-
|
53
|
+
if (hasDraftAndPublish(uid)) {
|
54
|
+
findParams.where.publishedAt = status === "draft" ? null : { $ne: null };
|
55
|
+
}
|
56
|
+
const result = await strapi2?.db?.query(uid).findMany(findParams);
|
48
57
|
result?.forEach(({ documentId, id, locale: locale2, publishedAt }) => {
|
49
58
|
const key = encodeKey({
|
50
59
|
documentId,
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"id-map.js","sources":["../../../../src/services/document-service/transform/id-map.ts"],"sourcesContent":["import { Core, Data } from '@strapi/types';\nimport { async } from '@strapi/utils';\n\n/**\n * TODO: Find a better way to encode keys than this\n * This converts an object into a string by joining its keys and values,\n * so it can be used as a key in a Map.\n *\n * @example\n * const obj = { a: 1, b: 2 };\n * const key = encodeKey(obj);\n * ^ \"a:::1&&b:::2\"\n */\nconst encodeKey = (obj: any) => {\n // Sort keys to always keep the same order when encoding\n const keys = Object.keys(obj).sort();\n return keys.map((key) => `${key}:::${obj[key]}`).join('&&');\n};\n\ninterface KeyFields {\n uid: string;\n documentId: Data.ID;\n locale?: string | null;\n status?: 'draft' | 'published';\n}\n\nexport interface IdMap {\n loadedIds: Map<string, string>;\n toLoadIds: Map<string, KeyFields>;\n // Make the Keys type to be the params of add\n add(keys: KeyFields): void;\n load(): Promise<void>;\n get(keys: KeyFields): string | undefined;\n clear(): void;\n}\n\n/**\n * Holds a registry of document ids and their corresponding entity ids.\n */\nconst createIdMap = ({ strapi }: { strapi: Core.Strapi }): IdMap => {\n const loadedIds = new Map();\n const toLoadIds = new Map();\n\n return {\n loadedIds,\n toLoadIds,\n /**\n * Register a new document id and its corresponding entity id.\n */\n add(keyFields: KeyFields) {\n const key = encodeKey({ status: 'published', locale: null, ...keyFields });\n\n // If the id is already loaded, do nothing\n if (loadedIds.has(key)) return;\n // If the id is already in the toLoadIds, do nothing\n if (toLoadIds.has(key)) return;\n\n // Add the id to the toLoadIds\n toLoadIds.set(key, keyFields);\n },\n\n /**\n * Load all ids from the registry.\n */\n async load() {\n // Document Id to Entry Id queries are batched by its uid and locale\n // TODO: Add publication state too\n const loadIdValues = Array.from(toLoadIds.values());\n\n // 1. Group ids to query together\n const idsByUidAndLocale = loadIdValues.reduce((acc, { documentId, ...rest }) => {\n const key = encodeKey(rest);\n const ids = acc[key] || { ...rest, documentIds: [] };\n ids.documentIds.push(documentId);\n return { ...acc, [key]: ids };\n }, {});\n\n // 2. Query ids\n await async.map(\n Object.values(idsByUidAndLocale),\n async ({ uid, locale, documentIds, status }: any) => {\n const findParams = {\n select: ['id', 'documentId', 'locale', 'publishedAt'],\n where: {\n documentId: { $in: documentIds },\n locale: locale || null,\n
|
1
|
+
{"version":3,"file":"id-map.js","sources":["../../../../src/services/document-service/transform/id-map.ts"],"sourcesContent":["import { Core, Data, UID } from '@strapi/types';\nimport { async, contentTypes } from '@strapi/utils';\n\nconst hasDraftAndPublish = (uid: UID.CollectionType) => {\n const model = strapi.getModel(uid);\n return contentTypes.hasDraftAndPublish(model);\n};\n\n/**\n * TODO: Find a better way to encode keys than this\n * This converts an object into a string by joining its keys and values,\n * so it can be used as a key in a Map.\n *\n * @example\n * const obj = { a: 1, b: 2 };\n * const key = encodeKey(obj);\n * ^ \"a:::1&&b:::2\"\n */\nconst encodeKey = (obj: any) => {\n // Ignore status field for models without draft and publish\n if (!hasDraftAndPublish(obj.uid)) {\n delete obj.status;\n }\n\n // Sort keys to always keep the same order when encoding\n const keys = Object.keys(obj).sort();\n return keys.map((key) => `${key}:::${obj[key]}`).join('&&');\n};\n\ninterface KeyFields {\n uid: string;\n documentId: Data.ID;\n locale?: string | null;\n status?: 'draft' | 'published';\n}\n\nexport interface IdMap {\n loadedIds: Map<string, string>;\n toLoadIds: Map<string, KeyFields>;\n // Make the Keys type to be the params of add\n add(keys: KeyFields): void;\n load(): Promise<void>;\n get(keys: KeyFields): string | undefined;\n clear(): void;\n}\n\n/**\n * Holds a registry of document ids and their corresponding entity ids.\n */\nconst createIdMap = ({ strapi }: { strapi: Core.Strapi }): IdMap => {\n const loadedIds = new Map();\n const toLoadIds = new Map();\n\n return {\n loadedIds,\n toLoadIds,\n /**\n * Register a new document id and its corresponding entity id.\n */\n add(keyFields: KeyFields) {\n const key = encodeKey({ status: 'published', locale: null, ...keyFields });\n\n // If the id is already loaded, do nothing\n if (loadedIds.has(key)) return;\n // If the id is already in the toLoadIds, do nothing\n if (toLoadIds.has(key)) return;\n\n // Add the id to the toLoadIds\n toLoadIds.set(key, keyFields);\n },\n\n /**\n * Load all ids from the registry.\n */\n async load() {\n // Document Id to Entry Id queries are batched by its uid and locale\n // TODO: Add publication state too\n const loadIdValues = Array.from(toLoadIds.values());\n\n // 1. Group ids to query together\n const idsByUidAndLocale = loadIdValues.reduce((acc, { documentId, ...rest }) => {\n const key = encodeKey(rest);\n const ids = acc[key] || { ...rest, documentIds: [] };\n ids.documentIds.push(documentId);\n return { ...acc, [key]: ids };\n }, {});\n\n // 2. Query ids\n await async.map(\n Object.values(idsByUidAndLocale),\n async ({ uid, locale, documentIds, status }: any) => {\n const findParams = {\n select: ['id', 'documentId', 'locale', 'publishedAt'],\n where: {\n documentId: { $in: documentIds },\n locale: locale || null,\n },\n } as any;\n\n if (hasDraftAndPublish(uid)) {\n findParams.where.publishedAt = status === 'draft' ? null : { $ne: null };\n }\n\n const result = await strapi?.db?.query(uid).findMany(findParams);\n\n // 3. Store result in loadedIds\n result?.forEach(({ documentId, id, locale, publishedAt }: any) => {\n const key = encodeKey({\n documentId,\n uid,\n locale,\n status: publishedAt ? 'published' : 'draft',\n });\n loadedIds.set(key, id);\n });\n }\n );\n\n // 4. Clear toLoadIds\n toLoadIds.clear();\n },\n\n /**\n * Get the entity id for a given document id.\n */\n get(keys: KeyFields) {\n const key = encodeKey({ status: 'published', locale: null, ...keys });\n return loadedIds.get(key);\n },\n\n /**\n * Clear the registry.\n */\n clear() {\n loadedIds.clear();\n toLoadIds.clear();\n },\n };\n};\n\nexport { createIdMap };\n"],"names":["contentTypes","strapi","async","locale"],"mappings":";;;AAGA,MAAM,qBAAqB,CAAC,QAA4B;AAChD,QAAA,QAAQ,OAAO,SAAS,GAAG;AAC1B,SAAAA,YAAA,aAAa,mBAAmB,KAAK;AAC9C;AAYA,MAAM,YAAY,CAAC,QAAa;AAE9B,MAAI,CAAC,mBAAmB,IAAI,GAAG,GAAG;AAChC,WAAO,IAAI;AAAA,EACb;AAGA,QAAM,OAAO,OAAO,KAAK,GAAG,EAAE,KAAK;AACnC,SAAO,KAAK,IAAI,CAAC,QAAQ,GAAG,GAAG,MAAM,IAAI,GAAG,CAAC,EAAE,EAAE,KAAK,IAAI;AAC5D;AAsBA,MAAM,cAAc,CAAC,EAAE,QAAAC,cAA6C;AAC5D,QAAA,gCAAgB;AAChB,QAAA,gCAAgB;AAEf,SAAA;AAAA,IACL;AAAA,IACA;AAAA;AAAA;AAAA;AAAA,IAIA,IAAI,WAAsB;AAClB,YAAA,MAAM,UAAU,EAAE,QAAQ,aAAa,QAAQ,MAAM,GAAG,UAAA,CAAW;AAGrE,UAAA,UAAU,IAAI,GAAG;AAAG;AAEpB,UAAA,UAAU,IAAI,GAAG;AAAG;AAGd,gBAAA,IAAI,KAAK,SAAS;AAAA,IAC9B;AAAA;AAAA;AAAA;AAAA,IAKA,MAAM,OAAO;AAGX,YAAM,eAAe,MAAM,KAAK,UAAU,OAAQ,CAAA;AAG5C,YAAA,oBAAoB,aAAa,OAAO,CAAC,KAAK,EAAE,YAAY,GAAG,WAAW;AACxE,cAAA,MAAM,UAAU,IAAI;AACpB,cAAA,MAAM,IAAI,GAAG,KAAK,EAAE,GAAG,MAAM,aAAa,CAAA;AAC5C,YAAA,YAAY,KAAK,UAAU;AAC/B,eAAO,EAAE,GAAG,KAAK,CAAC,GAAG,GAAG,IAAI;AAAA,MAC9B,GAAG,CAAE,CAAA;AAGL,YAAMC,YAAM,MAAA;AAAA,QACV,OAAO,OAAO,iBAAiB;AAAA,QAC/B,OAAO,EAAE,KAAK,QAAQ,aAAa,aAAkB;AACnD,gBAAM,aAAa;AAAA,YACjB,QAAQ,CAAC,MAAM,cAAc,UAAU,aAAa;AAAA,YACpD,OAAO;AAAA,cACL,YAAY,EAAE,KAAK,YAAY;AAAA,cAC/B,QAAQ,UAAU;AAAA,YACpB;AAAA,UAAA;AAGE,cAAA,mBAAmB,GAAG,GAAG;AAC3B,uBAAW,MAAM,cAAc,WAAW,UAAU,OAAO,EAAE,KAAK;UACpE;AAEM,gBAAA,SAAS,MAAMD,SAAQ,IAAI,MAAM,GAAG,EAAE,SAAS,UAAU;AAGvD,kBAAA,QAAQ,CAAC,EAAE,YAAY,IAAI,QAAAE,SAAQ,kBAAuB;AAChE,kBAAM,MAAM,UAAU;AAAA,cACpB;AAAA,cACA;AAAA,cACA,QAAAA;AAAAA,cACA,QAAQ,cAAc,cAAc;AAAA,YAAA,CACrC;AACS,sBAAA,IAAI,KAAK,EAAE;AAAA,UAAA,CACtB;AAAA,QACH;AAAA,MAAA;AAIF,gBAAU,MAAM;AAAA,IAClB;AAAA;AAAA;AAAA;AAAA,IAKA,IAAI,MAAiB;AACb,YAAA,MAAM,UAAU,EAAE,QAAQ,aAAa,QAAQ,MAAM,GAAG,KAAA,CAAM;AAC7D,aAAA,UAAU,IAAI,GAAG;AAAA,IAC1B;AAAA;AAAA;AAAA;AAAA,IAKA,QAAQ;AACN,gBAAU,MAAM;AAChB,gBAAU,MAAM;AAAA,IAClB;AAAA,EAAA;AAEJ;;"}
|
@@ -1,9 +1,16 @@
|
|
1
|
-
import { async } from "@strapi/utils";
|
1
|
+
import { async, contentTypes } from "@strapi/utils";
|
2
|
+
const hasDraftAndPublish = (uid) => {
|
3
|
+
const model = strapi.getModel(uid);
|
4
|
+
return contentTypes.hasDraftAndPublish(model);
|
5
|
+
};
|
2
6
|
const encodeKey = (obj) => {
|
7
|
+
if (!hasDraftAndPublish(obj.uid)) {
|
8
|
+
delete obj.status;
|
9
|
+
}
|
3
10
|
const keys = Object.keys(obj).sort();
|
4
11
|
return keys.map((key) => `${key}:::${obj[key]}`).join("&&");
|
5
12
|
};
|
6
|
-
const createIdMap = ({ strapi }) => {
|
13
|
+
const createIdMap = ({ strapi: strapi2 }) => {
|
7
14
|
const loadedIds = /* @__PURE__ */ new Map();
|
8
15
|
const toLoadIds = /* @__PURE__ */ new Map();
|
9
16
|
return {
|
@@ -38,11 +45,13 @@ const createIdMap = ({ strapi }) => {
|
|
38
45
|
select: ["id", "documentId", "locale", "publishedAt"],
|
39
46
|
where: {
|
40
47
|
documentId: { $in: documentIds },
|
41
|
-
locale: locale || null
|
42
|
-
publishedAt: status === "draft" ? null : { $ne: null }
|
48
|
+
locale: locale || null
|
43
49
|
}
|
44
50
|
};
|
45
|
-
|
51
|
+
if (hasDraftAndPublish(uid)) {
|
52
|
+
findParams.where.publishedAt = status === "draft" ? null : { $ne: null };
|
53
|
+
}
|
54
|
+
const result = await strapi2?.db?.query(uid).findMany(findParams);
|
46
55
|
result?.forEach(({ documentId, id, locale: locale2, publishedAt }) => {
|
47
56
|
const key = encodeKey({
|
48
57
|
documentId,
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"id-map.mjs","sources":["../../../../src/services/document-service/transform/id-map.ts"],"sourcesContent":["import { Core, Data } from '@strapi/types';\nimport { async } from '@strapi/utils';\n\n/**\n * TODO: Find a better way to encode keys than this\n * This converts an object into a string by joining its keys and values,\n * so it can be used as a key in a Map.\n *\n * @example\n * const obj = { a: 1, b: 2 };\n * const key = encodeKey(obj);\n * ^ \"a:::1&&b:::2\"\n */\nconst encodeKey = (obj: any) => {\n // Sort keys to always keep the same order when encoding\n const keys = Object.keys(obj).sort();\n return keys.map((key) => `${key}:::${obj[key]}`).join('&&');\n};\n\ninterface KeyFields {\n uid: string;\n documentId: Data.ID;\n locale?: string | null;\n status?: 'draft' | 'published';\n}\n\nexport interface IdMap {\n loadedIds: Map<string, string>;\n toLoadIds: Map<string, KeyFields>;\n // Make the Keys type to be the params of add\n add(keys: KeyFields): void;\n load(): Promise<void>;\n get(keys: KeyFields): string | undefined;\n clear(): void;\n}\n\n/**\n * Holds a registry of document ids and their corresponding entity ids.\n */\nconst createIdMap = ({ strapi }: { strapi: Core.Strapi }): IdMap => {\n const loadedIds = new Map();\n const toLoadIds = new Map();\n\n return {\n loadedIds,\n toLoadIds,\n /**\n * Register a new document id and its corresponding entity id.\n */\n add(keyFields: KeyFields) {\n const key = encodeKey({ status: 'published', locale: null, ...keyFields });\n\n // If the id is already loaded, do nothing\n if (loadedIds.has(key)) return;\n // If the id is already in the toLoadIds, do nothing\n if (toLoadIds.has(key)) return;\n\n // Add the id to the toLoadIds\n toLoadIds.set(key, keyFields);\n },\n\n /**\n * Load all ids from the registry.\n */\n async load() {\n // Document Id to Entry Id queries are batched by its uid and locale\n // TODO: Add publication state too\n const loadIdValues = Array.from(toLoadIds.values());\n\n // 1. Group ids to query together\n const idsByUidAndLocale = loadIdValues.reduce((acc, { documentId, ...rest }) => {\n const key = encodeKey(rest);\n const ids = acc[key] || { ...rest, documentIds: [] };\n ids.documentIds.push(documentId);\n return { ...acc, [key]: ids };\n }, {});\n\n // 2. Query ids\n await async.map(\n Object.values(idsByUidAndLocale),\n async ({ uid, locale, documentIds, status }: any) => {\n const findParams = {\n select: ['id', 'documentId', 'locale', 'publishedAt'],\n where: {\n documentId: { $in: documentIds },\n locale: locale || null,\n
|
1
|
+
{"version":3,"file":"id-map.mjs","sources":["../../../../src/services/document-service/transform/id-map.ts"],"sourcesContent":["import { Core, Data, UID } from '@strapi/types';\nimport { async, contentTypes } from '@strapi/utils';\n\nconst hasDraftAndPublish = (uid: UID.CollectionType) => {\n const model = strapi.getModel(uid);\n return contentTypes.hasDraftAndPublish(model);\n};\n\n/**\n * TODO: Find a better way to encode keys than this\n * This converts an object into a string by joining its keys and values,\n * so it can be used as a key in a Map.\n *\n * @example\n * const obj = { a: 1, b: 2 };\n * const key = encodeKey(obj);\n * ^ \"a:::1&&b:::2\"\n */\nconst encodeKey = (obj: any) => {\n // Ignore status field for models without draft and publish\n if (!hasDraftAndPublish(obj.uid)) {\n delete obj.status;\n }\n\n // Sort keys to always keep the same order when encoding\n const keys = Object.keys(obj).sort();\n return keys.map((key) => `${key}:::${obj[key]}`).join('&&');\n};\n\ninterface KeyFields {\n uid: string;\n documentId: Data.ID;\n locale?: string | null;\n status?: 'draft' | 'published';\n}\n\nexport interface IdMap {\n loadedIds: Map<string, string>;\n toLoadIds: Map<string, KeyFields>;\n // Make the Keys type to be the params of add\n add(keys: KeyFields): void;\n load(): Promise<void>;\n get(keys: KeyFields): string | undefined;\n clear(): void;\n}\n\n/**\n * Holds a registry of document ids and their corresponding entity ids.\n */\nconst createIdMap = ({ strapi }: { strapi: Core.Strapi }): IdMap => {\n const loadedIds = new Map();\n const toLoadIds = new Map();\n\n return {\n loadedIds,\n toLoadIds,\n /**\n * Register a new document id and its corresponding entity id.\n */\n add(keyFields: KeyFields) {\n const key = encodeKey({ status: 'published', locale: null, ...keyFields });\n\n // If the id is already loaded, do nothing\n if (loadedIds.has(key)) return;\n // If the id is already in the toLoadIds, do nothing\n if (toLoadIds.has(key)) return;\n\n // Add the id to the toLoadIds\n toLoadIds.set(key, keyFields);\n },\n\n /**\n * Load all ids from the registry.\n */\n async load() {\n // Document Id to Entry Id queries are batched by its uid and locale\n // TODO: Add publication state too\n const loadIdValues = Array.from(toLoadIds.values());\n\n // 1. Group ids to query together\n const idsByUidAndLocale = loadIdValues.reduce((acc, { documentId, ...rest }) => {\n const key = encodeKey(rest);\n const ids = acc[key] || { ...rest, documentIds: [] };\n ids.documentIds.push(documentId);\n return { ...acc, [key]: ids };\n }, {});\n\n // 2. Query ids\n await async.map(\n Object.values(idsByUidAndLocale),\n async ({ uid, locale, documentIds, status }: any) => {\n const findParams = {\n select: ['id', 'documentId', 'locale', 'publishedAt'],\n where: {\n documentId: { $in: documentIds },\n locale: locale || null,\n },\n } as any;\n\n if (hasDraftAndPublish(uid)) {\n findParams.where.publishedAt = status === 'draft' ? null : { $ne: null };\n }\n\n const result = await strapi?.db?.query(uid).findMany(findParams);\n\n // 3. Store result in loadedIds\n result?.forEach(({ documentId, id, locale, publishedAt }: any) => {\n const key = encodeKey({\n documentId,\n uid,\n locale,\n status: publishedAt ? 'published' : 'draft',\n });\n loadedIds.set(key, id);\n });\n }\n );\n\n // 4. Clear toLoadIds\n toLoadIds.clear();\n },\n\n /**\n * Get the entity id for a given document id.\n */\n get(keys: KeyFields) {\n const key = encodeKey({ status: 'published', locale: null, ...keys });\n return loadedIds.get(key);\n },\n\n /**\n * Clear the registry.\n */\n clear() {\n loadedIds.clear();\n toLoadIds.clear();\n },\n };\n};\n\nexport { createIdMap };\n"],"names":["strapi","locale"],"mappings":";AAGA,MAAM,qBAAqB,CAAC,QAA4B;AAChD,QAAA,QAAQ,OAAO,SAAS,GAAG;AAC1B,SAAA,aAAa,mBAAmB,KAAK;AAC9C;AAYA,MAAM,YAAY,CAAC,QAAa;AAE9B,MAAI,CAAC,mBAAmB,IAAI,GAAG,GAAG;AAChC,WAAO,IAAI;AAAA,EACb;AAGA,QAAM,OAAO,OAAO,KAAK,GAAG,EAAE,KAAK;AACnC,SAAO,KAAK,IAAI,CAAC,QAAQ,GAAG,GAAG,MAAM,IAAI,GAAG,CAAC,EAAE,EAAE,KAAK,IAAI;AAC5D;AAsBA,MAAM,cAAc,CAAC,EAAE,QAAAA,cAA6C;AAC5D,QAAA,gCAAgB;AAChB,QAAA,gCAAgB;AAEf,SAAA;AAAA,IACL;AAAA,IACA;AAAA;AAAA;AAAA;AAAA,IAIA,IAAI,WAAsB;AAClB,YAAA,MAAM,UAAU,EAAE,QAAQ,aAAa,QAAQ,MAAM,GAAG,UAAA,CAAW;AAGrE,UAAA,UAAU,IAAI,GAAG;AAAG;AAEpB,UAAA,UAAU,IAAI,GAAG;AAAG;AAGd,gBAAA,IAAI,KAAK,SAAS;AAAA,IAC9B;AAAA;AAAA;AAAA;AAAA,IAKA,MAAM,OAAO;AAGX,YAAM,eAAe,MAAM,KAAK,UAAU,OAAQ,CAAA;AAG5C,YAAA,oBAAoB,aAAa,OAAO,CAAC,KAAK,EAAE,YAAY,GAAG,WAAW;AACxE,cAAA,MAAM,UAAU,IAAI;AACpB,cAAA,MAAM,IAAI,GAAG,KAAK,EAAE,GAAG,MAAM,aAAa,CAAA;AAC5C,YAAA,YAAY,KAAK,UAAU;AAC/B,eAAO,EAAE,GAAG,KAAK,CAAC,GAAG,GAAG,IAAI;AAAA,MAC9B,GAAG,CAAE,CAAA;AAGL,YAAM,MAAM;AAAA,QACV,OAAO,OAAO,iBAAiB;AAAA,QAC/B,OAAO,EAAE,KAAK,QAAQ,aAAa,aAAkB;AACnD,gBAAM,aAAa;AAAA,YACjB,QAAQ,CAAC,MAAM,cAAc,UAAU,aAAa;AAAA,YACpD,OAAO;AAAA,cACL,YAAY,EAAE,KAAK,YAAY;AAAA,cAC/B,QAAQ,UAAU;AAAA,YACpB;AAAA,UAAA;AAGE,cAAA,mBAAmB,GAAG,GAAG;AAC3B,uBAAW,MAAM,cAAc,WAAW,UAAU,OAAO,EAAE,KAAK;UACpE;AAEM,gBAAA,SAAS,MAAMA,SAAQ,IAAI,MAAM,GAAG,EAAE,SAAS,UAAU;AAGvD,kBAAA,QAAQ,CAAC,EAAE,YAAY,IAAI,QAAAC,SAAQ,kBAAuB;AAChE,kBAAM,MAAM,UAAU;AAAA,cACpB;AAAA,cACA;AAAA,cACA,QAAAA;AAAAA,cACA,QAAQ,cAAc,cAAc;AAAA,YAAA,CACrC;AACS,sBAAA,IAAI,KAAK,EAAE;AAAA,UAAA,CACtB;AAAA,QACH;AAAA,MAAA;AAIF,gBAAU,MAAM;AAAA,IAClB;AAAA;AAAA;AAAA;AAAA,IAKA,IAAI,MAAiB;AACb,YAAA,MAAM,UAAU,EAAE,QAAQ,aAAa,QAAQ,MAAM,GAAG,KAAA,CAAM;AAC7D,aAAA,UAAU,IAAI,GAAG;AAAA,IAC1B;AAAA;AAAA;AAAA;AAAA,IAKA,QAAQ;AACN,gBAAU,MAAM;AAChB,gBAAU,MAAM;AAAA,IAClB;AAAA,EAAA;AAEJ;"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"populate.d.ts","sourceRoot":"","sources":["../../../../src/services/document-service/utils/populate.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,eAAe,CAAC;AAGpC,UAAU,OAAO;IACf;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,EAAE,CAAC;CAC7B;AAKD,eAAO,MAAM,eAAe,QAAS,IAAI,MAAM,SAAQ,OAAO,
|
1
|
+
{"version":3,"file":"populate.d.ts","sourceRoot":"","sources":["../../../../src/services/document-service/utils/populate.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,eAAe,CAAC;AAGpC,UAAU,OAAO;IACf;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,EAAE,CAAC;CAC7B;AAKD,eAAO,MAAM,eAAe,QAAS,IAAI,MAAM,SAAQ,OAAO,QAwD7D,CAAC"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"populate.js","sources":["../../../../src/services/document-service/utils/populate.ts"],"sourcesContent":["import { UID } from '@strapi/types';\nimport { contentTypes } from '@strapi/utils';\n\ninterface Options {\n /**\n * Fields to select when populating relations\n */\n relationalFields?: string[];\n}\n\nconst { CREATED_BY_ATTRIBUTE, UPDATED_BY_ATTRIBUTE } = contentTypes.constants;\n\n// We want to build a populate object based on the schema\nexport const getDeepPopulate = (uid: UID.Schema, opts: Options = {}) => {\n const model = strapi.getModel(uid);\n const attributes = Object.entries(model.attributes);\n\n return attributes.reduce((acc: any, [attributeName, attribute]) => {\n switch (attribute.type) {\n case 'relation': {\n // TODO: Support polymorphic relations\n const isMorphRelation = attribute.relation.toLowerCase().startsWith('morph');\n if (isMorphRelation) {\n break;\n }\n\n // Ignore not visible fields other than createdBy and updatedBy\n const isVisible = contentTypes.isVisibleAttribute(model, attributeName);\n const isCreatorField = [CREATED_BY_ATTRIBUTE, UPDATED_BY_ATTRIBUTE].includes(attributeName);\n\n if (isVisible || isCreatorField) {\n acc[attributeName] = { select: opts.relationalFields };\n }\n\n break;\n }\n\n case 'media': {\n acc[attributeName] = { select: ['
|
1
|
+
{"version":3,"file":"populate.js","sources":["../../../../src/services/document-service/utils/populate.ts"],"sourcesContent":["import { UID } from '@strapi/types';\nimport { contentTypes } from '@strapi/utils';\n\ninterface Options {\n /**\n * Fields to select when populating relations\n */\n relationalFields?: string[];\n}\n\nconst { CREATED_BY_ATTRIBUTE, UPDATED_BY_ATTRIBUTE } = contentTypes.constants;\n\n// We want to build a populate object based on the schema\nexport const getDeepPopulate = (uid: UID.Schema, opts: Options = {}) => {\n const model = strapi.getModel(uid);\n const attributes = Object.entries(model.attributes);\n\n return attributes.reduce((acc: any, [attributeName, attribute]) => {\n switch (attribute.type) {\n case 'relation': {\n // TODO: Support polymorphic relations\n const isMorphRelation = attribute.relation.toLowerCase().startsWith('morph');\n if (isMorphRelation) {\n break;\n }\n\n // Ignore not visible fields other than createdBy and updatedBy\n const isVisible = contentTypes.isVisibleAttribute(model, attributeName);\n const isCreatorField = [CREATED_BY_ATTRIBUTE, UPDATED_BY_ATTRIBUTE].includes(attributeName);\n\n if (isVisible || isCreatorField) {\n acc[attributeName] = { select: opts.relationalFields };\n }\n\n break;\n }\n\n case 'media': {\n // We populate all media fields for completeness of webhook responses\n // see https://github.com/strapi/strapi/issues/21546\n acc[attributeName] = { select: ['*'] };\n break;\n }\n\n case 'component': {\n const populate = getDeepPopulate(attribute.component, opts);\n acc[attributeName] = { populate };\n break;\n }\n\n case 'dynamiczone': {\n // Use fragments to populate the dynamic zone components\n const populatedComponents = (attribute.components || []).reduce(\n (acc: any, componentUID: UID.Component) => {\n acc[componentUID] = { populate: getDeepPopulate(componentUID, opts) };\n return acc;\n },\n {}\n );\n\n acc[attributeName] = { on: populatedComponents };\n break;\n }\n default:\n break;\n }\n\n return acc;\n }, {});\n};\n"],"names":["contentTypes","acc"],"mappings":";;;AAUA,MAAM,EAAE,sBAAsB,yBAAyBA,YAAAA,aAAa;AAG7D,MAAM,kBAAkB,CAAC,KAAiB,OAAgB,OAAO;AAChE,QAAA,QAAQ,OAAO,SAAS,GAAG;AACjC,QAAM,aAAa,OAAO,QAAQ,MAAM,UAAU;AAElD,SAAO,WAAW,OAAO,CAAC,KAAU,CAAC,eAAe,SAAS,MAAM;AACjE,YAAQ,UAAU,MAAM;AAAA,MACtB,KAAK,YAAY;AAEf,cAAM,kBAAkB,UAAU,SAAS,YAAY,EAAE,WAAW,OAAO;AAC3E,YAAI,iBAAiB;AACnB;AAAA,QACF;AAGA,cAAM,YAAYA,YAAA,aAAa,mBAAmB,OAAO,aAAa;AACtE,cAAM,iBAAiB,CAAC,sBAAsB,oBAAoB,EAAE,SAAS,aAAa;AAE1F,YAAI,aAAa,gBAAgB;AAC/B,cAAI,aAAa,IAAI,EAAE,QAAQ,KAAK,iBAAiB;AAAA,QACvD;AAEA;AAAA,MACF;AAAA,MAEA,KAAK,SAAS;AAGZ,YAAI,aAAa,IAAI,EAAE,QAAQ,CAAC,GAAG,EAAE;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,WAAW,gBAAgB,UAAU,WAAW,IAAI;AACtD,YAAA,aAAa,IAAI,EAAE;AACvB;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAElB,cAAM,uBAAuB,UAAU,cAAc,CAAI,GAAA;AAAA,UACvD,CAACC,MAAU,iBAAgC;AACzCA,iBAAI,YAAY,IAAI,EAAE,UAAU,gBAAgB,cAAc,IAAI;AAC3DA,mBAAAA;AAAAA,UACT;AAAA,UACA,CAAC;AAAA,QAAA;AAGH,YAAI,aAAa,IAAI,EAAE,IAAI,oBAAoB;AAC/C;AAAA,MACF;AAAA,IAGF;AAEO,WAAA;AAAA,EACT,GAAG,CAAE,CAAA;AACP;;"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"populate.mjs","sources":["../../../../src/services/document-service/utils/populate.ts"],"sourcesContent":["import { UID } from '@strapi/types';\nimport { contentTypes } from '@strapi/utils';\n\ninterface Options {\n /**\n * Fields to select when populating relations\n */\n relationalFields?: string[];\n}\n\nconst { CREATED_BY_ATTRIBUTE, UPDATED_BY_ATTRIBUTE } = contentTypes.constants;\n\n// We want to build a populate object based on the schema\nexport const getDeepPopulate = (uid: UID.Schema, opts: Options = {}) => {\n const model = strapi.getModel(uid);\n const attributes = Object.entries(model.attributes);\n\n return attributes.reduce((acc: any, [attributeName, attribute]) => {\n switch (attribute.type) {\n case 'relation': {\n // TODO: Support polymorphic relations\n const isMorphRelation = attribute.relation.toLowerCase().startsWith('morph');\n if (isMorphRelation) {\n break;\n }\n\n // Ignore not visible fields other than createdBy and updatedBy\n const isVisible = contentTypes.isVisibleAttribute(model, attributeName);\n const isCreatorField = [CREATED_BY_ATTRIBUTE, UPDATED_BY_ATTRIBUTE].includes(attributeName);\n\n if (isVisible || isCreatorField) {\n acc[attributeName] = { select: opts.relationalFields };\n }\n\n break;\n }\n\n case 'media': {\n acc[attributeName] = { select: ['
|
1
|
+
{"version":3,"file":"populate.mjs","sources":["../../../../src/services/document-service/utils/populate.ts"],"sourcesContent":["import { UID } from '@strapi/types';\nimport { contentTypes } from '@strapi/utils';\n\ninterface Options {\n /**\n * Fields to select when populating relations\n */\n relationalFields?: string[];\n}\n\nconst { CREATED_BY_ATTRIBUTE, UPDATED_BY_ATTRIBUTE } = contentTypes.constants;\n\n// We want to build a populate object based on the schema\nexport const getDeepPopulate = (uid: UID.Schema, opts: Options = {}) => {\n const model = strapi.getModel(uid);\n const attributes = Object.entries(model.attributes);\n\n return attributes.reduce((acc: any, [attributeName, attribute]) => {\n switch (attribute.type) {\n case 'relation': {\n // TODO: Support polymorphic relations\n const isMorphRelation = attribute.relation.toLowerCase().startsWith('morph');\n if (isMorphRelation) {\n break;\n }\n\n // Ignore not visible fields other than createdBy and updatedBy\n const isVisible = contentTypes.isVisibleAttribute(model, attributeName);\n const isCreatorField = [CREATED_BY_ATTRIBUTE, UPDATED_BY_ATTRIBUTE].includes(attributeName);\n\n if (isVisible || isCreatorField) {\n acc[attributeName] = { select: opts.relationalFields };\n }\n\n break;\n }\n\n case 'media': {\n // We populate all media fields for completeness of webhook responses\n // see https://github.com/strapi/strapi/issues/21546\n acc[attributeName] = { select: ['*'] };\n break;\n }\n\n case 'component': {\n const populate = getDeepPopulate(attribute.component, opts);\n acc[attributeName] = { populate };\n break;\n }\n\n case 'dynamiczone': {\n // Use fragments to populate the dynamic zone components\n const populatedComponents = (attribute.components || []).reduce(\n (acc: any, componentUID: UID.Component) => {\n acc[componentUID] = { populate: getDeepPopulate(componentUID, opts) };\n return acc;\n },\n {}\n );\n\n acc[attributeName] = { on: populatedComponents };\n break;\n }\n default:\n break;\n }\n\n return acc;\n }, {});\n};\n"],"names":["acc"],"mappings":";AAUA,MAAM,EAAE,sBAAsB,yBAAyB,aAAa;AAG7D,MAAM,kBAAkB,CAAC,KAAiB,OAAgB,OAAO;AAChE,QAAA,QAAQ,OAAO,SAAS,GAAG;AACjC,QAAM,aAAa,OAAO,QAAQ,MAAM,UAAU;AAElD,SAAO,WAAW,OAAO,CAAC,KAAU,CAAC,eAAe,SAAS,MAAM;AACjE,YAAQ,UAAU,MAAM;AAAA,MACtB,KAAK,YAAY;AAEf,cAAM,kBAAkB,UAAU,SAAS,YAAY,EAAE,WAAW,OAAO;AAC3E,YAAI,iBAAiB;AACnB;AAAA,QACF;AAGA,cAAM,YAAY,aAAa,mBAAmB,OAAO,aAAa;AACtE,cAAM,iBAAiB,CAAC,sBAAsB,oBAAoB,EAAE,SAAS,aAAa;AAE1F,YAAI,aAAa,gBAAgB;AAC/B,cAAI,aAAa,IAAI,EAAE,QAAQ,KAAK,iBAAiB;AAAA,QACvD;AAEA;AAAA,MACF;AAAA,MAEA,KAAK,SAAS;AAGZ,YAAI,aAAa,IAAI,EAAE,QAAQ,CAAC,GAAG,EAAE;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,WAAW,gBAAgB,UAAU,WAAW,IAAI;AACtD,YAAA,aAAa,IAAI,EAAE;AACvB;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAElB,cAAM,uBAAuB,UAAU,cAAc,CAAI,GAAA;AAAA,UACvD,CAACA,MAAU,iBAAgC;AACzCA,iBAAI,YAAY,IAAI,EAAE,UAAU,gBAAgB,cAAc,IAAI;AAC3DA,mBAAAA;AAAAA,UACT;AAAA,UACA,CAAC;AAAA,QAAA;AAGH,YAAI,aAAa,IAAI,EAAE,IAAI,oBAAoB;AAC/C;AAAA,MACF;AAAA,IAGF;AAEO,WAAA;AAAA,EACT,GAAG,CAAE,CAAA;AACP;"}
|
@@ -24,6 +24,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
24
24
|
type: "biginteger";
|
25
25
|
pluginOptions?: object | undefined;
|
26
26
|
searchable?: boolean | undefined;
|
27
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
27
28
|
configurable?: boolean | undefined;
|
28
29
|
default?: string | (() => string) | undefined;
|
29
30
|
min?: string | undefined;
|
@@ -37,6 +38,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
37
38
|
type: "boolean";
|
38
39
|
pluginOptions?: object | undefined;
|
39
40
|
searchable?: boolean | undefined;
|
41
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
40
42
|
configurable?: boolean | undefined;
|
41
43
|
default?: boolean | (() => boolean) | undefined;
|
42
44
|
private?: boolean | undefined;
|
@@ -47,6 +49,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
47
49
|
type: "blocks";
|
48
50
|
pluginOptions?: object | undefined;
|
49
51
|
searchable?: boolean | undefined;
|
52
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
50
53
|
configurable?: boolean | undefined;
|
51
54
|
private?: boolean | undefined;
|
52
55
|
required?: boolean | undefined;
|
@@ -56,6 +59,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
56
59
|
type: "datetime";
|
57
60
|
pluginOptions?: object | undefined;
|
58
61
|
searchable?: boolean | undefined;
|
62
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
59
63
|
configurable?: boolean | undefined;
|
60
64
|
default?: Schema.Attribute.DateTimeValue | (() => Schema.Attribute.DateTimeValue) | undefined;
|
61
65
|
private?: boolean | undefined;
|
@@ -67,6 +71,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
67
71
|
type: "date";
|
68
72
|
pluginOptions?: object | undefined;
|
69
73
|
searchable?: boolean | undefined;
|
74
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
70
75
|
configurable?: boolean | undefined;
|
71
76
|
default?: Schema.Attribute.DateValue | (() => Schema.Attribute.DateValue) | undefined;
|
72
77
|
private?: boolean | undefined;
|
@@ -78,6 +83,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
78
83
|
type: "decimal";
|
79
84
|
pluginOptions?: object | undefined;
|
80
85
|
searchable?: boolean | undefined;
|
86
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
81
87
|
configurable?: boolean | undefined;
|
82
88
|
default?: number | (() => number) | undefined;
|
83
89
|
min?: number | undefined;
|
@@ -91,6 +97,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
91
97
|
type: "email";
|
92
98
|
pluginOptions?: object | undefined;
|
93
99
|
searchable?: boolean | undefined;
|
100
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
94
101
|
configurable?: boolean | undefined;
|
95
102
|
default?: string | (() => string) | undefined;
|
96
103
|
minLength?: number | undefined;
|
@@ -104,6 +111,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
104
111
|
type: "enumeration";
|
105
112
|
pluginOptions?: object | undefined;
|
106
113
|
searchable?: boolean | undefined;
|
114
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
107
115
|
enum: string[];
|
108
116
|
enumName?: string | undefined;
|
109
117
|
configurable?: boolean | undefined;
|
@@ -116,6 +124,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
116
124
|
type: "float";
|
117
125
|
pluginOptions?: object | undefined;
|
118
126
|
searchable?: boolean | undefined;
|
127
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
119
128
|
configurable?: boolean | undefined;
|
120
129
|
default?: number | (() => number) | undefined;
|
121
130
|
min?: number | undefined;
|
@@ -129,6 +138,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
129
138
|
type: "integer";
|
130
139
|
pluginOptions?: object | undefined;
|
131
140
|
searchable?: boolean | undefined;
|
141
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
132
142
|
configurable?: boolean | undefined;
|
133
143
|
default?: number | (() => number) | undefined;
|
134
144
|
min?: number | undefined;
|
@@ -142,6 +152,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
142
152
|
type: "json";
|
143
153
|
pluginOptions?: object | undefined;
|
144
154
|
searchable?: boolean | undefined;
|
155
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
145
156
|
configurable?: boolean | undefined;
|
146
157
|
required?: boolean | undefined;
|
147
158
|
private?: boolean | undefined;
|
@@ -152,6 +163,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
152
163
|
type: "password";
|
153
164
|
pluginOptions?: object | undefined;
|
154
165
|
searchable?: boolean | undefined;
|
166
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
155
167
|
configurable?: boolean | undefined;
|
156
168
|
default?: string | (() => string) | undefined;
|
157
169
|
minLength?: number | undefined;
|
@@ -164,6 +176,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
164
176
|
type: "relation";
|
165
177
|
pluginOptions?: object | undefined;
|
166
178
|
searchable?: boolean | undefined;
|
179
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
167
180
|
configurable?: boolean | undefined;
|
168
181
|
private?: boolean | undefined;
|
169
182
|
writable?: boolean | undefined;
|
@@ -175,6 +188,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
175
188
|
type: "relation";
|
176
189
|
pluginOptions?: object | undefined;
|
177
190
|
searchable?: boolean | undefined;
|
191
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
178
192
|
configurable?: boolean | undefined;
|
179
193
|
private?: boolean | undefined;
|
180
194
|
writable?: boolean | undefined;
|
@@ -186,6 +200,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
186
200
|
type: "relation";
|
187
201
|
pluginOptions?: object | undefined;
|
188
202
|
searchable?: boolean | undefined;
|
203
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
189
204
|
target: import("@strapi/types/dist/uid").ContentType;
|
190
205
|
inversedBy?: string | undefined;
|
191
206
|
mappedBy?: string | undefined;
|
@@ -200,6 +215,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
200
215
|
type: "relation";
|
201
216
|
pluginOptions?: object | undefined;
|
202
217
|
searchable?: boolean | undefined;
|
218
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
203
219
|
target: import("@strapi/types/dist/uid").ContentType;
|
204
220
|
inversedBy?: string | undefined;
|
205
221
|
mappedBy?: string | undefined;
|
@@ -214,6 +230,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
214
230
|
type: "relation";
|
215
231
|
pluginOptions?: object | undefined;
|
216
232
|
searchable?: boolean | undefined;
|
233
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
217
234
|
target: import("@strapi/types/dist/uid").ContentType;
|
218
235
|
inversedBy?: string | undefined;
|
219
236
|
mappedBy?: string | undefined;
|
@@ -228,6 +245,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
228
245
|
type: "relation";
|
229
246
|
pluginOptions?: object | undefined;
|
230
247
|
searchable?: boolean | undefined;
|
248
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
231
249
|
target: import("@strapi/types/dist/uid").ContentType;
|
232
250
|
inversedBy?: string | undefined;
|
233
251
|
mappedBy?: string | undefined;
|
@@ -242,6 +260,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
242
260
|
type: "relation";
|
243
261
|
pluginOptions?: object | undefined;
|
244
262
|
searchable?: boolean | undefined;
|
263
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
245
264
|
target: import("@strapi/types/dist/uid").ContentType;
|
246
265
|
configurable?: boolean | undefined;
|
247
266
|
private?: boolean | undefined;
|
@@ -254,6 +273,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
254
273
|
type: "relation";
|
255
274
|
pluginOptions?: object | undefined;
|
256
275
|
searchable?: boolean | undefined;
|
276
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
257
277
|
target: import("@strapi/types/dist/uid").ContentType;
|
258
278
|
configurable?: boolean | undefined;
|
259
279
|
private?: boolean | undefined;
|
@@ -266,6 +286,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
266
286
|
type: "relation";
|
267
287
|
pluginOptions?: object | undefined;
|
268
288
|
searchable?: boolean | undefined;
|
289
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
269
290
|
target: import("@strapi/types/dist/uid").ContentType;
|
270
291
|
morphBy?: string | undefined;
|
271
292
|
configurable?: boolean | undefined;
|
@@ -279,6 +300,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
279
300
|
type: "relation";
|
280
301
|
pluginOptions?: object | undefined;
|
281
302
|
searchable?: boolean | undefined;
|
303
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
282
304
|
target: import("@strapi/types/dist/uid").ContentType;
|
283
305
|
morphBy?: string | undefined;
|
284
306
|
configurable?: boolean | undefined;
|
@@ -292,6 +314,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
292
314
|
type: "richtext";
|
293
315
|
pluginOptions?: object | undefined;
|
294
316
|
searchable?: boolean | undefined;
|
317
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
295
318
|
configurable?: boolean | undefined;
|
296
319
|
default?: string | (() => string) | undefined;
|
297
320
|
minLength?: number | undefined;
|
@@ -304,6 +327,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
304
327
|
type: "string";
|
305
328
|
pluginOptions?: object | undefined;
|
306
329
|
searchable?: boolean | undefined;
|
330
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
307
331
|
regex?: RegExp | undefined;
|
308
332
|
configurable?: boolean | undefined;
|
309
333
|
default?: string | (() => string) | undefined;
|
@@ -318,6 +342,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
318
342
|
type: "text";
|
319
343
|
pluginOptions?: object | undefined;
|
320
344
|
searchable?: boolean | undefined;
|
345
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
321
346
|
regex?: RegExp | undefined;
|
322
347
|
configurable?: boolean | undefined;
|
323
348
|
default?: string | (() => string) | undefined;
|
@@ -332,6 +357,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
332
357
|
type: "time";
|
333
358
|
pluginOptions?: object | undefined;
|
334
359
|
searchable?: boolean | undefined;
|
360
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
335
361
|
configurable?: boolean | undefined;
|
336
362
|
default?: Schema.Attribute.TimeValue | (() => Schema.Attribute.TimeValue) | undefined;
|
337
363
|
private?: boolean | undefined;
|
@@ -343,6 +369,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
343
369
|
type: "timestamp";
|
344
370
|
pluginOptions?: object | undefined;
|
345
371
|
searchable?: boolean | undefined;
|
372
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
346
373
|
configurable?: boolean | undefined;
|
347
374
|
default?: Schema.Attribute.TimestampValue | (() => Schema.Attribute.TimestampValue) | undefined;
|
348
375
|
private?: boolean | undefined;
|
@@ -354,6 +381,7 @@ export declare const transformAttribute: (name: string, attribute: Schema.Attrib
|
|
354
381
|
type: "uid";
|
355
382
|
pluginOptions?: object | undefined;
|
356
383
|
searchable?: boolean | undefined;
|
384
|
+
column?: Partial<Schema.Attribute.Column> | undefined;
|
357
385
|
targetField?: string | undefined;
|
358
386
|
options?: Schema.Attribute.UIDOptions | undefined;
|
359
387
|
configurable?: boolean | undefined;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"transform-content-types-to-models.d.ts","sourceRoot":"","sources":["../../src/utils/transform-content-types-to-models.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,KAAK,EAAE,KAAK,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAChE,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACpD,OAAO,EAAE,QAAQ,EAAE,MAAM,sBAAsB,CAAC;AAIhD;;;;;;;;;;;GAWG;AAEH,eAAO,MAAM,yBAAyB,mBAAoB,MAAM,eAAe,WAAW,WAKzF,CAAC;AAEF,eAAO,MAAM,kBAAkB,mBAAoB,MAAM,eAAe,WAAW,WAKlF,CAAC;AAEF,eAAO,MAAM,gCAAgC,gBAAiB,WAAW,WAKxE,CAAC;AAEF,eAAO,MAAM,iCAAiC,gBAAiB,WAAW,WAKzE,CAAC;AAEF,eAAO,MAAM,sBAAsB,gBAAiB,WAAW,WAE9D,CAAC;AAEF,eAAO,MAAM,uBAAuB,gBAAiB,MAAM,eAAe,WAAW,WAMpF,CAAC;AAIF,MAAM,MAAM,sBAAsB,GAAG,MAAM,CAAC,iBAAiB,GAC3D,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,iBAAiB,EAAE,gBAAgB,GAAG,KAAK,GAAG,WAAW,CAAC,CAAC,GAChF,IAAI,CAAC,KAAK,EAAE,YAAY,CAAC,CAAC;AAG5B,eAAO,MAAM,kBAAkB,SACvB,MAAM,aACD,OAAO,SAAS,CAAC,YAAY,eAC3B,sBAAsB,eACtB,WAAW
|
1
|
+
{"version":3,"file":"transform-content-types-to-models.d.ts","sourceRoot":"","sources":["../../src/utils/transform-content-types-to-models.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,KAAK,EAAE,KAAK,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAChE,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACpD,OAAO,EAAE,QAAQ,EAAE,MAAM,sBAAsB,CAAC;AAIhD;;;;;;;;;;;GAWG;AAEH,eAAO,MAAM,yBAAyB,mBAAoB,MAAM,eAAe,WAAW,WAKzF,CAAC;AAEF,eAAO,MAAM,kBAAkB,mBAAoB,MAAM,eAAe,WAAW,WAKlF,CAAC;AAEF,eAAO,MAAM,gCAAgC,gBAAiB,WAAW,WAKxE,CAAC;AAEF,eAAO,MAAM,iCAAiC,gBAAiB,WAAW,WAKzE,CAAC;AAEF,eAAO,MAAM,sBAAsB,gBAAiB,WAAW,WAE9D,CAAC;AAEF,eAAO,MAAM,uBAAuB,gBAAiB,MAAM,eAAe,WAAW,WAMpF,CAAC;AAIF,MAAM,MAAM,sBAAsB,GAAG,MAAM,CAAC,iBAAiB,GAC3D,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,iBAAiB,EAAE,gBAAgB,GAAG,KAAK,GAAG,WAAW,CAAC,CAAC,GAChF,IAAI,CAAC,KAAK,EAAE,YAAY,CAAC,CAAC;AAG5B,eAAO,MAAM,kBAAkB,SACvB,MAAM,aACD,OAAO,SAAS,CAAC,YAAY,eAC3B,sBAAsB,eACtB,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+FzB,CAAC;AAEF,eAAO,MAAM,mBAAmB,gBACjB,sBAAsB,eACtB,WAAW,OAazB,CAAC;AAEF,eAAO,MAAM,iBAAiB,gBACf,sBAAsB;UACc,aAAa,GAAG,WAAW;CAI7E,CAAC;AAEF,eAAO,MAAM,gBAAgB,iBAAW,CAAC;AAgFzC,eAAO,MAAM,6BAA6B,iBAC1B,sBAAsB,EAAE,eACzB,WAAW,KACvB,KAAK,EAkEP,CAAC"}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@strapi/core",
|
3
|
-
"version": "0.0.0-next.
|
3
|
+
"version": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89",
|
4
4
|
"description": "Core of Strapi",
|
5
5
|
"homepage": "https://strapi.io",
|
6
6
|
"bugs": {
|
@@ -53,17 +53,17 @@
|
|
53
53
|
},
|
54
54
|
"dependencies": {
|
55
55
|
"@koa/cors": "5.0.0",
|
56
|
-
"@koa/router": "12.0.
|
56
|
+
"@koa/router": "12.0.2",
|
57
57
|
"@paralleldrive/cuid2": "2.2.2",
|
58
|
-
"@strapi/admin": "0.0.0-next.
|
59
|
-
"@strapi/database": "0.0.0-next.
|
60
|
-
"@strapi/generators": "0.0.0-next.
|
61
|
-
"@strapi/logger": "0.0.0-next.
|
58
|
+
"@strapi/admin": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89",
|
59
|
+
"@strapi/database": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89",
|
60
|
+
"@strapi/generators": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89",
|
61
|
+
"@strapi/logger": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89",
|
62
62
|
"@strapi/pack-up": "5.0.0",
|
63
|
-
"@strapi/permissions": "0.0.0-next.
|
64
|
-
"@strapi/types": "0.0.0-next.
|
65
|
-
"@strapi/typescript-utils": "0.0.0-next.
|
66
|
-
"@strapi/utils": "0.0.0-next.
|
63
|
+
"@strapi/permissions": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89",
|
64
|
+
"@strapi/types": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89",
|
65
|
+
"@strapi/typescript-utils": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89",
|
66
|
+
"@strapi/utils": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89",
|
67
67
|
"bcryptjs": "2.4.3",
|
68
68
|
"boxen": "5.1.2",
|
69
69
|
"chalk": "4.1.2",
|
@@ -126,13 +126,13 @@
|
|
126
126
|
"@types/node": "18.19.24",
|
127
127
|
"@types/node-schedule": "2.1.7",
|
128
128
|
"@types/statuses": "2.0.1",
|
129
|
-
"eslint-config-custom": "0.0.0-next.
|
129
|
+
"eslint-config-custom": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89",
|
130
130
|
"supertest": "6.3.3",
|
131
|
-
"tsconfig": "0.0.0-next.
|
131
|
+
"tsconfig": "0.0.0-next.bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89"
|
132
132
|
},
|
133
133
|
"engines": {
|
134
|
-
"node": ">=18.0.0 <=
|
134
|
+
"node": ">=18.0.0 <=22.x.x",
|
135
135
|
"npm": ">=6.0.0"
|
136
136
|
},
|
137
|
-
"gitHead": "
|
137
|
+
"gitHead": "bb6ff32f5168f3e380d3d9acba90a9d53bfcfb89"
|
138
138
|
}
|