@payloadcms/plugin-import-export 3.75.0 → 3.76.0-canary.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/ExportPreview/index.d.ts.map +1 -1
- package/dist/components/ExportPreview/index.js +30 -12
- package/dist/components/ExportPreview/index.js.map +1 -1
- package/dist/components/ExportPreview/index.scss +12 -0
- package/dist/components/ImportPreview/index.js +3 -15
- package/dist/components/ImportPreview/index.js.map +1 -1
- package/dist/export/createExport.d.ts +5 -0
- package/dist/export/createExport.d.ts.map +1 -1
- package/dist/export/createExport.js +36 -12
- package/dist/export/createExport.js.map +1 -1
- package/dist/export/getCreateExportCollectionTask.d.ts.map +1 -1
- package/dist/export/getCreateExportCollectionTask.js +3 -0
- package/dist/export/getCreateExportCollectionTask.js.map +1 -1
- package/dist/export/getExportCollection.d.ts.map +1 -1
- package/dist/export/getExportCollection.js +21 -1
- package/dist/export/getExportCollection.js.map +1 -1
- package/dist/export/getFields.d.ts.map +1 -1
- package/dist/export/getFields.js +7 -3
- package/dist/export/getFields.js.map +1 -1
- package/dist/export/handleDownload.d.ts.map +1 -1
- package/dist/export/handleDownload.js +18 -1
- package/dist/export/handleDownload.js.map +1 -1
- package/dist/export/handlePreview.d.ts.map +1 -1
- package/dist/export/handlePreview.js +32 -10
- package/dist/export/handlePreview.js.map +1 -1
- package/dist/exports/types.d.ts +1 -1
- package/dist/exports/types.d.ts.map +1 -1
- package/dist/exports/types.js.map +1 -1
- package/dist/import/batchProcessor.js +50 -53
- package/dist/import/batchProcessor.js.map +1 -1
- package/dist/import/createImport.d.ts +6 -1
- package/dist/import/createImport.d.ts.map +1 -1
- package/dist/import/createImport.js +5 -1
- package/dist/import/createImport.js.map +1 -1
- package/dist/import/getCreateImportCollectionTask.d.ts.map +1 -1
- package/dist/import/getCreateImportCollectionTask.js +3 -0
- package/dist/import/getCreateImportCollectionTask.js.map +1 -1
- package/dist/import/getImportCollection.d.ts.map +1 -1
- package/dist/import/getImportCollection.js +15 -0
- package/dist/import/getImportCollection.js.map +1 -1
- package/dist/import/handlePreview.d.ts.map +1 -1
- package/dist/import/handlePreview.js +11 -0
- package/dist/import/handlePreview.js.map +1 -1
- package/dist/index.d.ts +22 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +26 -2
- package/dist/index.js.map +1 -1
- package/dist/translations/languages/ar.d.ts.map +1 -1
- package/dist/translations/languages/ar.js +3 -0
- package/dist/translations/languages/ar.js.map +1 -1
- package/dist/translations/languages/az.d.ts.map +1 -1
- package/dist/translations/languages/az.js +3 -0
- package/dist/translations/languages/az.js.map +1 -1
- package/dist/translations/languages/bg.d.ts.map +1 -1
- package/dist/translations/languages/bg.js +3 -0
- package/dist/translations/languages/bg.js.map +1 -1
- package/dist/translations/languages/bnBd.d.ts.map +1 -1
- package/dist/translations/languages/bnBd.js +22 -19
- package/dist/translations/languages/bnBd.js.map +1 -1
- package/dist/translations/languages/bnIn.d.ts.map +1 -1
- package/dist/translations/languages/bnIn.js +22 -19
- package/dist/translations/languages/bnIn.js.map +1 -1
- package/dist/translations/languages/ca.d.ts.map +1 -1
- package/dist/translations/languages/ca.js +3 -0
- package/dist/translations/languages/ca.js.map +1 -1
- package/dist/translations/languages/cs.d.ts.map +1 -1
- package/dist/translations/languages/cs.js +3 -0
- package/dist/translations/languages/cs.js.map +1 -1
- package/dist/translations/languages/da.d.ts.map +1 -1
- package/dist/translations/languages/da.js +3 -0
- package/dist/translations/languages/da.js.map +1 -1
- package/dist/translations/languages/de.d.ts.map +1 -1
- package/dist/translations/languages/de.js +3 -0
- package/dist/translations/languages/de.js.map +1 -1
- package/dist/translations/languages/en.d.ts +3 -0
- package/dist/translations/languages/en.d.ts.map +1 -1
- package/dist/translations/languages/en.js +3 -0
- package/dist/translations/languages/en.js.map +1 -1
- package/dist/translations/languages/es.d.ts.map +1 -1
- package/dist/translations/languages/es.js +3 -0
- package/dist/translations/languages/es.js.map +1 -1
- package/dist/translations/languages/et.d.ts.map +1 -1
- package/dist/translations/languages/et.js +3 -0
- package/dist/translations/languages/et.js.map +1 -1
- package/dist/translations/languages/fa.d.ts.map +1 -1
- package/dist/translations/languages/fa.js +3 -0
- package/dist/translations/languages/fa.js.map +1 -1
- package/dist/translations/languages/fr.d.ts.map +1 -1
- package/dist/translations/languages/fr.js +3 -0
- package/dist/translations/languages/fr.js.map +1 -1
- package/dist/translations/languages/he.d.ts.map +1 -1
- package/dist/translations/languages/he.js +3 -0
- package/dist/translations/languages/he.js.map +1 -1
- package/dist/translations/languages/hr.d.ts.map +1 -1
- package/dist/translations/languages/hr.js +3 -0
- package/dist/translations/languages/hr.js.map +1 -1
- package/dist/translations/languages/hu.d.ts.map +1 -1
- package/dist/translations/languages/hu.js +3 -0
- package/dist/translations/languages/hu.js.map +1 -1
- package/dist/translations/languages/hy.d.ts.map +1 -1
- package/dist/translations/languages/hy.js +3 -0
- package/dist/translations/languages/hy.js.map +1 -1
- package/dist/translations/languages/id.d.ts.map +1 -1
- package/dist/translations/languages/id.js +13 -10
- package/dist/translations/languages/id.js.map +1 -1
- package/dist/translations/languages/is.d.ts.map +1 -1
- package/dist/translations/languages/is.js +3 -0
- package/dist/translations/languages/is.js.map +1 -1
- package/dist/translations/languages/it.d.ts.map +1 -1
- package/dist/translations/languages/it.js +3 -0
- package/dist/translations/languages/it.js.map +1 -1
- package/dist/translations/languages/ja.d.ts.map +1 -1
- package/dist/translations/languages/ja.js +3 -0
- package/dist/translations/languages/ja.js.map +1 -1
- package/dist/translations/languages/ko.d.ts.map +1 -1
- package/dist/translations/languages/ko.js +3 -0
- package/dist/translations/languages/ko.js.map +1 -1
- package/dist/translations/languages/lt.d.ts.map +1 -1
- package/dist/translations/languages/lt.js +3 -0
- package/dist/translations/languages/lt.js.map +1 -1
- package/dist/translations/languages/lv.d.ts.map +1 -1
- package/dist/translations/languages/lv.js +16 -13
- package/dist/translations/languages/lv.js.map +1 -1
- package/dist/translations/languages/my.d.ts.map +1 -1
- package/dist/translations/languages/my.js +3 -0
- package/dist/translations/languages/my.js.map +1 -1
- package/dist/translations/languages/nb.d.ts.map +1 -1
- package/dist/translations/languages/nb.js +3 -0
- package/dist/translations/languages/nb.js.map +1 -1
- package/dist/translations/languages/nl.d.ts.map +1 -1
- package/dist/translations/languages/nl.js +3 -0
- package/dist/translations/languages/nl.js.map +1 -1
- package/dist/translations/languages/pl.d.ts.map +1 -1
- package/dist/translations/languages/pl.js +3 -0
- package/dist/translations/languages/pl.js.map +1 -1
- package/dist/translations/languages/pt.d.ts.map +1 -1
- package/dist/translations/languages/pt.js +3 -0
- package/dist/translations/languages/pt.js.map +1 -1
- package/dist/translations/languages/ro.d.ts.map +1 -1
- package/dist/translations/languages/ro.js +3 -0
- package/dist/translations/languages/ro.js.map +1 -1
- package/dist/translations/languages/rs.d.ts.map +1 -1
- package/dist/translations/languages/rs.js +3 -0
- package/dist/translations/languages/rs.js.map +1 -1
- package/dist/translations/languages/rsLatin.d.ts.map +1 -1
- package/dist/translations/languages/rsLatin.js +3 -0
- package/dist/translations/languages/rsLatin.js.map +1 -1
- package/dist/translations/languages/ru.d.ts.map +1 -1
- package/dist/translations/languages/ru.js +3 -0
- package/dist/translations/languages/ru.js.map +1 -1
- package/dist/translations/languages/sk.d.ts.map +1 -1
- package/dist/translations/languages/sk.js +3 -0
- package/dist/translations/languages/sk.js.map +1 -1
- package/dist/translations/languages/sl.d.ts.map +1 -1
- package/dist/translations/languages/sl.js +3 -0
- package/dist/translations/languages/sl.js.map +1 -1
- package/dist/translations/languages/sv.d.ts.map +1 -1
- package/dist/translations/languages/sv.js +3 -0
- package/dist/translations/languages/sv.js.map +1 -1
- package/dist/translations/languages/ta.d.ts.map +1 -1
- package/dist/translations/languages/ta.js +3 -0
- package/dist/translations/languages/ta.js.map +1 -1
- package/dist/translations/languages/th.d.ts.map +1 -1
- package/dist/translations/languages/th.js +3 -0
- package/dist/translations/languages/th.js.map +1 -1
- package/dist/translations/languages/tr.d.ts.map +1 -1
- package/dist/translations/languages/tr.js +3 -0
- package/dist/translations/languages/tr.js.map +1 -1
- package/dist/translations/languages/uk.d.ts.map +1 -1
- package/dist/translations/languages/uk.js +3 -0
- package/dist/translations/languages/uk.js.map +1 -1
- package/dist/translations/languages/vi.d.ts.map +1 -1
- package/dist/translations/languages/vi.js +3 -0
- package/dist/translations/languages/vi.js.map +1 -1
- package/dist/translations/languages/zh.d.ts.map +1 -1
- package/dist/translations/languages/zh.js +3 -0
- package/dist/translations/languages/zh.js.map +1 -1
- package/dist/translations/languages/zhTw.d.ts.map +1 -1
- package/dist/translations/languages/zhTw.js +3 -0
- package/dist/translations/languages/zhTw.js.map +1 -1
- package/dist/types.d.ts +44 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utilities/buildDisabledFieldRegex.d.ts +11 -2
- package/dist/utilities/buildDisabledFieldRegex.d.ts.map +1 -1
- package/dist/utilities/buildDisabledFieldRegex.js +33 -7
- package/dist/utilities/buildDisabledFieldRegex.js.map +1 -1
- package/dist/utilities/buildDisabledFieldRegex.spec.js +64 -0
- package/dist/utilities/buildDisabledFieldRegex.spec.js.map +1 -0
- package/dist/utilities/collectTimezoneCompanionFields.d.ts +24 -0
- package/dist/utilities/collectTimezoneCompanionFields.d.ts.map +1 -0
- package/dist/utilities/collectTimezoneCompanionFields.js +89 -0
- package/dist/utilities/collectTimezoneCompanionFields.js.map +1 -0
- package/dist/utilities/collectTimezoneCompanionFields.spec.js +319 -0
- package/dist/utilities/collectTimezoneCompanionFields.spec.js.map +1 -0
- package/dist/utilities/fieldToRegex.d.ts +14 -0
- package/dist/utilities/fieldToRegex.d.ts.map +1 -0
- package/dist/utilities/fieldToRegex.js +34 -0
- package/dist/utilities/fieldToRegex.js.map +1 -0
- package/dist/utilities/fieldToRegex.spec.js +151 -0
- package/dist/utilities/fieldToRegex.spec.js.map +1 -0
- package/dist/utilities/flattenObject.d.ts +7 -1
- package/dist/utilities/flattenObject.d.ts.map +1 -1
- package/dist/utilities/flattenObject.js +30 -18
- package/dist/utilities/flattenObject.js.map +1 -1
- package/dist/utilities/getExportFieldFunctions.d.ts.map +1 -1
- package/dist/utilities/getExportFieldFunctions.js +7 -0
- package/dist/utilities/getExportFieldFunctions.js.map +1 -1
- package/dist/utilities/getImportFieldFunctions.d.ts.map +1 -1
- package/dist/utilities/getImportFieldFunctions.js +2 -16
- package/dist/utilities/getImportFieldFunctions.js.map +1 -1
- package/dist/utilities/getPluginCollections.d.ts +1 -0
- package/dist/utilities/getPluginCollections.d.ts.map +1 -1
- package/dist/utilities/getPluginCollections.js +43 -10
- package/dist/utilities/getPluginCollections.js.map +1 -1
- package/dist/utilities/getSchemaColumns.d.ts +8 -2
- package/dist/utilities/getSchemaColumns.d.ts.map +1 -1
- package/dist/utilities/getSchemaColumns.js +61 -27
- package/dist/utilities/getSchemaColumns.js.map +1 -1
- package/dist/utilities/parseCSV.d.ts.map +1 -1
- package/dist/utilities/parseCSV.js +4 -10
- package/dist/utilities/parseCSV.js.map +1 -1
- package/dist/utilities/resolveLimit.d.ts +15 -0
- package/dist/utilities/resolveLimit.d.ts.map +1 -0
- package/dist/utilities/resolveLimit.js +21 -0
- package/dist/utilities/resolveLimit.js.map +1 -0
- package/dist/utilities/unflattenObject.d.ts +13 -0
- package/dist/utilities/unflattenObject.d.ts.map +1 -1
- package/dist/utilities/unflattenObject.js +64 -65
- package/dist/utilities/unflattenObject.js.map +1 -1
- package/package.json +8 -8
- package/dist/utilities/getvalueAtPath.d.ts +0 -15
- package/dist/utilities/getvalueAtPath.d.ts.map +0 -1
- package/dist/utilities/getvalueAtPath.js +0 -49
- package/dist/utilities/getvalueAtPath.js.map +0 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/export/handlePreview.ts"],"sourcesContent":["import type { FlattenedField, PayloadRequest, Where } from 'payload'\n\nimport { addDataAndFileToRequest } from 'payload'\n\nimport type { ExportPreviewResponse } from '../types.js'\n\nimport {\n DEFAULT_PREVIEW_LIMIT,\n MAX_PREVIEW_LIMIT,\n MIN_PREVIEW_LIMIT,\n MIN_PREVIEW_PAGE,\n} from '../constants.js'\nimport { flattenObject } from '../utilities/flattenObject.js'\nimport { getExportFieldFunctions } from '../utilities/getExportFieldFunctions.js'\nimport { getFlattenedFieldKeys } from '../utilities/getFlattenedFieldKeys.js'\nimport { getSchemaColumns } from '../utilities/getSchemaColumns.js'\nimport { getSelect } from '../utilities/getSelect.js'\nimport { getValueAtPath } from '../utilities/getvalueAtPath.js'\nimport { removeDisabledFields } from '../utilities/removeDisabledFields.js'\nimport { setNestedValue } from '../utilities/setNestedValue.js'\n\nexport const handlePreview = async (req: PayloadRequest): Promise<Response> => {\n await addDataAndFileToRequest(req)\n\n const {\n collectionSlug,\n draft: draftFromReq,\n fields,\n limit: exportLimit,\n locale,\n previewLimit: rawPreviewLimit = DEFAULT_PREVIEW_LIMIT,\n previewPage: rawPreviewPage = 1,\n sort,\n where: whereFromReq = {},\n } = req.data as {\n collectionSlug: string\n draft?: 'no' | 'yes'\n fields?: string[]\n format?: 'csv' | 'json'\n limit?: number\n locale?: string\n previewLimit?: number\n previewPage?: number\n sort?: any\n where?: any\n }\n\n // Validate and clamp pagination values to safe bounds\n const previewLimit = Math.max(MIN_PREVIEW_LIMIT, Math.min(rawPreviewLimit, MAX_PREVIEW_LIMIT))\n const previewPage = Math.max(MIN_PREVIEW_PAGE, rawPreviewPage)\n\n const targetCollection = req.payload.collections[collectionSlug]\n if (!targetCollection) {\n return Response.json(\n { error: `Collection with slug ${collectionSlug} not found` },\n { status: 400 },\n )\n }\n\n const select = Array.isArray(fields) && fields.length > 0 ? getSelect(fields) : undefined\n const draft = draftFromReq === 'yes'\n const collectionHasVersions = Boolean(targetCollection.config.versions)\n\n // Only filter by _status for versioned collections\n const publishedWhere: Where = collectionHasVersions ? { _status: { equals: 'published' } } : {}\n\n const where: Where = {\n and: [whereFromReq, draft ? {} : publishedWhere],\n }\n\n // Count total docs matching export criteria\n const countResult = await req.payload.count({\n collection: collectionSlug,\n overrideAccess: false,\n req,\n where,\n })\n\n const totalMatchingDocs = countResult.totalDocs\n\n // Calculate actual export count (respecting export limit)\n const exportTotalDocs =\n exportLimit && exportLimit > 0 ? Math.min(totalMatchingDocs, exportLimit) : totalMatchingDocs\n\n // Calculate preview pagination that respects export limit\n // Preview should only show docs that will actually be exported\n const previewStartIndex = (previewPage - 1) * previewLimit\n\n // Calculate pagination info based on export limit (not raw DB results)\n const previewTotalPages = exportTotalDocs === 0 ? 0 : Math.ceil(exportTotalDocs / previewLimit)\n\n const isCSV = req?.data?.format === 'csv'\n\n // Get locale codes for locale expansion when locale='all'\n const localeCodes =\n locale === 'all' && req.payload.config.localization\n ? req.payload.config.localization.localeCodes\n : undefined\n\n // Get disabled fields configuration\n const disabledFields =\n targetCollection.config.admin?.custom?.['plugin-import-export']?.disabledFields ?? []\n\n // Always compute columns for CSV (even if no docs) for consistent schema\n const columns = isCSV\n ? getSchemaColumns({\n collectionConfig: targetCollection.config,\n disabledFields,\n fields,\n locale: locale ?? undefined,\n localeCodes,\n })\n : undefined\n\n // If we're beyond the export limit, return empty docs with columns\n if (exportLimit && exportLimit > 0 && previewStartIndex >= exportLimit) {\n const response: ExportPreviewResponse = {\n columns,\n docs: [],\n exportTotalDocs,\n hasNextPage: false,\n hasPrevPage: previewPage > 1,\n limit: previewLimit,\n page: previewPage,\n totalDocs: exportTotalDocs,\n totalPages: previewTotalPages,\n }\n return Response.json(response)\n }\n\n // Fetch preview page with full previewLimit to maintain consistent pagination offsets\n // We'll trim the results afterwards if needed to respect export limit\n const result = await req.payload.find({\n collection: collectionSlug,\n depth: 1,\n draft,\n limit: previewLimit,\n locale,\n overrideAccess: false,\n page: previewPage,\n req,\n select,\n sort,\n where,\n })\n\n // Trim docs to respect export limit boundary\n let docs = result.docs\n if (exportLimit && exportLimit > 0) {\n const remainingInExport = exportLimit - previewStartIndex\n if (remainingInExport < docs.length) {\n docs = docs.slice(0, remainingInExport)\n }\n }\n\n // Transform docs based on format\n let transformed: Record<string, unknown>[]\n\n if (isCSV) {\n const toCSVFunctions = getExportFieldFunctions({\n fields: targetCollection.config.fields as FlattenedField[],\n })\n\n const possibleKeys = getFlattenedFieldKeys(\n targetCollection.config.fields as FlattenedField[],\n '',\n { localeCodes },\n )\n\n transformed = docs.map((doc) => {\n const row = flattenObject({\n doc,\n fields,\n toCSVFunctions,\n })\n\n for (const key of possibleKeys) {\n if (!(key in row)) {\n row[key] = null\n }\n }\n\n return row\n })\n } else {\n transformed = docs.map((doc) => {\n let output: Record<string, unknown> = { ...doc }\n\n // Remove disabled fields first\n output = removeDisabledFields(output, disabledFields)\n\n // Then trim to selected fields only (if fields are provided)\n if (Array.isArray(fields) && fields.length > 0) {\n const trimmed: Record<string, unknown> = {}\n\n for (const key of fields) {\n const value = getValueAtPath(output, key)\n setNestedValue(trimmed, key, value ?? null)\n }\n\n output = trimmed\n }\n\n return output\n })\n }\n\n const hasNextPage = previewPage < previewTotalPages\n const hasPrevPage = previewPage > 1\n\n const response: ExportPreviewResponse = {\n columns,\n docs: transformed,\n exportTotalDocs,\n hasNextPage,\n hasPrevPage,\n limit: previewLimit,\n page: previewPage,\n totalDocs: exportTotalDocs,\n totalPages: previewTotalPages,\n }\n\n return Response.json(response)\n}\n"],"names":["addDataAndFileToRequest","DEFAULT_PREVIEW_LIMIT","MAX_PREVIEW_LIMIT","MIN_PREVIEW_LIMIT","MIN_PREVIEW_PAGE","flattenObject","getExportFieldFunctions","getFlattenedFieldKeys","getSchemaColumns","getSelect","getValueAtPath","removeDisabledFields","setNestedValue","handlePreview","req","collectionSlug","draft","draftFromReq","fields","limit","exportLimit","locale","previewLimit","rawPreviewLimit","previewPage","rawPreviewPage","sort","where","whereFromReq","data","Math","max","min","targetCollection","payload","collections","Response","json","error","status","select","Array","isArray","length","undefined","collectionHasVersions","Boolean","config","versions","publishedWhere","_status","equals","and","countResult","count","collection","overrideAccess","totalMatchingDocs","totalDocs","exportTotalDocs","previewStartIndex","previewTotalPages","ceil","isCSV","format","localeCodes","localization","disabledFields","admin","custom","columns","collectionConfig","response","docs","hasNextPage","hasPrevPage","page","totalPages","result","find","depth","remainingInExport","slice","transformed","toCSVFunctions","possibleKeys","map","doc","row","key","output","trimmed","value"],"mappings":"AAEA,SAASA,uBAAuB,QAAQ,UAAS;AAIjD,SACEC,qBAAqB,EACrBC,iBAAiB,EACjBC,iBAAiB,EACjBC,gBAAgB,QACX,kBAAiB;AACxB,SAASC,aAAa,QAAQ,gCAA+B;AAC7D,SAASC,uBAAuB,QAAQ,0CAAyC;AACjF,SAASC,qBAAqB,QAAQ,wCAAuC;AAC7E,SAASC,gBAAgB,QAAQ,mCAAkC;AACnE,SAASC,SAAS,QAAQ,4BAA2B;AACrD,SAASC,cAAc,QAAQ,iCAAgC;AAC/D,SAASC,oBAAoB,QAAQ,uCAAsC;AAC3E,SAASC,cAAc,QAAQ,iCAAgC;AAE/D,OAAO,MAAMC,gBAAgB,OAAOC;IAClC,MAAMd,wBAAwBc;IAE9B,MAAM,EACJC,cAAc,EACdC,OAAOC,YAAY,EACnBC,MAAM,EACNC,OAAOC,WAAW,EAClBC,MAAM,EACNC,cAAcC,kBAAkBtB,qBAAqB,EACrDuB,aAAaC,iBAAiB,CAAC,EAC/BC,IAAI,EACJC,OAAOC,eAAe,CAAC,CAAC,EACzB,GAAGd,IAAIe,IAAI;IAaZ,sDAAsD;IACtD,MAAMP,eAAeQ,KAAKC,GAAG,CAAC5B,mBAAmB2B,KAAKE,GAAG,CAACT,iBAAiBrB;IAC3E,MAAMsB,cAAcM,KAAKC,GAAG,CAAC3B,kBAAkBqB;IAE/C,MAAMQ,mBAAmBnB,IAAIoB,OAAO,CAACC,WAAW,CAACpB,eAAe;IAChE,IAAI,CAACkB,kBAAkB;QACrB,OAAOG,SAASC,IAAI,CAClB;YAAEC,OAAO,CAAC,qBAAqB,EAAEvB,eAAe,UAAU,CAAC;QAAC,GAC5D;YAAEwB,QAAQ;QAAI;IAElB;IAEA,MAAMC,SAASC,MAAMC,OAAO,CAACxB,WAAWA,OAAOyB,MAAM,GAAG,IAAIlC,UAAUS,UAAU0B;IAChF,MAAM5B,QAAQC,iBAAiB;IAC/B,MAAM4B,wBAAwBC,QAAQb,iBAAiBc,MAAM,CAACC,QAAQ;IAEtE,mDAAmD;IACnD,MAAMC,iBAAwBJ,wBAAwB;QAAEK,SAAS;YAAEC,QAAQ;QAAY;IAAE,IAAI,CAAC;IAE9F,MAAMxB,QAAe;QACnByB,KAAK;YAACxB;YAAcZ,QAAQ,CAAC,IAAIiC;SAAe;IAClD;IAEA,4CAA4C;IAC5C,MAAMI,cAAc,MAAMvC,IAAIoB,OAAO,CAACoB,KAAK,CAAC;QAC1CC,YAAYxC;QACZyC,gBAAgB;QAChB1C;QACAa;IACF;IAEA,MAAM8B,oBAAoBJ,YAAYK,SAAS;IAE/C,0DAA0D;IAC1D,MAAMC,kBACJvC,eAAeA,cAAc,IAAIU,KAAKE,GAAG,CAACyB,mBAAmBrC,eAAeqC;IAE9E,0DAA0D;IAC1D,+DAA+D;IAC/D,MAAMG,oBAAoB,AAACpC,CAAAA,cAAc,CAAA,IAAKF;IAE9C,uEAAuE;IACvE,MAAMuC,oBAAoBF,oBAAoB,IAAI,IAAI7B,KAAKgC,IAAI,CAACH,kBAAkBrC;IAElF,MAAMyC,QAAQjD,KAAKe,MAAMmC,WAAW;IAEpC,0DAA0D;IAC1D,MAAMC,cACJ5C,WAAW,SAASP,IAAIoB,OAAO,CAACa,MAAM,CAACmB,YAAY,GAC/CpD,IAAIoB,OAAO,CAACa,MAAM,CAACmB,YAAY,CAACD,WAAW,GAC3CrB;IAEN,oCAAoC;IACpC,MAAMuB,iBACJlC,iBAAiBc,MAAM,CAACqB,KAAK,EAAEC,QAAQ,CAAC,uBAAuB,EAAEF,kBAAkB,EAAE;IAEvF,yEAAyE;IACzE,MAAMG,UAAUP,QACZvD,iBAAiB;QACf+D,kBAAkBtC,iBAAiBc,MAAM;QACzCoB;QACAjD;QACAG,QAAQA,UAAUuB;QAClBqB;IACF,KACArB;IAEJ,mEAAmE;IACnE,IAAIxB,eAAeA,cAAc,KAAKwC,qBAAqBxC,aAAa;QACtE,MAAMoD,WAAkC;YACtCF;YACAG,MAAM,EAAE;YACRd;YACAe,aAAa;YACbC,aAAanD,cAAc;YAC3BL,OAAOG;YACPsD,MAAMpD;YACNkC,WAAWC;YACXkB,YAAYhB;QACd;QACA,OAAOzB,SAASC,IAAI,CAACmC;IACvB;IAEA,sFAAsF;IACtF,sEAAsE;IACtE,MAAMM,SAAS,MAAMhE,IAAIoB,OAAO,CAAC6C,IAAI,CAAC;QACpCxB,YAAYxC;QACZiE,OAAO;QACPhE;QACAG,OAAOG;QACPD;QACAmC,gBAAgB;QAChBoB,MAAMpD;QACNV;QACA0B;QACAd;QACAC;IACF;IAEA,6CAA6C;IAC7C,IAAI8C,OAAOK,OAAOL,IAAI;IACtB,IAAIrD,eAAeA,cAAc,GAAG;QAClC,MAAM6D,oBAAoB7D,cAAcwC;QACxC,IAAIqB,oBAAoBR,KAAK9B,MAAM,EAAE;YACnC8B,OAAOA,KAAKS,KAAK,CAAC,GAAGD;QACvB;IACF;IAEA,iCAAiC;IACjC,IAAIE;IAEJ,IAAIpB,OAAO;QACT,MAAMqB,iBAAiB9E,wBAAwB;YAC7CY,QAAQe,iBAAiBc,MAAM,CAAC7B,MAAM;QACxC;QAEA,MAAMmE,eAAe9E,sBACnB0B,iBAAiBc,MAAM,CAAC7B,MAAM,EAC9B,IACA;YAAE+C;QAAY;QAGhBkB,cAAcV,KAAKa,GAAG,CAAC,CAACC;YACtB,MAAMC,MAAMnF,cAAc;gBACxBkF;gBACArE;gBACAkE;YACF;YAEA,KAAK,MAAMK,OAAOJ,aAAc;gBAC9B,IAAI,CAAEI,CAAAA,OAAOD,GAAE,GAAI;oBACjBA,GAAG,CAACC,IAAI,GAAG;gBACb;YACF;YAEA,OAAOD;QACT;IACF,OAAO;QACLL,cAAcV,KAAKa,GAAG,CAAC,CAACC;YACtB,IAAIG,SAAkC;gBAAE,GAAGH,GAAG;YAAC;YAE/C,+BAA+B;YAC/BG,SAAS/E,qBAAqB+E,QAAQvB;YAEtC,6DAA6D;YAC7D,IAAI1B,MAAMC,OAAO,CAACxB,WAAWA,OAAOyB,MAAM,GAAG,GAAG;gBAC9C,MAAMgD,UAAmC,CAAC;gBAE1C,KAAK,MAAMF,OAAOvE,OAAQ;oBACxB,MAAM0E,QAAQlF,eAAegF,QAAQD;oBACrC7E,eAAe+E,SAASF,KAAKG,SAAS;gBACxC;gBAEAF,SAASC;YACX;YAEA,OAAOD;QACT;IACF;IAEA,MAAMhB,cAAclD,cAAcqC;IAClC,MAAMc,cAAcnD,cAAc;IAElC,MAAMgD,WAAkC;QACtCF;QACAG,MAAMU;QACNxB;QACAe;QACAC;QACAxD,OAAOG;QACPsD,MAAMpD;QACNkC,WAAWC;QACXkB,YAAYhB;IACd;IAEA,OAAOzB,SAASC,IAAI,CAACmC;AACvB,EAAC"}
|
|
1
|
+
{"version":3,"sources":["../../src/export/handlePreview.ts"],"sourcesContent":["import type { FlattenedField, PayloadRequest, Where } from 'payload'\n\nimport { addDataAndFileToRequest } from 'payload'\nimport { getObjectDotNotation } from 'payload/shared'\n\nimport type { ExportPreviewResponse } from '../types.js'\n\nimport {\n DEFAULT_PREVIEW_LIMIT,\n MAX_PREVIEW_LIMIT,\n MIN_PREVIEW_LIMIT,\n MIN_PREVIEW_PAGE,\n} from '../constants.js'\nimport { collectTimezoneCompanionFields } from '../utilities/collectTimezoneCompanionFields.js'\nimport { flattenObject } from '../utilities/flattenObject.js'\nimport { getExportFieldFunctions } from '../utilities/getExportFieldFunctions.js'\nimport { getFlattenedFieldKeys } from '../utilities/getFlattenedFieldKeys.js'\nimport { getSchemaColumns } from '../utilities/getSchemaColumns.js'\nimport { getSelect } from '../utilities/getSelect.js'\nimport { removeDisabledFields } from '../utilities/removeDisabledFields.js'\nimport { resolveLimit } from '../utilities/resolveLimit.js'\nimport { setNestedValue } from '../utilities/setNestedValue.js'\n\nexport const handlePreview = async (req: PayloadRequest): Promise<Response> => {\n await addDataAndFileToRequest(req)\n\n const {\n collectionSlug,\n draft: draftFromReq,\n fields,\n limit: exportLimit,\n locale,\n previewLimit: rawPreviewLimit = DEFAULT_PREVIEW_LIMIT,\n previewPage: rawPreviewPage = 1,\n sort,\n where: whereFromReq = {},\n } = req.data as {\n collectionSlug: string\n draft?: 'no' | 'yes'\n fields?: string[]\n format?: 'csv' | 'json'\n limit?: number\n locale?: string\n previewLimit?: number\n previewPage?: number\n sort?: any\n where?: any\n }\n\n // Validate and clamp pagination values to safe bounds\n const previewLimit = Math.max(MIN_PREVIEW_LIMIT, Math.min(rawPreviewLimit, MAX_PREVIEW_LIMIT))\n const previewPage = Math.max(MIN_PREVIEW_PAGE, rawPreviewPage)\n\n const targetCollection = req.payload.collections[collectionSlug]\n if (!targetCollection) {\n return Response.json(\n { error: `Collection with slug ${collectionSlug} not found` },\n { status: 400 },\n )\n }\n\n const pluginConfig = targetCollection.config.custom?.['plugin-import-export']\n const maxLimit = await resolveLimit({\n limit: pluginConfig?.exportLimit,\n req,\n })\n\n const select = Array.isArray(fields) && fields.length > 0 ? getSelect(fields) : undefined\n const draft = draftFromReq === 'yes'\n const collectionHasVersions = Boolean(targetCollection.config.versions)\n\n // Only filter by _status for versioned collections\n const publishedWhere: Where = collectionHasVersions ? { _status: { equals: 'published' } } : {}\n\n const where: Where = {\n and: [whereFromReq, draft ? {} : publishedWhere],\n }\n\n // Count total docs matching export criteria\n const countResult = await req.payload.count({\n collection: collectionSlug,\n overrideAccess: false,\n req,\n where,\n })\n\n const totalMatchingDocs = countResult.totalDocs\n\n // Calculate actual export count (respecting both export limit and max limit)\n let effectiveLimit = totalMatchingDocs\n\n // Apply user's export limit if provided\n if (exportLimit && exportLimit > 0) {\n effectiveLimit = Math.min(effectiveLimit, exportLimit)\n }\n\n // Apply max limit if configured\n if (typeof maxLimit === 'number' && maxLimit > 0) {\n effectiveLimit = Math.min(effectiveLimit, maxLimit)\n }\n\n const exportTotalDocs = effectiveLimit\n\n // Calculate preview pagination that respects export limit\n // Preview should only show docs that will actually be exported\n const previewStartIndex = (previewPage - 1) * previewLimit\n\n // Calculate pagination info based on export limit (not raw DB results)\n const previewTotalPages = exportTotalDocs === 0 ? 0 : Math.ceil(exportTotalDocs / previewLimit)\n\n const isCSV = req?.data?.format === 'csv'\n\n // Get locale codes for locale expansion when locale='all'\n const localeCodes =\n locale === 'all' && req.payload.config.localization\n ? req.payload.config.localization.localeCodes\n : undefined\n\n // Get disabled fields configuration\n const disabledFields =\n targetCollection.config.admin?.custom?.['plugin-import-export']?.disabledFields ?? []\n\n // Collect auto-generated timezone companion fields from schema\n const timezoneCompanionFields = collectTimezoneCompanionFields(\n targetCollection.config.flattenedFields,\n )\n\n // Always compute columns for CSV (even if no docs) for consistent schema\n const columns = isCSV\n ? getSchemaColumns({\n collectionConfig: targetCollection.config,\n disabledFields,\n fields,\n locale: locale ?? undefined,\n localeCodes,\n timezoneCompanionFields,\n })\n : undefined\n\n // If we're beyond the effective limit (considering both user limit and maxLimit), return empty docs\n if (exportTotalDocs > 0 && previewStartIndex >= exportTotalDocs) {\n const response: ExportPreviewResponse = {\n columns,\n docs: [],\n exportTotalDocs,\n hasNextPage: false,\n hasPrevPage: previewPage > 1,\n limit: previewLimit,\n maxLimit,\n page: previewPage,\n totalDocs: exportTotalDocs,\n totalPages: previewTotalPages,\n }\n return Response.json(response)\n }\n\n // Fetch preview page with full previewLimit to maintain consistent pagination offsets\n // We'll trim the results afterwards if needed to respect export limit\n const result = await req.payload.find({\n collection: collectionSlug,\n depth: 1,\n draft,\n limit: previewLimit,\n locale,\n overrideAccess: false,\n page: previewPage,\n req,\n select,\n sort,\n where,\n })\n\n // Trim docs to respect effective limit boundary (user limit clamped by maxLimit)\n let docs = result.docs\n if (exportTotalDocs > 0) {\n const remainingInExport = exportTotalDocs - previewStartIndex\n if (remainingInExport < docs.length) {\n docs = docs.slice(0, remainingInExport)\n }\n }\n\n // Transform docs based on format\n let transformed: Record<string, unknown>[]\n\n if (isCSV) {\n const toCSVFunctions = getExportFieldFunctions({\n fields: targetCollection.config.fields as FlattenedField[],\n })\n\n const possibleKeys = getFlattenedFieldKeys(\n targetCollection.config.fields as FlattenedField[],\n '',\n { localeCodes },\n )\n\n transformed = docs.map((doc) => {\n const row = flattenObject({\n doc,\n fields,\n timezoneCompanionFields,\n toCSVFunctions,\n })\n\n for (const key of possibleKeys) {\n if (!(key in row)) {\n row[key] = null\n }\n }\n\n return row\n })\n } else {\n transformed = docs.map((doc) => {\n let output: Record<string, unknown> = { ...doc }\n\n // Remove disabled fields first\n output = removeDisabledFields(output, disabledFields)\n\n // Then trim to selected fields only (if fields are provided)\n if (Array.isArray(fields) && fields.length > 0) {\n const trimmed: Record<string, unknown> = {}\n\n for (const key of fields) {\n const value = getObjectDotNotation(output, key)\n setNestedValue(trimmed, key, value ?? null)\n }\n\n output = trimmed\n }\n\n return output\n })\n }\n\n const hasNextPage = previewPage < previewTotalPages\n const hasPrevPage = previewPage > 1\n\n const response: ExportPreviewResponse = {\n columns,\n docs: transformed,\n exportTotalDocs,\n hasNextPage,\n hasPrevPage,\n limit: previewLimit,\n maxLimit,\n page: previewPage,\n totalDocs: exportTotalDocs,\n totalPages: previewTotalPages,\n }\n\n return Response.json(response)\n}\n"],"names":["addDataAndFileToRequest","getObjectDotNotation","DEFAULT_PREVIEW_LIMIT","MAX_PREVIEW_LIMIT","MIN_PREVIEW_LIMIT","MIN_PREVIEW_PAGE","collectTimezoneCompanionFields","flattenObject","getExportFieldFunctions","getFlattenedFieldKeys","getSchemaColumns","getSelect","removeDisabledFields","resolveLimit","setNestedValue","handlePreview","req","collectionSlug","draft","draftFromReq","fields","limit","exportLimit","locale","previewLimit","rawPreviewLimit","previewPage","rawPreviewPage","sort","where","whereFromReq","data","Math","max","min","targetCollection","payload","collections","Response","json","error","status","pluginConfig","config","custom","maxLimit","select","Array","isArray","length","undefined","collectionHasVersions","Boolean","versions","publishedWhere","_status","equals","and","countResult","count","collection","overrideAccess","totalMatchingDocs","totalDocs","effectiveLimit","exportTotalDocs","previewStartIndex","previewTotalPages","ceil","isCSV","format","localeCodes","localization","disabledFields","admin","timezoneCompanionFields","flattenedFields","columns","collectionConfig","response","docs","hasNextPage","hasPrevPage","page","totalPages","result","find","depth","remainingInExport","slice","transformed","toCSVFunctions","possibleKeys","map","doc","row","key","output","trimmed","value"],"mappings":"AAEA,SAASA,uBAAuB,QAAQ,UAAS;AACjD,SAASC,oBAAoB,QAAQ,iBAAgB;AAIrD,SACEC,qBAAqB,EACrBC,iBAAiB,EACjBC,iBAAiB,EACjBC,gBAAgB,QACX,kBAAiB;AACxB,SAASC,8BAA8B,QAAQ,iDAAgD;AAC/F,SAASC,aAAa,QAAQ,gCAA+B;AAC7D,SAASC,uBAAuB,QAAQ,0CAAyC;AACjF,SAASC,qBAAqB,QAAQ,wCAAuC;AAC7E,SAASC,gBAAgB,QAAQ,mCAAkC;AACnE,SAASC,SAAS,QAAQ,4BAA2B;AACrD,SAASC,oBAAoB,QAAQ,uCAAsC;AAC3E,SAASC,YAAY,QAAQ,+BAA8B;AAC3D,SAASC,cAAc,QAAQ,iCAAgC;AAE/D,OAAO,MAAMC,gBAAgB,OAAOC;IAClC,MAAMhB,wBAAwBgB;IAE9B,MAAM,EACJC,cAAc,EACdC,OAAOC,YAAY,EACnBC,MAAM,EACNC,OAAOC,WAAW,EAClBC,MAAM,EACNC,cAAcC,kBAAkBvB,qBAAqB,EACrDwB,aAAaC,iBAAiB,CAAC,EAC/BC,IAAI,EACJC,OAAOC,eAAe,CAAC,CAAC,EACzB,GAAGd,IAAIe,IAAI;IAaZ,sDAAsD;IACtD,MAAMP,eAAeQ,KAAKC,GAAG,CAAC7B,mBAAmB4B,KAAKE,GAAG,CAACT,iBAAiBtB;IAC3E,MAAMuB,cAAcM,KAAKC,GAAG,CAAC5B,kBAAkBsB;IAE/C,MAAMQ,mBAAmBnB,IAAIoB,OAAO,CAACC,WAAW,CAACpB,eAAe;IAChE,IAAI,CAACkB,kBAAkB;QACrB,OAAOG,SAASC,IAAI,CAClB;YAAEC,OAAO,CAAC,qBAAqB,EAAEvB,eAAe,UAAU,CAAC;QAAC,GAC5D;YAAEwB,QAAQ;QAAI;IAElB;IAEA,MAAMC,eAAeP,iBAAiBQ,MAAM,CAACC,MAAM,EAAE,CAAC,uBAAuB;IAC7E,MAAMC,WAAW,MAAMhC,aAAa;QAClCQ,OAAOqB,cAAcpB;QACrBN;IACF;IAEA,MAAM8B,SAASC,MAAMC,OAAO,CAAC5B,WAAWA,OAAO6B,MAAM,GAAG,IAAItC,UAAUS,UAAU8B;IAChF,MAAMhC,QAAQC,iBAAiB;IAC/B,MAAMgC,wBAAwBC,QAAQjB,iBAAiBQ,MAAM,CAACU,QAAQ;IAEtE,mDAAmD;IACnD,MAAMC,iBAAwBH,wBAAwB;QAAEI,SAAS;YAAEC,QAAQ;QAAY;IAAE,IAAI,CAAC;IAE9F,MAAM3B,QAAe;QACnB4B,KAAK;YAAC3B;YAAcZ,QAAQ,CAAC,IAAIoC;SAAe;IAClD;IAEA,4CAA4C;IAC5C,MAAMI,cAAc,MAAM1C,IAAIoB,OAAO,CAACuB,KAAK,CAAC;QAC1CC,YAAY3C;QACZ4C,gBAAgB;QAChB7C;QACAa;IACF;IAEA,MAAMiC,oBAAoBJ,YAAYK,SAAS;IAE/C,6EAA6E;IAC7E,IAAIC,iBAAiBF;IAErB,wCAAwC;IACxC,IAAIxC,eAAeA,cAAc,GAAG;QAClC0C,iBAAiBhC,KAAKE,GAAG,CAAC8B,gBAAgB1C;IAC5C;IAEA,gCAAgC;IAChC,IAAI,OAAOuB,aAAa,YAAYA,WAAW,GAAG;QAChDmB,iBAAiBhC,KAAKE,GAAG,CAAC8B,gBAAgBnB;IAC5C;IAEA,MAAMoB,kBAAkBD;IAExB,0DAA0D;IAC1D,+DAA+D;IAC/D,MAAME,oBAAoB,AAACxC,CAAAA,cAAc,CAAA,IAAKF;IAE9C,uEAAuE;IACvE,MAAM2C,oBAAoBF,oBAAoB,IAAI,IAAIjC,KAAKoC,IAAI,CAACH,kBAAkBzC;IAElF,MAAM6C,QAAQrD,KAAKe,MAAMuC,WAAW;IAEpC,0DAA0D;IAC1D,MAAMC,cACJhD,WAAW,SAASP,IAAIoB,OAAO,CAACO,MAAM,CAAC6B,YAAY,GAC/CxD,IAAIoB,OAAO,CAACO,MAAM,CAAC6B,YAAY,CAACD,WAAW,GAC3CrB;IAEN,oCAAoC;IACpC,MAAMuB,iBACJtC,iBAAiBQ,MAAM,CAAC+B,KAAK,EAAE9B,QAAQ,CAAC,uBAAuB,EAAE6B,kBAAkB,EAAE;IAEvF,+DAA+D;IAC/D,MAAME,0BAA0BrE,+BAC9B6B,iBAAiBQ,MAAM,CAACiC,eAAe;IAGzC,yEAAyE;IACzE,MAAMC,UAAUR,QACZ3D,iBAAiB;QACfoE,kBAAkB3C,iBAAiBQ,MAAM;QACzC8B;QACArD;QACAG,QAAQA,UAAU2B;QAClBqB;QACAI;IACF,KACAzB;IAEJ,oGAAoG;IACpG,IAAIe,kBAAkB,KAAKC,qBAAqBD,iBAAiB;QAC/D,MAAMc,WAAkC;YACtCF;YACAG,MAAM,EAAE;YACRf;YACAgB,aAAa;YACbC,aAAaxD,cAAc;YAC3BL,OAAOG;YACPqB;YACAsC,MAAMzD;YACNqC,WAAWE;YACXmB,YAAYjB;QACd;QACA,OAAO7B,SAASC,IAAI,CAACwC;IACvB;IAEA,sFAAsF;IACtF,sEAAsE;IACtE,MAAMM,SAAS,MAAMrE,IAAIoB,OAAO,CAACkD,IAAI,CAAC;QACpC1B,YAAY3C;QACZsE,OAAO;QACPrE;QACAG,OAAOG;QACPD;QACAsC,gBAAgB;QAChBsB,MAAMzD;QACNV;QACA8B;QACAlB;QACAC;IACF;IAEA,iFAAiF;IACjF,IAAImD,OAAOK,OAAOL,IAAI;IACtB,IAAIf,kBAAkB,GAAG;QACvB,MAAMuB,oBAAoBvB,kBAAkBC;QAC5C,IAAIsB,oBAAoBR,KAAK/B,MAAM,EAAE;YACnC+B,OAAOA,KAAKS,KAAK,CAAC,GAAGD;QACvB;IACF;IAEA,iCAAiC;IACjC,IAAIE;IAEJ,IAAIrB,OAAO;QACT,MAAMsB,iBAAiBnF,wBAAwB;YAC7CY,QAAQe,iBAAiBQ,MAAM,CAACvB,MAAM;QACxC;QAEA,MAAMwE,eAAenF,sBACnB0B,iBAAiBQ,MAAM,CAACvB,MAAM,EAC9B,IACA;YAAEmD;QAAY;QAGhBmB,cAAcV,KAAKa,GAAG,CAAC,CAACC;YACtB,MAAMC,MAAMxF,cAAc;gBACxBuF;gBACA1E;gBACAuD;gBACAgB;YACF;YAEA,KAAK,MAAMK,OAAOJ,aAAc;gBAC9B,IAAI,CAAEI,CAAAA,OAAOD,GAAE,GAAI;oBACjBA,GAAG,CAACC,IAAI,GAAG;gBACb;YACF;YAEA,OAAOD;QACT;IACF,OAAO;QACLL,cAAcV,KAAKa,GAAG,CAAC,CAACC;YACtB,IAAIG,SAAkC;gBAAE,GAAGH,GAAG;YAAC;YAE/C,+BAA+B;YAC/BG,SAASrF,qBAAqBqF,QAAQxB;YAEtC,6DAA6D;YAC7D,IAAI1B,MAAMC,OAAO,CAAC5B,WAAWA,OAAO6B,MAAM,GAAG,GAAG;gBAC9C,MAAMiD,UAAmC,CAAC;gBAE1C,KAAK,MAAMF,OAAO5E,OAAQ;oBACxB,MAAM+E,QAAQlG,qBAAqBgG,QAAQD;oBAC3ClF,eAAeoF,SAASF,KAAKG,SAAS;gBACxC;gBAEAF,SAASC;YACX;YAEA,OAAOD;QACT;IACF;IAEA,MAAMhB,cAAcvD,cAAcyC;IAClC,MAAMe,cAAcxD,cAAc;IAElC,MAAMqD,WAAkC;QACtCF;QACAG,MAAMU;QACNzB;QACAgB;QACAC;QACA7D,OAAOG;QACPqB;QACAsC,MAAMzD;QACNqC,WAAWE;QACXmB,YAAYjB;IACd;IAEA,OAAO7B,SAASC,IAAI,CAACwC;AACvB,EAAC"}
|
package/dist/exports/types.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export type { ImportExportPluginConfig, ToCSVFunction } from '../types.js';
|
|
1
|
+
export type { ImportExportPluginConfig, Limit, LimitFunction, ToCSVFunction } from '../types.js';
|
|
2
2
|
//# sourceMappingURL=types.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/exports/types.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,wBAAwB,EAAE,aAAa,EAAE,MAAM,aAAa,CAAA"}
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/exports/types.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,wBAAwB,EAAE,KAAK,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,aAAa,CAAA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/exports/types.ts"],"sourcesContent":["export type { ImportExportPluginConfig, ToCSVFunction } from '../types.js'\n"],"names":[],"mappings":"AAAA,
|
|
1
|
+
{"version":3,"sources":["../../src/exports/types.ts"],"sourcesContent":["export type { ImportExportPluginConfig, Limit, LimitFunction, ToCSVFunction } from '../types.js'\n"],"names":[],"mappings":"AAAA,WAAgG"}
|
|
@@ -1,10 +1,19 @@
|
|
|
1
1
|
import { categorizeError, createBatches, extractErrorMessage } from '../utilities/useBatchProcessor.js';
|
|
2
|
-
|
|
3
|
-
|
|
2
|
+
/**
|
|
3
|
+
* Separates multi-locale data from a document for sequential locale updates.
|
|
4
|
+
*
|
|
5
|
+
* When a field has locale-keyed values (e.g., { title: { en: 'Hello', es: 'Hola' } }),
|
|
6
|
+
* this extracts the first locale's data for initial create/update, and stores
|
|
7
|
+
* remaining locales for subsequent update calls.
|
|
8
|
+
*
|
|
9
|
+
* @returns
|
|
10
|
+
* - flatData: Document with first locale values extracted (for initial operation)
|
|
11
|
+
* - hasMultiLocale: Whether any multi-locale fields were found
|
|
12
|
+
* - localeUpdates: Map of locale -> field data for follow-up updates
|
|
13
|
+
*/ function extractMultiLocaleData(data, configuredLocales) {
|
|
4
14
|
const flatData = {};
|
|
5
15
|
const localeUpdates = {};
|
|
6
16
|
let hasMultiLocale = false;
|
|
7
|
-
// If no locales configured, skip multi-locale processing
|
|
8
17
|
if (!configuredLocales || configuredLocales.length === 0) {
|
|
9
18
|
return {
|
|
10
19
|
flatData: {
|
|
@@ -18,16 +27,12 @@ function extractMultiLocaleData(data, configuredLocales) {
|
|
|
18
27
|
for (const [key, value] of Object.entries(data)){
|
|
19
28
|
if (value && typeof value === 'object' && !Array.isArray(value)) {
|
|
20
29
|
const valueObj = value;
|
|
21
|
-
// Check if this object has keys matching configured locales
|
|
22
30
|
const localeKeys = Object.keys(valueObj).filter((k)=>localeSet.has(k));
|
|
23
31
|
if (localeKeys.length > 0) {
|
|
24
32
|
hasMultiLocale = true;
|
|
25
|
-
// This is a localized field with explicit locale keys
|
|
26
|
-
// Use the first locale for initial creation, then update others
|
|
27
33
|
const firstLocale = localeKeys[0];
|
|
28
34
|
if (firstLocale) {
|
|
29
35
|
flatData[key] = valueObj[firstLocale];
|
|
30
|
-
// Store other locales for later update
|
|
31
36
|
for (const locale of localeKeys){
|
|
32
37
|
if (locale !== firstLocale) {
|
|
33
38
|
if (!localeUpdates[locale]) {
|
|
@@ -38,11 +43,9 @@ function extractMultiLocaleData(data, configuredLocales) {
|
|
|
38
43
|
}
|
|
39
44
|
}
|
|
40
45
|
} else {
|
|
41
|
-
// Not locale data, keep as is
|
|
42
46
|
flatData[key] = value;
|
|
43
47
|
}
|
|
44
48
|
} else {
|
|
45
|
-
// Not an object, keep as is. this includes localized fields without locale suffix; ie default locale
|
|
46
49
|
flatData[key] = value;
|
|
47
50
|
}
|
|
48
51
|
}
|
|
@@ -52,17 +55,24 @@ function extractMultiLocaleData(data, configuredLocales) {
|
|
|
52
55
|
localeUpdates
|
|
53
56
|
};
|
|
54
57
|
}
|
|
55
|
-
|
|
58
|
+
/**
|
|
59
|
+
* Processes a batch of documents for import based on the import mode.
|
|
60
|
+
*
|
|
61
|
+
* For each document in the batch:
|
|
62
|
+
* - create: Creates a new document (removes any existing ID)
|
|
63
|
+
* - update: Finds existing document by matchField and updates it
|
|
64
|
+
* - upsert: Updates if found, creates if not found
|
|
65
|
+
*
|
|
66
|
+
* Handles versioned collections, multi-locale data, and MongoDB ObjectID validation.
|
|
67
|
+
* Continues processing remaining documents even if individual imports fail.
|
|
68
|
+
*/ async function processImportBatch({ batch, batchIndex, collectionSlug, importMode, matchField, options, req, user }) {
|
|
56
69
|
const result = {
|
|
57
70
|
failed: [],
|
|
58
71
|
successful: []
|
|
59
72
|
};
|
|
60
|
-
// Check if the collection has versions enabled
|
|
61
73
|
const collectionConfig = req.payload.collections[collectionSlug]?.config;
|
|
62
74
|
const collectionHasVersions = Boolean(collectionConfig?.versions);
|
|
63
|
-
// Get configured locales for multi-locale data detection
|
|
64
75
|
const configuredLocales = req.payload.config.localization ? req.payload.config.localization.localeCodes : undefined;
|
|
65
|
-
// Calculate the starting row number for this batch
|
|
66
76
|
const startingRowNumber = batchIndex * options.batchSize;
|
|
67
77
|
for(let i = 0; i < batch.length; i++){
|
|
68
78
|
const document = batch[i];
|
|
@@ -71,23 +81,18 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
71
81
|
}
|
|
72
82
|
const rowNumber = startingRowNumber + i + 1;
|
|
73
83
|
try {
|
|
74
|
-
let
|
|
75
|
-
let
|
|
84
|
+
let savedDocument;
|
|
85
|
+
let existingDocResult;
|
|
76
86
|
if (importMode === 'create') {
|
|
77
|
-
// Remove ID field when creating new document
|
|
78
87
|
const createData = {
|
|
79
88
|
...document
|
|
80
89
|
};
|
|
81
90
|
delete createData.id;
|
|
82
|
-
// Only handle _status for versioned collections
|
|
83
91
|
let draftOption;
|
|
84
92
|
if (collectionHasVersions) {
|
|
85
|
-
// Check if _status is set - use defaultVersionStatus from config
|
|
86
|
-
// If no _status field provided, use the configured default
|
|
87
93
|
const statusValue = createData._status || options.defaultVersionStatus;
|
|
88
94
|
const isPublished = statusValue !== 'draft';
|
|
89
95
|
draftOption = !isPublished;
|
|
90
|
-
// Debug: log status handling
|
|
91
96
|
if (req.payload.config.debug) {
|
|
92
97
|
req.payload.logger.info({
|
|
93
98
|
_status: createData._status,
|
|
@@ -98,7 +103,6 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
98
103
|
}
|
|
99
104
|
delete createData._status; // Remove _status from data - it's controlled via draft option
|
|
100
105
|
}
|
|
101
|
-
// Debug: log what we're about to create
|
|
102
106
|
if (req.payload.config.debug && 'title' in createData) {
|
|
103
107
|
req.payload.logger.info({
|
|
104
108
|
msg: 'Creating document',
|
|
@@ -111,7 +115,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
111
115
|
const { flatData, hasMultiLocale, localeUpdates } = extractMultiLocaleData(createData, configuredLocales);
|
|
112
116
|
if (hasMultiLocale) {
|
|
113
117
|
// Create with default locale data
|
|
114
|
-
|
|
118
|
+
savedDocument = await req.payload.create({
|
|
115
119
|
collection: collectionSlug,
|
|
116
120
|
data: flatData,
|
|
117
121
|
draft: draftOption,
|
|
@@ -120,7 +124,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
120
124
|
user
|
|
121
125
|
});
|
|
122
126
|
// Update for other locales
|
|
123
|
-
if (
|
|
127
|
+
if (savedDocument && Object.keys(localeUpdates).length > 0) {
|
|
124
128
|
for (const [locale, localeData] of Object.entries(localeUpdates)){
|
|
125
129
|
try {
|
|
126
130
|
const localeReq = {
|
|
@@ -128,7 +132,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
128
132
|
locale
|
|
129
133
|
};
|
|
130
134
|
await req.payload.update({
|
|
131
|
-
id:
|
|
135
|
+
id: savedDocument.id,
|
|
132
136
|
collection: collectionSlug,
|
|
133
137
|
data: localeData,
|
|
134
138
|
draft: collectionHasVersions ? false : undefined,
|
|
@@ -140,14 +144,14 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
140
144
|
// Log but don't fail the entire import if a locale update fails
|
|
141
145
|
req.payload.logger.error({
|
|
142
146
|
err: error,
|
|
143
|
-
msg: `Failed to update locale ${locale} for document ${String(
|
|
147
|
+
msg: `Failed to update locale ${locale} for document ${String(savedDocument.id)}`
|
|
144
148
|
});
|
|
145
149
|
}
|
|
146
150
|
}
|
|
147
151
|
}
|
|
148
152
|
} else {
|
|
149
153
|
// No multi-locale data, create normally
|
|
150
|
-
|
|
154
|
+
savedDocument = await req.payload.create({
|
|
151
155
|
collection: collectionSlug,
|
|
152
156
|
data: createData,
|
|
153
157
|
draft: draftOption,
|
|
@@ -178,9 +182,8 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
178
182
|
matchValueStr = JSON.stringify(matchValue);
|
|
179
183
|
}
|
|
180
184
|
const isValidObjectIdFormat = /^[0-9a-f]{24}$/i.test(matchValueStr);
|
|
181
|
-
// Try to search normally first, catch errors for invalid IDs
|
|
182
185
|
try {
|
|
183
|
-
|
|
186
|
+
existingDocResult = await req.payload.find({
|
|
184
187
|
collection: collectionSlug,
|
|
185
188
|
depth: 0,
|
|
186
189
|
limit: 1,
|
|
@@ -194,23 +197,19 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
194
197
|
}
|
|
195
198
|
});
|
|
196
199
|
} catch (error) {
|
|
197
|
-
//
|
|
198
|
-
// and we're in upsert mode, treat as non-existent
|
|
200
|
+
// MongoDB may throw for invalid ObjectID format - handle gracefully for upsert
|
|
199
201
|
if (isMatchingById && importMode === 'upsert' && !isValidObjectIdFormat) {
|
|
200
|
-
|
|
202
|
+
existingDocResult = {
|
|
201
203
|
docs: []
|
|
202
204
|
};
|
|
203
205
|
} else if (isMatchingById && importMode === 'update' && !isValidObjectIdFormat) {
|
|
204
|
-
// For update mode with invalid ID, this should fail
|
|
205
206
|
throw new Error(`Invalid ID format for update: ${matchValueStr}`);
|
|
206
207
|
} else {
|
|
207
|
-
// Re-throw other errors
|
|
208
208
|
throw error;
|
|
209
209
|
}
|
|
210
210
|
}
|
|
211
|
-
if (
|
|
212
|
-
|
|
213
|
-
const existingDoc = existing.docs[0];
|
|
211
|
+
if (existingDocResult.docs.length > 0) {
|
|
212
|
+
const existingDoc = existingDocResult.docs[0];
|
|
214
213
|
if (!existingDoc) {
|
|
215
214
|
throw new Error(`Document not found`);
|
|
216
215
|
}
|
|
@@ -250,7 +249,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
250
249
|
}
|
|
251
250
|
if (hasMultiLocale) {
|
|
252
251
|
// Update with default locale data
|
|
253
|
-
|
|
252
|
+
savedDocument = await req.payload.update({
|
|
254
253
|
id: existingDoc.id,
|
|
255
254
|
collection: collectionSlug,
|
|
256
255
|
data: flatData,
|
|
@@ -261,7 +260,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
261
260
|
user
|
|
262
261
|
});
|
|
263
262
|
// Update for other locales
|
|
264
|
-
if (
|
|
263
|
+
if (savedDocument && Object.keys(localeUpdates).length > 0) {
|
|
265
264
|
for (const [locale, localeData] of Object.entries(localeUpdates)){
|
|
266
265
|
try {
|
|
267
266
|
// Clone the request with the specific locale
|
|
@@ -302,7 +301,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
302
301
|
}
|
|
303
302
|
// Update the document - don't specify draft to let Payload handle versions properly
|
|
304
303
|
// This will create a new draft version for collections with versions enabled
|
|
305
|
-
|
|
304
|
+
savedDocument = await req.payload.update({
|
|
306
305
|
id: existingDoc.id,
|
|
307
306
|
collection: collectionSlug,
|
|
308
307
|
data: updateData,
|
|
@@ -312,13 +311,12 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
312
311
|
req,
|
|
313
312
|
user
|
|
314
313
|
});
|
|
315
|
-
|
|
316
|
-
if (req.payload.config.debug && processedDoc) {
|
|
314
|
+
if (req.payload.config.debug && savedDocument) {
|
|
317
315
|
req.payload.logger.info({
|
|
318
|
-
id:
|
|
316
|
+
id: savedDocument.id,
|
|
319
317
|
msg: 'Update completed',
|
|
320
|
-
status:
|
|
321
|
-
title:
|
|
318
|
+
status: savedDocument._status,
|
|
319
|
+
title: savedDocument.title
|
|
322
320
|
});
|
|
323
321
|
}
|
|
324
322
|
} catch (updateError) {
|
|
@@ -357,7 +355,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
357
355
|
const { flatData, hasMultiLocale, localeUpdates } = extractMultiLocaleData(createData, configuredLocales);
|
|
358
356
|
if (hasMultiLocale) {
|
|
359
357
|
// Create with default locale data
|
|
360
|
-
|
|
358
|
+
savedDocument = await req.payload.create({
|
|
361
359
|
collection: collectionSlug,
|
|
362
360
|
data: flatData,
|
|
363
361
|
draft: draftOption,
|
|
@@ -366,7 +364,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
366
364
|
user
|
|
367
365
|
});
|
|
368
366
|
// Update for other locales
|
|
369
|
-
if (
|
|
367
|
+
if (savedDocument && Object.keys(localeUpdates).length > 0) {
|
|
370
368
|
for (const [locale, localeData] of Object.entries(localeUpdates)){
|
|
371
369
|
try {
|
|
372
370
|
// Clone the request with the specific locale
|
|
@@ -375,7 +373,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
375
373
|
locale
|
|
376
374
|
};
|
|
377
375
|
await req.payload.update({
|
|
378
|
-
id:
|
|
376
|
+
id: savedDocument.id,
|
|
379
377
|
collection: collectionSlug,
|
|
380
378
|
data: localeData,
|
|
381
379
|
draft: collectionHasVersions ? false : undefined,
|
|
@@ -386,14 +384,14 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
386
384
|
// Log but don't fail the entire import if a locale update fails
|
|
387
385
|
req.payload.logger.error({
|
|
388
386
|
err: error,
|
|
389
|
-
msg: `Failed to update locale ${locale} for document ${String(
|
|
387
|
+
msg: `Failed to update locale ${locale} for document ${String(savedDocument.id)}`
|
|
390
388
|
});
|
|
391
389
|
}
|
|
392
390
|
}
|
|
393
391
|
}
|
|
394
392
|
} else {
|
|
395
393
|
// No multi-locale data, create normally
|
|
396
|
-
|
|
394
|
+
savedDocument = await req.payload.create({
|
|
397
395
|
collection: collectionSlug,
|
|
398
396
|
data: createData,
|
|
399
397
|
draft: draftOption,
|
|
@@ -420,7 +418,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
420
418
|
} else {
|
|
421
419
|
throw new Error(`Unknown import mode: ${String(importMode)}`);
|
|
422
420
|
}
|
|
423
|
-
if (
|
|
421
|
+
if (savedDocument) {
|
|
424
422
|
// Determine operation type for proper counting
|
|
425
423
|
let operation;
|
|
426
424
|
if (importMode === 'create') {
|
|
@@ -428,8 +426,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
428
426
|
} else if (importMode === 'update') {
|
|
429
427
|
operation = 'updated';
|
|
430
428
|
} else if (importMode === 'upsert') {
|
|
431
|
-
|
|
432
|
-
if (existing && existing.docs.length > 0) {
|
|
429
|
+
if (existingDocResult && existingDocResult.docs.length > 0) {
|
|
433
430
|
operation = 'updated';
|
|
434
431
|
} else {
|
|
435
432
|
operation = 'created';
|
|
@@ -439,7 +436,7 @@ async function processImportBatch({ batch, batchIndex, collectionSlug, importMod
|
|
|
439
436
|
document,
|
|
440
437
|
index: rowNumber - 1,
|
|
441
438
|
operation,
|
|
442
|
-
result:
|
|
439
|
+
result: savedDocument
|
|
443
440
|
});
|
|
444
441
|
}
|
|
445
442
|
} catch (error) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/import/batchProcessor.ts"],"sourcesContent":["import type { PayloadRequest, TypedUser } from 'payload'\n\nimport type { ImportMode, ImportResult } from './createImport.js'\n\nimport {\n type BatchError,\n categorizeError,\n createBatches,\n extractErrorMessage,\n} from '../utilities/useBatchProcessor.js'\n\n/**\n * Import-specific batch processor options\n */\nexport interface ImportBatchProcessorOptions {\n batchSize?: number\n defaultVersionStatus?: 'draft' | 'published'\n}\n\n/**\n * Import-specific error type extending the generic BatchError\n */\nexport interface ImportError extends BatchError<Record<string, unknown>> {\n documentData: Record<string, unknown>\n field?: string\n fieldLabel?: string\n rowNumber: number // 1-indexed for user clarity\n}\n\n/**\n * Result from processing a single import batch\n */\nexport interface ImportBatchResult {\n failed: Array<ImportError>\n successful: Array<{\n document: Record<string, unknown>\n index: number\n operation?: 'created' | 'updated'\n result: Record<string, unknown>\n }>\n}\n\n/**\n * Options for processing an import operation\n */\nexport interface ImportProcessOptions {\n collectionSlug: string\n documents: Record<string, unknown>[]\n importMode: ImportMode\n matchField?: string\n req: PayloadRequest\n user?: TypedUser\n}\n\n// Helper function to handle multi-locale data\nfunction extractMultiLocaleData(\n data: Record<string, unknown>,\n configuredLocales?: string[],\n): {\n flatData: Record<string, unknown>\n hasMultiLocale: boolean\n localeUpdates: Record<string, Record<string, unknown>>\n} {\n const flatData: Record<string, unknown> = {}\n const localeUpdates: Record<string, Record<string, unknown>> = {}\n let hasMultiLocale = false\n\n // If no locales configured, skip multi-locale processing\n if (!configuredLocales || configuredLocales.length === 0) {\n return { flatData: { ...data }, hasMultiLocale: false, localeUpdates: {} }\n }\n\n const localeSet = new Set(configuredLocales)\n\n for (const [key, value] of Object.entries(data)) {\n if (value && typeof value === 'object' && !Array.isArray(value)) {\n const valueObj = value as Record<string, unknown>\n // Check if this object has keys matching configured locales\n const localeKeys = Object.keys(valueObj).filter((k) => localeSet.has(k))\n if (localeKeys.length > 0) {\n hasMultiLocale = true\n // This is a localized field with explicit locale keys\n // Use the first locale for initial creation, then update others\n const firstLocale = localeKeys[0]\n if (firstLocale) {\n flatData[key] = valueObj[firstLocale]\n // Store other locales for later update\n for (const locale of localeKeys) {\n if (locale !== firstLocale) {\n if (!localeUpdates[locale]) {\n localeUpdates[locale] = {}\n }\n localeUpdates[locale][key] = valueObj[locale]\n }\n }\n }\n } else {\n // Not locale data, keep as is\n flatData[key] = value\n }\n } else {\n // Not an object, keep as is. this includes localized fields without locale suffix; ie default locale\n flatData[key] = value\n }\n }\n\n return { flatData, hasMultiLocale, localeUpdates }\n}\n\ntype ProcessImportBatchOptions = {\n batch: Record<string, unknown>[]\n batchIndex: number\n collectionSlug: string\n importMode: ImportMode\n matchField: string | undefined\n options: { batchSize: number; defaultVersionStatus: 'draft' | 'published' }\n req: PayloadRequest\n user?: TypedUser\n}\n\nasync function processImportBatch({\n batch,\n batchIndex,\n collectionSlug,\n importMode,\n matchField,\n options,\n req,\n user,\n}: ProcessImportBatchOptions): Promise<ImportBatchResult> {\n const result: ImportBatchResult = {\n failed: [],\n successful: [],\n }\n\n // Check if the collection has versions enabled\n const collectionConfig = req.payload.collections[collectionSlug]?.config\n const collectionHasVersions = Boolean(collectionConfig?.versions)\n\n // Get configured locales for multi-locale data detection\n const configuredLocales = req.payload.config.localization\n ? req.payload.config.localization.localeCodes\n : undefined\n\n // Calculate the starting row number for this batch\n const startingRowNumber = batchIndex * options.batchSize\n\n for (let i = 0; i < batch.length; i++) {\n const document = batch[i]\n if (!document) {\n continue\n }\n const rowNumber = startingRowNumber + i + 1\n\n try {\n let processedDoc: Record<string, unknown> | undefined\n let existing: { docs: Array<Record<string, unknown>> } | undefined\n\n if (importMode === 'create') {\n // Remove ID field when creating new document\n const createData = { ...document }\n delete createData.id\n\n // Only handle _status for versioned collections\n let draftOption: boolean | undefined\n if (collectionHasVersions) {\n // Check if _status is set - use defaultVersionStatus from config\n // If no _status field provided, use the configured default\n const statusValue = createData._status || options.defaultVersionStatus\n const isPublished = statusValue !== 'draft'\n draftOption = !isPublished\n\n // Debug: log status handling\n if (req.payload.config.debug) {\n req.payload.logger.info({\n _status: createData._status,\n isPublished,\n msg: 'Status handling in create',\n willSetDraft: draftOption,\n })\n }\n\n delete createData._status // Remove _status from data - it's controlled via draft option\n }\n\n // Debug: log what we're about to create\n if (req.payload.config.debug && 'title' in createData) {\n req.payload.logger.info({\n msg: 'Creating document',\n title: createData.title,\n titleIsNull: createData.title === null,\n titleType: typeof createData.title,\n })\n }\n\n // Check if we have multi-locale data and extract it\n const { flatData, hasMultiLocale, localeUpdates } = extractMultiLocaleData(\n createData,\n configuredLocales,\n )\n\n if (hasMultiLocale) {\n // Create with default locale data\n processedDoc = await req.payload.create({\n collection: collectionSlug,\n data: flatData,\n draft: draftOption,\n overrideAccess: false,\n req,\n user,\n })\n\n // Update for other locales\n if (processedDoc && Object.keys(localeUpdates).length > 0) {\n for (const [locale, localeData] of Object.entries(localeUpdates)) {\n try {\n const localeReq = { ...req, locale }\n await req.payload.update({\n id: processedDoc.id as number | string,\n collection: collectionSlug,\n data: localeData,\n draft: collectionHasVersions ? false : undefined,\n overrideAccess: false,\n req: localeReq,\n user,\n })\n } catch (error) {\n // Log but don't fail the entire import if a locale update fails\n req.payload.logger.error({\n err: error,\n msg: `Failed to update locale ${locale} for document ${String(processedDoc.id)}`,\n })\n }\n }\n }\n } else {\n // No multi-locale data, create normally\n processedDoc = await req.payload.create({\n collection: collectionSlug,\n data: createData,\n draft: draftOption,\n overrideAccess: false,\n req,\n user,\n })\n }\n } else if (importMode === 'update' || importMode === 'upsert') {\n const matchValue = document[matchField || 'id']\n if (!matchValue) {\n throw new Error(`Match field \"${matchField || 'id'}\" not found in document`)\n }\n\n // Special handling for ID field with MongoDB\n // If matching by 'id' and it's not a valid ObjectID format, handle specially\n const isMatchingById = (matchField || 'id') === 'id'\n\n // Check if it's a valid MongoDB ObjectID format (24 hex chars)\n // Note: matchValue could be string, number, or ObjectID object\n let matchValueStr: string\n if (typeof matchValue === 'object' && matchValue !== null) {\n matchValueStr = JSON.stringify(matchValue)\n } else if (typeof matchValue === 'string') {\n matchValueStr = matchValue\n } else if (typeof matchValue === 'number') {\n matchValueStr = matchValue.toString()\n } else {\n // For other types, use JSON.stringify\n matchValueStr = JSON.stringify(matchValue)\n }\n const isValidObjectIdFormat = /^[0-9a-f]{24}$/i.test(matchValueStr)\n\n // Try to search normally first, catch errors for invalid IDs\n try {\n existing = await req.payload.find({\n collection: collectionSlug,\n depth: 0,\n limit: 1,\n overrideAccess: false,\n req,\n user,\n where: {\n [matchField || 'id']: {\n equals: matchValue,\n },\n },\n })\n } catch (error) {\n // If we get an error when searching by ID (e.g., invalid ObjectID format)\n // and we're in upsert mode, treat as non-existent\n if (isMatchingById && importMode === 'upsert' && !isValidObjectIdFormat) {\n existing = { docs: [] }\n } else if (isMatchingById && importMode === 'update' && !isValidObjectIdFormat) {\n // For update mode with invalid ID, this should fail\n throw new Error(`Invalid ID format for update: ${matchValueStr}`)\n } else {\n // Re-throw other errors\n throw error\n }\n }\n\n if (existing.docs.length > 0) {\n // Update existing\n const existingDoc = existing.docs[0]\n if (!existingDoc) {\n throw new Error(`Document not found`)\n }\n\n // Debug: log what we found\n if (req.payload.config.debug) {\n req.payload.logger.info({\n existingId: existingDoc.id,\n existingStatus: existingDoc._status,\n existingTitle: existingDoc.title,\n incomingDocument: document,\n mode: importMode,\n msg: 'Found existing document for update',\n })\n }\n\n const updateData = { ...document }\n // Remove ID and internal fields from update data\n delete updateData.id\n delete updateData._id\n delete updateData.createdAt\n delete updateData.updatedAt\n\n // Check if we have multi-locale data and extract it\n const { flatData, hasMultiLocale, localeUpdates } = extractMultiLocaleData(\n updateData,\n configuredLocales,\n )\n\n if (req.payload.config.debug) {\n req.payload.logger.info({\n existingId: existingDoc.id,\n hasMultiLocale,\n mode: importMode,\n msg: 'Updating document in upsert/update mode',\n updateData: Object.keys(hasMultiLocale ? flatData : updateData).reduce(\n (acc, key) => {\n const val = (hasMultiLocale ? flatData : updateData)[key]\n acc[key] =\n typeof val === 'string' && val.length > 50 ? val.substring(0, 50) + '...' : val\n return acc\n },\n {} as Record<string, unknown>,\n ),\n })\n }\n\n if (hasMultiLocale) {\n // Update with default locale data\n processedDoc = await req.payload.update({\n id: existingDoc.id as number | string,\n collection: collectionSlug,\n data: flatData,\n depth: 0,\n // Don't specify draft - this creates a new draft for versioned collections\n overrideAccess: false,\n req,\n user,\n })\n\n // Update for other locales\n if (processedDoc && Object.keys(localeUpdates).length > 0) {\n for (const [locale, localeData] of Object.entries(localeUpdates)) {\n try {\n // Clone the request with the specific locale\n const localeReq = { ...req, locale }\n await req.payload.update({\n id: existingDoc.id as number | string,\n collection: collectionSlug,\n data: localeData,\n depth: 0,\n // Don't specify draft - this creates a new draft for versioned collections\n overrideAccess: false,\n req: localeReq,\n user,\n })\n } catch (error) {\n // Log but don't fail the entire import if a locale update fails\n req.payload.logger.error({\n err: error,\n msg: `Failed to update locale ${locale} for document ${String(existingDoc.id)}`,\n })\n }\n }\n }\n } else {\n // No multi-locale data, update normally\n try {\n // Extra debug: log before update\n if (req.payload.config.debug) {\n req.payload.logger.info({\n existingId: existingDoc.id,\n existingTitle: existingDoc.title,\n msg: 'About to update document',\n newData: updateData,\n })\n }\n\n // Update the document - don't specify draft to let Payload handle versions properly\n // This will create a new draft version for collections with versions enabled\n processedDoc = await req.payload.update({\n id: existingDoc.id as number | string,\n collection: collectionSlug,\n data: updateData,\n depth: 0,\n // Don't specify draft - this creates a new draft for versioned collections\n overrideAccess: false,\n req,\n user,\n })\n\n // Debug: log what was returned\n if (req.payload.config.debug && processedDoc) {\n req.payload.logger.info({\n id: processedDoc.id,\n msg: 'Update completed',\n status: processedDoc._status,\n title: processedDoc.title,\n })\n }\n } catch (updateError) {\n req.payload.logger.error({\n id: existingDoc.id,\n err: updateError,\n msg: 'Update failed',\n })\n throw updateError\n }\n }\n } else if (importMode === 'upsert') {\n // Create new in upsert mode\n if (req.payload.config.debug) {\n req.payload.logger.info({\n document,\n matchField: matchField || 'id',\n matchValue: document[matchField || 'id'],\n msg: 'No existing document found, creating new in upsert mode',\n })\n }\n\n const createData = { ...document }\n delete createData.id\n\n // Only handle _status for versioned collections\n let draftOption: boolean | undefined\n if (collectionHasVersions) {\n // Use defaultVersionStatus from config if _status not provided\n const statusValue = createData._status || options.defaultVersionStatus\n const isPublished = statusValue !== 'draft'\n draftOption = !isPublished\n delete createData._status // Remove _status from data - it's controlled via draft option\n }\n\n // Check if we have multi-locale data and extract it\n const { flatData, hasMultiLocale, localeUpdates } = extractMultiLocaleData(\n createData,\n configuredLocales,\n )\n\n if (hasMultiLocale) {\n // Create with default locale data\n processedDoc = await req.payload.create({\n collection: collectionSlug,\n data: flatData,\n draft: draftOption,\n overrideAccess: false,\n req,\n user,\n })\n\n // Update for other locales\n if (processedDoc && Object.keys(localeUpdates).length > 0) {\n for (const [locale, localeData] of Object.entries(localeUpdates)) {\n try {\n // Clone the request with the specific locale\n const localeReq = { ...req, locale }\n await req.payload.update({\n id: processedDoc.id as number | string,\n collection: collectionSlug,\n data: localeData,\n draft: collectionHasVersions ? false : undefined,\n overrideAccess: false,\n req: localeReq,\n })\n } catch (error) {\n // Log but don't fail the entire import if a locale update fails\n req.payload.logger.error({\n err: error,\n msg: `Failed to update locale ${locale} for document ${String(processedDoc.id)}`,\n })\n }\n }\n }\n } else {\n // No multi-locale data, create normally\n processedDoc = await req.payload.create({\n collection: collectionSlug,\n data: createData,\n draft: draftOption,\n overrideAccess: false,\n req,\n user,\n })\n }\n } else {\n // Update mode but document not found\n let matchValueDisplay: string\n if (typeof matchValue === 'object' && matchValue !== null) {\n matchValueDisplay = JSON.stringify(matchValue)\n } else if (typeof matchValue === 'string') {\n matchValueDisplay = matchValue\n } else if (typeof matchValue === 'number') {\n matchValueDisplay = matchValue.toString()\n } else {\n // For other types, use JSON.stringify to avoid [object Object]\n matchValueDisplay = JSON.stringify(matchValue)\n }\n throw new Error(`Document with ${matchField || 'id'}=\"${matchValueDisplay}\" not found`)\n }\n } else {\n throw new Error(`Unknown import mode: ${String(importMode)}`)\n }\n\n if (processedDoc) {\n // Determine operation type for proper counting\n let operation: 'created' | 'updated' | undefined\n if (importMode === 'create') {\n operation = 'created'\n } else if (importMode === 'update') {\n operation = 'updated'\n } else if (importMode === 'upsert') {\n // In upsert mode, check if we found an existing document\n if (existing && existing.docs.length > 0) {\n operation = 'updated'\n } else {\n operation = 'created'\n }\n }\n\n result.successful.push({\n document,\n index: rowNumber - 1, // Store as 0-indexed\n operation,\n result: processedDoc,\n })\n }\n } catch (error) {\n const importError: ImportError = {\n type: categorizeError(error),\n documentData: document || {},\n error: extractErrorMessage(error),\n item: document || {},\n itemIndex: rowNumber - 1,\n rowNumber,\n }\n\n // Try to extract field information from validation errors\n if (error && typeof error === 'object' && 'data' in error) {\n const errorData = error as { data?: { errors?: Array<{ path?: string }> } }\n if (errorData.data?.errors && Array.isArray(errorData.data.errors)) {\n const firstError = errorData.data.errors[0]\n if (firstError?.path) {\n importError.field = firstError.path\n }\n }\n }\n\n result.failed.push(importError)\n // Always continue processing all rows\n }\n }\n\n return result\n}\n\nexport function createImportBatchProcessor(options: ImportBatchProcessorOptions = {}) {\n const processorOptions = {\n batchSize: options.batchSize ?? 100,\n defaultVersionStatus: options.defaultVersionStatus ?? 'published',\n }\n\n const processImport = async (processOptions: ImportProcessOptions): Promise<ImportResult> => {\n const { collectionSlug, documents, importMode, matchField, req, user } = processOptions\n const batches = createBatches(documents, processorOptions.batchSize)\n\n const result: ImportResult = {\n errors: [],\n imported: 0,\n total: documents.length,\n updated: 0,\n }\n\n for (let i = 0; i < batches.length; i++) {\n const currentBatch = batches[i]\n if (!currentBatch) {\n continue\n }\n\n const batchResult = await processImportBatch({\n batch: currentBatch,\n batchIndex: i,\n collectionSlug,\n importMode,\n matchField,\n options: processorOptions,\n req,\n user,\n })\n\n // Update results\n for (const success of batchResult.successful) {\n if (success.operation === 'created') {\n result.imported++\n } else if (success.operation === 'updated') {\n result.updated++\n } else {\n // Fallback\n if (importMode === 'create') {\n result.imported++\n } else {\n result.updated++\n }\n }\n }\n\n for (const error of batchResult.failed) {\n result.errors.push({\n doc: error.documentData,\n error: error.error,\n index: error.rowNumber - 1, // Convert back to 0-indexed\n })\n }\n }\n\n return result\n }\n\n return {\n processImport,\n }\n}\n"],"names":["categorizeError","createBatches","extractErrorMessage","extractMultiLocaleData","data","configuredLocales","flatData","localeUpdates","hasMultiLocale","length","localeSet","Set","key","value","Object","entries","Array","isArray","valueObj","localeKeys","keys","filter","k","has","firstLocale","locale","processImportBatch","batch","batchIndex","collectionSlug","importMode","matchField","options","req","user","result","failed","successful","collectionConfig","payload","collections","config","collectionHasVersions","Boolean","versions","localization","localeCodes","undefined","startingRowNumber","batchSize","i","document","rowNumber","processedDoc","existing","createData","id","draftOption","statusValue","_status","defaultVersionStatus","isPublished","debug","logger","info","msg","willSetDraft","title","titleIsNull","titleType","create","collection","draft","overrideAccess","localeData","localeReq","update","error","err","String","matchValue","Error","isMatchingById","matchValueStr","JSON","stringify","toString","isValidObjectIdFormat","test","find","depth","limit","where","equals","docs","existingDoc","existingId","existingStatus","existingTitle","incomingDocument","mode","updateData","_id","createdAt","updatedAt","reduce","acc","val","substring","newData","status","updateError","matchValueDisplay","operation","push","index","importError","type","documentData","item","itemIndex","errorData","errors","firstError","path","field","createImportBatchProcessor","processorOptions","processImport","processOptions","documents","batches","imported","total","updated","currentBatch","batchResult","success","doc"],"mappings":"AAIA,SAEEA,eAAe,EACfC,aAAa,EACbC,mBAAmB,QACd,oCAAmC;AA6C1C,8CAA8C;AAC9C,SAASC,uBACPC,IAA6B,EAC7BC,iBAA4B;IAM5B,MAAMC,WAAoC,CAAC;IAC3C,MAAMC,gBAAyD,CAAC;IAChE,IAAIC,iBAAiB;IAErB,yDAAyD;IACzD,IAAI,CAACH,qBAAqBA,kBAAkBI,MAAM,KAAK,GAAG;QACxD,OAAO;YAAEH,UAAU;gBAAE,GAAGF,IAAI;YAAC;YAAGI,gBAAgB;YAAOD,eAAe,CAAC;QAAE;IAC3E;IAEA,MAAMG,YAAY,IAAIC,IAAIN;IAE1B,KAAK,MAAM,CAACO,KAAKC,MAAM,IAAIC,OAAOC,OAAO,CAACX,MAAO;QAC/C,IAAIS,SAAS,OAAOA,UAAU,YAAY,CAACG,MAAMC,OAAO,CAACJ,QAAQ;YAC/D,MAAMK,WAAWL;YACjB,4DAA4D;YAC5D,MAAMM,aAAaL,OAAOM,IAAI,CAACF,UAAUG,MAAM,CAAC,CAACC,IAAMZ,UAAUa,GAAG,CAACD;YACrE,IAAIH,WAAWV,MAAM,GAAG,GAAG;gBACzBD,iBAAiB;gBACjB,sDAAsD;gBACtD,gEAAgE;gBAChE,MAAMgB,cAAcL,UAAU,CAAC,EAAE;gBACjC,IAAIK,aAAa;oBACflB,QAAQ,CAACM,IAAI,GAAGM,QAAQ,CAACM,YAAY;oBACrC,uCAAuC;oBACvC,KAAK,MAAMC,UAAUN,WAAY;wBAC/B,IAAIM,WAAWD,aAAa;4BAC1B,IAAI,CAACjB,aAAa,CAACkB,OAAO,EAAE;gCAC1BlB,aAAa,CAACkB,OAAO,GAAG,CAAC;4BAC3B;4BACAlB,aAAa,CAACkB,OAAO,CAACb,IAAI,GAAGM,QAAQ,CAACO,OAAO;wBAC/C;oBACF;gBACF;YACF,OAAO;gBACL,8BAA8B;gBAC9BnB,QAAQ,CAACM,IAAI,GAAGC;YAClB;QACF,OAAO;YACL,qGAAqG;YACrGP,QAAQ,CAACM,IAAI,GAAGC;QAClB;IACF;IAEA,OAAO;QAAEP;QAAUE;QAAgBD;IAAc;AACnD;AAaA,eAAemB,mBAAmB,EAChCC,KAAK,EACLC,UAAU,EACVC,cAAc,EACdC,UAAU,EACVC,UAAU,EACVC,OAAO,EACPC,GAAG,EACHC,IAAI,EACsB;IAC1B,MAAMC,SAA4B;QAChCC,QAAQ,EAAE;QACVC,YAAY,EAAE;IAChB;IAEA,+CAA+C;IAC/C,MAAMC,mBAAmBL,IAAIM,OAAO,CAACC,WAAW,CAACX,eAAe,EAAEY;IAClE,MAAMC,wBAAwBC,QAAQL,kBAAkBM;IAExD,yDAAyD;IACzD,MAAMvC,oBAAoB4B,IAAIM,OAAO,CAACE,MAAM,CAACI,YAAY,GACrDZ,IAAIM,OAAO,CAACE,MAAM,CAACI,YAAY,CAACC,WAAW,GAC3CC;IAEJ,mDAAmD;IACnD,MAAMC,oBAAoBpB,aAAaI,QAAQiB,SAAS;IAExD,IAAK,IAAIC,IAAI,GAAGA,IAAIvB,MAAMlB,MAAM,EAAEyC,IAAK;QACrC,MAAMC,WAAWxB,KAAK,CAACuB,EAAE;QACzB,IAAI,CAACC,UAAU;YACb;QACF;QACA,MAAMC,YAAYJ,oBAAoBE,IAAI;QAE1C,IAAI;YACF,IAAIG;YACJ,IAAIC;YAEJ,IAAIxB,eAAe,UAAU;gBAC3B,6CAA6C;gBAC7C,MAAMyB,aAAa;oBAAE,GAAGJ,QAAQ;gBAAC;gBACjC,OAAOI,WAAWC,EAAE;gBAEpB,gDAAgD;gBAChD,IAAIC;gBACJ,IAAIf,uBAAuB;oBACzB,iEAAiE;oBACjE,2DAA2D;oBAC3D,MAAMgB,cAAcH,WAAWI,OAAO,IAAI3B,QAAQ4B,oBAAoB;oBACtE,MAAMC,cAAcH,gBAAgB;oBACpCD,cAAc,CAACI;oBAEf,6BAA6B;oBAC7B,IAAI5B,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,EAAE;wBAC5B7B,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;4BACtBL,SAASJ,WAAWI,OAAO;4BAC3BE;4BACAI,KAAK;4BACLC,cAAcT;wBAChB;oBACF;oBAEA,OAAOF,WAAWI,OAAO,EAAC,8DAA8D;gBAC1F;gBAEA,wCAAwC;gBACxC,IAAI1B,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,IAAI,WAAWP,YAAY;oBACrDtB,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;wBACtBC,KAAK;wBACLE,OAAOZ,WAAWY,KAAK;wBACvBC,aAAab,WAAWY,KAAK,KAAK;wBAClCE,WAAW,OAAOd,WAAWY,KAAK;oBACpC;gBACF;gBAEA,oDAAoD;gBACpD,MAAM,EAAE7D,QAAQ,EAAEE,cAAc,EAAED,aAAa,EAAE,GAAGJ,uBAClDoD,YACAlD;gBAGF,IAAIG,gBAAgB;oBAClB,kCAAkC;oBAClC6C,eAAe,MAAMpB,IAAIM,OAAO,CAAC+B,MAAM,CAAC;wBACtCC,YAAY1C;wBACZzB,MAAME;wBACNkE,OAAOf;wBACPgB,gBAAgB;wBAChBxC;wBACAC;oBACF;oBAEA,2BAA2B;oBAC3B,IAAImB,gBAAgBvC,OAAOM,IAAI,CAACb,eAAeE,MAAM,GAAG,GAAG;wBACzD,KAAK,MAAM,CAACgB,QAAQiD,WAAW,IAAI5D,OAAOC,OAAO,CAACR,eAAgB;4BAChE,IAAI;gCACF,MAAMoE,YAAY;oCAAE,GAAG1C,GAAG;oCAAER;gCAAO;gCACnC,MAAMQ,IAAIM,OAAO,CAACqC,MAAM,CAAC;oCACvBpB,IAAIH,aAAaG,EAAE;oCACnBe,YAAY1C;oCACZzB,MAAMsE;oCACNF,OAAO9B,wBAAwB,QAAQK;oCACvC0B,gBAAgB;oCAChBxC,KAAK0C;oCACLzC;gCACF;4BACF,EAAE,OAAO2C,OAAO;gCACd,gEAAgE;gCAChE5C,IAAIM,OAAO,CAACwB,MAAM,CAACc,KAAK,CAAC;oCACvBC,KAAKD;oCACLZ,KAAK,CAAC,wBAAwB,EAAExC,OAAO,cAAc,EAAEsD,OAAO1B,aAAaG,EAAE,GAAG;gCAClF;4BACF;wBACF;oBACF;gBACF,OAAO;oBACL,wCAAwC;oBACxCH,eAAe,MAAMpB,IAAIM,OAAO,CAAC+B,MAAM,CAAC;wBACtCC,YAAY1C;wBACZzB,MAAMmD;wBACNiB,OAAOf;wBACPgB,gBAAgB;wBAChBxC;wBACAC;oBACF;gBACF;YACF,OAAO,IAAIJ,eAAe,YAAYA,eAAe,UAAU;gBAC7D,MAAMkD,aAAa7B,QAAQ,CAACpB,cAAc,KAAK;gBAC/C,IAAI,CAACiD,YAAY;oBACf,MAAM,IAAIC,MAAM,CAAC,aAAa,EAAElD,cAAc,KAAK,uBAAuB,CAAC;gBAC7E;gBAEA,6CAA6C;gBAC7C,6EAA6E;gBAC7E,MAAMmD,iBAAiB,AAACnD,CAAAA,cAAc,IAAG,MAAO;gBAEhD,+DAA+D;gBAC/D,+DAA+D;gBAC/D,IAAIoD;gBACJ,IAAI,OAAOH,eAAe,YAAYA,eAAe,MAAM;oBACzDG,gBAAgBC,KAAKC,SAAS,CAACL;gBACjC,OAAO,IAAI,OAAOA,eAAe,UAAU;oBACzCG,gBAAgBH;gBAClB,OAAO,IAAI,OAAOA,eAAe,UAAU;oBACzCG,gBAAgBH,WAAWM,QAAQ;gBACrC,OAAO;oBACL,sCAAsC;oBACtCH,gBAAgBC,KAAKC,SAAS,CAACL;gBACjC;gBACA,MAAMO,wBAAwB,kBAAkBC,IAAI,CAACL;gBAErD,6DAA6D;gBAC7D,IAAI;oBACF7B,WAAW,MAAMrB,IAAIM,OAAO,CAACkD,IAAI,CAAC;wBAChClB,YAAY1C;wBACZ6D,OAAO;wBACPC,OAAO;wBACPlB,gBAAgB;wBAChBxC;wBACAC;wBACA0D,OAAO;4BACL,CAAC7D,cAAc,KAAK,EAAE;gCACpB8D,QAAQb;4BACV;wBACF;oBACF;gBACF,EAAE,OAAOH,OAAO;oBACd,0EAA0E;oBAC1E,kDAAkD;oBAClD,IAAIK,kBAAkBpD,eAAe,YAAY,CAACyD,uBAAuB;wBACvEjC,WAAW;4BAAEwC,MAAM,EAAE;wBAAC;oBACxB,OAAO,IAAIZ,kBAAkBpD,eAAe,YAAY,CAACyD,uBAAuB;wBAC9E,oDAAoD;wBACpD,MAAM,IAAIN,MAAM,CAAC,8BAA8B,EAAEE,eAAe;oBAClE,OAAO;wBACL,wBAAwB;wBACxB,MAAMN;oBACR;gBACF;gBAEA,IAAIvB,SAASwC,IAAI,CAACrF,MAAM,GAAG,GAAG;oBAC5B,kBAAkB;oBAClB,MAAMsF,cAAczC,SAASwC,IAAI,CAAC,EAAE;oBACpC,IAAI,CAACC,aAAa;wBAChB,MAAM,IAAId,MAAM,CAAC,kBAAkB,CAAC;oBACtC;oBAEA,2BAA2B;oBAC3B,IAAIhD,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,EAAE;wBAC5B7B,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;4BACtBgC,YAAYD,YAAYvC,EAAE;4BAC1ByC,gBAAgBF,YAAYpC,OAAO;4BACnCuC,eAAeH,YAAY5B,KAAK;4BAChCgC,kBAAkBhD;4BAClBiD,MAAMtE;4BACNmC,KAAK;wBACP;oBACF;oBAEA,MAAMoC,aAAa;wBAAE,GAAGlD,QAAQ;oBAAC;oBACjC,iDAAiD;oBACjD,OAAOkD,WAAW7C,EAAE;oBACpB,OAAO6C,WAAWC,GAAG;oBACrB,OAAOD,WAAWE,SAAS;oBAC3B,OAAOF,WAAWG,SAAS;oBAE3B,oDAAoD;oBACpD,MAAM,EAAElG,QAAQ,EAAEE,cAAc,EAAED,aAAa,EAAE,GAAGJ,uBAClDkG,YACAhG;oBAGF,IAAI4B,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,EAAE;wBAC5B7B,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;4BACtBgC,YAAYD,YAAYvC,EAAE;4BAC1BhD;4BACA4F,MAAMtE;4BACNmC,KAAK;4BACLoC,YAAYvF,OAAOM,IAAI,CAACZ,iBAAiBF,WAAW+F,YAAYI,MAAM,CACpE,CAACC,KAAK9F;gCACJ,MAAM+F,MAAM,AAACnG,CAAAA,iBAAiBF,WAAW+F,UAAS,CAAE,CAACzF,IAAI;gCACzD8F,GAAG,CAAC9F,IAAI,GACN,OAAO+F,QAAQ,YAAYA,IAAIlG,MAAM,GAAG,KAAKkG,IAAIC,SAAS,CAAC,GAAG,MAAM,QAAQD;gCAC9E,OAAOD;4BACT,GACA,CAAC;wBAEL;oBACF;oBAEA,IAAIlG,gBAAgB;wBAClB,kCAAkC;wBAClC6C,eAAe,MAAMpB,IAAIM,OAAO,CAACqC,MAAM,CAAC;4BACtCpB,IAAIuC,YAAYvC,EAAE;4BAClBe,YAAY1C;4BACZzB,MAAME;4BACNoF,OAAO;4BACP,2EAA2E;4BAC3EjB,gBAAgB;4BAChBxC;4BACAC;wBACF;wBAEA,2BAA2B;wBAC3B,IAAImB,gBAAgBvC,OAAOM,IAAI,CAACb,eAAeE,MAAM,GAAG,GAAG;4BACzD,KAAK,MAAM,CAACgB,QAAQiD,WAAW,IAAI5D,OAAOC,OAAO,CAACR,eAAgB;gCAChE,IAAI;oCACF,6CAA6C;oCAC7C,MAAMoE,YAAY;wCAAE,GAAG1C,GAAG;wCAAER;oCAAO;oCACnC,MAAMQ,IAAIM,OAAO,CAACqC,MAAM,CAAC;wCACvBpB,IAAIuC,YAAYvC,EAAE;wCAClBe,YAAY1C;wCACZzB,MAAMsE;wCACNgB,OAAO;wCACP,2EAA2E;wCAC3EjB,gBAAgB;wCAChBxC,KAAK0C;wCACLzC;oCACF;gCACF,EAAE,OAAO2C,OAAO;oCACd,gEAAgE;oCAChE5C,IAAIM,OAAO,CAACwB,MAAM,CAACc,KAAK,CAAC;wCACvBC,KAAKD;wCACLZ,KAAK,CAAC,wBAAwB,EAAExC,OAAO,cAAc,EAAEsD,OAAOgB,YAAYvC,EAAE,GAAG;oCACjF;gCACF;4BACF;wBACF;oBACF,OAAO;wBACL,wCAAwC;wBACxC,IAAI;4BACF,iCAAiC;4BACjC,IAAIvB,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,EAAE;gCAC5B7B,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;oCACtBgC,YAAYD,YAAYvC,EAAE;oCAC1B0C,eAAeH,YAAY5B,KAAK;oCAChCF,KAAK;oCACL4C,SAASR;gCACX;4BACF;4BAEA,oFAAoF;4BACpF,6EAA6E;4BAC7EhD,eAAe,MAAMpB,IAAIM,OAAO,CAACqC,MAAM,CAAC;gCACtCpB,IAAIuC,YAAYvC,EAAE;gCAClBe,YAAY1C;gCACZzB,MAAMiG;gCACNX,OAAO;gCACP,2EAA2E;gCAC3EjB,gBAAgB;gCAChBxC;gCACAC;4BACF;4BAEA,+BAA+B;4BAC/B,IAAID,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,IAAIT,cAAc;gCAC5CpB,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;oCACtBR,IAAIH,aAAaG,EAAE;oCACnBS,KAAK;oCACL6C,QAAQzD,aAAaM,OAAO;oCAC5BQ,OAAOd,aAAac,KAAK;gCAC3B;4BACF;wBACF,EAAE,OAAO4C,aAAa;4BACpB9E,IAAIM,OAAO,CAACwB,MAAM,CAACc,KAAK,CAAC;gCACvBrB,IAAIuC,YAAYvC,EAAE;gCAClBsB,KAAKiC;gCACL9C,KAAK;4BACP;4BACA,MAAM8C;wBACR;oBACF;gBACF,OAAO,IAAIjF,eAAe,UAAU;oBAClC,4BAA4B;oBAC5B,IAAIG,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,EAAE;wBAC5B7B,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;4BACtBb;4BACApB,YAAYA,cAAc;4BAC1BiD,YAAY7B,QAAQ,CAACpB,cAAc,KAAK;4BACxCkC,KAAK;wBACP;oBACF;oBAEA,MAAMV,aAAa;wBAAE,GAAGJ,QAAQ;oBAAC;oBACjC,OAAOI,WAAWC,EAAE;oBAEpB,gDAAgD;oBAChD,IAAIC;oBACJ,IAAIf,uBAAuB;wBACzB,+DAA+D;wBAC/D,MAAMgB,cAAcH,WAAWI,OAAO,IAAI3B,QAAQ4B,oBAAoB;wBACtE,MAAMC,cAAcH,gBAAgB;wBACpCD,cAAc,CAACI;wBACf,OAAON,WAAWI,OAAO,EAAC,8DAA8D;oBAC1F;oBAEA,oDAAoD;oBACpD,MAAM,EAAErD,QAAQ,EAAEE,cAAc,EAAED,aAAa,EAAE,GAAGJ,uBAClDoD,YACAlD;oBAGF,IAAIG,gBAAgB;wBAClB,kCAAkC;wBAClC6C,eAAe,MAAMpB,IAAIM,OAAO,CAAC+B,MAAM,CAAC;4BACtCC,YAAY1C;4BACZzB,MAAME;4BACNkE,OAAOf;4BACPgB,gBAAgB;4BAChBxC;4BACAC;wBACF;wBAEA,2BAA2B;wBAC3B,IAAImB,gBAAgBvC,OAAOM,IAAI,CAACb,eAAeE,MAAM,GAAG,GAAG;4BACzD,KAAK,MAAM,CAACgB,QAAQiD,WAAW,IAAI5D,OAAOC,OAAO,CAACR,eAAgB;gCAChE,IAAI;oCACF,6CAA6C;oCAC7C,MAAMoE,YAAY;wCAAE,GAAG1C,GAAG;wCAAER;oCAAO;oCACnC,MAAMQ,IAAIM,OAAO,CAACqC,MAAM,CAAC;wCACvBpB,IAAIH,aAAaG,EAAE;wCACnBe,YAAY1C;wCACZzB,MAAMsE;wCACNF,OAAO9B,wBAAwB,QAAQK;wCACvC0B,gBAAgB;wCAChBxC,KAAK0C;oCACP;gCACF,EAAE,OAAOE,OAAO;oCACd,gEAAgE;oCAChE5C,IAAIM,OAAO,CAACwB,MAAM,CAACc,KAAK,CAAC;wCACvBC,KAAKD;wCACLZ,KAAK,CAAC,wBAAwB,EAAExC,OAAO,cAAc,EAAEsD,OAAO1B,aAAaG,EAAE,GAAG;oCAClF;gCACF;4BACF;wBACF;oBACF,OAAO;wBACL,wCAAwC;wBACxCH,eAAe,MAAMpB,IAAIM,OAAO,CAAC+B,MAAM,CAAC;4BACtCC,YAAY1C;4BACZzB,MAAMmD;4BACNiB,OAAOf;4BACPgB,gBAAgB;4BAChBxC;4BACAC;wBACF;oBACF;gBACF,OAAO;oBACL,qCAAqC;oBACrC,IAAI8E;oBACJ,IAAI,OAAOhC,eAAe,YAAYA,eAAe,MAAM;wBACzDgC,oBAAoB5B,KAAKC,SAAS,CAACL;oBACrC,OAAO,IAAI,OAAOA,eAAe,UAAU;wBACzCgC,oBAAoBhC;oBACtB,OAAO,IAAI,OAAOA,eAAe,UAAU;wBACzCgC,oBAAoBhC,WAAWM,QAAQ;oBACzC,OAAO;wBACL,+DAA+D;wBAC/D0B,oBAAoB5B,KAAKC,SAAS,CAACL;oBACrC;oBACA,MAAM,IAAIC,MAAM,CAAC,cAAc,EAAElD,cAAc,KAAK,EAAE,EAAEiF,kBAAkB,WAAW,CAAC;gBACxF;YACF,OAAO;gBACL,MAAM,IAAI/B,MAAM,CAAC,qBAAqB,EAAEF,OAAOjD,aAAa;YAC9D;YAEA,IAAIuB,cAAc;gBAChB,+CAA+C;gBAC/C,IAAI4D;gBACJ,IAAInF,eAAe,UAAU;oBAC3BmF,YAAY;gBACd,OAAO,IAAInF,eAAe,UAAU;oBAClCmF,YAAY;gBACd,OAAO,IAAInF,eAAe,UAAU;oBAClC,yDAAyD;oBACzD,IAAIwB,YAAYA,SAASwC,IAAI,CAACrF,MAAM,GAAG,GAAG;wBACxCwG,YAAY;oBACd,OAAO;wBACLA,YAAY;oBACd;gBACF;gBAEA9E,OAAOE,UAAU,CAAC6E,IAAI,CAAC;oBACrB/D;oBACAgE,OAAO/D,YAAY;oBACnB6D;oBACA9E,QAAQkB;gBACV;YACF;QACF,EAAE,OAAOwB,OAAO;YACd,MAAMuC,cAA2B;gBAC/BC,MAAMrH,gBAAgB6E;gBACtByC,cAAcnE,YAAY,CAAC;gBAC3B0B,OAAO3E,oBAAoB2E;gBAC3B0C,MAAMpE,YAAY,CAAC;gBACnBqE,WAAWpE,YAAY;gBACvBA;YACF;YAEA,0DAA0D;YAC1D,IAAIyB,SAAS,OAAOA,UAAU,YAAY,UAAUA,OAAO;gBACzD,MAAM4C,YAAY5C;gBAClB,IAAI4C,UAAUrH,IAAI,EAAEsH,UAAU1G,MAAMC,OAAO,CAACwG,UAAUrH,IAAI,CAACsH,MAAM,GAAG;oBAClE,MAAMC,aAAaF,UAAUrH,IAAI,CAACsH,MAAM,CAAC,EAAE;oBAC3C,IAAIC,YAAYC,MAAM;wBACpBR,YAAYS,KAAK,GAAGF,WAAWC,IAAI;oBACrC;gBACF;YACF;YAEAzF,OAAOC,MAAM,CAAC8E,IAAI,CAACE;QACnB,sCAAsC;QACxC;IACF;IAEA,OAAOjF;AACT;AAEA,OAAO,SAAS2F,2BAA2B9F,UAAuC,CAAC,CAAC;IAClF,MAAM+F,mBAAmB;QACvB9E,WAAWjB,QAAQiB,SAAS,IAAI;QAChCW,sBAAsB5B,QAAQ4B,oBAAoB,IAAI;IACxD;IAEA,MAAMoE,gBAAgB,OAAOC;QAC3B,MAAM,EAAEpG,cAAc,EAAEqG,SAAS,EAAEpG,UAAU,EAAEC,UAAU,EAAEE,GAAG,EAAEC,IAAI,EAAE,GAAG+F;QACzE,MAAME,UAAUlI,cAAciI,WAAWH,iBAAiB9E,SAAS;QAEnE,MAAMd,SAAuB;YAC3BuF,QAAQ,EAAE;YACVU,UAAU;YACVC,OAAOH,UAAUzH,MAAM;YACvB6H,SAAS;QACX;QAEA,IAAK,IAAIpF,IAAI,GAAGA,IAAIiF,QAAQ1H,MAAM,EAAEyC,IAAK;YACvC,MAAMqF,eAAeJ,OAAO,CAACjF,EAAE;YAC/B,IAAI,CAACqF,cAAc;gBACjB;YACF;YAEA,MAAMC,cAAc,MAAM9G,mBAAmB;gBAC3CC,OAAO4G;gBACP3G,YAAYsB;gBACZrB;gBACAC;gBACAC;gBACAC,SAAS+F;gBACT9F;gBACAC;YACF;YAEA,iBAAiB;YACjB,KAAK,MAAMuG,WAAWD,YAAYnG,UAAU,CAAE;gBAC5C,IAAIoG,QAAQxB,SAAS,KAAK,WAAW;oBACnC9E,OAAOiG,QAAQ;gBACjB,OAAO,IAAIK,QAAQxB,SAAS,KAAK,WAAW;oBAC1C9E,OAAOmG,OAAO;gBAChB,OAAO;oBACL,WAAW;oBACX,IAAIxG,eAAe,UAAU;wBAC3BK,OAAOiG,QAAQ;oBACjB,OAAO;wBACLjG,OAAOmG,OAAO;oBAChB;gBACF;YACF;YAEA,KAAK,MAAMzD,SAAS2D,YAAYpG,MAAM,CAAE;gBACtCD,OAAOuF,MAAM,CAACR,IAAI,CAAC;oBACjBwB,KAAK7D,MAAMyC,YAAY;oBACvBzC,OAAOA,MAAMA,KAAK;oBAClBsC,OAAOtC,MAAMzB,SAAS,GAAG;gBAC3B;YACF;QACF;QAEA,OAAOjB;IACT;IAEA,OAAO;QACL6F;IACF;AACF"}
|
|
1
|
+
{"version":3,"sources":["../../src/import/batchProcessor.ts"],"sourcesContent":["import type { PayloadRequest, TypedUser } from 'payload'\n\nimport type { ImportMode, ImportResult } from './createImport.js'\n\nimport {\n type BatchError,\n categorizeError,\n createBatches,\n extractErrorMessage,\n} from '../utilities/useBatchProcessor.js'\n\n/**\n * Import-specific batch processor options\n */\nexport interface ImportBatchProcessorOptions {\n batchSize?: number\n defaultVersionStatus?: 'draft' | 'published'\n}\n\n/**\n * Import-specific error type extending the generic BatchError\n */\nexport interface ImportError extends BatchError<Record<string, unknown>> {\n documentData: Record<string, unknown>\n field?: string\n fieldLabel?: string\n rowNumber: number // 1-indexed for user clarity\n}\n\n/**\n * Result from processing a single import batch\n */\nexport interface ImportBatchResult {\n failed: Array<ImportError>\n successful: Array<{\n document: Record<string, unknown>\n index: number\n operation?: 'created' | 'updated'\n result: Record<string, unknown>\n }>\n}\n\n/**\n * Options for processing an import operation\n */\nexport interface ImportProcessOptions {\n collectionSlug: string\n documents: Record<string, unknown>[]\n importMode: ImportMode\n matchField?: string\n req: PayloadRequest\n user?: TypedUser\n}\n\n/**\n * Separates multi-locale data from a document for sequential locale updates.\n *\n * When a field has locale-keyed values (e.g., { title: { en: 'Hello', es: 'Hola' } }),\n * this extracts the first locale's data for initial create/update, and stores\n * remaining locales for subsequent update calls.\n *\n * @returns\n * - flatData: Document with first locale values extracted (for initial operation)\n * - hasMultiLocale: Whether any multi-locale fields were found\n * - localeUpdates: Map of locale -> field data for follow-up updates\n */\nfunction extractMultiLocaleData(\n data: Record<string, unknown>,\n configuredLocales?: string[],\n): {\n flatData: Record<string, unknown>\n hasMultiLocale: boolean\n localeUpdates: Record<string, Record<string, unknown>>\n} {\n const flatData: Record<string, unknown> = {}\n const localeUpdates: Record<string, Record<string, unknown>> = {}\n let hasMultiLocale = false\n\n if (!configuredLocales || configuredLocales.length === 0) {\n return { flatData: { ...data }, hasMultiLocale: false, localeUpdates: {} }\n }\n\n const localeSet = new Set(configuredLocales)\n\n for (const [key, value] of Object.entries(data)) {\n if (value && typeof value === 'object' && !Array.isArray(value)) {\n const valueObj = value as Record<string, unknown>\n const localeKeys = Object.keys(valueObj).filter((k) => localeSet.has(k))\n\n if (localeKeys.length > 0) {\n hasMultiLocale = true\n const firstLocale = localeKeys[0]\n if (firstLocale) {\n flatData[key] = valueObj[firstLocale]\n for (const locale of localeKeys) {\n if (locale !== firstLocale) {\n if (!localeUpdates[locale]) {\n localeUpdates[locale] = {}\n }\n localeUpdates[locale][key] = valueObj[locale]\n }\n }\n }\n } else {\n flatData[key] = value\n }\n } else {\n flatData[key] = value\n }\n }\n\n return { flatData, hasMultiLocale, localeUpdates }\n}\n\ntype ProcessImportBatchOptions = {\n batch: Record<string, unknown>[]\n batchIndex: number\n collectionSlug: string\n importMode: ImportMode\n matchField: string | undefined\n options: { batchSize: number; defaultVersionStatus: 'draft' | 'published' }\n req: PayloadRequest\n user?: TypedUser\n}\n\n/**\n * Processes a batch of documents for import based on the import mode.\n *\n * For each document in the batch:\n * - create: Creates a new document (removes any existing ID)\n * - update: Finds existing document by matchField and updates it\n * - upsert: Updates if found, creates if not found\n *\n * Handles versioned collections, multi-locale data, and MongoDB ObjectID validation.\n * Continues processing remaining documents even if individual imports fail.\n */\nasync function processImportBatch({\n batch,\n batchIndex,\n collectionSlug,\n importMode,\n matchField,\n options,\n req,\n user,\n}: ProcessImportBatchOptions): Promise<ImportBatchResult> {\n const result: ImportBatchResult = {\n failed: [],\n successful: [],\n }\n\n const collectionConfig = req.payload.collections[collectionSlug]?.config\n const collectionHasVersions = Boolean(collectionConfig?.versions)\n\n const configuredLocales = req.payload.config.localization\n ? req.payload.config.localization.localeCodes\n : undefined\n\n const startingRowNumber = batchIndex * options.batchSize\n\n for (let i = 0; i < batch.length; i++) {\n const document = batch[i]\n if (!document) {\n continue\n }\n const rowNumber = startingRowNumber + i + 1\n\n try {\n let savedDocument: Record<string, unknown> | undefined\n let existingDocResult: { docs: Array<Record<string, unknown>> } | undefined\n\n if (importMode === 'create') {\n const createData = { ...document }\n delete createData.id\n\n let draftOption: boolean | undefined\n if (collectionHasVersions) {\n const statusValue = createData._status || options.defaultVersionStatus\n const isPublished = statusValue !== 'draft'\n draftOption = !isPublished\n\n if (req.payload.config.debug) {\n req.payload.logger.info({\n _status: createData._status,\n isPublished,\n msg: 'Status handling in create',\n willSetDraft: draftOption,\n })\n }\n\n delete createData._status // Remove _status from data - it's controlled via draft option\n }\n\n if (req.payload.config.debug && 'title' in createData) {\n req.payload.logger.info({\n msg: 'Creating document',\n title: createData.title,\n titleIsNull: createData.title === null,\n titleType: typeof createData.title,\n })\n }\n\n // Check if we have multi-locale data and extract it\n const { flatData, hasMultiLocale, localeUpdates } = extractMultiLocaleData(\n createData,\n configuredLocales,\n )\n\n if (hasMultiLocale) {\n // Create with default locale data\n savedDocument = await req.payload.create({\n collection: collectionSlug,\n data: flatData,\n draft: draftOption,\n overrideAccess: false,\n req,\n user,\n })\n\n // Update for other locales\n if (savedDocument && Object.keys(localeUpdates).length > 0) {\n for (const [locale, localeData] of Object.entries(localeUpdates)) {\n try {\n const localeReq = { ...req, locale }\n await req.payload.update({\n id: savedDocument.id as number | string,\n collection: collectionSlug,\n data: localeData,\n draft: collectionHasVersions ? false : undefined,\n overrideAccess: false,\n req: localeReq,\n user,\n })\n } catch (error) {\n // Log but don't fail the entire import if a locale update fails\n req.payload.logger.error({\n err: error,\n msg: `Failed to update locale ${locale} for document ${String(savedDocument.id)}`,\n })\n }\n }\n }\n } else {\n // No multi-locale data, create normally\n savedDocument = await req.payload.create({\n collection: collectionSlug,\n data: createData,\n draft: draftOption,\n overrideAccess: false,\n req,\n user,\n })\n }\n } else if (importMode === 'update' || importMode === 'upsert') {\n const matchValue = document[matchField || 'id']\n if (!matchValue) {\n throw new Error(`Match field \"${matchField || 'id'}\" not found in document`)\n }\n\n // Special handling for ID field with MongoDB\n // If matching by 'id' and it's not a valid ObjectID format, handle specially\n const isMatchingById = (matchField || 'id') === 'id'\n\n // Check if it's a valid MongoDB ObjectID format (24 hex chars)\n // Note: matchValue could be string, number, or ObjectID object\n let matchValueStr: string\n if (typeof matchValue === 'object' && matchValue !== null) {\n matchValueStr = JSON.stringify(matchValue)\n } else if (typeof matchValue === 'string') {\n matchValueStr = matchValue\n } else if (typeof matchValue === 'number') {\n matchValueStr = matchValue.toString()\n } else {\n // For other types, use JSON.stringify\n matchValueStr = JSON.stringify(matchValue)\n }\n const isValidObjectIdFormat = /^[0-9a-f]{24}$/i.test(matchValueStr)\n\n try {\n existingDocResult = await req.payload.find({\n collection: collectionSlug,\n depth: 0,\n limit: 1,\n overrideAccess: false,\n req,\n user,\n where: {\n [matchField || 'id']: {\n equals: matchValue,\n },\n },\n })\n } catch (error) {\n // MongoDB may throw for invalid ObjectID format - handle gracefully for upsert\n if (isMatchingById && importMode === 'upsert' && !isValidObjectIdFormat) {\n existingDocResult = { docs: [] }\n } else if (isMatchingById && importMode === 'update' && !isValidObjectIdFormat) {\n throw new Error(`Invalid ID format for update: ${matchValueStr}`)\n } else {\n throw error\n }\n }\n\n if (existingDocResult.docs.length > 0) {\n const existingDoc = existingDocResult.docs[0]\n if (!existingDoc) {\n throw new Error(`Document not found`)\n }\n\n // Debug: log what we found\n if (req.payload.config.debug) {\n req.payload.logger.info({\n existingId: existingDoc.id,\n existingStatus: existingDoc._status,\n existingTitle: existingDoc.title,\n incomingDocument: document,\n mode: importMode,\n msg: 'Found existing document for update',\n })\n }\n\n const updateData = { ...document }\n // Remove ID and internal fields from update data\n delete updateData.id\n delete updateData._id\n delete updateData.createdAt\n delete updateData.updatedAt\n\n // Check if we have multi-locale data and extract it\n const { flatData, hasMultiLocale, localeUpdates } = extractMultiLocaleData(\n updateData,\n configuredLocales,\n )\n\n if (req.payload.config.debug) {\n req.payload.logger.info({\n existingId: existingDoc.id,\n hasMultiLocale,\n mode: importMode,\n msg: 'Updating document in upsert/update mode',\n updateData: Object.keys(hasMultiLocale ? flatData : updateData).reduce(\n (acc, key) => {\n const val = (hasMultiLocale ? flatData : updateData)[key]\n acc[key] =\n typeof val === 'string' && val.length > 50 ? val.substring(0, 50) + '...' : val\n return acc\n },\n {} as Record<string, unknown>,\n ),\n })\n }\n\n if (hasMultiLocale) {\n // Update with default locale data\n savedDocument = await req.payload.update({\n id: existingDoc.id as number | string,\n collection: collectionSlug,\n data: flatData,\n depth: 0,\n // Don't specify draft - this creates a new draft for versioned collections\n overrideAccess: false,\n req,\n user,\n })\n\n // Update for other locales\n if (savedDocument && Object.keys(localeUpdates).length > 0) {\n for (const [locale, localeData] of Object.entries(localeUpdates)) {\n try {\n // Clone the request with the specific locale\n const localeReq = { ...req, locale }\n await req.payload.update({\n id: existingDoc.id as number | string,\n collection: collectionSlug,\n data: localeData,\n depth: 0,\n // Don't specify draft - this creates a new draft for versioned collections\n overrideAccess: false,\n req: localeReq,\n user,\n })\n } catch (error) {\n // Log but don't fail the entire import if a locale update fails\n req.payload.logger.error({\n err: error,\n msg: `Failed to update locale ${locale} for document ${String(existingDoc.id)}`,\n })\n }\n }\n }\n } else {\n // No multi-locale data, update normally\n try {\n // Extra debug: log before update\n if (req.payload.config.debug) {\n req.payload.logger.info({\n existingId: existingDoc.id,\n existingTitle: existingDoc.title,\n msg: 'About to update document',\n newData: updateData,\n })\n }\n\n // Update the document - don't specify draft to let Payload handle versions properly\n // This will create a new draft version for collections with versions enabled\n savedDocument = await req.payload.update({\n id: existingDoc.id as number | string,\n collection: collectionSlug,\n data: updateData,\n depth: 0,\n // Don't specify draft - this creates a new draft for versioned collections\n overrideAccess: false,\n req,\n user,\n })\n\n if (req.payload.config.debug && savedDocument) {\n req.payload.logger.info({\n id: savedDocument.id,\n msg: 'Update completed',\n status: savedDocument._status,\n title: savedDocument.title,\n })\n }\n } catch (updateError) {\n req.payload.logger.error({\n id: existingDoc.id,\n err: updateError,\n msg: 'Update failed',\n })\n throw updateError\n }\n }\n } else if (importMode === 'upsert') {\n // Create new in upsert mode\n if (req.payload.config.debug) {\n req.payload.logger.info({\n document,\n matchField: matchField || 'id',\n matchValue: document[matchField || 'id'],\n msg: 'No existing document found, creating new in upsert mode',\n })\n }\n\n const createData = { ...document }\n delete createData.id\n\n // Only handle _status for versioned collections\n let draftOption: boolean | undefined\n if (collectionHasVersions) {\n // Use defaultVersionStatus from config if _status not provided\n const statusValue = createData._status || options.defaultVersionStatus\n const isPublished = statusValue !== 'draft'\n draftOption = !isPublished\n delete createData._status // Remove _status from data - it's controlled via draft option\n }\n\n // Check if we have multi-locale data and extract it\n const { flatData, hasMultiLocale, localeUpdates } = extractMultiLocaleData(\n createData,\n configuredLocales,\n )\n\n if (hasMultiLocale) {\n // Create with default locale data\n savedDocument = await req.payload.create({\n collection: collectionSlug,\n data: flatData,\n draft: draftOption,\n overrideAccess: false,\n req,\n user,\n })\n\n // Update for other locales\n if (savedDocument && Object.keys(localeUpdates).length > 0) {\n for (const [locale, localeData] of Object.entries(localeUpdates)) {\n try {\n // Clone the request with the specific locale\n const localeReq = { ...req, locale }\n await req.payload.update({\n id: savedDocument.id as number | string,\n collection: collectionSlug,\n data: localeData,\n draft: collectionHasVersions ? false : undefined,\n overrideAccess: false,\n req: localeReq,\n })\n } catch (error) {\n // Log but don't fail the entire import if a locale update fails\n req.payload.logger.error({\n err: error,\n msg: `Failed to update locale ${locale} for document ${String(savedDocument.id)}`,\n })\n }\n }\n }\n } else {\n // No multi-locale data, create normally\n savedDocument = await req.payload.create({\n collection: collectionSlug,\n data: createData,\n draft: draftOption,\n overrideAccess: false,\n req,\n user,\n })\n }\n } else {\n // Update mode but document not found\n let matchValueDisplay: string\n if (typeof matchValue === 'object' && matchValue !== null) {\n matchValueDisplay = JSON.stringify(matchValue)\n } else if (typeof matchValue === 'string') {\n matchValueDisplay = matchValue\n } else if (typeof matchValue === 'number') {\n matchValueDisplay = matchValue.toString()\n } else {\n // For other types, use JSON.stringify to avoid [object Object]\n matchValueDisplay = JSON.stringify(matchValue)\n }\n throw new Error(`Document with ${matchField || 'id'}=\"${matchValueDisplay}\" not found`)\n }\n } else {\n throw new Error(`Unknown import mode: ${String(importMode)}`)\n }\n\n if (savedDocument) {\n // Determine operation type for proper counting\n let operation: 'created' | 'updated' | undefined\n if (importMode === 'create') {\n operation = 'created'\n } else if (importMode === 'update') {\n operation = 'updated'\n } else if (importMode === 'upsert') {\n if (existingDocResult && existingDocResult.docs.length > 0) {\n operation = 'updated'\n } else {\n operation = 'created'\n }\n }\n\n result.successful.push({\n document,\n index: rowNumber - 1, // Store as 0-indexed\n operation,\n result: savedDocument,\n })\n }\n } catch (error) {\n const importError: ImportError = {\n type: categorizeError(error),\n documentData: document || {},\n error: extractErrorMessage(error),\n item: document || {},\n itemIndex: rowNumber - 1,\n rowNumber,\n }\n\n // Try to extract field information from validation errors\n if (error && typeof error === 'object' && 'data' in error) {\n const errorData = error as { data?: { errors?: Array<{ path?: string }> } }\n if (errorData.data?.errors && Array.isArray(errorData.data.errors)) {\n const firstError = errorData.data.errors[0]\n if (firstError?.path) {\n importError.field = firstError.path\n }\n }\n }\n\n result.failed.push(importError)\n // Always continue processing all rows\n }\n }\n\n return result\n}\n\nexport function createImportBatchProcessor(options: ImportBatchProcessorOptions = {}) {\n const processorOptions = {\n batchSize: options.batchSize ?? 100,\n defaultVersionStatus: options.defaultVersionStatus ?? 'published',\n }\n\n const processImport = async (processOptions: ImportProcessOptions): Promise<ImportResult> => {\n const { collectionSlug, documents, importMode, matchField, req, user } = processOptions\n const batches = createBatches(documents, processorOptions.batchSize)\n\n const result: ImportResult = {\n errors: [],\n imported: 0,\n total: documents.length,\n updated: 0,\n }\n\n for (let i = 0; i < batches.length; i++) {\n const currentBatch = batches[i]\n if (!currentBatch) {\n continue\n }\n\n const batchResult = await processImportBatch({\n batch: currentBatch,\n batchIndex: i,\n collectionSlug,\n importMode,\n matchField,\n options: processorOptions,\n req,\n user,\n })\n\n // Update results\n for (const success of batchResult.successful) {\n if (success.operation === 'created') {\n result.imported++\n } else if (success.operation === 'updated') {\n result.updated++\n } else {\n // Fallback\n if (importMode === 'create') {\n result.imported++\n } else {\n result.updated++\n }\n }\n }\n\n for (const error of batchResult.failed) {\n result.errors.push({\n doc: error.documentData,\n error: error.error,\n index: error.rowNumber - 1, // Convert back to 0-indexed\n })\n }\n }\n\n return result\n }\n\n return {\n processImport,\n }\n}\n"],"names":["categorizeError","createBatches","extractErrorMessage","extractMultiLocaleData","data","configuredLocales","flatData","localeUpdates","hasMultiLocale","length","localeSet","Set","key","value","Object","entries","Array","isArray","valueObj","localeKeys","keys","filter","k","has","firstLocale","locale","processImportBatch","batch","batchIndex","collectionSlug","importMode","matchField","options","req","user","result","failed","successful","collectionConfig","payload","collections","config","collectionHasVersions","Boolean","versions","localization","localeCodes","undefined","startingRowNumber","batchSize","i","document","rowNumber","savedDocument","existingDocResult","createData","id","draftOption","statusValue","_status","defaultVersionStatus","isPublished","debug","logger","info","msg","willSetDraft","title","titleIsNull","titleType","create","collection","draft","overrideAccess","localeData","localeReq","update","error","err","String","matchValue","Error","isMatchingById","matchValueStr","JSON","stringify","toString","isValidObjectIdFormat","test","find","depth","limit","where","equals","docs","existingDoc","existingId","existingStatus","existingTitle","incomingDocument","mode","updateData","_id","createdAt","updatedAt","reduce","acc","val","substring","newData","status","updateError","matchValueDisplay","operation","push","index","importError","type","documentData","item","itemIndex","errorData","errors","firstError","path","field","createImportBatchProcessor","processorOptions","processImport","processOptions","documents","batches","imported","total","updated","currentBatch","batchResult","success","doc"],"mappings":"AAIA,SAEEA,eAAe,EACfC,aAAa,EACbC,mBAAmB,QACd,oCAAmC;AA6C1C;;;;;;;;;;;CAWC,GACD,SAASC,uBACPC,IAA6B,EAC7BC,iBAA4B;IAM5B,MAAMC,WAAoC,CAAC;IAC3C,MAAMC,gBAAyD,CAAC;IAChE,IAAIC,iBAAiB;IAErB,IAAI,CAACH,qBAAqBA,kBAAkBI,MAAM,KAAK,GAAG;QACxD,OAAO;YAAEH,UAAU;gBAAE,GAAGF,IAAI;YAAC;YAAGI,gBAAgB;YAAOD,eAAe,CAAC;QAAE;IAC3E;IAEA,MAAMG,YAAY,IAAIC,IAAIN;IAE1B,KAAK,MAAM,CAACO,KAAKC,MAAM,IAAIC,OAAOC,OAAO,CAACX,MAAO;QAC/C,IAAIS,SAAS,OAAOA,UAAU,YAAY,CAACG,MAAMC,OAAO,CAACJ,QAAQ;YAC/D,MAAMK,WAAWL;YACjB,MAAMM,aAAaL,OAAOM,IAAI,CAACF,UAAUG,MAAM,CAAC,CAACC,IAAMZ,UAAUa,GAAG,CAACD;YAErE,IAAIH,WAAWV,MAAM,GAAG,GAAG;gBACzBD,iBAAiB;gBACjB,MAAMgB,cAAcL,UAAU,CAAC,EAAE;gBACjC,IAAIK,aAAa;oBACflB,QAAQ,CAACM,IAAI,GAAGM,QAAQ,CAACM,YAAY;oBACrC,KAAK,MAAMC,UAAUN,WAAY;wBAC/B,IAAIM,WAAWD,aAAa;4BAC1B,IAAI,CAACjB,aAAa,CAACkB,OAAO,EAAE;gCAC1BlB,aAAa,CAACkB,OAAO,GAAG,CAAC;4BAC3B;4BACAlB,aAAa,CAACkB,OAAO,CAACb,IAAI,GAAGM,QAAQ,CAACO,OAAO;wBAC/C;oBACF;gBACF;YACF,OAAO;gBACLnB,QAAQ,CAACM,IAAI,GAAGC;YAClB;QACF,OAAO;YACLP,QAAQ,CAACM,IAAI,GAAGC;QAClB;IACF;IAEA,OAAO;QAAEP;QAAUE;QAAgBD;IAAc;AACnD;AAaA;;;;;;;;;;CAUC,GACD,eAAemB,mBAAmB,EAChCC,KAAK,EACLC,UAAU,EACVC,cAAc,EACdC,UAAU,EACVC,UAAU,EACVC,OAAO,EACPC,GAAG,EACHC,IAAI,EACsB;IAC1B,MAAMC,SAA4B;QAChCC,QAAQ,EAAE;QACVC,YAAY,EAAE;IAChB;IAEA,MAAMC,mBAAmBL,IAAIM,OAAO,CAACC,WAAW,CAACX,eAAe,EAAEY;IAClE,MAAMC,wBAAwBC,QAAQL,kBAAkBM;IAExD,MAAMvC,oBAAoB4B,IAAIM,OAAO,CAACE,MAAM,CAACI,YAAY,GACrDZ,IAAIM,OAAO,CAACE,MAAM,CAACI,YAAY,CAACC,WAAW,GAC3CC;IAEJ,MAAMC,oBAAoBpB,aAAaI,QAAQiB,SAAS;IAExD,IAAK,IAAIC,IAAI,GAAGA,IAAIvB,MAAMlB,MAAM,EAAEyC,IAAK;QACrC,MAAMC,WAAWxB,KAAK,CAACuB,EAAE;QACzB,IAAI,CAACC,UAAU;YACb;QACF;QACA,MAAMC,YAAYJ,oBAAoBE,IAAI;QAE1C,IAAI;YACF,IAAIG;YACJ,IAAIC;YAEJ,IAAIxB,eAAe,UAAU;gBAC3B,MAAMyB,aAAa;oBAAE,GAAGJ,QAAQ;gBAAC;gBACjC,OAAOI,WAAWC,EAAE;gBAEpB,IAAIC;gBACJ,IAAIf,uBAAuB;oBACzB,MAAMgB,cAAcH,WAAWI,OAAO,IAAI3B,QAAQ4B,oBAAoB;oBACtE,MAAMC,cAAcH,gBAAgB;oBACpCD,cAAc,CAACI;oBAEf,IAAI5B,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,EAAE;wBAC5B7B,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;4BACtBL,SAASJ,WAAWI,OAAO;4BAC3BE;4BACAI,KAAK;4BACLC,cAAcT;wBAChB;oBACF;oBAEA,OAAOF,WAAWI,OAAO,EAAC,8DAA8D;gBAC1F;gBAEA,IAAI1B,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,IAAI,WAAWP,YAAY;oBACrDtB,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;wBACtBC,KAAK;wBACLE,OAAOZ,WAAWY,KAAK;wBACvBC,aAAab,WAAWY,KAAK,KAAK;wBAClCE,WAAW,OAAOd,WAAWY,KAAK;oBACpC;gBACF;gBAEA,oDAAoD;gBACpD,MAAM,EAAE7D,QAAQ,EAAEE,cAAc,EAAED,aAAa,EAAE,GAAGJ,uBAClDoD,YACAlD;gBAGF,IAAIG,gBAAgB;oBAClB,kCAAkC;oBAClC6C,gBAAgB,MAAMpB,IAAIM,OAAO,CAAC+B,MAAM,CAAC;wBACvCC,YAAY1C;wBACZzB,MAAME;wBACNkE,OAAOf;wBACPgB,gBAAgB;wBAChBxC;wBACAC;oBACF;oBAEA,2BAA2B;oBAC3B,IAAImB,iBAAiBvC,OAAOM,IAAI,CAACb,eAAeE,MAAM,GAAG,GAAG;wBAC1D,KAAK,MAAM,CAACgB,QAAQiD,WAAW,IAAI5D,OAAOC,OAAO,CAACR,eAAgB;4BAChE,IAAI;gCACF,MAAMoE,YAAY;oCAAE,GAAG1C,GAAG;oCAAER;gCAAO;gCACnC,MAAMQ,IAAIM,OAAO,CAACqC,MAAM,CAAC;oCACvBpB,IAAIH,cAAcG,EAAE;oCACpBe,YAAY1C;oCACZzB,MAAMsE;oCACNF,OAAO9B,wBAAwB,QAAQK;oCACvC0B,gBAAgB;oCAChBxC,KAAK0C;oCACLzC;gCACF;4BACF,EAAE,OAAO2C,OAAO;gCACd,gEAAgE;gCAChE5C,IAAIM,OAAO,CAACwB,MAAM,CAACc,KAAK,CAAC;oCACvBC,KAAKD;oCACLZ,KAAK,CAAC,wBAAwB,EAAExC,OAAO,cAAc,EAAEsD,OAAO1B,cAAcG,EAAE,GAAG;gCACnF;4BACF;wBACF;oBACF;gBACF,OAAO;oBACL,wCAAwC;oBACxCH,gBAAgB,MAAMpB,IAAIM,OAAO,CAAC+B,MAAM,CAAC;wBACvCC,YAAY1C;wBACZzB,MAAMmD;wBACNiB,OAAOf;wBACPgB,gBAAgB;wBAChBxC;wBACAC;oBACF;gBACF;YACF,OAAO,IAAIJ,eAAe,YAAYA,eAAe,UAAU;gBAC7D,MAAMkD,aAAa7B,QAAQ,CAACpB,cAAc,KAAK;gBAC/C,IAAI,CAACiD,YAAY;oBACf,MAAM,IAAIC,MAAM,CAAC,aAAa,EAAElD,cAAc,KAAK,uBAAuB,CAAC;gBAC7E;gBAEA,6CAA6C;gBAC7C,6EAA6E;gBAC7E,MAAMmD,iBAAiB,AAACnD,CAAAA,cAAc,IAAG,MAAO;gBAEhD,+DAA+D;gBAC/D,+DAA+D;gBAC/D,IAAIoD;gBACJ,IAAI,OAAOH,eAAe,YAAYA,eAAe,MAAM;oBACzDG,gBAAgBC,KAAKC,SAAS,CAACL;gBACjC,OAAO,IAAI,OAAOA,eAAe,UAAU;oBACzCG,gBAAgBH;gBAClB,OAAO,IAAI,OAAOA,eAAe,UAAU;oBACzCG,gBAAgBH,WAAWM,QAAQ;gBACrC,OAAO;oBACL,sCAAsC;oBACtCH,gBAAgBC,KAAKC,SAAS,CAACL;gBACjC;gBACA,MAAMO,wBAAwB,kBAAkBC,IAAI,CAACL;gBAErD,IAAI;oBACF7B,oBAAoB,MAAMrB,IAAIM,OAAO,CAACkD,IAAI,CAAC;wBACzClB,YAAY1C;wBACZ6D,OAAO;wBACPC,OAAO;wBACPlB,gBAAgB;wBAChBxC;wBACAC;wBACA0D,OAAO;4BACL,CAAC7D,cAAc,KAAK,EAAE;gCACpB8D,QAAQb;4BACV;wBACF;oBACF;gBACF,EAAE,OAAOH,OAAO;oBACd,+EAA+E;oBAC/E,IAAIK,kBAAkBpD,eAAe,YAAY,CAACyD,uBAAuB;wBACvEjC,oBAAoB;4BAAEwC,MAAM,EAAE;wBAAC;oBACjC,OAAO,IAAIZ,kBAAkBpD,eAAe,YAAY,CAACyD,uBAAuB;wBAC9E,MAAM,IAAIN,MAAM,CAAC,8BAA8B,EAAEE,eAAe;oBAClE,OAAO;wBACL,MAAMN;oBACR;gBACF;gBAEA,IAAIvB,kBAAkBwC,IAAI,CAACrF,MAAM,GAAG,GAAG;oBACrC,MAAMsF,cAAczC,kBAAkBwC,IAAI,CAAC,EAAE;oBAC7C,IAAI,CAACC,aAAa;wBAChB,MAAM,IAAId,MAAM,CAAC,kBAAkB,CAAC;oBACtC;oBAEA,2BAA2B;oBAC3B,IAAIhD,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,EAAE;wBAC5B7B,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;4BACtBgC,YAAYD,YAAYvC,EAAE;4BAC1ByC,gBAAgBF,YAAYpC,OAAO;4BACnCuC,eAAeH,YAAY5B,KAAK;4BAChCgC,kBAAkBhD;4BAClBiD,MAAMtE;4BACNmC,KAAK;wBACP;oBACF;oBAEA,MAAMoC,aAAa;wBAAE,GAAGlD,QAAQ;oBAAC;oBACjC,iDAAiD;oBACjD,OAAOkD,WAAW7C,EAAE;oBACpB,OAAO6C,WAAWC,GAAG;oBACrB,OAAOD,WAAWE,SAAS;oBAC3B,OAAOF,WAAWG,SAAS;oBAE3B,oDAAoD;oBACpD,MAAM,EAAElG,QAAQ,EAAEE,cAAc,EAAED,aAAa,EAAE,GAAGJ,uBAClDkG,YACAhG;oBAGF,IAAI4B,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,EAAE;wBAC5B7B,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;4BACtBgC,YAAYD,YAAYvC,EAAE;4BAC1BhD;4BACA4F,MAAMtE;4BACNmC,KAAK;4BACLoC,YAAYvF,OAAOM,IAAI,CAACZ,iBAAiBF,WAAW+F,YAAYI,MAAM,CACpE,CAACC,KAAK9F;gCACJ,MAAM+F,MAAM,AAACnG,CAAAA,iBAAiBF,WAAW+F,UAAS,CAAE,CAACzF,IAAI;gCACzD8F,GAAG,CAAC9F,IAAI,GACN,OAAO+F,QAAQ,YAAYA,IAAIlG,MAAM,GAAG,KAAKkG,IAAIC,SAAS,CAAC,GAAG,MAAM,QAAQD;gCAC9E,OAAOD;4BACT,GACA,CAAC;wBAEL;oBACF;oBAEA,IAAIlG,gBAAgB;wBAClB,kCAAkC;wBAClC6C,gBAAgB,MAAMpB,IAAIM,OAAO,CAACqC,MAAM,CAAC;4BACvCpB,IAAIuC,YAAYvC,EAAE;4BAClBe,YAAY1C;4BACZzB,MAAME;4BACNoF,OAAO;4BACP,2EAA2E;4BAC3EjB,gBAAgB;4BAChBxC;4BACAC;wBACF;wBAEA,2BAA2B;wBAC3B,IAAImB,iBAAiBvC,OAAOM,IAAI,CAACb,eAAeE,MAAM,GAAG,GAAG;4BAC1D,KAAK,MAAM,CAACgB,QAAQiD,WAAW,IAAI5D,OAAOC,OAAO,CAACR,eAAgB;gCAChE,IAAI;oCACF,6CAA6C;oCAC7C,MAAMoE,YAAY;wCAAE,GAAG1C,GAAG;wCAAER;oCAAO;oCACnC,MAAMQ,IAAIM,OAAO,CAACqC,MAAM,CAAC;wCACvBpB,IAAIuC,YAAYvC,EAAE;wCAClBe,YAAY1C;wCACZzB,MAAMsE;wCACNgB,OAAO;wCACP,2EAA2E;wCAC3EjB,gBAAgB;wCAChBxC,KAAK0C;wCACLzC;oCACF;gCACF,EAAE,OAAO2C,OAAO;oCACd,gEAAgE;oCAChE5C,IAAIM,OAAO,CAACwB,MAAM,CAACc,KAAK,CAAC;wCACvBC,KAAKD;wCACLZ,KAAK,CAAC,wBAAwB,EAAExC,OAAO,cAAc,EAAEsD,OAAOgB,YAAYvC,EAAE,GAAG;oCACjF;gCACF;4BACF;wBACF;oBACF,OAAO;wBACL,wCAAwC;wBACxC,IAAI;4BACF,iCAAiC;4BACjC,IAAIvB,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,EAAE;gCAC5B7B,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;oCACtBgC,YAAYD,YAAYvC,EAAE;oCAC1B0C,eAAeH,YAAY5B,KAAK;oCAChCF,KAAK;oCACL4C,SAASR;gCACX;4BACF;4BAEA,oFAAoF;4BACpF,6EAA6E;4BAC7EhD,gBAAgB,MAAMpB,IAAIM,OAAO,CAACqC,MAAM,CAAC;gCACvCpB,IAAIuC,YAAYvC,EAAE;gCAClBe,YAAY1C;gCACZzB,MAAMiG;gCACNX,OAAO;gCACP,2EAA2E;gCAC3EjB,gBAAgB;gCAChBxC;gCACAC;4BACF;4BAEA,IAAID,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,IAAIT,eAAe;gCAC7CpB,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;oCACtBR,IAAIH,cAAcG,EAAE;oCACpBS,KAAK;oCACL6C,QAAQzD,cAAcM,OAAO;oCAC7BQ,OAAOd,cAAcc,KAAK;gCAC5B;4BACF;wBACF,EAAE,OAAO4C,aAAa;4BACpB9E,IAAIM,OAAO,CAACwB,MAAM,CAACc,KAAK,CAAC;gCACvBrB,IAAIuC,YAAYvC,EAAE;gCAClBsB,KAAKiC;gCACL9C,KAAK;4BACP;4BACA,MAAM8C;wBACR;oBACF;gBACF,OAAO,IAAIjF,eAAe,UAAU;oBAClC,4BAA4B;oBAC5B,IAAIG,IAAIM,OAAO,CAACE,MAAM,CAACqB,KAAK,EAAE;wBAC5B7B,IAAIM,OAAO,CAACwB,MAAM,CAACC,IAAI,CAAC;4BACtBb;4BACApB,YAAYA,cAAc;4BAC1BiD,YAAY7B,QAAQ,CAACpB,cAAc,KAAK;4BACxCkC,KAAK;wBACP;oBACF;oBAEA,MAAMV,aAAa;wBAAE,GAAGJ,QAAQ;oBAAC;oBACjC,OAAOI,WAAWC,EAAE;oBAEpB,gDAAgD;oBAChD,IAAIC;oBACJ,IAAIf,uBAAuB;wBACzB,+DAA+D;wBAC/D,MAAMgB,cAAcH,WAAWI,OAAO,IAAI3B,QAAQ4B,oBAAoB;wBACtE,MAAMC,cAAcH,gBAAgB;wBACpCD,cAAc,CAACI;wBACf,OAAON,WAAWI,OAAO,EAAC,8DAA8D;oBAC1F;oBAEA,oDAAoD;oBACpD,MAAM,EAAErD,QAAQ,EAAEE,cAAc,EAAED,aAAa,EAAE,GAAGJ,uBAClDoD,YACAlD;oBAGF,IAAIG,gBAAgB;wBAClB,kCAAkC;wBAClC6C,gBAAgB,MAAMpB,IAAIM,OAAO,CAAC+B,MAAM,CAAC;4BACvCC,YAAY1C;4BACZzB,MAAME;4BACNkE,OAAOf;4BACPgB,gBAAgB;4BAChBxC;4BACAC;wBACF;wBAEA,2BAA2B;wBAC3B,IAAImB,iBAAiBvC,OAAOM,IAAI,CAACb,eAAeE,MAAM,GAAG,GAAG;4BAC1D,KAAK,MAAM,CAACgB,QAAQiD,WAAW,IAAI5D,OAAOC,OAAO,CAACR,eAAgB;gCAChE,IAAI;oCACF,6CAA6C;oCAC7C,MAAMoE,YAAY;wCAAE,GAAG1C,GAAG;wCAAER;oCAAO;oCACnC,MAAMQ,IAAIM,OAAO,CAACqC,MAAM,CAAC;wCACvBpB,IAAIH,cAAcG,EAAE;wCACpBe,YAAY1C;wCACZzB,MAAMsE;wCACNF,OAAO9B,wBAAwB,QAAQK;wCACvC0B,gBAAgB;wCAChBxC,KAAK0C;oCACP;gCACF,EAAE,OAAOE,OAAO;oCACd,gEAAgE;oCAChE5C,IAAIM,OAAO,CAACwB,MAAM,CAACc,KAAK,CAAC;wCACvBC,KAAKD;wCACLZ,KAAK,CAAC,wBAAwB,EAAExC,OAAO,cAAc,EAAEsD,OAAO1B,cAAcG,EAAE,GAAG;oCACnF;gCACF;4BACF;wBACF;oBACF,OAAO;wBACL,wCAAwC;wBACxCH,gBAAgB,MAAMpB,IAAIM,OAAO,CAAC+B,MAAM,CAAC;4BACvCC,YAAY1C;4BACZzB,MAAMmD;4BACNiB,OAAOf;4BACPgB,gBAAgB;4BAChBxC;4BACAC;wBACF;oBACF;gBACF,OAAO;oBACL,qCAAqC;oBACrC,IAAI8E;oBACJ,IAAI,OAAOhC,eAAe,YAAYA,eAAe,MAAM;wBACzDgC,oBAAoB5B,KAAKC,SAAS,CAACL;oBACrC,OAAO,IAAI,OAAOA,eAAe,UAAU;wBACzCgC,oBAAoBhC;oBACtB,OAAO,IAAI,OAAOA,eAAe,UAAU;wBACzCgC,oBAAoBhC,WAAWM,QAAQ;oBACzC,OAAO;wBACL,+DAA+D;wBAC/D0B,oBAAoB5B,KAAKC,SAAS,CAACL;oBACrC;oBACA,MAAM,IAAIC,MAAM,CAAC,cAAc,EAAElD,cAAc,KAAK,EAAE,EAAEiF,kBAAkB,WAAW,CAAC;gBACxF;YACF,OAAO;gBACL,MAAM,IAAI/B,MAAM,CAAC,qBAAqB,EAAEF,OAAOjD,aAAa;YAC9D;YAEA,IAAIuB,eAAe;gBACjB,+CAA+C;gBAC/C,IAAI4D;gBACJ,IAAInF,eAAe,UAAU;oBAC3BmF,YAAY;gBACd,OAAO,IAAInF,eAAe,UAAU;oBAClCmF,YAAY;gBACd,OAAO,IAAInF,eAAe,UAAU;oBAClC,IAAIwB,qBAAqBA,kBAAkBwC,IAAI,CAACrF,MAAM,GAAG,GAAG;wBAC1DwG,YAAY;oBACd,OAAO;wBACLA,YAAY;oBACd;gBACF;gBAEA9E,OAAOE,UAAU,CAAC6E,IAAI,CAAC;oBACrB/D;oBACAgE,OAAO/D,YAAY;oBACnB6D;oBACA9E,QAAQkB;gBACV;YACF;QACF,EAAE,OAAOwB,OAAO;YACd,MAAMuC,cAA2B;gBAC/BC,MAAMrH,gBAAgB6E;gBACtByC,cAAcnE,YAAY,CAAC;gBAC3B0B,OAAO3E,oBAAoB2E;gBAC3B0C,MAAMpE,YAAY,CAAC;gBACnBqE,WAAWpE,YAAY;gBACvBA;YACF;YAEA,0DAA0D;YAC1D,IAAIyB,SAAS,OAAOA,UAAU,YAAY,UAAUA,OAAO;gBACzD,MAAM4C,YAAY5C;gBAClB,IAAI4C,UAAUrH,IAAI,EAAEsH,UAAU1G,MAAMC,OAAO,CAACwG,UAAUrH,IAAI,CAACsH,MAAM,GAAG;oBAClE,MAAMC,aAAaF,UAAUrH,IAAI,CAACsH,MAAM,CAAC,EAAE;oBAC3C,IAAIC,YAAYC,MAAM;wBACpBR,YAAYS,KAAK,GAAGF,WAAWC,IAAI;oBACrC;gBACF;YACF;YAEAzF,OAAOC,MAAM,CAAC8E,IAAI,CAACE;QACnB,sCAAsC;QACxC;IACF;IAEA,OAAOjF;AACT;AAEA,OAAO,SAAS2F,2BAA2B9F,UAAuC,CAAC,CAAC;IAClF,MAAM+F,mBAAmB;QACvB9E,WAAWjB,QAAQiB,SAAS,IAAI;QAChCW,sBAAsB5B,QAAQ4B,oBAAoB,IAAI;IACxD;IAEA,MAAMoE,gBAAgB,OAAOC;QAC3B,MAAM,EAAEpG,cAAc,EAAEqG,SAAS,EAAEpG,UAAU,EAAEC,UAAU,EAAEE,GAAG,EAAEC,IAAI,EAAE,GAAG+F;QACzE,MAAME,UAAUlI,cAAciI,WAAWH,iBAAiB9E,SAAS;QAEnE,MAAMd,SAAuB;YAC3BuF,QAAQ,EAAE;YACVU,UAAU;YACVC,OAAOH,UAAUzH,MAAM;YACvB6H,SAAS;QACX;QAEA,IAAK,IAAIpF,IAAI,GAAGA,IAAIiF,QAAQ1H,MAAM,EAAEyC,IAAK;YACvC,MAAMqF,eAAeJ,OAAO,CAACjF,EAAE;YAC/B,IAAI,CAACqF,cAAc;gBACjB;YACF;YAEA,MAAMC,cAAc,MAAM9G,mBAAmB;gBAC3CC,OAAO4G;gBACP3G,YAAYsB;gBACZrB;gBACAC;gBACAC;gBACAC,SAAS+F;gBACT9F;gBACAC;YACF;YAEA,iBAAiB;YACjB,KAAK,MAAMuG,WAAWD,YAAYnG,UAAU,CAAE;gBAC5C,IAAIoG,QAAQxB,SAAS,KAAK,WAAW;oBACnC9E,OAAOiG,QAAQ;gBACjB,OAAO,IAAIK,QAAQxB,SAAS,KAAK,WAAW;oBAC1C9E,OAAOmG,OAAO;gBAChB,OAAO;oBACL,WAAW;oBACX,IAAIxG,eAAe,UAAU;wBAC3BK,OAAOiG,QAAQ;oBACjB,OAAO;wBACLjG,OAAOmG,OAAO;oBAChB;gBACF;YACF;YAEA,KAAK,MAAMzD,SAAS2D,YAAYpG,MAAM,CAAE;gBACtCD,OAAOuF,MAAM,CAACR,IAAI,CAAC;oBACjBwB,KAAK7D,MAAMyC,YAAY;oBACvBzC,OAAOA,MAAMA,KAAK;oBAClBsC,OAAOtC,MAAMzB,SAAS,GAAG;gBAC3B;YACF;QACF;QAEA,OAAOjB;IACT;IAEA,OAAO;QACL6F;IACF;AACF"}
|
|
@@ -23,6 +23,11 @@ export type Import = {
|
|
|
23
23
|
*/
|
|
24
24
|
importMode: ImportMode;
|
|
25
25
|
matchField?: string;
|
|
26
|
+
/**
|
|
27
|
+
* Maximum number of documents that can be imported in a single operation.
|
|
28
|
+
* This value has already been resolved from the plugin config.
|
|
29
|
+
*/
|
|
30
|
+
maxLimit?: number;
|
|
26
31
|
name: string;
|
|
27
32
|
userCollection?: string;
|
|
28
33
|
userID?: number | string;
|
|
@@ -41,5 +46,5 @@ export type ImportResult = {
|
|
|
41
46
|
total: number;
|
|
42
47
|
updated: number;
|
|
43
48
|
};
|
|
44
|
-
export declare const createImport: ({ batchSize, collectionSlug, debug, defaultVersionStatus, file, format, importMode, matchField, req, userCollection, userID, }: CreateImportArgs) => Promise<ImportResult>;
|
|
49
|
+
export declare const createImport: ({ batchSize, collectionSlug, debug, defaultVersionStatus, file, format, importMode, matchField, maxLimit, req, userCollection, userID, }: CreateImportArgs) => Promise<ImportResult>;
|
|
45
50
|
//# sourceMappingURL=createImport.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"createImport.d.ts","sourceRoot":"","sources":["../../src/import/createImport.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAa,MAAM,SAAS,CAAA;AAWxD,MAAM,MAAM,UAAU,GAAG,QAAQ,GAAG,QAAQ,GAAG,QAAQ,CAAA;AAEvD,MAAM,MAAM,MAAM,GAAG;IACnB;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,cAAc,EAAE,MAAM,CAAA;IACtB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,IAAI,CAAC,EAAE;QACL,IAAI,EAAE,MAAM,CAAA;QACZ,QAAQ,EAAE,MAAM,CAAA;QAChB,IAAI,EAAE,MAAM,CAAA;KACb,CAAA;IACD,MAAM,EAAE,KAAK,GAAG,MAAM,CAAA;IACtB,EAAE,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IACpB;;OAEG;IACH,UAAU,EAAE,UAAU,CAAA;IACtB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,IAAI,EAAE,MAAM,CAAA;IACZ,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;CACzB,CAAA;AAED,MAAM,MAAM,gBAAgB,GAAG;IAC7B,oBAAoB,CAAC,EAAE,OAAO,GAAG,WAAW,CAAA;IAC5C,GAAG,EAAE,cAAc,CAAA;CACpB,GAAG,MAAM,CAAA;AAEV,MAAM,MAAM,YAAY,GAAG;IACzB,MAAM,EAAE,KAAK,CAAC;QACZ,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAC5B,KAAK,EAAE,MAAM,CAAA;QACb,KAAK,EAAE,MAAM,CAAA;KACd,CAAC,CAAA;IACF,QAAQ,EAAE,MAAM,CAAA;IAChB,KAAK,EAAE,MAAM,CAAA;IACb,OAAO,EAAE,MAAM,CAAA;CAChB,CAAA;AAED,eAAO,MAAM,YAAY,
|
|
1
|
+
{"version":3,"file":"createImport.d.ts","sourceRoot":"","sources":["../../src/import/createImport.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAa,MAAM,SAAS,CAAA;AAWxD,MAAM,MAAM,UAAU,GAAG,QAAQ,GAAG,QAAQ,GAAG,QAAQ,CAAA;AAEvD,MAAM,MAAM,MAAM,GAAG;IACnB;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,cAAc,EAAE,MAAM,CAAA;IACtB;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,IAAI,CAAC,EAAE;QACL,IAAI,EAAE,MAAM,CAAA;QACZ,QAAQ,EAAE,MAAM,CAAA;QAChB,IAAI,EAAE,MAAM,CAAA;KACb,CAAA;IACD,MAAM,EAAE,KAAK,GAAG,MAAM,CAAA;IACtB,EAAE,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IACpB;;OAEG;IACH,UAAU,EAAE,UAAU,CAAA;IACtB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB;;;OAGG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,IAAI,EAAE,MAAM,CAAA;IACZ,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;CACzB,CAAA;AAED,MAAM,MAAM,gBAAgB,GAAG;IAC7B,oBAAoB,CAAC,EAAE,OAAO,GAAG,WAAW,CAAA;IAC5C,GAAG,EAAE,cAAc,CAAA;CACpB,GAAG,MAAM,CAAA;AAEV,MAAM,MAAM,YAAY,GAAG;IACzB,MAAM,EAAE,KAAK,CAAC;QACZ,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAC5B,KAAK,EAAE,MAAM,CAAA;QACb,KAAK,EAAE,MAAM,CAAA;KACd,CAAC,CAAA;IACF,QAAQ,EAAE,MAAM,CAAA;IAChB,KAAK,EAAE,MAAM,CAAA;IACb,OAAO,EAAE,MAAM,CAAA;CAChB,CAAA;AAED,eAAO,MAAM,YAAY,6IAatB,gBAAgB,KAAG,OAAO,CAAC,YAAY,CAyMzC,CAAA"}
|
|
@@ -5,7 +5,7 @@ import { parseJSON } from '../utilities/parseJSON.js';
|
|
|
5
5
|
import { removeDisabledFields } from '../utilities/removeDisabledFields.js';
|
|
6
6
|
import { unflattenObject } from '../utilities/unflattenObject.js';
|
|
7
7
|
import { createImportBatchProcessor } from './batchProcessor.js';
|
|
8
|
-
export const createImport = async ({ batchSize = 100, collectionSlug, debug = false, defaultVersionStatus = 'published', file, format, importMode = 'create', matchField = 'id', req, userCollection, userID })=>{
|
|
8
|
+
export const createImport = async ({ batchSize = 100, collectionSlug, debug = false, defaultVersionStatus = 'published', file, format, importMode = 'create', matchField = 'id', maxLimit, req, userCollection, userID })=>{
|
|
9
9
|
let user;
|
|
10
10
|
if (userCollection && userID) {
|
|
11
11
|
user = await req.payload.findByID({
|
|
@@ -135,6 +135,10 @@ export const createImport = async ({ batchSize = 100, collectionSlug, debug = fa
|
|
|
135
135
|
});
|
|
136
136
|
}
|
|
137
137
|
}
|
|
138
|
+
// Enforce maxLimit before processing to save memory/time
|
|
139
|
+
if (typeof maxLimit === 'number' && maxLimit > 0 && documents.length > maxLimit) {
|
|
140
|
+
throw new APIError(`Import file contains ${documents.length} documents but limit is ${maxLimit}`, 400, null, true);
|
|
141
|
+
}
|
|
138
142
|
// Remove disabled fields from all documents
|
|
139
143
|
if (disabledFields.length > 0) {
|
|
140
144
|
documents = documents.map((doc)=>removeDisabledFields(doc, disabledFields));
|