@payloadcms/plugin-import-export 3.71.0-internal.e36f916 → 3.71.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (298) hide show
  1. package/dist/components/CollectionField/index.d.ts.map +1 -1
  2. package/dist/components/CollectionField/index.js +8 -3
  3. package/dist/components/CollectionField/index.js.map +1 -1
  4. package/dist/components/ExportListMenuItem/index.d.ts +0 -1
  5. package/dist/components/ExportListMenuItem/index.d.ts.map +1 -1
  6. package/dist/components/ExportListMenuItem/index.js +0 -1
  7. package/dist/components/ExportListMenuItem/index.js.map +1 -1
  8. package/dist/components/ExportPreview/index.d.ts +4 -0
  9. package/dist/components/ExportPreview/index.d.ts.map +1 -0
  10. package/dist/components/ExportPreview/index.js +252 -0
  11. package/dist/components/ExportPreview/index.js.map +1 -0
  12. package/dist/components/ExportPreview/index.scss +36 -0
  13. package/dist/components/ImportCollectionField/index.d.ts +3 -0
  14. package/dist/components/ImportCollectionField/index.d.ts.map +1 -0
  15. package/dist/components/ImportCollectionField/index.js +17 -0
  16. package/dist/components/ImportCollectionField/index.js.map +1 -0
  17. package/dist/components/ImportListMenuItem/index.d.ts +6 -0
  18. package/dist/components/ImportListMenuItem/index.d.ts.map +1 -0
  19. package/dist/components/ImportListMenuItem/index.js +48 -0
  20. package/dist/components/ImportListMenuItem/index.js.map +1 -0
  21. package/dist/components/ImportPreview/index.d.ts +4 -0
  22. package/dist/components/ImportPreview/index.d.ts.map +1 -0
  23. package/dist/components/ImportPreview/index.js +624 -0
  24. package/dist/components/ImportPreview/index.js.map +1 -0
  25. package/dist/components/ImportPreview/index.scss +41 -0
  26. package/dist/components/ImportSaveButton/index.d.ts +2 -0
  27. package/dist/components/ImportSaveButton/index.d.ts.map +1 -0
  28. package/dist/components/ImportSaveButton/index.js +18 -0
  29. package/dist/components/ImportSaveButton/index.js.map +1 -0
  30. package/dist/components/Page/index.scss +4 -2
  31. package/dist/constants.d.ts +21 -0
  32. package/dist/constants.d.ts.map +1 -0
  33. package/dist/constants.js +21 -0
  34. package/dist/constants.js.map +1 -0
  35. package/dist/export/batchProcessor.d.ts +107 -0
  36. package/dist/export/batchProcessor.d.ts.map +1 -0
  37. package/dist/export/batchProcessor.js +187 -0
  38. package/dist/export/batchProcessor.js.map +1 -0
  39. package/dist/export/createExport.d.ts +9 -4
  40. package/dist/export/createExport.d.ts.map +1 -1
  41. package/dist/export/createExport.js +163 -111
  42. package/dist/export/createExport.js.map +1 -1
  43. package/dist/export/getCreateExportCollectionTask.d.ts +1 -2
  44. package/dist/export/getCreateExportCollectionTask.d.ts.map +1 -1
  45. package/dist/export/getCreateExportCollectionTask.js +10 -18
  46. package/dist/export/getCreateExportCollectionTask.js.map +1 -1
  47. package/dist/export/getExportCollection.d.ts +8 -0
  48. package/dist/export/getExportCollection.d.ts.map +1 -0
  49. package/dist/export/getExportCollection.js +100 -0
  50. package/dist/export/getExportCollection.js.map +1 -0
  51. package/dist/export/getFields.d.ts +8 -2
  52. package/dist/export/getFields.d.ts.map +1 -1
  53. package/dist/export/getFields.js +7 -9
  54. package/dist/export/getFields.js.map +1 -1
  55. package/dist/export/handleDownload.d.ts +3 -0
  56. package/dist/export/handleDownload.d.ts.map +1 -0
  57. package/dist/export/handleDownload.js +42 -0
  58. package/dist/export/handleDownload.js.map +1 -0
  59. package/dist/export/handlePreview.d.ts +3 -0
  60. package/dist/export/handlePreview.d.ts.map +1 -0
  61. package/dist/export/handlePreview.js +163 -0
  62. package/dist/export/handlePreview.js.map +1 -0
  63. package/dist/exports/rsc.d.ts +5 -1
  64. package/dist/exports/rsc.d.ts.map +1 -1
  65. package/dist/exports/rsc.js +5 -1
  66. package/dist/exports/rsc.js.map +1 -1
  67. package/dist/import/batchProcessor.d.ts +46 -0
  68. package/dist/import/batchProcessor.d.ts.map +1 -0
  69. package/dist/import/batchProcessor.js +529 -0
  70. package/dist/import/batchProcessor.js.map +1 -0
  71. package/dist/import/createImport.d.ts +45 -0
  72. package/dist/import/createImport.d.ts.map +1 -0
  73. package/dist/import/createImport.js +175 -0
  74. package/dist/import/createImport.js.map +1 -0
  75. package/dist/import/getCreateImportCollectionTask.d.ts +13 -0
  76. package/dist/import/getCreateImportCollectionTask.d.ts.map +1 -0
  77. package/dist/import/getCreateImportCollectionTask.js +81 -0
  78. package/dist/import/getCreateImportCollectionTask.js.map +1 -0
  79. package/dist/import/getFields.d.ts +7 -0
  80. package/dist/import/getFields.d.ts.map +1 -0
  81. package/dist/import/getFields.js +150 -0
  82. package/dist/import/getFields.js.map +1 -0
  83. package/dist/import/getImportCollection.d.ts +8 -0
  84. package/dist/import/getImportCollection.d.ts.map +1 -0
  85. package/dist/import/getImportCollection.js +258 -0
  86. package/dist/import/getImportCollection.js.map +1 -0
  87. package/dist/import/handlePreview.d.ts +3 -0
  88. package/dist/import/handlePreview.d.ts.map +1 -0
  89. package/dist/import/handlePreview.js +94 -0
  90. package/dist/import/handlePreview.js.map +1 -0
  91. package/dist/index.d.ts +3 -2
  92. package/dist/index.d.ts.map +1 -1
  93. package/dist/index.js +68 -110
  94. package/dist/index.js.map +1 -1
  95. package/dist/translations/languages/ar.d.ts.map +1 -1
  96. package/dist/translations/languages/ar.js +21 -1
  97. package/dist/translations/languages/ar.js.map +1 -1
  98. package/dist/translations/languages/az.d.ts.map +1 -1
  99. package/dist/translations/languages/az.js +21 -1
  100. package/dist/translations/languages/az.js.map +1 -1
  101. package/dist/translations/languages/bg.d.ts.map +1 -1
  102. package/dist/translations/languages/bg.js +21 -1
  103. package/dist/translations/languages/bg.js.map +1 -1
  104. package/dist/translations/languages/bnBd.d.ts +4 -0
  105. package/dist/translations/languages/bnBd.d.ts.map +1 -0
  106. package/dist/translations/languages/bnBd.js +48 -0
  107. package/dist/translations/languages/bnBd.js.map +1 -0
  108. package/dist/translations/languages/bnIn.d.ts +4 -0
  109. package/dist/translations/languages/bnIn.d.ts.map +1 -0
  110. package/dist/translations/languages/bnIn.js +48 -0
  111. package/dist/translations/languages/bnIn.js.map +1 -0
  112. package/dist/translations/languages/ca.d.ts.map +1 -1
  113. package/dist/translations/languages/ca.js +21 -1
  114. package/dist/translations/languages/ca.js.map +1 -1
  115. package/dist/translations/languages/cs.d.ts.map +1 -1
  116. package/dist/translations/languages/cs.js +21 -1
  117. package/dist/translations/languages/cs.js.map +1 -1
  118. package/dist/translations/languages/da.d.ts.map +1 -1
  119. package/dist/translations/languages/da.js +21 -1
  120. package/dist/translations/languages/da.js.map +1 -1
  121. package/dist/translations/languages/de.d.ts.map +1 -1
  122. package/dist/translations/languages/de.js +21 -1
  123. package/dist/translations/languages/de.js.map +1 -1
  124. package/dist/translations/languages/en.d.ts +20 -0
  125. package/dist/translations/languages/en.d.ts.map +1 -1
  126. package/dist/translations/languages/en.js +21 -1
  127. package/dist/translations/languages/en.js.map +1 -1
  128. package/dist/translations/languages/es.d.ts.map +1 -1
  129. package/dist/translations/languages/es.js +21 -1
  130. package/dist/translations/languages/es.js.map +1 -1
  131. package/dist/translations/languages/et.d.ts.map +1 -1
  132. package/dist/translations/languages/et.js +21 -1
  133. package/dist/translations/languages/et.js.map +1 -1
  134. package/dist/translations/languages/fa.d.ts.map +1 -1
  135. package/dist/translations/languages/fa.js +21 -1
  136. package/dist/translations/languages/fa.js.map +1 -1
  137. package/dist/translations/languages/fr.d.ts.map +1 -1
  138. package/dist/translations/languages/fr.js +21 -1
  139. package/dist/translations/languages/fr.js.map +1 -1
  140. package/dist/translations/languages/he.d.ts.map +1 -1
  141. package/dist/translations/languages/he.js +21 -1
  142. package/dist/translations/languages/he.js.map +1 -1
  143. package/dist/translations/languages/hr.d.ts.map +1 -1
  144. package/dist/translations/languages/hr.js +21 -1
  145. package/dist/translations/languages/hr.js.map +1 -1
  146. package/dist/translations/languages/hu.d.ts.map +1 -1
  147. package/dist/translations/languages/hu.js +21 -1
  148. package/dist/translations/languages/hu.js.map +1 -1
  149. package/dist/translations/languages/hy.d.ts.map +1 -1
  150. package/dist/translations/languages/hy.js +21 -1
  151. package/dist/translations/languages/hy.js.map +1 -1
  152. package/dist/translations/languages/id.d.ts +4 -0
  153. package/dist/translations/languages/id.d.ts.map +1 -0
  154. package/dist/translations/languages/id.js +48 -0
  155. package/dist/translations/languages/id.js.map +1 -0
  156. package/dist/translations/languages/is.d.ts.map +1 -1
  157. package/dist/translations/languages/is.js +21 -1
  158. package/dist/translations/languages/is.js.map +1 -1
  159. package/dist/translations/languages/it.d.ts.map +1 -1
  160. package/dist/translations/languages/it.js +21 -1
  161. package/dist/translations/languages/it.js.map +1 -1
  162. package/dist/translations/languages/ja.d.ts.map +1 -1
  163. package/dist/translations/languages/ja.js +21 -1
  164. package/dist/translations/languages/ja.js.map +1 -1
  165. package/dist/translations/languages/ko.d.ts.map +1 -1
  166. package/dist/translations/languages/ko.js +21 -1
  167. package/dist/translations/languages/ko.js.map +1 -1
  168. package/dist/translations/languages/lt.d.ts.map +1 -1
  169. package/dist/translations/languages/lt.js +21 -1
  170. package/dist/translations/languages/lt.js.map +1 -1
  171. package/dist/translations/languages/lv.d.ts.map +1 -1
  172. package/dist/translations/languages/lv.js +26 -6
  173. package/dist/translations/languages/lv.js.map +1 -1
  174. package/dist/translations/languages/my.d.ts.map +1 -1
  175. package/dist/translations/languages/my.js +21 -1
  176. package/dist/translations/languages/my.js.map +1 -1
  177. package/dist/translations/languages/nb.d.ts.map +1 -1
  178. package/dist/translations/languages/nb.js +21 -1
  179. package/dist/translations/languages/nb.js.map +1 -1
  180. package/dist/translations/languages/nl.d.ts.map +1 -1
  181. package/dist/translations/languages/nl.js +21 -1
  182. package/dist/translations/languages/nl.js.map +1 -1
  183. package/dist/translations/languages/pl.d.ts.map +1 -1
  184. package/dist/translations/languages/pl.js +21 -1
  185. package/dist/translations/languages/pl.js.map +1 -1
  186. package/dist/translations/languages/pt.d.ts.map +1 -1
  187. package/dist/translations/languages/pt.js +21 -1
  188. package/dist/translations/languages/pt.js.map +1 -1
  189. package/dist/translations/languages/ro.d.ts.map +1 -1
  190. package/dist/translations/languages/ro.js +21 -1
  191. package/dist/translations/languages/ro.js.map +1 -1
  192. package/dist/translations/languages/rs.d.ts.map +1 -1
  193. package/dist/translations/languages/rs.js +21 -1
  194. package/dist/translations/languages/rs.js.map +1 -1
  195. package/dist/translations/languages/rsLatin.d.ts.map +1 -1
  196. package/dist/translations/languages/rsLatin.js +21 -1
  197. package/dist/translations/languages/rsLatin.js.map +1 -1
  198. package/dist/translations/languages/ru.d.ts.map +1 -1
  199. package/dist/translations/languages/ru.js +21 -1
  200. package/dist/translations/languages/ru.js.map +1 -1
  201. package/dist/translations/languages/sk.d.ts.map +1 -1
  202. package/dist/translations/languages/sk.js +21 -1
  203. package/dist/translations/languages/sk.js.map +1 -1
  204. package/dist/translations/languages/sl.d.ts.map +1 -1
  205. package/dist/translations/languages/sl.js +21 -1
  206. package/dist/translations/languages/sl.js.map +1 -1
  207. package/dist/translations/languages/sv.d.ts.map +1 -1
  208. package/dist/translations/languages/sv.js +21 -1
  209. package/dist/translations/languages/sv.js.map +1 -1
  210. package/dist/translations/languages/ta.d.ts.map +1 -1
  211. package/dist/translations/languages/ta.js +21 -1
  212. package/dist/translations/languages/ta.js.map +1 -1
  213. package/dist/translations/languages/th.d.ts.map +1 -1
  214. package/dist/translations/languages/th.js +21 -1
  215. package/dist/translations/languages/th.js.map +1 -1
  216. package/dist/translations/languages/tr.d.ts.map +1 -1
  217. package/dist/translations/languages/tr.js +21 -1
  218. package/dist/translations/languages/tr.js.map +1 -1
  219. package/dist/translations/languages/uk.d.ts.map +1 -1
  220. package/dist/translations/languages/uk.js +21 -1
  221. package/dist/translations/languages/uk.js.map +1 -1
  222. package/dist/translations/languages/vi.d.ts.map +1 -1
  223. package/dist/translations/languages/vi.js +21 -1
  224. package/dist/translations/languages/vi.js.map +1 -1
  225. package/dist/translations/languages/zh.d.ts.map +1 -1
  226. package/dist/translations/languages/zh.js +21 -1
  227. package/dist/translations/languages/zh.js.map +1 -1
  228. package/dist/translations/languages/zhTw.d.ts.map +1 -1
  229. package/dist/translations/languages/zhTw.js +21 -1
  230. package/dist/translations/languages/zhTw.js.map +1 -1
  231. package/dist/translations/types.d.ts +13 -0
  232. package/dist/translations/types.d.ts.map +1 -1
  233. package/dist/translations/types.js.map +1 -1
  234. package/dist/types.d.ts +166 -22
  235. package/dist/types.d.ts.map +1 -1
  236. package/dist/types.js +1 -1
  237. package/dist/types.js.map +1 -1
  238. package/dist/utilities/flattenObject.d.ts +11 -0
  239. package/dist/utilities/flattenObject.d.ts.map +1 -0
  240. package/dist/utilities/flattenObject.js +129 -0
  241. package/dist/utilities/flattenObject.js.map +1 -0
  242. package/dist/utilities/getExportFieldFunctions.d.ts +12 -0
  243. package/dist/utilities/getExportFieldFunctions.d.ts.map +1 -0
  244. package/dist/utilities/getExportFieldFunctions.js +102 -0
  245. package/dist/utilities/getExportFieldFunctions.js.map +1 -0
  246. package/dist/utilities/getFilename.d.ts +6 -0
  247. package/dist/utilities/getFilename.d.ts.map +1 -0
  248. package/dist/utilities/getFilename.js +13 -0
  249. package/dist/utilities/getFilename.js.map +1 -0
  250. package/dist/utilities/getFlattenedFieldKeys.d.ts +12 -1
  251. package/dist/utilities/getFlattenedFieldKeys.d.ts.map +1 -1
  252. package/dist/utilities/getFlattenedFieldKeys.js +35 -10
  253. package/dist/utilities/getFlattenedFieldKeys.js.map +1 -1
  254. package/dist/utilities/getImportFieldFunctions.d.ts +12 -0
  255. package/dist/utilities/getImportFieldFunctions.d.ts.map +1 -0
  256. package/dist/utilities/getImportFieldFunctions.js +130 -0
  257. package/dist/utilities/getImportFieldFunctions.js.map +1 -0
  258. package/dist/utilities/getPluginCollections.d.ts +39 -0
  259. package/dist/utilities/getPluginCollections.d.ts.map +1 -0
  260. package/dist/utilities/getPluginCollections.js +102 -0
  261. package/dist/utilities/getPluginCollections.js.map +1 -0
  262. package/dist/utilities/getSchemaColumns.d.ts +43 -0
  263. package/dist/utilities/getSchemaColumns.d.ts.map +1 -0
  264. package/dist/utilities/getSchemaColumns.js +163 -0
  265. package/dist/utilities/getSchemaColumns.js.map +1 -0
  266. package/dist/utilities/getSelect.d.ts +11 -0
  267. package/dist/utilities/getSelect.d.ts.map +1 -0
  268. package/dist/utilities/getSelect.js +27 -0
  269. package/dist/utilities/getSelect.js.map +1 -0
  270. package/dist/utilities/parseCSV.d.ts +11 -0
  271. package/dist/utilities/parseCSV.d.ts.map +1 -0
  272. package/dist/utilities/parseCSV.js +67 -0
  273. package/dist/utilities/parseCSV.js.map +1 -0
  274. package/dist/utilities/parseCSV.spec.js +169 -0
  275. package/dist/utilities/parseCSV.spec.js.map +1 -0
  276. package/dist/utilities/parseJSON.d.ts +11 -0
  277. package/dist/utilities/parseJSON.d.ts.map +1 -0
  278. package/dist/utilities/parseJSON.js +25 -0
  279. package/dist/utilities/parseJSON.js.map +1 -0
  280. package/dist/utilities/processRichTextField.d.ts +6 -0
  281. package/dist/utilities/processRichTextField.d.ts.map +1 -0
  282. package/dist/utilities/processRichTextField.js +45 -0
  283. package/dist/utilities/processRichTextField.js.map +1 -0
  284. package/dist/utilities/unflattenObject.d.ts +11 -0
  285. package/dist/utilities/unflattenObject.d.ts.map +1 -0
  286. package/dist/utilities/unflattenObject.js +431 -0
  287. package/dist/utilities/unflattenObject.js.map +1 -0
  288. package/dist/utilities/unflattenObject.spec.js +680 -0
  289. package/dist/utilities/unflattenObject.spec.js.map +1 -0
  290. package/dist/utilities/useBatchProcessor.d.ts +103 -0
  291. package/dist/utilities/useBatchProcessor.d.ts.map +1 -0
  292. package/dist/utilities/useBatchProcessor.js +88 -0
  293. package/dist/utilities/useBatchProcessor.js.map +1 -0
  294. package/dist/utilities/validateLimitValue.d.ts +1 -1
  295. package/dist/utilities/validateLimitValue.d.ts.map +1 -1
  296. package/dist/utilities/validateLimitValue.js +1 -4
  297. package/dist/utilities/validateLimitValue.js.map +1 -1
  298. package/package.json +9 -9
@@ -2,30 +2,58 @@
2
2
  import { APIError } from 'payload';
3
3
  import { Readable } from 'stream';
4
4
  import { buildDisabledFieldRegex } from '../utilities/buildDisabledFieldRegex.js';
5
+ import { flattenObject } from '../utilities/flattenObject.js';
6
+ import { getExportFieldFunctions } from '../utilities/getExportFieldFunctions.js';
7
+ import { getFilename } from '../utilities/getFilename.js';
8
+ import { getSchemaColumns, mergeColumns } from '../utilities/getSchemaColumns.js';
9
+ import { getSelect } from '../utilities/getSelect.js';
5
10
  import { validateLimitValue } from '../utilities/validateLimitValue.js';
6
- import { flattenObject } from './flattenObject.js';
7
- import { getCustomFieldFunctions } from './getCustomFieldFunctions.js';
8
- import { getFilename } from './getFilename.js';
9
- import { getSelect } from './getSelect.js';
11
+ import { createExportBatchProcessor } from './batchProcessor.js';
10
12
  export const createExport = async (args)=>{
11
- const { download, input: { id, name: nameArg, collectionSlug, debug = false, drafts, exportsCollection, fields, format, locale: localeInput, sort, page, limit: incomingLimit, where }, req: { locale: localeArg, payload }, req, user } = args;
12
- if (!user) {
13
- throw new APIError('User authentication is required to create exports');
14
- }
13
+ const { id, name: nameArg, batchSize = 100, collectionSlug, debug = false, download, drafts: draftsFromInput, exportsCollection, fields, format, limit: incomingLimit, locale: localeFromInput, page, req, sort, userCollection, userID, where: whereFromInput = {} } = args;
14
+ const { locale: localeFromReq, payload } = req;
15
15
  if (debug) {
16
16
  req.payload.logger.debug({
17
17
  message: 'Starting export process with args:',
18
18
  collectionSlug,
19
- drafts,
19
+ draft: draftsFromInput,
20
20
  fields,
21
21
  format
22
22
  });
23
23
  }
24
- const locale = localeInput ?? localeArg;
24
+ const locale = localeFromInput ?? localeFromReq;
25
25
  const collectionConfig = payload.config.collections.find(({ slug })=>slug === collectionSlug);
26
26
  if (!collectionConfig) {
27
- throw new APIError(`Collection with slug ${collectionSlug} not found`);
27
+ throw new APIError(`Collection with slug ${collectionSlug} not found.`);
28
+ }
29
+ let user;
30
+ if (userCollection && userID) {
31
+ user = await req.payload.findByID({
32
+ id: userID,
33
+ collection: userCollection,
34
+ overrideAccess: true
35
+ });
36
+ }
37
+ if (!user && req.user) {
38
+ user = req?.user?.id ? req.user : req?.user?.user;
39
+ }
40
+ if (!user) {
41
+ throw new APIError('User authentication is required to create exports.');
28
42
  }
43
+ const draft = draftsFromInput === 'yes';
44
+ const hasVersions = Boolean(collectionConfig.versions);
45
+ // Only filter by _status for versioned collections
46
+ const publishedWhere = hasVersions ? {
47
+ _status: {
48
+ equals: 'published'
49
+ }
50
+ } : {};
51
+ const where = {
52
+ and: [
53
+ whereFromInput,
54
+ draft ? {} : publishedWhere
55
+ ]
56
+ };
29
57
  const name = `${nameArg ?? `${getFilename()}-${collectionSlug}`}.${format}`;
30
58
  const isCSV = format === 'csv';
31
59
  const select = Array.isArray(fields) && fields.length > 0 ? getSelect(fields) : undefined;
@@ -37,22 +65,36 @@ export const createExport = async (args)=>{
37
65
  locale
38
66
  });
39
67
  }
40
- const batchSize = 100 // fixed per request
41
- ;
42
68
  const hardLimit = typeof incomingLimit === 'number' && incomingLimit > 0 ? incomingLimit : undefined;
43
- const { totalDocs } = await payload.count({
44
- collection: collectionSlug,
45
- user,
46
- locale,
47
- overrideAccess: false
48
- });
69
+ // Try to count documents - if access is denied, treat as 0 documents
70
+ let totalDocs = 0;
71
+ let accessDenied = false;
72
+ try {
73
+ const countResult = await payload.count({
74
+ collection: collectionSlug,
75
+ user,
76
+ locale,
77
+ overrideAccess: false
78
+ });
79
+ totalDocs = countResult.totalDocs;
80
+ } catch (error) {
81
+ // Access denied - user can't read from this collection
82
+ // We'll create an empty export file
83
+ accessDenied = true;
84
+ if (debug) {
85
+ req.payload.logger.debug({
86
+ message: 'Access denied for collection, creating empty export',
87
+ collectionSlug
88
+ });
89
+ }
90
+ }
49
91
  const totalPages = Math.max(1, Math.ceil(totalDocs / batchSize));
50
92
  const requestedPage = page || 1;
51
93
  const adjustedPage = requestedPage > totalPages ? 1 : requestedPage;
52
94
  const findArgs = {
53
95
  collection: collectionSlug,
54
96
  depth: 1,
55
- draft: drafts === 'yes',
97
+ draft,
56
98
  limit: batchSize,
57
99
  locale,
58
100
  overrideAccess: false,
@@ -68,7 +110,7 @@ export const createExport = async (args)=>{
68
110
  findArgs
69
111
  });
70
112
  }
71
- const toCSVFunctions = getCustomFieldFunctions({
113
+ const toCSVFunctions = getExportFieldFunctions({
72
114
  fields: collectionConfig.flattenedFields
73
115
  });
74
116
  const disabledFields = collectionConfig.admin?.custom?.['plugin-import-export']?.disabledFields ?? [];
@@ -102,52 +144,31 @@ export const createExport = async (args)=>{
102
144
  return filtered;
103
145
  };
104
146
  if (download) {
105
- if (debug) {
106
- req.payload.logger.debug('Pre-scanning all columns before streaming');
107
- }
108
- const limitErrorMsg = validateLimitValue(incomingLimit, req.t, batchSize);
147
+ const limitErrorMsg = validateLimitValue(incomingLimit, req.t);
109
148
  if (limitErrorMsg) {
110
149
  throw new APIError(limitErrorMsg);
111
150
  }
112
- const allColumns = [];
151
+ // Get schema-based columns first (provides base ordering and handles empty exports)
152
+ let schemaColumns = [];
113
153
  if (isCSV) {
114
- const allColumnsSet = new Set();
115
- // Use the incoming page value here, defaulting to 1 if undefined
116
- let scanPage = adjustedPage;
117
- let hasMore = true;
118
- let fetched = 0;
119
- const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY;
120
- while(hasMore){
121
- const remaining = Math.max(0, maxDocs - fetched);
122
- if (remaining === 0) {
123
- break;
124
- }
125
- const result = await payload.find({
126
- ...findArgs,
127
- page: scanPage,
128
- limit: Math.min(batchSize, remaining)
129
- });
130
- result.docs.forEach((doc)=>{
131
- const flat = filterDisabledCSV(flattenObject({
132
- doc,
133
- fields,
134
- toCSVFunctions
135
- }));
136
- Object.keys(flat).forEach((key)=>{
137
- if (!allColumnsSet.has(key)) {
138
- allColumnsSet.add(key);
139
- allColumns.push(key);
140
- }
141
- });
142
- });
143
- fetched += result.docs.length;
144
- scanPage += 1; // Increment page for next batch
145
- hasMore = result.hasNextPage && fetched < maxDocs;
146
- }
154
+ const localeCodes = locale === 'all' && payload.config.localization ? payload.config.localization.localeCodes : undefined;
155
+ schemaColumns = getSchemaColumns({
156
+ collectionConfig,
157
+ disabledFields,
158
+ fields,
159
+ locale,
160
+ localeCodes
161
+ });
147
162
  if (debug) {
148
- req.payload.logger.debug(`Discovered ${allColumns.length} columns`);
163
+ req.payload.logger.debug({
164
+ columnCount: schemaColumns.length,
165
+ msg: 'Schema-based column inference complete'
166
+ });
149
167
  }
150
168
  }
169
+ // allColumns will be finalized after first batch (schema + data columns merged)
170
+ let allColumns = [];
171
+ let columnsFinalized = false;
151
172
  const encoder = new TextEncoder();
152
173
  let isFirstBatch = true;
153
174
  let streamPage = adjustedPage;
@@ -158,7 +179,8 @@ export const createExport = async (args)=>{
158
179
  const remaining = Math.max(0, maxDocs - fetched);
159
180
  if (remaining === 0) {
160
181
  if (!isCSV) {
161
- this.push(encoder.encode(']'));
182
+ // If first batch with no remaining, output empty array; otherwise just close
183
+ this.push(encoder.encode(isFirstBatch ? '[]' : ']'));
162
184
  }
163
185
  this.push(null);
164
186
  return;
@@ -174,7 +196,8 @@ export const createExport = async (args)=>{
174
196
  if (result.docs.length === 0) {
175
197
  // Close JSON array properly if JSON
176
198
  if (!isCSV) {
177
- this.push(encoder.encode(']'));
199
+ // If first batch with no docs, output empty array; otherwise just close
200
+ this.push(encoder.encode(isFirstBatch ? '[]' : ']'));
178
201
  }
179
202
  this.push(null);
180
203
  return;
@@ -186,6 +209,29 @@ export const createExport = async (args)=>{
186
209
  fields,
187
210
  toCSVFunctions
188
211
  })));
212
+ // On first batch, discover additional columns from data and merge with schema
213
+ if (!columnsFinalized) {
214
+ const dataColumns = [];
215
+ const seenCols = new Set();
216
+ for (const row of batchRows){
217
+ for (const key of Object.keys(row)){
218
+ if (!seenCols.has(key)) {
219
+ seenCols.add(key);
220
+ dataColumns.push(key);
221
+ }
222
+ }
223
+ }
224
+ // Merge schema columns with data-discovered columns
225
+ allColumns = mergeColumns(schemaColumns, dataColumns);
226
+ columnsFinalized = true;
227
+ if (debug) {
228
+ req.payload.logger.debug({
229
+ dataColumnsCount: dataColumns.length,
230
+ finalColumnsCount: allColumns.length,
231
+ msg: 'Merged schema and data columns'
232
+ });
233
+ }
234
+ }
189
235
  const paddedRows = batchRows.map((row)=>{
190
236
  const fullRow = {};
191
237
  for (const col of allColumns){
@@ -223,7 +269,7 @@ export const createExport = async (args)=>{
223
269
  }
224
270
  }
225
271
  });
226
- return new Response(stream, {
272
+ return new Response(Readable.toWeb(stream), {
227
273
  headers: {
228
274
  'Content-Disposition': `attachment; filename="${name}"`,
229
275
  'Content-Type': isCSV ? 'text/csv' : 'application/json'
@@ -234,67 +280,71 @@ export const createExport = async (args)=>{
234
280
  if (debug) {
235
281
  req.payload.logger.debug('Starting file generation');
236
282
  }
237
- const outputData = [];
238
- const rows = [];
239
- const columnsSet = new Set();
240
- const columns = [];
241
- // Start from the incoming page value, defaulting to 1 if undefined
242
- let currentPage = adjustedPage;
243
- let fetched = 0;
244
- let hasNextPage = true;
245
- const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY;
246
- while(hasNextPage){
247
- const remaining = Math.max(0, maxDocs - fetched);
248
- if (remaining === 0) {
249
- break;
250
- }
251
- const result = await payload.find({
252
- ...findArgs,
253
- page: currentPage,
254
- limit: Math.min(batchSize, remaining)
283
+ // Create export batch processor
284
+ const processor = createExportBatchProcessor({
285
+ batchSize,
286
+ debug
287
+ });
288
+ // Transform function based on format
289
+ const transformDoc = (doc)=>isCSV ? filterDisabledCSV(flattenObject({
290
+ doc,
291
+ fields,
292
+ toCSVFunctions
293
+ })) : filterDisabledJSON(doc);
294
+ // Skip fetching if access was denied - we'll create an empty export
295
+ let exportResult = {
296
+ columns: [],
297
+ docs: [],
298
+ fetchedCount: 0
299
+ };
300
+ if (!accessDenied) {
301
+ exportResult = await processor.processExport({
302
+ collectionSlug,
303
+ findArgs: findArgs,
304
+ format,
305
+ maxDocs: typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY,
306
+ req,
307
+ startPage: adjustedPage,
308
+ transformDoc
255
309
  });
256
- if (debug) {
257
- req.payload.logger.debug(`Processing batch ${currentPage} with ${result.docs.length} documents`);
258
- }
259
- if (isCSV) {
260
- const batchRows = result.docs.map((doc)=>filterDisabledCSV(flattenObject({
261
- doc,
262
- fields,
263
- toCSVFunctions
264
- })));
265
- // Track discovered column keys
266
- batchRows.forEach((row)=>{
267
- Object.keys(row).forEach((key)=>{
268
- if (!columnsSet.has(key)) {
269
- columnsSet.add(key);
270
- columns.push(key);
271
- }
272
- });
273
- });
274
- rows.push(...batchRows);
275
- } else {
276
- const batchRows = result.docs.map((doc)=>filterDisabledJSON(doc));
277
- outputData.push(batchRows.map((doc)=>JSON.stringify(doc)).join(',\n'));
278
- }
279
- fetched += result.docs.length;
280
- hasNextPage = result.hasNextPage && fetched < maxDocs;
281
- currentPage += 1; // Increment page for next batch
282
310
  }
311
+ const { columns: dataColumns, docs: rows } = exportResult;
312
+ const outputData = [];
283
313
  // Prepare final output
284
314
  if (isCSV) {
315
+ // Get schema-based columns for consistent ordering
316
+ const localeCodes = locale === 'all' && payload.config.localization ? payload.config.localization.localeCodes : undefined;
317
+ const schemaColumns = getSchemaColumns({
318
+ collectionConfig,
319
+ disabledFields,
320
+ fields,
321
+ locale,
322
+ localeCodes
323
+ });
324
+ // Merge schema columns with data-discovered columns
325
+ // Schema provides ordering, data provides additional columns (e.g., array indices > 0)
326
+ const finalColumns = mergeColumns(schemaColumns, dataColumns);
285
327
  const paddedRows = rows.map((row)=>{
286
328
  const fullRow = {};
287
- for (const col of columns){
329
+ for (const col of finalColumns){
288
330
  fullRow[col] = row[col] ?? '';
289
331
  }
290
332
  return fullRow;
291
333
  });
334
+ // Always output CSV with header, even if empty
292
335
  outputData.push(stringify(paddedRows, {
293
336
  header: true,
294
- columns
337
+ columns: finalColumns
295
338
  }));
339
+ } else {
340
+ // JSON format
341
+ outputData.push(rows.map((doc)=>JSON.stringify(doc)).join(',\n'));
296
342
  }
297
- const buffer = Buffer.from(format === 'json' ? `[${outputData.join(',')}]` : outputData.join(''));
343
+ // Ensure we always have valid content for the file
344
+ // For JSON, empty exports produce "[]"
345
+ // For CSV, if completely empty (no columns, no rows), produce at least a newline to ensure file creation
346
+ const content = format === 'json' ? `[${outputData.join(',')}]` : outputData.join('');
347
+ const buffer = Buffer.from(content.length > 0 ? content : '\n');
298
348
  if (debug) {
299
349
  req.payload.logger.debug(`${format} file generation complete`);
300
350
  }
@@ -322,7 +372,9 @@ export const createExport = async (args)=>{
322
372
  mimetype: isCSV ? 'text/csv' : 'application/json',
323
373
  size: buffer.length
324
374
  },
325
- user
375
+ // Override access only here so that we can be sure the export collection itself is updated as expected
376
+ overrideAccess: true,
377
+ req
326
378
  });
327
379
  }
328
380
  if (debug) {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/export/createExport.ts"],"sourcesContent":["/* eslint-disable perfectionist/sort-objects */\nimport type { PayloadRequest, Sort, TypedUser, Where } from 'payload'\n\nimport { stringify } from 'csv-stringify/sync'\nimport { APIError } from 'payload'\nimport { Readable } from 'stream'\n\nimport { buildDisabledFieldRegex } from '../utilities/buildDisabledFieldRegex.js'\nimport { validateLimitValue } from '../utilities/validateLimitValue.js'\nimport { flattenObject } from './flattenObject.js'\nimport { getCustomFieldFunctions } from './getCustomFieldFunctions.js'\nimport { getFilename } from './getFilename.js'\nimport { getSelect } from './getSelect.js'\n\nexport type Export = {\n collectionSlug: string\n /**\n * If true, enables debug logging\n */\n debug?: boolean\n drafts?: 'no' | 'yes'\n exportsCollection: string\n fields?: string[]\n format: 'csv' | 'json'\n globals?: string[]\n id: number | string\n limit?: number\n locale?: string\n name: string\n page?: number\n slug: string\n sort: Sort\n where?: Where\n}\n\nexport type CreateExportArgs = {\n /**\n * If true, stream the file instead of saving it\n */\n download?: boolean\n input: Export\n req: PayloadRequest\n user?: null | TypedUser\n}\n\nexport const createExport = async (args: CreateExportArgs) => {\n const {\n download,\n input: {\n id,\n name: nameArg,\n collectionSlug,\n debug = false,\n drafts,\n exportsCollection,\n fields,\n format,\n locale: localeInput,\n sort,\n page,\n limit: incomingLimit,\n where,\n },\n req: { locale: localeArg, payload },\n req,\n user,\n } = args\n\n if (!user) {\n throw new APIError('User authentication is required to create exports')\n }\n\n if (debug) {\n req.payload.logger.debug({\n message: 'Starting export process with args:',\n collectionSlug,\n drafts,\n fields,\n format,\n })\n }\n\n const locale = localeInput ?? localeArg\n const collectionConfig = payload.config.collections.find(({ slug }) => slug === collectionSlug)\n if (!collectionConfig) {\n throw new APIError(`Collection with slug ${collectionSlug} not found`)\n }\n\n const name = `${nameArg ?? `${getFilename()}-${collectionSlug}`}.${format}`\n const isCSV = format === 'csv'\n const select = Array.isArray(fields) && fields.length > 0 ? getSelect(fields) : undefined\n\n if (debug) {\n req.payload.logger.debug({ message: 'Export configuration:', name, isCSV, locale })\n }\n\n const batchSize = 100 // fixed per request\n\n const hardLimit =\n typeof incomingLimit === 'number' && incomingLimit > 0 ? incomingLimit : undefined\n\n const { totalDocs } = await payload.count({\n collection: collectionSlug,\n user,\n locale,\n overrideAccess: false,\n })\n\n const totalPages = Math.max(1, Math.ceil(totalDocs / batchSize))\n const requestedPage = page || 1\n const adjustedPage = requestedPage > totalPages ? 1 : requestedPage\n\n const findArgs = {\n collection: collectionSlug,\n depth: 1,\n draft: drafts === 'yes',\n limit: batchSize,\n locale,\n overrideAccess: false,\n page: 0, // The page will be incremented manually in the loop\n select,\n sort,\n user,\n where,\n }\n\n if (debug) {\n req.payload.logger.debug({ message: 'Find arguments:', findArgs })\n }\n\n const toCSVFunctions = getCustomFieldFunctions({\n fields: collectionConfig.flattenedFields,\n })\n\n const disabledFields =\n collectionConfig.admin?.custom?.['plugin-import-export']?.disabledFields ?? []\n\n const disabledRegexes: RegExp[] = disabledFields.map(buildDisabledFieldRegex)\n\n const filterDisabledCSV = (row: Record<string, unknown>): Record<string, unknown> => {\n const filtered: Record<string, unknown> = {}\n\n for (const [key, value] of Object.entries(row)) {\n const isDisabled = disabledRegexes.some((regex) => regex.test(key))\n if (!isDisabled) {\n filtered[key] = value\n }\n }\n\n return filtered\n }\n\n const filterDisabledJSON = (doc: any, parentPath = ''): any => {\n if (Array.isArray(doc)) {\n return doc.map((item) => filterDisabledJSON(item, parentPath))\n }\n\n if (typeof doc !== 'object' || doc === null) {\n return doc\n }\n\n const filtered: Record<string, any> = {}\n for (const [key, value] of Object.entries(doc)) {\n const currentPath = parentPath ? `${parentPath}.${key}` : key\n\n // Only remove if this exact path is disabled\n const isDisabled = disabledFields.includes(currentPath)\n\n if (!isDisabled) {\n filtered[key] = filterDisabledJSON(value, currentPath)\n }\n }\n\n return filtered\n }\n\n if (download) {\n if (debug) {\n req.payload.logger.debug('Pre-scanning all columns before streaming')\n }\n\n const limitErrorMsg = validateLimitValue(\n incomingLimit,\n req.t,\n batchSize, // step i.e. 100\n )\n if (limitErrorMsg) {\n throw new APIError(limitErrorMsg)\n }\n\n const allColumns: string[] = []\n\n if (isCSV) {\n const allColumnsSet = new Set<string>()\n\n // Use the incoming page value here, defaulting to 1 if undefined\n let scanPage = adjustedPage\n let hasMore = true\n let fetched = 0\n const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY\n\n while (hasMore) {\n const remaining = Math.max(0, maxDocs - fetched)\n if (remaining === 0) {\n break\n }\n\n const result = await payload.find({\n ...findArgs,\n page: scanPage,\n limit: Math.min(batchSize, remaining),\n })\n\n result.docs.forEach((doc) => {\n const flat = filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions }))\n Object.keys(flat).forEach((key) => {\n if (!allColumnsSet.has(key)) {\n allColumnsSet.add(key)\n allColumns.push(key)\n }\n })\n })\n\n fetched += result.docs.length\n scanPage += 1 // Increment page for next batch\n hasMore = result.hasNextPage && fetched < maxDocs\n }\n\n if (debug) {\n req.payload.logger.debug(`Discovered ${allColumns.length} columns`)\n }\n }\n\n const encoder = new TextEncoder()\n let isFirstBatch = true\n let streamPage = adjustedPage\n let fetched = 0\n const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY\n\n const stream = new Readable({\n async read() {\n const remaining = Math.max(0, maxDocs - fetched)\n\n if (remaining === 0) {\n if (!isCSV) {\n this.push(encoder.encode(']'))\n }\n this.push(null)\n return\n }\n\n const result = await payload.find({\n ...findArgs,\n page: streamPage,\n limit: Math.min(batchSize, remaining),\n })\n\n if (debug) {\n req.payload.logger.debug(`Streaming batch ${streamPage} with ${result.docs.length} docs`)\n }\n\n if (result.docs.length === 0) {\n // Close JSON array properly if JSON\n if (!isCSV) {\n this.push(encoder.encode(']'))\n }\n this.push(null)\n return\n }\n\n if (isCSV) {\n // --- CSV Streaming ---\n const batchRows = result.docs.map((doc) =>\n filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })),\n )\n\n const paddedRows = batchRows.map((row) => {\n const fullRow: Record<string, unknown> = {}\n for (const col of allColumns) {\n fullRow[col] = row[col] ?? ''\n }\n return fullRow\n })\n\n const csvString = stringify(paddedRows, {\n header: isFirstBatch,\n columns: allColumns,\n })\n\n this.push(encoder.encode(csvString))\n } else {\n // --- JSON Streaming ---\n const batchRows = result.docs.map((doc) => filterDisabledJSON(doc))\n\n // Convert each filtered/flattened row into JSON string\n const batchJSON = batchRows.map((row) => JSON.stringify(row)).join(',')\n\n if (isFirstBatch) {\n this.push(encoder.encode('[' + batchJSON))\n } else {\n this.push(encoder.encode(',' + batchJSON))\n }\n }\n\n fetched += result.docs.length\n isFirstBatch = false\n streamPage += 1 // Increment stream page for the next batch\n\n if (!result.hasNextPage || fetched >= maxDocs) {\n if (debug) {\n req.payload.logger.debug('Stream complete - no more pages')\n }\n if (!isCSV) {\n this.push(encoder.encode(']'))\n }\n this.push(null) // End the stream\n }\n },\n })\n\n return new Response(stream as any, {\n headers: {\n 'Content-Disposition': `attachment; filename=\"${name}\"`,\n 'Content-Type': isCSV ? 'text/csv' : 'application/json',\n },\n })\n }\n\n // Non-download path (buffered export)\n if (debug) {\n req.payload.logger.debug('Starting file generation')\n }\n\n const outputData: string[] = []\n const rows: Record<string, unknown>[] = []\n const columnsSet = new Set<string>()\n const columns: string[] = []\n\n // Start from the incoming page value, defaulting to 1 if undefined\n let currentPage = adjustedPage\n let fetched = 0\n let hasNextPage = true\n const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY\n\n while (hasNextPage) {\n const remaining = Math.max(0, maxDocs - fetched)\n\n if (remaining === 0) {\n break\n }\n\n const result = await payload.find({\n ...findArgs,\n page: currentPage,\n limit: Math.min(batchSize, remaining),\n })\n\n if (debug) {\n req.payload.logger.debug(\n `Processing batch ${currentPage} with ${result.docs.length} documents`,\n )\n }\n\n if (isCSV) {\n const batchRows = result.docs.map((doc) =>\n filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })),\n )\n\n // Track discovered column keys\n batchRows.forEach((row) => {\n Object.keys(row).forEach((key) => {\n if (!columnsSet.has(key)) {\n columnsSet.add(key)\n columns.push(key)\n }\n })\n })\n\n rows.push(...batchRows)\n } else {\n const batchRows = result.docs.map((doc) => filterDisabledJSON(doc))\n outputData.push(batchRows.map((doc) => JSON.stringify(doc)).join(',\\n'))\n }\n\n fetched += result.docs.length\n hasNextPage = result.hasNextPage && fetched < maxDocs\n currentPage += 1 // Increment page for next batch\n }\n\n // Prepare final output\n if (isCSV) {\n const paddedRows = rows.map((row) => {\n const fullRow: Record<string, unknown> = {}\n for (const col of columns) {\n fullRow[col] = row[col] ?? ''\n }\n return fullRow\n })\n\n outputData.push(\n stringify(paddedRows, {\n header: true,\n columns,\n }),\n )\n }\n\n const buffer = Buffer.from(format === 'json' ? `[${outputData.join(',')}]` : outputData.join(''))\n if (debug) {\n req.payload.logger.debug(`${format} file generation complete`)\n }\n\n if (!id) {\n if (debug) {\n req.payload.logger.debug('Creating new export file')\n }\n req.file = {\n name,\n data: buffer,\n mimetype: isCSV ? 'text/csv' : 'application/json',\n size: buffer.length,\n }\n } else {\n if (debug) {\n req.payload.logger.debug(`Updating existing export with id: ${id}`)\n }\n await req.payload.update({\n id,\n collection: exportsCollection,\n data: {},\n file: {\n name,\n data: buffer,\n mimetype: isCSV ? 'text/csv' : 'application/json',\n size: buffer.length,\n },\n user,\n })\n }\n if (debug) {\n req.payload.logger.debug('Export process completed successfully')\n }\n}\n"],"names":["stringify","APIError","Readable","buildDisabledFieldRegex","validateLimitValue","flattenObject","getCustomFieldFunctions","getFilename","getSelect","createExport","args","download","input","id","name","nameArg","collectionSlug","debug","drafts","exportsCollection","fields","format","locale","localeInput","sort","page","limit","incomingLimit","where","req","localeArg","payload","user","logger","message","collectionConfig","config","collections","find","slug","isCSV","select","Array","isArray","length","undefined","batchSize","hardLimit","totalDocs","count","collection","overrideAccess","totalPages","Math","max","ceil","requestedPage","adjustedPage","findArgs","depth","draft","toCSVFunctions","flattenedFields","disabledFields","admin","custom","disabledRegexes","map","filterDisabledCSV","row","filtered","key","value","Object","entries","isDisabled","some","regex","test","filterDisabledJSON","doc","parentPath","item","currentPath","includes","limitErrorMsg","t","allColumns","allColumnsSet","Set","scanPage","hasMore","fetched","maxDocs","Number","POSITIVE_INFINITY","remaining","result","min","docs","forEach","flat","keys","has","add","push","hasNextPage","encoder","TextEncoder","isFirstBatch","streamPage","stream","read","encode","batchRows","paddedRows","fullRow","col","csvString","header","columns","batchJSON","JSON","join","Response","headers","outputData","rows","columnsSet","currentPage","buffer","Buffer","from","file","data","mimetype","size","update"],"mappings":"AAAA,6CAA6C,GAG7C,SAASA,SAAS,QAAQ,qBAAoB;AAC9C,SAASC,QAAQ,QAAQ,UAAS;AAClC,SAASC,QAAQ,QAAQ,SAAQ;AAEjC,SAASC,uBAAuB,QAAQ,0CAAyC;AACjF,SAASC,kBAAkB,QAAQ,qCAAoC;AACvE,SAASC,aAAa,QAAQ,qBAAoB;AAClD,SAASC,uBAAuB,QAAQ,+BAA8B;AACtE,SAASC,WAAW,QAAQ,mBAAkB;AAC9C,SAASC,SAAS,QAAQ,iBAAgB;AAiC1C,OAAO,MAAMC,eAAe,OAAOC;IACjC,MAAM,EACJC,QAAQ,EACRC,OAAO,EACLC,EAAE,EACFC,MAAMC,OAAO,EACbC,cAAc,EACdC,QAAQ,KAAK,EACbC,MAAM,EACNC,iBAAiB,EACjBC,MAAM,EACNC,MAAM,EACNC,QAAQC,WAAW,EACnBC,IAAI,EACJC,IAAI,EACJC,OAAOC,aAAa,EACpBC,KAAK,EACN,EACDC,KAAK,EAAEP,QAAQQ,SAAS,EAAEC,OAAO,EAAE,EACnCF,GAAG,EACHG,IAAI,EACL,GAAGtB;IAEJ,IAAI,CAACsB,MAAM;QACT,MAAM,IAAI/B,SAAS;IACrB;IAEA,IAAIgB,OAAO;QACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC;YACvBiB,SAAS;YACTlB;YACAE;YACAE;YACAC;QACF;IACF;IAEA,MAAMC,SAASC,eAAeO;IAC9B,MAAMK,mBAAmBJ,QAAQK,MAAM,CAACC,WAAW,CAACC,IAAI,CAAC,CAAC,EAAEC,IAAI,EAAE,GAAKA,SAASvB;IAChF,IAAI,CAACmB,kBAAkB;QACrB,MAAM,IAAIlC,SAAS,CAAC,qBAAqB,EAAEe,eAAe,UAAU,CAAC;IACvE;IAEA,MAAMF,OAAO,GAAGC,WAAW,GAAGR,cAAc,CAAC,EAAES,gBAAgB,CAAC,CAAC,EAAEK,QAAQ;IAC3E,MAAMmB,QAAQnB,WAAW;IACzB,MAAMoB,SAASC,MAAMC,OAAO,CAACvB,WAAWA,OAAOwB,MAAM,GAAG,IAAIpC,UAAUY,UAAUyB;IAEhF,IAAI5B,OAAO;QACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC;YAAEiB,SAAS;YAAyBpB;YAAM0B;YAAOlB;QAAO;IACnF;IAEA,MAAMwB,YAAY,IAAI,oBAAoB;;IAE1C,MAAMC,YACJ,OAAOpB,kBAAkB,YAAYA,gBAAgB,IAAIA,gBAAgBkB;IAE3E,MAAM,EAAEG,SAAS,EAAE,GAAG,MAAMjB,QAAQkB,KAAK,CAAC;QACxCC,YAAYlC;QACZgB;QACAV;QACA6B,gBAAgB;IAClB;IAEA,MAAMC,aAAaC,KAAKC,GAAG,CAAC,GAAGD,KAAKE,IAAI,CAACP,YAAYF;IACrD,MAAMU,gBAAgB/B,QAAQ;IAC9B,MAAMgC,eAAeD,gBAAgBJ,aAAa,IAAII;IAEtD,MAAME,WAAW;QACfR,YAAYlC;QACZ2C,OAAO;QACPC,OAAO1C,WAAW;QAClBQ,OAAOoB;QACPxB;QACA6B,gBAAgB;QAChB1B,MAAM;QACNgB;QACAjB;QACAQ;QACAJ;IACF;IAEA,IAAIX,OAAO;QACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC;YAAEiB,SAAS;YAAmBwB;QAAS;IAClE;IAEA,MAAMG,iBAAiBvD,wBAAwB;QAC7Cc,QAAQe,iBAAiB2B,eAAe;IAC1C;IAEA,MAAMC,iBACJ5B,iBAAiB6B,KAAK,EAAEC,QAAQ,CAAC,uBAAuB,EAAEF,kBAAkB,EAAE;IAEhF,MAAMG,kBAA4BH,eAAeI,GAAG,CAAChE;IAErD,MAAMiE,oBAAoB,CAACC;QACzB,MAAMC,WAAoC,CAAC;QAE3C,KAAK,MAAM,CAACC,KAAKC,MAAM,IAAIC,OAAOC,OAAO,CAACL,KAAM;YAC9C,MAAMM,aAAaT,gBAAgBU,IAAI,CAAC,CAACC,QAAUA,MAAMC,IAAI,CAACP;YAC9D,IAAI,CAACI,YAAY;gBACfL,QAAQ,CAACC,IAAI,GAAGC;YAClB;QACF;QAEA,OAAOF;IACT;IAEA,MAAMS,qBAAqB,CAACC,KAAUC,aAAa,EAAE;QACnD,IAAIvC,MAAMC,OAAO,CAACqC,MAAM;YACtB,OAAOA,IAAIb,GAAG,CAAC,CAACe,OAASH,mBAAmBG,MAAMD;QACpD;QAEA,IAAI,OAAOD,QAAQ,YAAYA,QAAQ,MAAM;YAC3C,OAAOA;QACT;QAEA,MAAMV,WAAgC,CAAC;QACvC,KAAK,MAAM,CAACC,KAAKC,MAAM,IAAIC,OAAOC,OAAO,CAACM,KAAM;YAC9C,MAAMG,cAAcF,aAAa,GAAGA,WAAW,CAAC,EAAEV,KAAK,GAAGA;YAE1D,6CAA6C;YAC7C,MAAMI,aAAaZ,eAAeqB,QAAQ,CAACD;YAE3C,IAAI,CAACR,YAAY;gBACfL,QAAQ,CAACC,IAAI,GAAGQ,mBAAmBP,OAAOW;YAC5C;QACF;QAEA,OAAOb;IACT;IAEA,IAAI3D,UAAU;QACZ,IAAIM,OAAO;YACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC;QAC3B;QAEA,MAAMoE,gBAAgBjF,mBACpBuB,eACAE,IAAIyD,CAAC,EACLxC;QAEF,IAAIuC,eAAe;YACjB,MAAM,IAAIpF,SAASoF;QACrB;QAEA,MAAME,aAAuB,EAAE;QAE/B,IAAI/C,OAAO;YACT,MAAMgD,gBAAgB,IAAIC;YAE1B,iEAAiE;YACjE,IAAIC,WAAWjC;YACf,IAAIkC,UAAU;YACd,IAAIC,UAAU;YACd,MAAMC,UAAU,OAAO9C,cAAc,WAAWA,YAAY+C,OAAOC,iBAAiB;YAEpF,MAAOJ,QAAS;gBACd,MAAMK,YAAY3C,KAAKC,GAAG,CAAC,GAAGuC,UAAUD;gBACxC,IAAII,cAAc,GAAG;oBACnB;gBACF;gBAEA,MAAMC,SAAS,MAAMlE,QAAQO,IAAI,CAAC;oBAChC,GAAGoB,QAAQ;oBACXjC,MAAMiE;oBACNhE,OAAO2B,KAAK6C,GAAG,CAACpD,WAAWkD;gBAC7B;gBAEAC,OAAOE,IAAI,CAACC,OAAO,CAAC,CAACpB;oBACnB,MAAMqB,OAAOjC,kBAAkB/D,cAAc;wBAAE2E;wBAAK5D;wBAAQyC;oBAAe;oBAC3EY,OAAO6B,IAAI,CAACD,MAAMD,OAAO,CAAC,CAAC7B;wBACzB,IAAI,CAACiB,cAAce,GAAG,CAAChC,MAAM;4BAC3BiB,cAAcgB,GAAG,CAACjC;4BAClBgB,WAAWkB,IAAI,CAAClC;wBAClB;oBACF;gBACF;gBAEAqB,WAAWK,OAAOE,IAAI,CAACvD,MAAM;gBAC7B8C,YAAY,GAAE,gCAAgC;gBAC9CC,UAAUM,OAAOS,WAAW,IAAId,UAAUC;YAC5C;YAEA,IAAI5E,OAAO;gBACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC,CAAC,WAAW,EAAEsE,WAAW3C,MAAM,CAAC,QAAQ,CAAC;YACpE;QACF;QAEA,MAAM+D,UAAU,IAAIC;QACpB,IAAIC,eAAe;QACnB,IAAIC,aAAarD;QACjB,IAAImC,UAAU;QACd,MAAMC,UAAU,OAAO9C,cAAc,WAAWA,YAAY+C,OAAOC,iBAAiB;QAEpF,MAAMgB,SAAS,IAAI7G,SAAS;YAC1B,MAAM8G;gBACJ,MAAMhB,YAAY3C,KAAKC,GAAG,CAAC,GAAGuC,UAAUD;gBAExC,IAAII,cAAc,GAAG;oBACnB,IAAI,CAACxD,OAAO;wBACV,IAAI,CAACiE,IAAI,CAACE,QAAQM,MAAM,CAAC;oBAC3B;oBACA,IAAI,CAACR,IAAI,CAAC;oBACV;gBACF;gBAEA,MAAMR,SAAS,MAAMlE,QAAQO,IAAI,CAAC;oBAChC,GAAGoB,QAAQ;oBACXjC,MAAMqF;oBACNpF,OAAO2B,KAAK6C,GAAG,CAACpD,WAAWkD;gBAC7B;gBAEA,IAAI/E,OAAO;oBACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC,CAAC,gBAAgB,EAAE6F,WAAW,MAAM,EAAEb,OAAOE,IAAI,CAACvD,MAAM,CAAC,KAAK,CAAC;gBAC1F;gBAEA,IAAIqD,OAAOE,IAAI,CAACvD,MAAM,KAAK,GAAG;oBAC5B,oCAAoC;oBACpC,IAAI,CAACJ,OAAO;wBACV,IAAI,CAACiE,IAAI,CAACE,QAAQM,MAAM,CAAC;oBAC3B;oBACA,IAAI,CAACR,IAAI,CAAC;oBACV;gBACF;gBAEA,IAAIjE,OAAO;oBACT,wBAAwB;oBACxB,MAAM0E,YAAYjB,OAAOE,IAAI,CAAChC,GAAG,CAAC,CAACa,MACjCZ,kBAAkB/D,cAAc;4BAAE2E;4BAAK5D;4BAAQyC;wBAAe;oBAGhE,MAAMsD,aAAaD,UAAU/C,GAAG,CAAC,CAACE;wBAChC,MAAM+C,UAAmC,CAAC;wBAC1C,KAAK,MAAMC,OAAO9B,WAAY;4BAC5B6B,OAAO,CAACC,IAAI,GAAGhD,GAAG,CAACgD,IAAI,IAAI;wBAC7B;wBACA,OAAOD;oBACT;oBAEA,MAAME,YAAYtH,UAAUmH,YAAY;wBACtCI,QAAQV;wBACRW,SAASjC;oBACX;oBAEA,IAAI,CAACkB,IAAI,CAACE,QAAQM,MAAM,CAACK;gBAC3B,OAAO;oBACL,yBAAyB;oBACzB,MAAMJ,YAAYjB,OAAOE,IAAI,CAAChC,GAAG,CAAC,CAACa,MAAQD,mBAAmBC;oBAE9D,uDAAuD;oBACvD,MAAMyC,YAAYP,UAAU/C,GAAG,CAAC,CAACE,MAAQqD,KAAK1H,SAAS,CAACqE,MAAMsD,IAAI,CAAC;oBAEnE,IAAId,cAAc;wBAChB,IAAI,CAACJ,IAAI,CAACE,QAAQM,MAAM,CAAC,MAAMQ;oBACjC,OAAO;wBACL,IAAI,CAAChB,IAAI,CAACE,QAAQM,MAAM,CAAC,MAAMQ;oBACjC;gBACF;gBAEA7B,WAAWK,OAAOE,IAAI,CAACvD,MAAM;gBAC7BiE,eAAe;gBACfC,cAAc,GAAE,2CAA2C;gBAE3D,IAAI,CAACb,OAAOS,WAAW,IAAId,WAAWC,SAAS;oBAC7C,IAAI5E,OAAO;wBACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC;oBAC3B;oBACA,IAAI,CAACuB,OAAO;wBACV,IAAI,CAACiE,IAAI,CAACE,QAAQM,MAAM,CAAC;oBAC3B;oBACA,IAAI,CAACR,IAAI,CAAC,OAAM,iBAAiB;gBACnC;YACF;QACF;QAEA,OAAO,IAAImB,SAASb,QAAe;YACjCc,SAAS;gBACP,uBAAuB,CAAC,sBAAsB,EAAE/G,KAAK,CAAC,CAAC;gBACvD,gBAAgB0B,QAAQ,aAAa;YACvC;QACF;IACF;IAEA,sCAAsC;IACtC,IAAIvB,OAAO;QACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC;IAC3B;IAEA,MAAM6G,aAAuB,EAAE;IAC/B,MAAMC,OAAkC,EAAE;IAC1C,MAAMC,aAAa,IAAIvC;IACvB,MAAM+B,UAAoB,EAAE;IAE5B,mEAAmE;IACnE,IAAIS,cAAcxE;IAClB,IAAImC,UAAU;IACd,IAAIc,cAAc;IAClB,MAAMb,UAAU,OAAO9C,cAAc,WAAWA,YAAY+C,OAAOC,iBAAiB;IAEpF,MAAOW,YAAa;QAClB,MAAMV,YAAY3C,KAAKC,GAAG,CAAC,GAAGuC,UAAUD;QAExC,IAAII,cAAc,GAAG;YACnB;QACF;QAEA,MAAMC,SAAS,MAAMlE,QAAQO,IAAI,CAAC;YAChC,GAAGoB,QAAQ;YACXjC,MAAMwG;YACNvG,OAAO2B,KAAK6C,GAAG,CAACpD,WAAWkD;QAC7B;QAEA,IAAI/E,OAAO;YACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CACtB,CAAC,iBAAiB,EAAEgH,YAAY,MAAM,EAAEhC,OAAOE,IAAI,CAACvD,MAAM,CAAC,UAAU,CAAC;QAE1E;QAEA,IAAIJ,OAAO;YACT,MAAM0E,YAAYjB,OAAOE,IAAI,CAAChC,GAAG,CAAC,CAACa,MACjCZ,kBAAkB/D,cAAc;oBAAE2E;oBAAK5D;oBAAQyC;gBAAe;YAGhE,+BAA+B;YAC/BqD,UAAUd,OAAO,CAAC,CAAC/B;gBACjBI,OAAO6B,IAAI,CAACjC,KAAK+B,OAAO,CAAC,CAAC7B;oBACxB,IAAI,CAACyD,WAAWzB,GAAG,CAAChC,MAAM;wBACxByD,WAAWxB,GAAG,CAACjC;wBACfiD,QAAQf,IAAI,CAAClC;oBACf;gBACF;YACF;YAEAwD,KAAKtB,IAAI,IAAIS;QACf,OAAO;YACL,MAAMA,YAAYjB,OAAOE,IAAI,CAAChC,GAAG,CAAC,CAACa,MAAQD,mBAAmBC;YAC9D8C,WAAWrB,IAAI,CAACS,UAAU/C,GAAG,CAAC,CAACa,MAAQ0C,KAAK1H,SAAS,CAACgF,MAAM2C,IAAI,CAAC;QACnE;QAEA/B,WAAWK,OAAOE,IAAI,CAACvD,MAAM;QAC7B8D,cAAcT,OAAOS,WAAW,IAAId,UAAUC;QAC9CoC,eAAe,GAAE,gCAAgC;IACnD;IAEA,uBAAuB;IACvB,IAAIzF,OAAO;QACT,MAAM2E,aAAaY,KAAK5D,GAAG,CAAC,CAACE;YAC3B,MAAM+C,UAAmC,CAAC;YAC1C,KAAK,MAAMC,OAAOG,QAAS;gBACzBJ,OAAO,CAACC,IAAI,GAAGhD,GAAG,CAACgD,IAAI,IAAI;YAC7B;YACA,OAAOD;QACT;QAEAU,WAAWrB,IAAI,CACbzG,UAAUmH,YAAY;YACpBI,QAAQ;YACRC;QACF;IAEJ;IAEA,MAAMU,SAASC,OAAOC,IAAI,CAAC/G,WAAW,SAAS,CAAC,CAAC,EAAEyG,WAAWH,IAAI,CAAC,KAAK,CAAC,CAAC,GAAGG,WAAWH,IAAI,CAAC;IAC7F,IAAI1G,OAAO;QACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC,GAAGI,OAAO,yBAAyB,CAAC;IAC/D;IAEA,IAAI,CAACR,IAAI;QACP,IAAII,OAAO;YACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC;QAC3B;QACAY,IAAIwG,IAAI,GAAG;YACTvH;YACAwH,MAAMJ;YACNK,UAAU/F,QAAQ,aAAa;YAC/BgG,MAAMN,OAAOtF,MAAM;QACrB;IACF,OAAO;QACL,IAAI3B,OAAO;YACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC,CAAC,kCAAkC,EAAEJ,IAAI;QACpE;QACA,MAAMgB,IAAIE,OAAO,CAAC0G,MAAM,CAAC;YACvB5H;YACAqC,YAAY/B;YACZmH,MAAM,CAAC;YACPD,MAAM;gBACJvH;gBACAwH,MAAMJ;gBACNK,UAAU/F,QAAQ,aAAa;gBAC/BgG,MAAMN,OAAOtF,MAAM;YACrB;YACAZ;QACF;IACF;IACA,IAAIf,OAAO;QACTY,IAAIE,OAAO,CAACE,MAAM,CAAChB,KAAK,CAAC;IAC3B;AACF,EAAC"}
1
+ {"version":3,"sources":["../../src/export/createExport.ts"],"sourcesContent":["/* eslint-disable perfectionist/sort-objects */\nimport type { PayloadRequest, Sort, TypedUser, Where } from 'payload'\n\nimport { stringify } from 'csv-stringify/sync'\nimport { APIError } from 'payload'\nimport { Readable } from 'stream'\n\nimport { buildDisabledFieldRegex } from '../utilities/buildDisabledFieldRegex.js'\nimport { flattenObject } from '../utilities/flattenObject.js'\nimport { getExportFieldFunctions } from '../utilities/getExportFieldFunctions.js'\nimport { getFilename } from '../utilities/getFilename.js'\nimport { getSchemaColumns, mergeColumns } from '../utilities/getSchemaColumns.js'\nimport { getSelect } from '../utilities/getSelect.js'\nimport { validateLimitValue } from '../utilities/validateLimitValue.js'\nimport { createExportBatchProcessor, type ExportFindArgs } from './batchProcessor.js'\n\nexport type Export = {\n /**\n * Number of documents to process in each batch during export\n * @default 100\n */\n batchSize?: number\n collectionSlug: string\n /**\n * If true, enables debug logging\n */\n debug?: boolean\n drafts?: 'no' | 'yes'\n exportsCollection: string\n fields?: string[]\n format: 'csv' | 'json'\n globals?: string[]\n id: number | string\n limit?: number\n locale?: string\n name: string\n page?: number\n slug: string\n sort: Sort\n userCollection: string\n userID: number | string\n where?: Where\n}\n\nexport type CreateExportArgs = {\n /**\n * If true, stream the file instead of saving it\n */\n download?: boolean\n req: PayloadRequest\n} & Export\n\nexport const createExport = async (args: CreateExportArgs) => {\n const {\n id,\n name: nameArg,\n batchSize = 100,\n collectionSlug,\n debug = false,\n download,\n drafts: draftsFromInput,\n exportsCollection,\n fields,\n format,\n limit: incomingLimit,\n locale: localeFromInput,\n page,\n req,\n sort,\n userCollection,\n userID,\n where: whereFromInput = {},\n } = args\n const { locale: localeFromReq, payload } = req\n\n if (debug) {\n req.payload.logger.debug({\n message: 'Starting export process with args:',\n collectionSlug,\n draft: draftsFromInput,\n fields,\n format,\n })\n }\n\n const locale = localeFromInput ?? localeFromReq\n const collectionConfig = payload.config.collections.find(({ slug }) => slug === collectionSlug)\n\n if (!collectionConfig) {\n throw new APIError(`Collection with slug ${collectionSlug} not found.`)\n }\n\n let user: TypedUser | undefined\n\n if (userCollection && userID) {\n user = (await req.payload.findByID({\n id: userID,\n collection: userCollection,\n overrideAccess: true,\n })) as TypedUser\n }\n\n if (!user && req.user) {\n user = req?.user?.id ? req.user : req?.user?.user\n }\n\n if (!user) {\n throw new APIError('User authentication is required to create exports.')\n }\n\n const draft = draftsFromInput === 'yes'\n const hasVersions = Boolean(collectionConfig.versions)\n\n // Only filter by _status for versioned collections\n const publishedWhere: Where = hasVersions ? { _status: { equals: 'published' } } : {}\n\n const where: Where = {\n and: [whereFromInput, draft ? {} : publishedWhere],\n }\n\n const name = `${nameArg ?? `${getFilename()}-${collectionSlug}`}.${format}`\n const isCSV = format === 'csv'\n const select = Array.isArray(fields) && fields.length > 0 ? getSelect(fields) : undefined\n\n if (debug) {\n req.payload.logger.debug({ message: 'Export configuration:', name, isCSV, locale })\n }\n\n const hardLimit =\n typeof incomingLimit === 'number' && incomingLimit > 0 ? incomingLimit : undefined\n\n // Try to count documents - if access is denied, treat as 0 documents\n let totalDocs = 0\n let accessDenied = false\n try {\n const countResult = await payload.count({\n collection: collectionSlug,\n user,\n locale,\n overrideAccess: false,\n })\n totalDocs = countResult.totalDocs\n } catch (error) {\n // Access denied - user can't read from this collection\n // We'll create an empty export file\n accessDenied = true\n if (debug) {\n req.payload.logger.debug({\n message: 'Access denied for collection, creating empty export',\n collectionSlug,\n })\n }\n }\n\n const totalPages = Math.max(1, Math.ceil(totalDocs / batchSize))\n const requestedPage = page || 1\n const adjustedPage = requestedPage > totalPages ? 1 : requestedPage\n\n const findArgs = {\n collection: collectionSlug,\n depth: 1,\n draft,\n limit: batchSize,\n locale,\n overrideAccess: false,\n page: 0, // The page will be incremented manually in the loop\n select,\n sort,\n user,\n where,\n }\n\n if (debug) {\n req.payload.logger.debug({ message: 'Find arguments:', findArgs })\n }\n\n const toCSVFunctions = getExportFieldFunctions({\n fields: collectionConfig.flattenedFields,\n })\n\n const disabledFields =\n collectionConfig.admin?.custom?.['plugin-import-export']?.disabledFields ?? []\n\n const disabledRegexes: RegExp[] = disabledFields.map(buildDisabledFieldRegex)\n\n const filterDisabledCSV = (row: Record<string, unknown>): Record<string, unknown> => {\n const filtered: Record<string, unknown> = {}\n\n for (const [key, value] of Object.entries(row)) {\n const isDisabled = disabledRegexes.some((regex) => regex.test(key))\n if (!isDisabled) {\n filtered[key] = value\n }\n }\n\n return filtered\n }\n\n const filterDisabledJSON = (doc: any, parentPath = ''): any => {\n if (Array.isArray(doc)) {\n return doc.map((item) => filterDisabledJSON(item, parentPath))\n }\n\n if (typeof doc !== 'object' || doc === null) {\n return doc\n }\n\n const filtered: Record<string, any> = {}\n for (const [key, value] of Object.entries(doc)) {\n const currentPath = parentPath ? `${parentPath}.${key}` : key\n\n // Only remove if this exact path is disabled\n const isDisabled = disabledFields.includes(currentPath)\n\n if (!isDisabled) {\n filtered[key] = filterDisabledJSON(value, currentPath)\n }\n }\n\n return filtered\n }\n\n if (download) {\n const limitErrorMsg = validateLimitValue(incomingLimit, req.t)\n if (limitErrorMsg) {\n throw new APIError(limitErrorMsg)\n }\n\n // Get schema-based columns first (provides base ordering and handles empty exports)\n let schemaColumns: string[] = []\n if (isCSV) {\n const localeCodes =\n locale === 'all' && payload.config.localization\n ? payload.config.localization.localeCodes\n : undefined\n\n schemaColumns = getSchemaColumns({\n collectionConfig,\n disabledFields,\n fields,\n locale,\n localeCodes,\n })\n\n if (debug) {\n req.payload.logger.debug({\n columnCount: schemaColumns.length,\n msg: 'Schema-based column inference complete',\n })\n }\n }\n\n // allColumns will be finalized after first batch (schema + data columns merged)\n let allColumns: string[] = []\n let columnsFinalized = false\n\n const encoder = new TextEncoder()\n let isFirstBatch = true\n let streamPage = adjustedPage\n let fetched = 0\n const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY\n\n const stream = new Readable({\n async read() {\n const remaining = Math.max(0, maxDocs - fetched)\n\n if (remaining === 0) {\n if (!isCSV) {\n // If first batch with no remaining, output empty array; otherwise just close\n this.push(encoder.encode(isFirstBatch ? '[]' : ']'))\n }\n this.push(null)\n return\n }\n\n const result = await payload.find({\n ...findArgs,\n page: streamPage,\n limit: Math.min(batchSize, remaining),\n })\n\n if (debug) {\n req.payload.logger.debug(`Streaming batch ${streamPage} with ${result.docs.length} docs`)\n }\n\n if (result.docs.length === 0) {\n // Close JSON array properly if JSON\n if (!isCSV) {\n // If first batch with no docs, output empty array; otherwise just close\n this.push(encoder.encode(isFirstBatch ? '[]' : ']'))\n }\n this.push(null)\n return\n }\n\n if (isCSV) {\n // --- CSV Streaming ---\n const batchRows = result.docs.map((doc) =>\n filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })),\n )\n\n // On first batch, discover additional columns from data and merge with schema\n if (!columnsFinalized) {\n const dataColumns: string[] = []\n const seenCols = new Set<string>()\n for (const row of batchRows) {\n for (const key of Object.keys(row)) {\n if (!seenCols.has(key)) {\n seenCols.add(key)\n dataColumns.push(key)\n }\n }\n }\n // Merge schema columns with data-discovered columns\n allColumns = mergeColumns(schemaColumns, dataColumns)\n columnsFinalized = true\n\n if (debug) {\n req.payload.logger.debug({\n dataColumnsCount: dataColumns.length,\n finalColumnsCount: allColumns.length,\n msg: 'Merged schema and data columns',\n })\n }\n }\n\n const paddedRows = batchRows.map((row) => {\n const fullRow: Record<string, unknown> = {}\n for (const col of allColumns) {\n fullRow[col] = row[col] ?? ''\n }\n return fullRow\n })\n\n const csvString = stringify(paddedRows, {\n header: isFirstBatch,\n columns: allColumns,\n })\n\n this.push(encoder.encode(csvString))\n } else {\n // --- JSON Streaming ---\n const batchRows = result.docs.map((doc) => filterDisabledJSON(doc))\n\n // Convert each filtered/flattened row into JSON string\n const batchJSON = batchRows.map((row) => JSON.stringify(row)).join(',')\n\n if (isFirstBatch) {\n this.push(encoder.encode('[' + batchJSON))\n } else {\n this.push(encoder.encode(',' + batchJSON))\n }\n }\n\n fetched += result.docs.length\n isFirstBatch = false\n streamPage += 1 // Increment stream page for the next batch\n\n if (!result.hasNextPage || fetched >= maxDocs) {\n if (debug) {\n req.payload.logger.debug('Stream complete - no more pages')\n }\n if (!isCSV) {\n this.push(encoder.encode(']'))\n }\n this.push(null) // End the stream\n }\n },\n })\n\n return new Response(Readable.toWeb(stream) as ReadableStream, {\n headers: {\n 'Content-Disposition': `attachment; filename=\"${name}\"`,\n 'Content-Type': isCSV ? 'text/csv' : 'application/json',\n },\n })\n }\n\n // Non-download path (buffered export)\n if (debug) {\n req.payload.logger.debug('Starting file generation')\n }\n\n // Create export batch processor\n const processor = createExportBatchProcessor({ batchSize, debug })\n\n // Transform function based on format\n const transformDoc = (doc: unknown) =>\n isCSV\n ? filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions }))\n : filterDisabledJSON(doc)\n\n // Skip fetching if access was denied - we'll create an empty export\n let exportResult = {\n columns: [] as string[],\n docs: [] as Record<string, unknown>[],\n fetchedCount: 0,\n }\n\n if (!accessDenied) {\n exportResult = await processor.processExport({\n collectionSlug,\n findArgs: findArgs as ExportFindArgs,\n format,\n maxDocs: typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY,\n req,\n startPage: adjustedPage,\n transformDoc,\n })\n }\n\n const { columns: dataColumns, docs: rows } = exportResult\n const outputData: string[] = []\n\n // Prepare final output\n if (isCSV) {\n // Get schema-based columns for consistent ordering\n const localeCodes =\n locale === 'all' && payload.config.localization\n ? payload.config.localization.localeCodes\n : undefined\n\n const schemaColumns = getSchemaColumns({\n collectionConfig,\n disabledFields,\n fields,\n locale,\n localeCodes,\n })\n\n // Merge schema columns with data-discovered columns\n // Schema provides ordering, data provides additional columns (e.g., array indices > 0)\n const finalColumns = mergeColumns(schemaColumns, dataColumns)\n\n const paddedRows = rows.map((row) => {\n const fullRow: Record<string, unknown> = {}\n for (const col of finalColumns) {\n fullRow[col] = row[col] ?? ''\n }\n return fullRow\n })\n\n // Always output CSV with header, even if empty\n outputData.push(\n stringify(paddedRows, {\n header: true,\n columns: finalColumns,\n }),\n )\n } else {\n // JSON format\n outputData.push(rows.map((doc) => JSON.stringify(doc)).join(',\\n'))\n }\n\n // Ensure we always have valid content for the file\n // For JSON, empty exports produce \"[]\"\n // For CSV, if completely empty (no columns, no rows), produce at least a newline to ensure file creation\n const content = format === 'json' ? `[${outputData.join(',')}]` : outputData.join('')\n const buffer = Buffer.from(content.length > 0 ? content : '\\n')\n if (debug) {\n req.payload.logger.debug(`${format} file generation complete`)\n }\n\n if (!id) {\n if (debug) {\n req.payload.logger.debug('Creating new export file')\n }\n req.file = {\n name,\n data: buffer,\n mimetype: isCSV ? 'text/csv' : 'application/json',\n size: buffer.length,\n }\n } else {\n if (debug) {\n req.payload.logger.debug(`Updating existing export with id: ${id}`)\n }\n await req.payload.update({\n id,\n collection: exportsCollection,\n data: {},\n file: {\n name,\n data: buffer,\n mimetype: isCSV ? 'text/csv' : 'application/json',\n size: buffer.length,\n },\n // Override access only here so that we can be sure the export collection itself is updated as expected\n overrideAccess: true,\n req,\n })\n }\n if (debug) {\n req.payload.logger.debug('Export process completed successfully')\n }\n}\n"],"names":["stringify","APIError","Readable","buildDisabledFieldRegex","flattenObject","getExportFieldFunctions","getFilename","getSchemaColumns","mergeColumns","getSelect","validateLimitValue","createExportBatchProcessor","createExport","args","id","name","nameArg","batchSize","collectionSlug","debug","download","drafts","draftsFromInput","exportsCollection","fields","format","limit","incomingLimit","locale","localeFromInput","page","req","sort","userCollection","userID","where","whereFromInput","localeFromReq","payload","logger","message","draft","collectionConfig","config","collections","find","slug","user","findByID","collection","overrideAccess","hasVersions","Boolean","versions","publishedWhere","_status","equals","and","isCSV","select","Array","isArray","length","undefined","hardLimit","totalDocs","accessDenied","countResult","count","error","totalPages","Math","max","ceil","requestedPage","adjustedPage","findArgs","depth","toCSVFunctions","flattenedFields","disabledFields","admin","custom","disabledRegexes","map","filterDisabledCSV","row","filtered","key","value","Object","entries","isDisabled","some","regex","test","filterDisabledJSON","doc","parentPath","item","currentPath","includes","limitErrorMsg","t","schemaColumns","localeCodes","localization","columnCount","msg","allColumns","columnsFinalized","encoder","TextEncoder","isFirstBatch","streamPage","fetched","maxDocs","Number","POSITIVE_INFINITY","stream","read","remaining","push","encode","result","min","docs","batchRows","dataColumns","seenCols","Set","keys","has","add","dataColumnsCount","finalColumnsCount","paddedRows","fullRow","col","csvString","header","columns","batchJSON","JSON","join","hasNextPage","Response","toWeb","headers","processor","transformDoc","exportResult","fetchedCount","processExport","startPage","rows","outputData","finalColumns","content","buffer","Buffer","from","file","data","mimetype","size","update"],"mappings":"AAAA,6CAA6C,GAG7C,SAASA,SAAS,QAAQ,qBAAoB;AAC9C,SAASC,QAAQ,QAAQ,UAAS;AAClC,SAASC,QAAQ,QAAQ,SAAQ;AAEjC,SAASC,uBAAuB,QAAQ,0CAAyC;AACjF,SAASC,aAAa,QAAQ,gCAA+B;AAC7D,SAASC,uBAAuB,QAAQ,0CAAyC;AACjF,SAASC,WAAW,QAAQ,8BAA6B;AACzD,SAASC,gBAAgB,EAAEC,YAAY,QAAQ,mCAAkC;AACjF,SAASC,SAAS,QAAQ,4BAA2B;AACrD,SAASC,kBAAkB,QAAQ,qCAAoC;AACvE,SAASC,0BAA0B,QAA6B,sBAAqB;AAsCrF,OAAO,MAAMC,eAAe,OAAOC;IACjC,MAAM,EACJC,EAAE,EACFC,MAAMC,OAAO,EACbC,YAAY,GAAG,EACfC,cAAc,EACdC,QAAQ,KAAK,EACbC,QAAQ,EACRC,QAAQC,eAAe,EACvBC,iBAAiB,EACjBC,MAAM,EACNC,MAAM,EACNC,OAAOC,aAAa,EACpBC,QAAQC,eAAe,EACvBC,IAAI,EACJC,GAAG,EACHC,IAAI,EACJC,cAAc,EACdC,MAAM,EACNC,OAAOC,iBAAiB,CAAC,CAAC,EAC3B,GAAGvB;IACJ,MAAM,EAAEe,QAAQS,aAAa,EAAEC,OAAO,EAAE,GAAGP;IAE3C,IAAIZ,OAAO;QACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC;YACvBqB,SAAS;YACTtB;YACAuB,OAAOnB;YACPE;YACAC;QACF;IACF;IAEA,MAAMG,SAASC,mBAAmBQ;IAClC,MAAMK,mBAAmBJ,QAAQK,MAAM,CAACC,WAAW,CAACC,IAAI,CAAC,CAAC,EAAEC,IAAI,EAAE,GAAKA,SAAS5B;IAEhF,IAAI,CAACwB,kBAAkB;QACrB,MAAM,IAAIzC,SAAS,CAAC,qBAAqB,EAAEiB,eAAe,WAAW,CAAC;IACxE;IAEA,IAAI6B;IAEJ,IAAId,kBAAkBC,QAAQ;QAC5Ba,OAAQ,MAAMhB,IAAIO,OAAO,CAACU,QAAQ,CAAC;YACjClC,IAAIoB;YACJe,YAAYhB;YACZiB,gBAAgB;QAClB;IACF;IAEA,IAAI,CAACH,QAAQhB,IAAIgB,IAAI,EAAE;QACrBA,OAAOhB,KAAKgB,MAAMjC,KAAKiB,IAAIgB,IAAI,GAAGhB,KAAKgB,MAAMA;IAC/C;IAEA,IAAI,CAACA,MAAM;QACT,MAAM,IAAI9C,SAAS;IACrB;IAEA,MAAMwC,QAAQnB,oBAAoB;IAClC,MAAM6B,cAAcC,QAAQV,iBAAiBW,QAAQ;IAErD,mDAAmD;IACnD,MAAMC,iBAAwBH,cAAc;QAAEI,SAAS;YAAEC,QAAQ;QAAY;IAAE,IAAI,CAAC;IAEpF,MAAMrB,QAAe;QACnBsB,KAAK;YAACrB;YAAgBK,QAAQ,CAAC,IAAIa;SAAe;IACpD;IAEA,MAAMvC,OAAO,GAAGC,WAAW,GAAGV,cAAc,CAAC,EAAEY,gBAAgB,CAAC,CAAC,EAAEO,QAAQ;IAC3E,MAAMiC,QAAQjC,WAAW;IACzB,MAAMkC,SAASC,MAAMC,OAAO,CAACrC,WAAWA,OAAOsC,MAAM,GAAG,IAAIrD,UAAUe,UAAUuC;IAEhF,IAAI5C,OAAO;QACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC;YAAEqB,SAAS;YAAyBzB;YAAM2C;YAAO9B;QAAO;IACnF;IAEA,MAAMoC,YACJ,OAAOrC,kBAAkB,YAAYA,gBAAgB,IAAIA,gBAAgBoC;IAE3E,qEAAqE;IACrE,IAAIE,YAAY;IAChB,IAAIC,eAAe;IACnB,IAAI;QACF,MAAMC,cAAc,MAAM7B,QAAQ8B,KAAK,CAAC;YACtCnB,YAAY/B;YACZ6B;YACAnB;YACAsB,gBAAgB;QAClB;QACAe,YAAYE,YAAYF,SAAS;IACnC,EAAE,OAAOI,OAAO;QACd,uDAAuD;QACvD,oCAAoC;QACpCH,eAAe;QACf,IAAI/C,OAAO;YACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC;gBACvBqB,SAAS;gBACTtB;YACF;QACF;IACF;IAEA,MAAMoD,aAAaC,KAAKC,GAAG,CAAC,GAAGD,KAAKE,IAAI,CAACR,YAAYhD;IACrD,MAAMyD,gBAAgB5C,QAAQ;IAC9B,MAAM6C,eAAeD,gBAAgBJ,aAAa,IAAII;IAEtD,MAAME,WAAW;QACf3B,YAAY/B;QACZ2D,OAAO;QACPpC;QACAf,OAAOT;QACPW;QACAsB,gBAAgB;QAChBpB,MAAM;QACN6B;QACA3B;QACAe;QACAZ;IACF;IAEA,IAAIhB,OAAO;QACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC;YAAEqB,SAAS;YAAmBoC;QAAS;IAClE;IAEA,MAAME,iBAAiBzE,wBAAwB;QAC7CmB,QAAQkB,iBAAiBqC,eAAe;IAC1C;IAEA,MAAMC,iBACJtC,iBAAiBuC,KAAK,EAAEC,QAAQ,CAAC,uBAAuB,EAAEF,kBAAkB,EAAE;IAEhF,MAAMG,kBAA4BH,eAAeI,GAAG,CAACjF;IAErD,MAAMkF,oBAAoB,CAACC;QACzB,MAAMC,WAAoC,CAAC;QAE3C,KAAK,MAAM,CAACC,KAAKC,MAAM,IAAIC,OAAOC,OAAO,CAACL,KAAM;YAC9C,MAAMM,aAAaT,gBAAgBU,IAAI,CAAC,CAACC,QAAUA,MAAMC,IAAI,CAACP;YAC9D,IAAI,CAACI,YAAY;gBACfL,QAAQ,CAACC,IAAI,GAAGC;YAClB;QACF;QAEA,OAAOF;IACT;IAEA,MAAMS,qBAAqB,CAACC,KAAUC,aAAa,EAAE;QACnD,IAAItC,MAAMC,OAAO,CAACoC,MAAM;YACtB,OAAOA,IAAIb,GAAG,CAAC,CAACe,OAASH,mBAAmBG,MAAMD;QACpD;QAEA,IAAI,OAAOD,QAAQ,YAAYA,QAAQ,MAAM;YAC3C,OAAOA;QACT;QAEA,MAAMV,WAAgC,CAAC;QACvC,KAAK,MAAM,CAACC,KAAKC,MAAM,IAAIC,OAAOC,OAAO,CAACM,KAAM;YAC9C,MAAMG,cAAcF,aAAa,GAAGA,WAAW,CAAC,EAAEV,KAAK,GAAGA;YAE1D,6CAA6C;YAC7C,MAAMI,aAAaZ,eAAeqB,QAAQ,CAACD;YAE3C,IAAI,CAACR,YAAY;gBACfL,QAAQ,CAACC,IAAI,GAAGQ,mBAAmBP,OAAOW;YAC5C;QACF;QAEA,OAAOb;IACT;IAEA,IAAInE,UAAU;QACZ,MAAMkF,gBAAgB5F,mBAAmBiB,eAAeI,IAAIwE,CAAC;QAC7D,IAAID,eAAe;YACjB,MAAM,IAAIrG,SAASqG;QACrB;QAEA,oFAAoF;QACpF,IAAIE,gBAA0B,EAAE;QAChC,IAAI9C,OAAO;YACT,MAAM+C,cACJ7E,WAAW,SAASU,QAAQK,MAAM,CAAC+D,YAAY,GAC3CpE,QAAQK,MAAM,CAAC+D,YAAY,CAACD,WAAW,GACvC1C;YAENyC,gBAAgBjG,iBAAiB;gBAC/BmC;gBACAsC;gBACAxD;gBACAI;gBACA6E;YACF;YAEA,IAAItF,OAAO;gBACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC;oBACvBwF,aAAaH,cAAc1C,MAAM;oBACjC8C,KAAK;gBACP;YACF;QACF;QAEA,gFAAgF;QAChF,IAAIC,aAAuB,EAAE;QAC7B,IAAIC,mBAAmB;QAEvB,MAAMC,UAAU,IAAIC;QACpB,IAAIC,eAAe;QACnB,IAAIC,aAAavC;QACjB,IAAIwC,UAAU;QACd,MAAMC,UAAU,OAAOpD,cAAc,WAAWA,YAAYqD,OAAOC,iBAAiB;QAEpF,MAAMC,SAAS,IAAIrH,SAAS;YAC1B,MAAMsH;gBACJ,MAAMC,YAAYlD,KAAKC,GAAG,CAAC,GAAG4C,UAAUD;gBAExC,IAAIM,cAAc,GAAG;oBACnB,IAAI,CAAC/D,OAAO;wBACV,6EAA6E;wBAC7E,IAAI,CAACgE,IAAI,CAACX,QAAQY,MAAM,CAACV,eAAe,OAAO;oBACjD;oBACA,IAAI,CAACS,IAAI,CAAC;oBACV;gBACF;gBAEA,MAAME,SAAS,MAAMtF,QAAQO,IAAI,CAAC;oBAChC,GAAG+B,QAAQ;oBACX9C,MAAMoF;oBACNxF,OAAO6C,KAAKsD,GAAG,CAAC5G,WAAWwG;gBAC7B;gBAEA,IAAItG,OAAO;oBACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC,CAAC,gBAAgB,EAAE+F,WAAW,MAAM,EAAEU,OAAOE,IAAI,CAAChE,MAAM,CAAC,KAAK,CAAC;gBAC1F;gBAEA,IAAI8D,OAAOE,IAAI,CAAChE,MAAM,KAAK,GAAG;oBAC5B,oCAAoC;oBACpC,IAAI,CAACJ,OAAO;wBACV,wEAAwE;wBACxE,IAAI,CAACgE,IAAI,CAACX,QAAQY,MAAM,CAACV,eAAe,OAAO;oBACjD;oBACA,IAAI,CAACS,IAAI,CAAC;oBACV;gBACF;gBAEA,IAAIhE,OAAO;oBACT,wBAAwB;oBACxB,MAAMqE,YAAYH,OAAOE,IAAI,CAAC1C,GAAG,CAAC,CAACa,MACjCZ,kBAAkBjF,cAAc;4BAAE6F;4BAAKzE;4BAAQsD;wBAAe;oBAGhE,8EAA8E;oBAC9E,IAAI,CAACgC,kBAAkB;wBACrB,MAAMkB,cAAwB,EAAE;wBAChC,MAAMC,WAAW,IAAIC;wBACrB,KAAK,MAAM5C,OAAOyC,UAAW;4BAC3B,KAAK,MAAMvC,OAAOE,OAAOyC,IAAI,CAAC7C,KAAM;gCAClC,IAAI,CAAC2C,SAASG,GAAG,CAAC5C,MAAM;oCACtByC,SAASI,GAAG,CAAC7C;oCACbwC,YAAYN,IAAI,CAAClC;gCACnB;4BACF;wBACF;wBACA,oDAAoD;wBACpDqB,aAAarG,aAAagG,eAAewB;wBACzClB,mBAAmB;wBAEnB,IAAI3F,OAAO;4BACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC;gCACvBmH,kBAAkBN,YAAYlE,MAAM;gCACpCyE,mBAAmB1B,WAAW/C,MAAM;gCACpC8C,KAAK;4BACP;wBACF;oBACF;oBAEA,MAAM4B,aAAaT,UAAU3C,GAAG,CAAC,CAACE;wBAChC,MAAMmD,UAAmC,CAAC;wBAC1C,KAAK,MAAMC,OAAO7B,WAAY;4BAC5B4B,OAAO,CAACC,IAAI,GAAGpD,GAAG,CAACoD,IAAI,IAAI;wBAC7B;wBACA,OAAOD;oBACT;oBAEA,MAAME,YAAY3I,UAAUwI,YAAY;wBACtCI,QAAQ3B;wBACR4B,SAAShC;oBACX;oBAEA,IAAI,CAACa,IAAI,CAACX,QAAQY,MAAM,CAACgB;gBAC3B,OAAO;oBACL,yBAAyB;oBACzB,MAAMZ,YAAYH,OAAOE,IAAI,CAAC1C,GAAG,CAAC,CAACa,MAAQD,mBAAmBC;oBAE9D,uDAAuD;oBACvD,MAAM6C,YAAYf,UAAU3C,GAAG,CAAC,CAACE,MAAQyD,KAAK/I,SAAS,CAACsF,MAAM0D,IAAI,CAAC;oBAEnE,IAAI/B,cAAc;wBAChB,IAAI,CAACS,IAAI,CAACX,QAAQY,MAAM,CAAC,MAAMmB;oBACjC,OAAO;wBACL,IAAI,CAACpB,IAAI,CAACX,QAAQY,MAAM,CAAC,MAAMmB;oBACjC;gBACF;gBAEA3B,WAAWS,OAAOE,IAAI,CAAChE,MAAM;gBAC7BmD,eAAe;gBACfC,cAAc,GAAE,2CAA2C;gBAE3D,IAAI,CAACU,OAAOqB,WAAW,IAAI9B,WAAWC,SAAS;oBAC7C,IAAIjG,OAAO;wBACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC;oBAC3B;oBACA,IAAI,CAACuC,OAAO;wBACV,IAAI,CAACgE,IAAI,CAACX,QAAQY,MAAM,CAAC;oBAC3B;oBACA,IAAI,CAACD,IAAI,CAAC,OAAM,iBAAiB;gBACnC;YACF;QACF;QAEA,OAAO,IAAIwB,SAAShJ,SAASiJ,KAAK,CAAC5B,SAA2B;YAC5D6B,SAAS;gBACP,uBAAuB,CAAC,sBAAsB,EAAErI,KAAK,CAAC,CAAC;gBACvD,gBAAgB2C,QAAQ,aAAa;YACvC;QACF;IACF;IAEA,sCAAsC;IACtC,IAAIvC,OAAO;QACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC;IAC3B;IAEA,gCAAgC;IAChC,MAAMkI,YAAY1I,2BAA2B;QAAEM;QAAWE;IAAM;IAEhE,qCAAqC;IACrC,MAAMmI,eAAe,CAACrD,MACpBvC,QACI2B,kBAAkBjF,cAAc;YAAE6F;YAAKzE;YAAQsD;QAAe,MAC9DkB,mBAAmBC;IAEzB,oEAAoE;IACpE,IAAIsD,eAAe;QACjBV,SAAS,EAAE;QACXf,MAAM,EAAE;QACR0B,cAAc;IAChB;IAEA,IAAI,CAACtF,cAAc;QACjBqF,eAAe,MAAMF,UAAUI,aAAa,CAAC;YAC3CvI;YACA0D,UAAUA;YACVnD;YACA2F,SAAS,OAAOpD,cAAc,WAAWA,YAAYqD,OAAOC,iBAAiB;YAC7EvF;YACA2H,WAAW/E;YACX2E;QACF;IACF;IAEA,MAAM,EAAET,SAASb,WAAW,EAAEF,MAAM6B,IAAI,EAAE,GAAGJ;IAC7C,MAAMK,aAAuB,EAAE;IAE/B,uBAAuB;IACvB,IAAIlG,OAAO;QACT,mDAAmD;QACnD,MAAM+C,cACJ7E,WAAW,SAASU,QAAQK,MAAM,CAAC+D,YAAY,GAC3CpE,QAAQK,MAAM,CAAC+D,YAAY,CAACD,WAAW,GACvC1C;QAEN,MAAMyC,gBAAgBjG,iBAAiB;YACrCmC;YACAsC;YACAxD;YACAI;YACA6E;QACF;QAEA,oDAAoD;QACpD,uFAAuF;QACvF,MAAMoD,eAAerJ,aAAagG,eAAewB;QAEjD,MAAMQ,aAAamB,KAAKvE,GAAG,CAAC,CAACE;YAC3B,MAAMmD,UAAmC,CAAC;YAC1C,KAAK,MAAMC,OAAOmB,aAAc;gBAC9BpB,OAAO,CAACC,IAAI,GAAGpD,GAAG,CAACoD,IAAI,IAAI;YAC7B;YACA,OAAOD;QACT;QAEA,+CAA+C;QAC/CmB,WAAWlC,IAAI,CACb1H,UAAUwI,YAAY;YACpBI,QAAQ;YACRC,SAASgB;QACX;IAEJ,OAAO;QACL,cAAc;QACdD,WAAWlC,IAAI,CAACiC,KAAKvE,GAAG,CAAC,CAACa,MAAQ8C,KAAK/I,SAAS,CAACiG,MAAM+C,IAAI,CAAC;IAC9D;IAEA,mDAAmD;IACnD,uCAAuC;IACvC,yGAAyG;IACzG,MAAMc,UAAUrI,WAAW,SAAS,CAAC,CAAC,EAAEmI,WAAWZ,IAAI,CAAC,KAAK,CAAC,CAAC,GAAGY,WAAWZ,IAAI,CAAC;IAClF,MAAMe,SAASC,OAAOC,IAAI,CAACH,QAAQhG,MAAM,GAAG,IAAIgG,UAAU;IAC1D,IAAI3I,OAAO;QACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC,GAAGM,OAAO,yBAAyB,CAAC;IAC/D;IAEA,IAAI,CAACX,IAAI;QACP,IAAIK,OAAO;YACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC;QAC3B;QACAY,IAAImI,IAAI,GAAG;YACTnJ;YACAoJ,MAAMJ;YACNK,UAAU1G,QAAQ,aAAa;YAC/B2G,MAAMN,OAAOjG,MAAM;QACrB;IACF,OAAO;QACL,IAAI3C,OAAO;YACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC,CAAC,kCAAkC,EAAEL,IAAI;QACpE;QACA,MAAMiB,IAAIO,OAAO,CAACgI,MAAM,CAAC;YACvBxJ;YACAmC,YAAY1B;YACZ4I,MAAM,CAAC;YACPD,MAAM;gBACJnJ;gBACAoJ,MAAMJ;gBACNK,UAAU1G,QAAQ,aAAa;gBAC/B2G,MAAMN,OAAOjG,MAAM;YACrB;YACA,uGAAuG;YACvGZ,gBAAgB;YAChBnB;QACF;IACF;IACA,IAAIZ,OAAO;QACTY,IAAIO,OAAO,CAACC,MAAM,CAACpB,KAAK,CAAC;IAC3B;AACF,EAAC"}
@@ -1,5 +1,4 @@
1
1
  import type { Config, TaskConfig } from 'payload';
2
- import type { ImportExportPluginConfig } from '../types.js';
3
2
  import type { Export } from './createExport.js';
4
3
  /**
5
4
  * Export input type for job queue serialization.
@@ -10,7 +9,7 @@ export type ExportJobInput = {
10
9
  user: number | string;
11
10
  userCollection: string;
12
11
  } & Export;
13
- export declare const getCreateCollectionExportTask: (config: Config, pluginConfig?: ImportExportPluginConfig) => TaskConfig<{
12
+ export declare const getCreateCollectionExportTask: (config: Config) => TaskConfig<{
14
13
  input: ExportJobInput;
15
14
  output: object;
16
15
  }>;
@@ -1 +1 @@
1
- {"version":3,"file":"getCreateExportCollectionTask.d.ts","sourceRoot":"","sources":["../../src/export/getCreateExportCollectionTask.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAkB,UAAU,EAAa,MAAM,SAAS,CAAA;AAE5E,OAAO,KAAK,EAAE,wBAAwB,EAAE,MAAM,aAAa,CAAA;AAC3D,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,mBAAmB,CAAA;AAK/C;;;;GAIG;AACH,MAAM,MAAM,cAAc,GAAG;IAC3B,IAAI,EAAE,MAAM,GAAG,MAAM,CAAA;IACrB,cAAc,EAAE,MAAM,CAAA;CACvB,GAAG,MAAM,CAAA;AAEV,eAAO,MAAM,6BAA6B,WAChC,MAAM,iBACC,wBAAwB,KACtC,UAAU,CAAC;IACZ,KAAK,EAAE,cAAc,CAAA;IACrB,MAAM,EAAE,MAAM,CAAA;CACf,CA6CA,CAAA"}
1
+ {"version":3,"file":"getCreateExportCollectionTask.d.ts","sourceRoot":"","sources":["../../src/export/getCreateExportCollectionTask.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,SAAS,CAAA;AAEjD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,mBAAmB,CAAA;AAK/C;;;;GAIG;AACH,MAAM,MAAM,cAAc,GAAG;IAC3B,IAAI,EAAE,MAAM,GAAG,MAAM,CAAA;IACrB,cAAc,EAAE,MAAM,CAAA;CACvB,GAAG,MAAM,CAAA;AAEV,eAAO,MAAM,6BAA6B,WAChC,MAAM,KACb,UAAU,CAAC;IACZ,KAAK,EAAE,cAAc,CAAA;IACrB,MAAM,EAAE,MAAM,CAAA;CACf,CAoCA,CAAA"}
@@ -1,8 +1,8 @@
1
1
  import { createExport } from './createExport.js';
2
2
  import { getFields } from './getFields.js';
3
- export const getCreateCollectionExportTask = (config, pluginConfig)=>{
4
- const inputSchema = getFields(config, pluginConfig).concat({
5
- name: 'user',
3
+ export const getCreateCollectionExportTask = (config)=>{
4
+ const inputSchema = getFields(config).concat({
5
+ name: 'userID',
6
6
  type: 'text'
7
7
  }, {
8
8
  name: 'userCollection',
@@ -14,23 +14,15 @@ export const getCreateCollectionExportTask = (config, pluginConfig)=>{
14
14
  return {
15
15
  slug: 'createCollectionExport',
16
16
  handler: async ({ input, req })=>{
17
- let user;
18
- if (input.userCollection && input.user) {
19
- user = await req.payload.findByID({
20
- id: input.user,
21
- collection: input.userCollection
22
- });
23
- req.user = user;
17
+ if (!input) {
18
+ req.payload.logger.error('No input provided to createCollectionExport task');
19
+ return {
20
+ output: {}
21
+ };
24
22
  }
25
- if (!user) {
26
- throw new Error('User not found');
27
- }
28
- // Strip out user and userCollection from input - they're only needed for rehydration
29
- const { user: _userId, userCollection: _userCollection, ...exportInput } = input;
30
23
  await createExport({
31
- input: exportInput,
32
- req,
33
- user
24
+ ...input,
25
+ req
34
26
  });
35
27
  return {
36
28
  output: {}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/export/getCreateExportCollectionTask.ts"],"sourcesContent":["import type { Config, PayloadRequest, TaskConfig, TypedUser } from 'payload'\n\nimport type { ImportExportPluginConfig } from '../types.js'\nimport type { Export } from './createExport.js'\n\nimport { createExport } from './createExport.js'\nimport { getFields } from './getFields.js'\n\n/**\n * Export input type for job queue serialization.\n * When exports are queued as jobs, the user must be serialized as an ID string or number\n * along with the collection name so it can be rehydrated when the job runs.\n */\nexport type ExportJobInput = {\n user: number | string\n userCollection: string\n} & Export\n\nexport const getCreateCollectionExportTask = (\n config: Config,\n pluginConfig?: ImportExportPluginConfig,\n): TaskConfig<{\n input: ExportJobInput\n output: object\n}> => {\n const inputSchema = getFields(config, pluginConfig).concat(\n {\n name: 'user',\n type: 'text',\n },\n {\n name: 'userCollection',\n type: 'text',\n },\n {\n name: 'exportsCollection',\n type: 'text',\n },\n )\n\n return {\n slug: 'createCollectionExport',\n handler: async ({ input, req }: { input: ExportJobInput; req: PayloadRequest }) => {\n let user: TypedUser | undefined\n\n if (input.userCollection && input.user) {\n user = (await req.payload.findByID({\n id: input.user,\n collection: input.userCollection,\n })) as TypedUser\n\n req.user = user\n }\n\n if (!user) {\n throw new Error('User not found')\n }\n\n // Strip out user and userCollection from input - they're only needed for rehydration\n const { user: _userId, userCollection: _userCollection, ...exportInput } = input\n\n await createExport({ input: exportInput, req, user })\n\n return {\n output: {},\n }\n },\n inputSchema,\n }\n}\n"],"names":["createExport","getFields","getCreateCollectionExportTask","config","pluginConfig","inputSchema","concat","name","type","slug","handler","input","req","user","userCollection","payload","findByID","id","collection","Error","_userId","_userCollection","exportInput","output"],"mappings":"AAKA,SAASA,YAAY,QAAQ,oBAAmB;AAChD,SAASC,SAAS,QAAQ,iBAAgB;AAY1C,OAAO,MAAMC,gCAAgC,CAC3CC,QACAC;IAKA,MAAMC,cAAcJ,UAAUE,QAAQC,cAAcE,MAAM,CACxD;QACEC,MAAM;QACNC,MAAM;IACR,GACA;QACED,MAAM;QACNC,MAAM;IACR,GACA;QACED,MAAM;QACNC,MAAM;IACR;IAGF,OAAO;QACLC,MAAM;QACNC,SAAS,OAAO,EAAEC,KAAK,EAAEC,GAAG,EAAkD;YAC5E,IAAIC;YAEJ,IAAIF,MAAMG,cAAc,IAAIH,MAAME,IAAI,EAAE;gBACtCA,OAAQ,MAAMD,IAAIG,OAAO,CAACC,QAAQ,CAAC;oBACjCC,IAAIN,MAAME,IAAI;oBACdK,YAAYP,MAAMG,cAAc;gBAClC;gBAEAF,IAAIC,IAAI,GAAGA;YACb;YAEA,IAAI,CAACA,MAAM;gBACT,MAAM,IAAIM,MAAM;YAClB;YAEA,qFAAqF;YACrF,MAAM,EAAEN,MAAMO,OAAO,EAAEN,gBAAgBO,eAAe,EAAE,GAAGC,aAAa,GAAGX;YAE3E,MAAMX,aAAa;gBAAEW,OAAOW;gBAAaV;gBAAKC;YAAK;YAEnD,OAAO;gBACLU,QAAQ,CAAC;YACX;QACF;QACAlB;IACF;AACF,EAAC"}
1
+ {"version":3,"sources":["../../src/export/getCreateExportCollectionTask.ts"],"sourcesContent":["import type { Config, TaskConfig } from 'payload'\n\nimport type { Export } from './createExport.js'\n\nimport { createExport } from './createExport.js'\nimport { getFields } from './getFields.js'\n\n/**\n * Export input type for job queue serialization.\n * When exports are queued as jobs, the user must be serialized as an ID string or number\n * along with the collection name so it can be rehydrated when the job runs.\n */\nexport type ExportJobInput = {\n user: number | string\n userCollection: string\n} & Export\n\nexport const getCreateCollectionExportTask = (\n config: Config,\n): TaskConfig<{\n input: ExportJobInput\n output: object\n}> => {\n const inputSchema = getFields(config).concat(\n {\n name: 'userID',\n type: 'text',\n },\n {\n name: 'userCollection',\n type: 'text',\n },\n {\n name: 'exportsCollection',\n type: 'text',\n },\n )\n\n return {\n slug: 'createCollectionExport',\n handler: async ({ input, req }) => {\n if (!input) {\n req.payload.logger.error('No input provided to createCollectionExport task')\n\n return { output: {} }\n }\n\n await createExport({\n ...input,\n req,\n })\n\n return {\n output: {},\n }\n },\n inputSchema,\n }\n}\n"],"names":["createExport","getFields","getCreateCollectionExportTask","config","inputSchema","concat","name","type","slug","handler","input","req","payload","logger","error","output"],"mappings":"AAIA,SAASA,YAAY,QAAQ,oBAAmB;AAChD,SAASC,SAAS,QAAQ,iBAAgB;AAY1C,OAAO,MAAMC,gCAAgC,CAC3CC;IAKA,MAAMC,cAAcH,UAAUE,QAAQE,MAAM,CAC1C;QACEC,MAAM;QACNC,MAAM;IACR,GACA;QACED,MAAM;QACNC,MAAM;IACR,GACA;QACED,MAAM;QACNC,MAAM;IACR;IAGF,OAAO;QACLC,MAAM;QACNC,SAAS,OAAO,EAAEC,KAAK,EAAEC,GAAG,EAAE;YAC5B,IAAI,CAACD,OAAO;gBACVC,IAAIC,OAAO,CAACC,MAAM,CAACC,KAAK,CAAC;gBAEzB,OAAO;oBAAEC,QAAQ,CAAC;gBAAE;YACtB;YAEA,MAAMf,aAAa;gBACjB,GAAGU,KAAK;gBACRC;YACF;YAEA,OAAO;gBACLI,QAAQ,CAAC;YACX;QACF;QACAX;IACF;AACF,EAAC"}
@@ -0,0 +1,8 @@
1
+ import type { CollectionConfig, Config } from 'payload';
2
+ import type { ExportConfig, ImportExportPluginConfig } from '../types.js';
3
+ export declare const getExportCollection: ({ config, exportConfig, pluginConfig, }: {
4
+ config: Config;
5
+ exportConfig?: ExportConfig;
6
+ pluginConfig: ImportExportPluginConfig;
7
+ }) => CollectionConfig;
8
+ //# sourceMappingURL=getExportCollection.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"getExportCollection.d.ts","sourceRoot":"","sources":["../../src/export/getExportCollection.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAGV,gBAAgB,EAChB,MAAM,EACP,MAAM,SAAS,CAAA;AAEhB,OAAO,KAAK,EAAE,YAAY,EAAE,wBAAwB,EAAE,MAAM,aAAa,CAAA;AAQzE,eAAO,MAAM,mBAAmB,4CAI7B;IACD,MAAM,EAAE,MAAM,CAAA;IACd,YAAY,CAAC,EAAE,YAAY,CAAA;IAC3B,YAAY,EAAE,wBAAwB,CAAA;CACvC,KAAG,gBAmGH,CAAA"}