@payloadcms/plugin-import-export 3.75.0 → 3.76.0-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (235) hide show
  1. package/dist/components/ExportPreview/index.d.ts.map +1 -1
  2. package/dist/components/ExportPreview/index.js +30 -12
  3. package/dist/components/ExportPreview/index.js.map +1 -1
  4. package/dist/components/ExportPreview/index.scss +12 -0
  5. package/dist/components/ImportPreview/index.js +3 -15
  6. package/dist/components/ImportPreview/index.js.map +1 -1
  7. package/dist/export/createExport.d.ts +5 -0
  8. package/dist/export/createExport.d.ts.map +1 -1
  9. package/dist/export/createExport.js +36 -12
  10. package/dist/export/createExport.js.map +1 -1
  11. package/dist/export/getCreateExportCollectionTask.d.ts.map +1 -1
  12. package/dist/export/getCreateExportCollectionTask.js +3 -0
  13. package/dist/export/getCreateExportCollectionTask.js.map +1 -1
  14. package/dist/export/getExportCollection.d.ts.map +1 -1
  15. package/dist/export/getExportCollection.js +21 -1
  16. package/dist/export/getExportCollection.js.map +1 -1
  17. package/dist/export/getFields.d.ts.map +1 -1
  18. package/dist/export/getFields.js +7 -3
  19. package/dist/export/getFields.js.map +1 -1
  20. package/dist/export/handleDownload.d.ts.map +1 -1
  21. package/dist/export/handleDownload.js +18 -1
  22. package/dist/export/handleDownload.js.map +1 -1
  23. package/dist/export/handlePreview.d.ts.map +1 -1
  24. package/dist/export/handlePreview.js +32 -10
  25. package/dist/export/handlePreview.js.map +1 -1
  26. package/dist/exports/types.d.ts +1 -1
  27. package/dist/exports/types.d.ts.map +1 -1
  28. package/dist/exports/types.js.map +1 -1
  29. package/dist/import/batchProcessor.js +50 -53
  30. package/dist/import/batchProcessor.js.map +1 -1
  31. package/dist/import/createImport.d.ts +6 -1
  32. package/dist/import/createImport.d.ts.map +1 -1
  33. package/dist/import/createImport.js +5 -1
  34. package/dist/import/createImport.js.map +1 -1
  35. package/dist/import/getCreateImportCollectionTask.d.ts.map +1 -1
  36. package/dist/import/getCreateImportCollectionTask.js +3 -0
  37. package/dist/import/getCreateImportCollectionTask.js.map +1 -1
  38. package/dist/import/getImportCollection.d.ts.map +1 -1
  39. package/dist/import/getImportCollection.js +15 -0
  40. package/dist/import/getImportCollection.js.map +1 -1
  41. package/dist/import/handlePreview.d.ts.map +1 -1
  42. package/dist/import/handlePreview.js +11 -0
  43. package/dist/import/handlePreview.js.map +1 -1
  44. package/dist/index.d.ts +22 -1
  45. package/dist/index.d.ts.map +1 -1
  46. package/dist/index.js +26 -2
  47. package/dist/index.js.map +1 -1
  48. package/dist/translations/languages/ar.d.ts.map +1 -1
  49. package/dist/translations/languages/ar.js +3 -0
  50. package/dist/translations/languages/ar.js.map +1 -1
  51. package/dist/translations/languages/az.d.ts.map +1 -1
  52. package/dist/translations/languages/az.js +3 -0
  53. package/dist/translations/languages/az.js.map +1 -1
  54. package/dist/translations/languages/bg.d.ts.map +1 -1
  55. package/dist/translations/languages/bg.js +3 -0
  56. package/dist/translations/languages/bg.js.map +1 -1
  57. package/dist/translations/languages/bnBd.d.ts.map +1 -1
  58. package/dist/translations/languages/bnBd.js +22 -19
  59. package/dist/translations/languages/bnBd.js.map +1 -1
  60. package/dist/translations/languages/bnIn.d.ts.map +1 -1
  61. package/dist/translations/languages/bnIn.js +22 -19
  62. package/dist/translations/languages/bnIn.js.map +1 -1
  63. package/dist/translations/languages/ca.d.ts.map +1 -1
  64. package/dist/translations/languages/ca.js +3 -0
  65. package/dist/translations/languages/ca.js.map +1 -1
  66. package/dist/translations/languages/cs.d.ts.map +1 -1
  67. package/dist/translations/languages/cs.js +3 -0
  68. package/dist/translations/languages/cs.js.map +1 -1
  69. package/dist/translations/languages/da.d.ts.map +1 -1
  70. package/dist/translations/languages/da.js +3 -0
  71. package/dist/translations/languages/da.js.map +1 -1
  72. package/dist/translations/languages/de.d.ts.map +1 -1
  73. package/dist/translations/languages/de.js +3 -0
  74. package/dist/translations/languages/de.js.map +1 -1
  75. package/dist/translations/languages/en.d.ts +3 -0
  76. package/dist/translations/languages/en.d.ts.map +1 -1
  77. package/dist/translations/languages/en.js +3 -0
  78. package/dist/translations/languages/en.js.map +1 -1
  79. package/dist/translations/languages/es.d.ts.map +1 -1
  80. package/dist/translations/languages/es.js +3 -0
  81. package/dist/translations/languages/es.js.map +1 -1
  82. package/dist/translations/languages/et.d.ts.map +1 -1
  83. package/dist/translations/languages/et.js +3 -0
  84. package/dist/translations/languages/et.js.map +1 -1
  85. package/dist/translations/languages/fa.d.ts.map +1 -1
  86. package/dist/translations/languages/fa.js +3 -0
  87. package/dist/translations/languages/fa.js.map +1 -1
  88. package/dist/translations/languages/fr.d.ts.map +1 -1
  89. package/dist/translations/languages/fr.js +3 -0
  90. package/dist/translations/languages/fr.js.map +1 -1
  91. package/dist/translations/languages/he.d.ts.map +1 -1
  92. package/dist/translations/languages/he.js +3 -0
  93. package/dist/translations/languages/he.js.map +1 -1
  94. package/dist/translations/languages/hr.d.ts.map +1 -1
  95. package/dist/translations/languages/hr.js +3 -0
  96. package/dist/translations/languages/hr.js.map +1 -1
  97. package/dist/translations/languages/hu.d.ts.map +1 -1
  98. package/dist/translations/languages/hu.js +3 -0
  99. package/dist/translations/languages/hu.js.map +1 -1
  100. package/dist/translations/languages/hy.d.ts.map +1 -1
  101. package/dist/translations/languages/hy.js +3 -0
  102. package/dist/translations/languages/hy.js.map +1 -1
  103. package/dist/translations/languages/id.d.ts.map +1 -1
  104. package/dist/translations/languages/id.js +13 -10
  105. package/dist/translations/languages/id.js.map +1 -1
  106. package/dist/translations/languages/is.d.ts.map +1 -1
  107. package/dist/translations/languages/is.js +3 -0
  108. package/dist/translations/languages/is.js.map +1 -1
  109. package/dist/translations/languages/it.d.ts.map +1 -1
  110. package/dist/translations/languages/it.js +3 -0
  111. package/dist/translations/languages/it.js.map +1 -1
  112. package/dist/translations/languages/ja.d.ts.map +1 -1
  113. package/dist/translations/languages/ja.js +3 -0
  114. package/dist/translations/languages/ja.js.map +1 -1
  115. package/dist/translations/languages/ko.d.ts.map +1 -1
  116. package/dist/translations/languages/ko.js +3 -0
  117. package/dist/translations/languages/ko.js.map +1 -1
  118. package/dist/translations/languages/lt.d.ts.map +1 -1
  119. package/dist/translations/languages/lt.js +3 -0
  120. package/dist/translations/languages/lt.js.map +1 -1
  121. package/dist/translations/languages/lv.d.ts.map +1 -1
  122. package/dist/translations/languages/lv.js +16 -13
  123. package/dist/translations/languages/lv.js.map +1 -1
  124. package/dist/translations/languages/my.d.ts.map +1 -1
  125. package/dist/translations/languages/my.js +3 -0
  126. package/dist/translations/languages/my.js.map +1 -1
  127. package/dist/translations/languages/nb.d.ts.map +1 -1
  128. package/dist/translations/languages/nb.js +3 -0
  129. package/dist/translations/languages/nb.js.map +1 -1
  130. package/dist/translations/languages/nl.d.ts.map +1 -1
  131. package/dist/translations/languages/nl.js +3 -0
  132. package/dist/translations/languages/nl.js.map +1 -1
  133. package/dist/translations/languages/pl.d.ts.map +1 -1
  134. package/dist/translations/languages/pl.js +3 -0
  135. package/dist/translations/languages/pl.js.map +1 -1
  136. package/dist/translations/languages/pt.d.ts.map +1 -1
  137. package/dist/translations/languages/pt.js +3 -0
  138. package/dist/translations/languages/pt.js.map +1 -1
  139. package/dist/translations/languages/ro.d.ts.map +1 -1
  140. package/dist/translations/languages/ro.js +3 -0
  141. package/dist/translations/languages/ro.js.map +1 -1
  142. package/dist/translations/languages/rs.d.ts.map +1 -1
  143. package/dist/translations/languages/rs.js +3 -0
  144. package/dist/translations/languages/rs.js.map +1 -1
  145. package/dist/translations/languages/rsLatin.d.ts.map +1 -1
  146. package/dist/translations/languages/rsLatin.js +3 -0
  147. package/dist/translations/languages/rsLatin.js.map +1 -1
  148. package/dist/translations/languages/ru.d.ts.map +1 -1
  149. package/dist/translations/languages/ru.js +3 -0
  150. package/dist/translations/languages/ru.js.map +1 -1
  151. package/dist/translations/languages/sk.d.ts.map +1 -1
  152. package/dist/translations/languages/sk.js +3 -0
  153. package/dist/translations/languages/sk.js.map +1 -1
  154. package/dist/translations/languages/sl.d.ts.map +1 -1
  155. package/dist/translations/languages/sl.js +3 -0
  156. package/dist/translations/languages/sl.js.map +1 -1
  157. package/dist/translations/languages/sv.d.ts.map +1 -1
  158. package/dist/translations/languages/sv.js +3 -0
  159. package/dist/translations/languages/sv.js.map +1 -1
  160. package/dist/translations/languages/ta.d.ts.map +1 -1
  161. package/dist/translations/languages/ta.js +3 -0
  162. package/dist/translations/languages/ta.js.map +1 -1
  163. package/dist/translations/languages/th.d.ts.map +1 -1
  164. package/dist/translations/languages/th.js +3 -0
  165. package/dist/translations/languages/th.js.map +1 -1
  166. package/dist/translations/languages/tr.d.ts.map +1 -1
  167. package/dist/translations/languages/tr.js +3 -0
  168. package/dist/translations/languages/tr.js.map +1 -1
  169. package/dist/translations/languages/uk.d.ts.map +1 -1
  170. package/dist/translations/languages/uk.js +3 -0
  171. package/dist/translations/languages/uk.js.map +1 -1
  172. package/dist/translations/languages/vi.d.ts.map +1 -1
  173. package/dist/translations/languages/vi.js +3 -0
  174. package/dist/translations/languages/vi.js.map +1 -1
  175. package/dist/translations/languages/zh.d.ts.map +1 -1
  176. package/dist/translations/languages/zh.js +3 -0
  177. package/dist/translations/languages/zh.js.map +1 -1
  178. package/dist/translations/languages/zhTw.d.ts.map +1 -1
  179. package/dist/translations/languages/zhTw.js +3 -0
  180. package/dist/translations/languages/zhTw.js.map +1 -1
  181. package/dist/types.d.ts +44 -1
  182. package/dist/types.d.ts.map +1 -1
  183. package/dist/types.js.map +1 -1
  184. package/dist/utilities/buildDisabledFieldRegex.d.ts +11 -2
  185. package/dist/utilities/buildDisabledFieldRegex.d.ts.map +1 -1
  186. package/dist/utilities/buildDisabledFieldRegex.js +33 -7
  187. package/dist/utilities/buildDisabledFieldRegex.js.map +1 -1
  188. package/dist/utilities/buildDisabledFieldRegex.spec.js +64 -0
  189. package/dist/utilities/buildDisabledFieldRegex.spec.js.map +1 -0
  190. package/dist/utilities/collectTimezoneCompanionFields.d.ts +24 -0
  191. package/dist/utilities/collectTimezoneCompanionFields.d.ts.map +1 -0
  192. package/dist/utilities/collectTimezoneCompanionFields.js +89 -0
  193. package/dist/utilities/collectTimezoneCompanionFields.js.map +1 -0
  194. package/dist/utilities/collectTimezoneCompanionFields.spec.js +319 -0
  195. package/dist/utilities/collectTimezoneCompanionFields.spec.js.map +1 -0
  196. package/dist/utilities/fieldToRegex.d.ts +14 -0
  197. package/dist/utilities/fieldToRegex.d.ts.map +1 -0
  198. package/dist/utilities/fieldToRegex.js +34 -0
  199. package/dist/utilities/fieldToRegex.js.map +1 -0
  200. package/dist/utilities/fieldToRegex.spec.js +151 -0
  201. package/dist/utilities/fieldToRegex.spec.js.map +1 -0
  202. package/dist/utilities/flattenObject.d.ts +7 -1
  203. package/dist/utilities/flattenObject.d.ts.map +1 -1
  204. package/dist/utilities/flattenObject.js +30 -18
  205. package/dist/utilities/flattenObject.js.map +1 -1
  206. package/dist/utilities/getExportFieldFunctions.d.ts.map +1 -1
  207. package/dist/utilities/getExportFieldFunctions.js +7 -0
  208. package/dist/utilities/getExportFieldFunctions.js.map +1 -1
  209. package/dist/utilities/getImportFieldFunctions.d.ts.map +1 -1
  210. package/dist/utilities/getImportFieldFunctions.js +2 -16
  211. package/dist/utilities/getImportFieldFunctions.js.map +1 -1
  212. package/dist/utilities/getPluginCollections.d.ts +1 -0
  213. package/dist/utilities/getPluginCollections.d.ts.map +1 -1
  214. package/dist/utilities/getPluginCollections.js +43 -10
  215. package/dist/utilities/getPluginCollections.js.map +1 -1
  216. package/dist/utilities/getSchemaColumns.d.ts +8 -2
  217. package/dist/utilities/getSchemaColumns.d.ts.map +1 -1
  218. package/dist/utilities/getSchemaColumns.js +61 -27
  219. package/dist/utilities/getSchemaColumns.js.map +1 -1
  220. package/dist/utilities/parseCSV.d.ts.map +1 -1
  221. package/dist/utilities/parseCSV.js +4 -10
  222. package/dist/utilities/parseCSV.js.map +1 -1
  223. package/dist/utilities/resolveLimit.d.ts +15 -0
  224. package/dist/utilities/resolveLimit.d.ts.map +1 -0
  225. package/dist/utilities/resolveLimit.js +21 -0
  226. package/dist/utilities/resolveLimit.js.map +1 -0
  227. package/dist/utilities/unflattenObject.d.ts +13 -0
  228. package/dist/utilities/unflattenObject.d.ts.map +1 -1
  229. package/dist/utilities/unflattenObject.js +64 -65
  230. package/dist/utilities/unflattenObject.js.map +1 -1
  231. package/package.json +8 -8
  232. package/dist/utilities/getvalueAtPath.d.ts +0 -15
  233. package/dist/utilities/getvalueAtPath.d.ts.map +0 -1
  234. package/dist/utilities/getvalueAtPath.js +0 -49
  235. package/dist/utilities/getvalueAtPath.js.map +0 -1
@@ -10,7 +10,7 @@ import { getFlattenedFieldKeys } from './getFlattenedFieldKeys.js';
10
10
  * - Provides consistent base columns
11
11
  * - Works for empty exports
12
12
  * - Ensures proper column ordering
13
- */ export const getSchemaColumns = ({ collectionConfig, disabledFields = [], fields: selectedFields, locale, localeCodes })=>{
13
+ */ export const getSchemaColumns = ({ collectionConfig, disabledFields = [], fields: selectedFields, locale, localeCodes, timezoneCompanionFields })=>{
14
14
  const hasVersions = Boolean(collectionConfig.versions);
15
15
  // Determine if we need locale expansion
16
16
  const expandLocales = locale === 'all' && localeCodes && localeCodes.length > 0;
@@ -30,7 +30,7 @@ import { getFlattenedFieldKeys } from './getFlattenedFieldKeys.js';
30
30
  ];
31
31
  // Filter to user-selected fields if specified
32
32
  if (selectedFields && selectedFields.length > 0) {
33
- schemaColumns = filterToSelectedFields(schemaColumns, selectedFields);
33
+ schemaColumns = filterToSelectedFields(schemaColumns, selectedFields, timezoneCompanionFields);
34
34
  }
35
35
  // Remove disabled fields
36
36
  if (disabledFields.length > 0) {
@@ -88,40 +88,64 @@ import { getFlattenedFieldKeys } from './getFlattenedFieldKeys.js';
88
88
  /**
89
89
  * Merges schema-derived columns with data-discovered columns.
90
90
  * Schema columns provide the base ordering, data columns add any additional
91
- * columns (e.g., array indices beyond 0, dynamic fields).
91
+ * columns (e.g., array indices beyond 0, dynamic fields, derived columns from toCSV).
92
92
  */ export const mergeColumns = (schemaColumns, dataColumns)=>{
93
93
  const result = [
94
94
  ...schemaColumns
95
95
  ];
96
96
  const schemaSet = new Set(schemaColumns);
97
+ const insertedDerived = new Map();
97
98
  // Add any data columns not in schema (preserves schema ordering, appends new ones)
98
99
  for (const col of dataColumns){
99
100
  if (!schemaSet.has(col)) {
100
- // Find the best position to insert this column
101
- // For array indices (e.g., field_1_*), insert after field_0_*
102
- const match = col.match(/^(.+?)_(\d+)(_.*)?$/);
103
- if (match) {
104
- const [, basePath, index, suffix] = match;
105
- if (basePath && index) {
106
- const prevIndex = parseInt(index, 10) - 1;
107
- const prevCol = `${basePath}_${prevIndex}${suffix ?? ''}`;
108
- const prevIdx = result.indexOf(prevCol);
109
- if (prevIdx !== -1) {
110
- // Insert after the previous index column
111
- result.splice(prevIdx + 1, 0, col);
112
- schemaSet.add(col);
113
- continue;
101
+ let inserted = false;
102
+ // Check if this is a derived column from a schema column (e.g., field_id, field_email)
103
+ // Pattern: schemaCol_suffix where suffix is NOT a number (array indices are handled separately)
104
+ for (const schemaCol of schemaColumns){
105
+ if (col.startsWith(`${schemaCol}_`)) {
106
+ const suffix = col.slice(schemaCol.length + 1);
107
+ // Skip if suffix starts with a digit (array index pattern like field_0_*)
108
+ if (!/^\d/.test(suffix)) {
109
+ const baseIdx = result.indexOf(schemaCol);
110
+ if (baseIdx !== -1) {
111
+ const derivedList = insertedDerived.get(schemaCol) || [];
112
+ const insertIdx = baseIdx + 1 + derivedList.length;
113
+ result.splice(insertIdx, 0, col);
114
+ derivedList.push(col);
115
+ insertedDerived.set(schemaCol, derivedList);
116
+ schemaSet.add(col);
117
+ inserted = true;
118
+ break;
119
+ }
114
120
  }
115
121
  }
116
122
  }
117
- // Otherwise append at the end (before timestamps)
118
- const createdAtIdx = result.indexOf('createdAt');
119
- if (createdAtIdx !== -1) {
120
- result.splice(createdAtIdx, 0, col);
121
- } else {
122
- result.push(col);
123
+ if (!inserted) {
124
+ // Check for array indices (e.g., field_1_*), insert after field_0_*
125
+ const match = col.match(/^(.+?)_(\d+)(_.*)?$/);
126
+ if (match) {
127
+ const [, basePath, index, suffix] = match;
128
+ if (basePath && index) {
129
+ const prevIndex = parseInt(index, 10) - 1;
130
+ const prevCol = `${basePath}_${prevIndex}${suffix ?? ''}`;
131
+ const prevIdx = result.indexOf(prevCol);
132
+ if (prevIdx !== -1) {
133
+ // Insert after the previous index column
134
+ result.splice(prevIdx + 1, 0, col);
135
+ schemaSet.add(col);
136
+ continue;
137
+ }
138
+ }
139
+ }
140
+ // Otherwise append at the end (before timestamps)
141
+ const createdAtIdx = result.indexOf('createdAt');
142
+ if (createdAtIdx !== -1) {
143
+ result.splice(createdAtIdx, 0, col);
144
+ } else {
145
+ result.push(col);
146
+ }
147
+ schemaSet.add(col);
123
148
  }
124
- schemaSet.add(col);
125
149
  }
126
150
  }
127
151
  return result;
@@ -130,7 +154,7 @@ import { getFlattenedFieldKeys } from './getFlattenedFieldKeys.js';
130
154
  * Filters schema columns to only include those matching user-selected fields.
131
155
  * Preserves the order specified by the user in selectedFields.
132
156
  * Handles nested field selection (e.g., 'group.value' includes 'group_value' and 'group_value_*')
133
- */ function filterToSelectedFields(columns, selectedFields) {
157
+ */ function filterToSelectedFields(columns, selectedFields, timezoneCompanionFields) {
134
158
  const result = [];
135
159
  const columnsSet = new Set(columns);
136
160
  // Convert selected fields to underscore notation patterns
@@ -142,15 +166,25 @@ import { getFlattenedFieldKeys } from './getFlattenedFieldKeys.js';
142
166
  prefix: `${underscored}_`
143
167
  };
144
168
  });
169
+ // Track which timezone companion fields were explicitly selected
170
+ const explicitlySelectedTzFields = new Set(selectedFields.filter((f)=>{
171
+ const underscored = f.replace(/\./g, '_');
172
+ return timezoneCompanionFields?.has(underscored);
173
+ }).map((f)=>f.replace(/\./g, '_')));
145
174
  // Iterate through user-specified fields in order to preserve their ordering
146
175
  for (const pattern of patterns){
147
- // First add the exact match if it exists
148
- if (columnsSet.has(pattern.exact)) {
176
+ // First add the exact match if it exists and not already added
177
+ // (it may have been added as a nested field of a previous pattern)
178
+ if (columnsSet.has(pattern.exact) && !result.includes(pattern.exact)) {
149
179
  result.push(pattern.exact);
150
180
  }
151
181
  // Then add any columns with the prefix (nested fields)
152
182
  for (const column of columns){
153
183
  if (column !== pattern.exact && column.startsWith(pattern.prefix)) {
184
+ // Skip auto-generated timezone companion fields unless explicitly selected
185
+ if (timezoneCompanionFields?.has(column) && !explicitlySelectedTzFields.has(column)) {
186
+ continue;
187
+ }
154
188
  if (!result.includes(column)) {
155
189
  result.push(column);
156
190
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/utilities/getSchemaColumns.ts"],"sourcesContent":["import type { SanitizedCollectionConfig } from 'payload'\n\nimport { getFlattenedFieldKeys } from './getFlattenedFieldKeys.js'\n\nexport type GetSchemaColumnsArgs = {\n /**\n * The collection configuration to derive columns from\n */\n collectionConfig: SanitizedCollectionConfig\n /**\n * Array of disabled field paths from plugin config\n */\n disabledFields?: string[]\n /**\n * User-selected fields to export. If provided, only these fields (and their nested fields) will be included.\n */\n fields?: string[]\n /**\n * The locale to export. When 'all', localized fields are expanded to include all locale suffixes.\n */\n locale?: null | string\n /**\n * Available locale codes from config. Required when locale='all'.\n */\n localeCodes?: string[]\n}\n\n/**\n * Derives CSV column names from the collection schema.\n * This provides a base set of columns from field definitions.\n *\n * Note: For arrays/blocks with multiple items, the schema only generates index 0.\n * Additional indices from actual data should be merged with these columns.\n *\n * Benefits:\n * - Provides consistent base columns\n * - Works for empty exports\n * - Ensures proper column ordering\n */\nexport const getSchemaColumns = ({\n collectionConfig,\n disabledFields = [],\n fields: selectedFields,\n locale,\n localeCodes,\n}: GetSchemaColumnsArgs): string[] => {\n const hasVersions = Boolean(collectionConfig.versions)\n\n // Determine if we need locale expansion\n const expandLocales = locale === 'all' && localeCodes && localeCodes.length > 0\n\n // Get all possible columns from schema (excludes system fields like id, createdAt, updatedAt)\n let schemaColumns = getFlattenedFieldKeys(\n collectionConfig.flattenedFields,\n '',\n expandLocales ? { localeCodes } : {},\n )\n\n // Add system fields that aren't in flattenedFields\n const systemFields = ['id', 'createdAt', 'updatedAt']\n schemaColumns = [...systemFields, ...schemaColumns]\n\n // Filter to user-selected fields if specified\n if (selectedFields && selectedFields.length > 0) {\n schemaColumns = filterToSelectedFields(schemaColumns, selectedFields)\n }\n\n // Remove disabled fields\n if (disabledFields.length > 0) {\n const disabledSet = new Set<string>()\n for (const path of disabledFields) {\n // Convert dot notation to underscore and add to set\n disabledSet.add(path.replace(/\\./g, '_'))\n }\n schemaColumns = schemaColumns.filter((col) => {\n // Check if column matches any disabled path\n for (const disabled of disabledSet) {\n if (col === disabled || col.startsWith(`${disabled}_`)) {\n return false\n }\n }\n return true\n })\n }\n\n // When user has selected specific fields, preserve their ordering\n // filterToSelectedFields() already returns columns in user's specified order\n if (selectedFields && selectedFields.length > 0) {\n return schemaColumns\n }\n\n // No fields selected - apply default ordering (id first, timestamps last)\n const orderedColumns: string[] = []\n\n // 1. ID always first\n if (schemaColumns.includes('id')) {\n orderedColumns.push('id')\n }\n\n // 2. Status field for versioned collections\n if (hasVersions) {\n orderedColumns.push('_status')\n }\n\n // 3. All other fields (excluding id, timestamps, status)\n const excludeFromMiddle = new Set(['_status', 'createdAt', 'id', 'updatedAt'])\n for (const col of schemaColumns) {\n if (!excludeFromMiddle.has(col)) {\n orderedColumns.push(col)\n }\n }\n\n // 4. Timestamps at the end\n if (schemaColumns.includes('createdAt')) {\n orderedColumns.push('createdAt')\n }\n if (schemaColumns.includes('updatedAt')) {\n orderedColumns.push('updatedAt')\n }\n\n return orderedColumns\n}\n\n/**\n * Merges schema-derived columns with data-discovered columns.\n * Schema columns provide the base ordering, data columns add any additional\n * columns (e.g., array indices beyond 0, dynamic fields).\n */\nexport const mergeColumns = (schemaColumns: string[], dataColumns: string[]): string[] => {\n const result = [...schemaColumns]\n const schemaSet = new Set(schemaColumns)\n\n // Add any data columns not in schema (preserves schema ordering, appends new ones)\n for (const col of dataColumns) {\n if (!schemaSet.has(col)) {\n // Find the best position to insert this column\n // For array indices (e.g., field_1_*), insert after field_0_*\n const match = col.match(/^(.+?)_(\\d+)(_.*)?$/)\n if (match) {\n const [, basePath, index, suffix] = match\n if (basePath && index) {\n const prevIndex = parseInt(index, 10) - 1\n const prevCol = `${basePath}_${prevIndex}${suffix ?? ''}`\n const prevIdx = result.indexOf(prevCol)\n if (prevIdx !== -1) {\n // Insert after the previous index column\n result.splice(prevIdx + 1, 0, col)\n schemaSet.add(col)\n continue\n }\n }\n }\n // Otherwise append at the end (before timestamps)\n const createdAtIdx = result.indexOf('createdAt')\n if (createdAtIdx !== -1) {\n result.splice(createdAtIdx, 0, col)\n } else {\n result.push(col)\n }\n schemaSet.add(col)\n }\n }\n\n return result\n}\n\n/**\n * Filters schema columns to only include those matching user-selected fields.\n * Preserves the order specified by the user in selectedFields.\n * Handles nested field selection (e.g., 'group.value' includes 'group_value' and 'group_value_*')\n */\nfunction filterToSelectedFields(columns: string[], selectedFields: string[]): string[] {\n const result: string[] = []\n const columnsSet = new Set(columns)\n\n // Convert selected fields to underscore notation patterns\n const patterns = selectedFields.map((field) => {\n const underscored = field.replace(/\\./g, '_')\n return {\n exact: underscored,\n original: field,\n prefix: `${underscored}_`,\n }\n })\n\n // Iterate through user-specified fields in order to preserve their ordering\n for (const pattern of patterns) {\n // First add the exact match if it exists\n if (columnsSet.has(pattern.exact)) {\n result.push(pattern.exact)\n }\n\n // Then add any columns with the prefix (nested fields)\n for (const column of columns) {\n if (column !== pattern.exact && column.startsWith(pattern.prefix)) {\n if (!result.includes(column)) {\n result.push(column)\n }\n }\n }\n }\n\n return result\n}\n"],"names":["getFlattenedFieldKeys","getSchemaColumns","collectionConfig","disabledFields","fields","selectedFields","locale","localeCodes","hasVersions","Boolean","versions","expandLocales","length","schemaColumns","flattenedFields","systemFields","filterToSelectedFields","disabledSet","Set","path","add","replace","filter","col","disabled","startsWith","orderedColumns","includes","push","excludeFromMiddle","has","mergeColumns","dataColumns","result","schemaSet","match","basePath","index","suffix","prevIndex","parseInt","prevCol","prevIdx","indexOf","splice","createdAtIdx","columns","columnsSet","patterns","map","field","underscored","exact","original","prefix","pattern","column"],"mappings":"AAEA,SAASA,qBAAqB,QAAQ,6BAA4B;AAyBlE;;;;;;;;;;;CAWC,GACD,OAAO,MAAMC,mBAAmB,CAAC,EAC/BC,gBAAgB,EAChBC,iBAAiB,EAAE,EACnBC,QAAQC,cAAc,EACtBC,MAAM,EACNC,WAAW,EACU;IACrB,MAAMC,cAAcC,QAAQP,iBAAiBQ,QAAQ;IAErD,wCAAwC;IACxC,MAAMC,gBAAgBL,WAAW,SAASC,eAAeA,YAAYK,MAAM,GAAG;IAE9E,8FAA8F;IAC9F,IAAIC,gBAAgBb,sBAClBE,iBAAiBY,eAAe,EAChC,IACAH,gBAAgB;QAAEJ;IAAY,IAAI,CAAC;IAGrC,mDAAmD;IACnD,MAAMQ,eAAe;QAAC;QAAM;QAAa;KAAY;IACrDF,gBAAgB;WAAIE;WAAiBF;KAAc;IAEnD,8CAA8C;IAC9C,IAAIR,kBAAkBA,eAAeO,MAAM,GAAG,GAAG;QAC/CC,gBAAgBG,uBAAuBH,eAAeR;IACxD;IAEA,yBAAyB;IACzB,IAAIF,eAAeS,MAAM,GAAG,GAAG;QAC7B,MAAMK,cAAc,IAAIC;QACxB,KAAK,MAAMC,QAAQhB,eAAgB;YACjC,oDAAoD;YACpDc,YAAYG,GAAG,CAACD,KAAKE,OAAO,CAAC,OAAO;QACtC;QACAR,gBAAgBA,cAAcS,MAAM,CAAC,CAACC;YACpC,4CAA4C;YAC5C,KAAK,MAAMC,YAAYP,YAAa;gBAClC,IAAIM,QAAQC,YAAYD,IAAIE,UAAU,CAAC,GAAGD,SAAS,CAAC,CAAC,GAAG;oBACtD,OAAO;gBACT;YACF;YACA,OAAO;QACT;IACF;IAEA,kEAAkE;IAClE,6EAA6E;IAC7E,IAAInB,kBAAkBA,eAAeO,MAAM,GAAG,GAAG;QAC/C,OAAOC;IACT;IAEA,0EAA0E;IAC1E,MAAMa,iBAA2B,EAAE;IAEnC,qBAAqB;IACrB,IAAIb,cAAcc,QAAQ,CAAC,OAAO;QAChCD,eAAeE,IAAI,CAAC;IACtB;IAEA,4CAA4C;IAC5C,IAAIpB,aAAa;QACfkB,eAAeE,IAAI,CAAC;IACtB;IAEA,yDAAyD;IACzD,MAAMC,oBAAoB,IAAIX,IAAI;QAAC;QAAW;QAAa;QAAM;KAAY;IAC7E,KAAK,MAAMK,OAAOV,cAAe;QAC/B,IAAI,CAACgB,kBAAkBC,GAAG,CAACP,MAAM;YAC/BG,eAAeE,IAAI,CAACL;QACtB;IACF;IAEA,2BAA2B;IAC3B,IAAIV,cAAcc,QAAQ,CAAC,cAAc;QACvCD,eAAeE,IAAI,CAAC;IACtB;IACA,IAAIf,cAAcc,QAAQ,CAAC,cAAc;QACvCD,eAAeE,IAAI,CAAC;IACtB;IAEA,OAAOF;AACT,EAAC;AAED;;;;CAIC,GACD,OAAO,MAAMK,eAAe,CAAClB,eAAyBmB;IACpD,MAAMC,SAAS;WAAIpB;KAAc;IACjC,MAAMqB,YAAY,IAAIhB,IAAIL;IAE1B,mFAAmF;IACnF,KAAK,MAAMU,OAAOS,YAAa;QAC7B,IAAI,CAACE,UAAUJ,GAAG,CAACP,MAAM;YACvB,+CAA+C;YAC/C,8DAA8D;YAC9D,MAAMY,QAAQZ,IAAIY,KAAK,CAAC;YACxB,IAAIA,OAAO;gBACT,MAAM,GAAGC,UAAUC,OAAOC,OAAO,GAAGH;gBACpC,IAAIC,YAAYC,OAAO;oBACrB,MAAME,YAAYC,SAASH,OAAO,MAAM;oBACxC,MAAMI,UAAU,GAAGL,SAAS,CAAC,EAAEG,YAAYD,UAAU,IAAI;oBACzD,MAAMI,UAAUT,OAAOU,OAAO,CAACF;oBAC/B,IAAIC,YAAY,CAAC,GAAG;wBAClB,yCAAyC;wBACzCT,OAAOW,MAAM,CAACF,UAAU,GAAG,GAAGnB;wBAC9BW,UAAUd,GAAG,CAACG;wBACd;oBACF;gBACF;YACF;YACA,kDAAkD;YAClD,MAAMsB,eAAeZ,OAAOU,OAAO,CAAC;YACpC,IAAIE,iBAAiB,CAAC,GAAG;gBACvBZ,OAAOW,MAAM,CAACC,cAAc,GAAGtB;YACjC,OAAO;gBACLU,OAAOL,IAAI,CAACL;YACd;YACAW,UAAUd,GAAG,CAACG;QAChB;IACF;IAEA,OAAOU;AACT,EAAC;AAED;;;;CAIC,GACD,SAASjB,uBAAuB8B,OAAiB,EAAEzC,cAAwB;IACzE,MAAM4B,SAAmB,EAAE;IAC3B,MAAMc,aAAa,IAAI7B,IAAI4B;IAE3B,0DAA0D;IAC1D,MAAME,WAAW3C,eAAe4C,GAAG,CAAC,CAACC;QACnC,MAAMC,cAAcD,MAAM7B,OAAO,CAAC,OAAO;QACzC,OAAO;YACL+B,OAAOD;YACPE,UAAUH;YACVI,QAAQ,GAAGH,YAAY,CAAC,CAAC;QAC3B;IACF;IAEA,4EAA4E;IAC5E,KAAK,MAAMI,WAAWP,SAAU;QAC9B,yCAAyC;QACzC,IAAID,WAAWjB,GAAG,CAACyB,QAAQH,KAAK,GAAG;YACjCnB,OAAOL,IAAI,CAAC2B,QAAQH,KAAK;QAC3B;QAEA,uDAAuD;QACvD,KAAK,MAAMI,UAAUV,QAAS;YAC5B,IAAIU,WAAWD,QAAQH,KAAK,IAAII,OAAO/B,UAAU,CAAC8B,QAAQD,MAAM,GAAG;gBACjE,IAAI,CAACrB,OAAON,QAAQ,CAAC6B,SAAS;oBAC5BvB,OAAOL,IAAI,CAAC4B;gBACd;YACF;QACF;IACF;IAEA,OAAOvB;AACT"}
1
+ {"version":3,"sources":["../../src/utilities/getSchemaColumns.ts"],"sourcesContent":["import type { SanitizedCollectionConfig } from 'payload'\n\nimport { getFlattenedFieldKeys } from './getFlattenedFieldKeys.js'\n\nexport type GetSchemaColumnsArgs = {\n /**\n * The collection configuration to derive columns from\n */\n collectionConfig: SanitizedCollectionConfig\n /**\n * Array of disabled field paths from plugin config\n */\n disabledFields?: string[]\n /**\n * User-selected fields to export. If provided, only these fields (and their nested fields) will be included.\n */\n fields?: string[]\n /**\n * The locale to export. When 'all', localized fields are expanded to include all locale suffixes.\n */\n locale?: null | string\n /**\n * Available locale codes from config. Required when locale='all'.\n */\n localeCodes?: string[]\n /**\n * Set of auto-generated timezone companion field names (from collectTimezoneCompanionFields).\n * These fields are excluded unless explicitly selected.\n * If not provided, no timezone filtering is applied.\n */\n timezoneCompanionFields?: Set<string>\n}\n\n/**\n * Derives CSV column names from the collection schema.\n * This provides a base set of columns from field definitions.\n *\n * Note: For arrays/blocks with multiple items, the schema only generates index 0.\n * Additional indices from actual data should be merged with these columns.\n *\n * Benefits:\n * - Provides consistent base columns\n * - Works for empty exports\n * - Ensures proper column ordering\n */\nexport const getSchemaColumns = ({\n collectionConfig,\n disabledFields = [],\n fields: selectedFields,\n locale,\n localeCodes,\n timezoneCompanionFields,\n}: GetSchemaColumnsArgs): string[] => {\n const hasVersions = Boolean(collectionConfig.versions)\n\n // Determine if we need locale expansion\n const expandLocales = locale === 'all' && localeCodes && localeCodes.length > 0\n\n // Get all possible columns from schema (excludes system fields like id, createdAt, updatedAt)\n let schemaColumns = getFlattenedFieldKeys(\n collectionConfig.flattenedFields,\n '',\n expandLocales ? { localeCodes } : {},\n )\n\n // Add system fields that aren't in flattenedFields\n const systemFields = ['id', 'createdAt', 'updatedAt']\n schemaColumns = [...systemFields, ...schemaColumns]\n\n // Filter to user-selected fields if specified\n if (selectedFields && selectedFields.length > 0) {\n schemaColumns = filterToSelectedFields(schemaColumns, selectedFields, timezoneCompanionFields)\n }\n\n // Remove disabled fields\n if (disabledFields.length > 0) {\n const disabledSet = new Set<string>()\n for (const path of disabledFields) {\n // Convert dot notation to underscore and add to set\n disabledSet.add(path.replace(/\\./g, '_'))\n }\n schemaColumns = schemaColumns.filter((col) => {\n // Check if column matches any disabled path\n for (const disabled of disabledSet) {\n if (col === disabled || col.startsWith(`${disabled}_`)) {\n return false\n }\n }\n return true\n })\n }\n\n // When user has selected specific fields, preserve their ordering\n // filterToSelectedFields() already returns columns in user's specified order\n if (selectedFields && selectedFields.length > 0) {\n return schemaColumns\n }\n\n // No fields selected - apply default ordering (id first, timestamps last)\n const orderedColumns: string[] = []\n\n // 1. ID always first\n if (schemaColumns.includes('id')) {\n orderedColumns.push('id')\n }\n\n // 2. Status field for versioned collections\n if (hasVersions) {\n orderedColumns.push('_status')\n }\n\n // 3. All other fields (excluding id, timestamps, status)\n const excludeFromMiddle = new Set(['_status', 'createdAt', 'id', 'updatedAt'])\n for (const col of schemaColumns) {\n if (!excludeFromMiddle.has(col)) {\n orderedColumns.push(col)\n }\n }\n\n // 4. Timestamps at the end\n if (schemaColumns.includes('createdAt')) {\n orderedColumns.push('createdAt')\n }\n if (schemaColumns.includes('updatedAt')) {\n orderedColumns.push('updatedAt')\n }\n\n return orderedColumns\n}\n\n/**\n * Merges schema-derived columns with data-discovered columns.\n * Schema columns provide the base ordering, data columns add any additional\n * columns (e.g., array indices beyond 0, dynamic fields, derived columns from toCSV).\n */\nexport const mergeColumns = (schemaColumns: string[], dataColumns: string[]): string[] => {\n const result = [...schemaColumns]\n const schemaSet = new Set(schemaColumns)\n const insertedDerived = new Map<string, string[]>()\n\n // Add any data columns not in schema (preserves schema ordering, appends new ones)\n for (const col of dataColumns) {\n if (!schemaSet.has(col)) {\n let inserted = false\n\n // Check if this is a derived column from a schema column (e.g., field_id, field_email)\n // Pattern: schemaCol_suffix where suffix is NOT a number (array indices are handled separately)\n for (const schemaCol of schemaColumns) {\n if (col.startsWith(`${schemaCol}_`)) {\n const suffix = col.slice(schemaCol.length + 1)\n // Skip if suffix starts with a digit (array index pattern like field_0_*)\n if (!/^\\d/.test(suffix)) {\n const baseIdx = result.indexOf(schemaCol)\n if (baseIdx !== -1) {\n const derivedList = insertedDerived.get(schemaCol) || []\n const insertIdx = baseIdx + 1 + derivedList.length\n result.splice(insertIdx, 0, col)\n derivedList.push(col)\n insertedDerived.set(schemaCol, derivedList)\n schemaSet.add(col)\n inserted = true\n break\n }\n }\n }\n }\n\n if (!inserted) {\n // Check for array indices (e.g., field_1_*), insert after field_0_*\n const match = col.match(/^(.+?)_(\\d+)(_.*)?$/)\n if (match) {\n const [, basePath, index, suffix] = match\n if (basePath && index) {\n const prevIndex = parseInt(index, 10) - 1\n const prevCol = `${basePath}_${prevIndex}${suffix ?? ''}`\n const prevIdx = result.indexOf(prevCol)\n if (prevIdx !== -1) {\n // Insert after the previous index column\n result.splice(prevIdx + 1, 0, col)\n schemaSet.add(col)\n continue\n }\n }\n }\n\n // Otherwise append at the end (before timestamps)\n const createdAtIdx = result.indexOf('createdAt')\n if (createdAtIdx !== -1) {\n result.splice(createdAtIdx, 0, col)\n } else {\n result.push(col)\n }\n schemaSet.add(col)\n }\n }\n }\n\n return result\n}\n\n/**\n * Filters schema columns to only include those matching user-selected fields.\n * Preserves the order specified by the user in selectedFields.\n * Handles nested field selection (e.g., 'group.value' includes 'group_value' and 'group_value_*')\n */\nfunction filterToSelectedFields(\n columns: string[],\n selectedFields: string[],\n timezoneCompanionFields?: Set<string>,\n): string[] {\n const result: string[] = []\n const columnsSet = new Set(columns)\n\n // Convert selected fields to underscore notation patterns\n const patterns = selectedFields.map((field) => {\n const underscored = field.replace(/\\./g, '_')\n return {\n exact: underscored,\n original: field,\n prefix: `${underscored}_`,\n }\n })\n\n // Track which timezone companion fields were explicitly selected\n const explicitlySelectedTzFields = new Set(\n selectedFields\n .filter((f) => {\n const underscored = f.replace(/\\./g, '_')\n return timezoneCompanionFields?.has(underscored)\n })\n .map((f) => f.replace(/\\./g, '_')),\n )\n\n // Iterate through user-specified fields in order to preserve their ordering\n for (const pattern of patterns) {\n // First add the exact match if it exists and not already added\n // (it may have been added as a nested field of a previous pattern)\n if (columnsSet.has(pattern.exact) && !result.includes(pattern.exact)) {\n result.push(pattern.exact)\n }\n\n // Then add any columns with the prefix (nested fields)\n for (const column of columns) {\n if (column !== pattern.exact && column.startsWith(pattern.prefix)) {\n // Skip auto-generated timezone companion fields unless explicitly selected\n if (timezoneCompanionFields?.has(column) && !explicitlySelectedTzFields.has(column)) {\n continue\n }\n if (!result.includes(column)) {\n result.push(column)\n }\n }\n }\n }\n\n return result\n}\n"],"names":["getFlattenedFieldKeys","getSchemaColumns","collectionConfig","disabledFields","fields","selectedFields","locale","localeCodes","timezoneCompanionFields","hasVersions","Boolean","versions","expandLocales","length","schemaColumns","flattenedFields","systemFields","filterToSelectedFields","disabledSet","Set","path","add","replace","filter","col","disabled","startsWith","orderedColumns","includes","push","excludeFromMiddle","has","mergeColumns","dataColumns","result","schemaSet","insertedDerived","Map","inserted","schemaCol","suffix","slice","test","baseIdx","indexOf","derivedList","get","insertIdx","splice","set","match","basePath","index","prevIndex","parseInt","prevCol","prevIdx","createdAtIdx","columns","columnsSet","patterns","map","field","underscored","exact","original","prefix","explicitlySelectedTzFields","f","pattern","column"],"mappings":"AAEA,SAASA,qBAAqB,QAAQ,6BAA4B;AA+BlE;;;;;;;;;;;CAWC,GACD,OAAO,MAAMC,mBAAmB,CAAC,EAC/BC,gBAAgB,EAChBC,iBAAiB,EAAE,EACnBC,QAAQC,cAAc,EACtBC,MAAM,EACNC,WAAW,EACXC,uBAAuB,EACF;IACrB,MAAMC,cAAcC,QAAQR,iBAAiBS,QAAQ;IAErD,wCAAwC;IACxC,MAAMC,gBAAgBN,WAAW,SAASC,eAAeA,YAAYM,MAAM,GAAG;IAE9E,8FAA8F;IAC9F,IAAIC,gBAAgBd,sBAClBE,iBAAiBa,eAAe,EAChC,IACAH,gBAAgB;QAAEL;IAAY,IAAI,CAAC;IAGrC,mDAAmD;IACnD,MAAMS,eAAe;QAAC;QAAM;QAAa;KAAY;IACrDF,gBAAgB;WAAIE;WAAiBF;KAAc;IAEnD,8CAA8C;IAC9C,IAAIT,kBAAkBA,eAAeQ,MAAM,GAAG,GAAG;QAC/CC,gBAAgBG,uBAAuBH,eAAeT,gBAAgBG;IACxE;IAEA,yBAAyB;IACzB,IAAIL,eAAeU,MAAM,GAAG,GAAG;QAC7B,MAAMK,cAAc,IAAIC;QACxB,KAAK,MAAMC,QAAQjB,eAAgB;YACjC,oDAAoD;YACpDe,YAAYG,GAAG,CAACD,KAAKE,OAAO,CAAC,OAAO;QACtC;QACAR,gBAAgBA,cAAcS,MAAM,CAAC,CAACC;YACpC,4CAA4C;YAC5C,KAAK,MAAMC,YAAYP,YAAa;gBAClC,IAAIM,QAAQC,YAAYD,IAAIE,UAAU,CAAC,GAAGD,SAAS,CAAC,CAAC,GAAG;oBACtD,OAAO;gBACT;YACF;YACA,OAAO;QACT;IACF;IAEA,kEAAkE;IAClE,6EAA6E;IAC7E,IAAIpB,kBAAkBA,eAAeQ,MAAM,GAAG,GAAG;QAC/C,OAAOC;IACT;IAEA,0EAA0E;IAC1E,MAAMa,iBAA2B,EAAE;IAEnC,qBAAqB;IACrB,IAAIb,cAAcc,QAAQ,CAAC,OAAO;QAChCD,eAAeE,IAAI,CAAC;IACtB;IAEA,4CAA4C;IAC5C,IAAIpB,aAAa;QACfkB,eAAeE,IAAI,CAAC;IACtB;IAEA,yDAAyD;IACzD,MAAMC,oBAAoB,IAAIX,IAAI;QAAC;QAAW;QAAa;QAAM;KAAY;IAC7E,KAAK,MAAMK,OAAOV,cAAe;QAC/B,IAAI,CAACgB,kBAAkBC,GAAG,CAACP,MAAM;YAC/BG,eAAeE,IAAI,CAACL;QACtB;IACF;IAEA,2BAA2B;IAC3B,IAAIV,cAAcc,QAAQ,CAAC,cAAc;QACvCD,eAAeE,IAAI,CAAC;IACtB;IACA,IAAIf,cAAcc,QAAQ,CAAC,cAAc;QACvCD,eAAeE,IAAI,CAAC;IACtB;IAEA,OAAOF;AACT,EAAC;AAED;;;;CAIC,GACD,OAAO,MAAMK,eAAe,CAAClB,eAAyBmB;IACpD,MAAMC,SAAS;WAAIpB;KAAc;IACjC,MAAMqB,YAAY,IAAIhB,IAAIL;IAC1B,MAAMsB,kBAAkB,IAAIC;IAE5B,mFAAmF;IACnF,KAAK,MAAMb,OAAOS,YAAa;QAC7B,IAAI,CAACE,UAAUJ,GAAG,CAACP,MAAM;YACvB,IAAIc,WAAW;YAEf,uFAAuF;YACvF,gGAAgG;YAChG,KAAK,MAAMC,aAAazB,cAAe;gBACrC,IAAIU,IAAIE,UAAU,CAAC,GAAGa,UAAU,CAAC,CAAC,GAAG;oBACnC,MAAMC,SAAShB,IAAIiB,KAAK,CAACF,UAAU1B,MAAM,GAAG;oBAC5C,0EAA0E;oBAC1E,IAAI,CAAC,MAAM6B,IAAI,CAACF,SAAS;wBACvB,MAAMG,UAAUT,OAAOU,OAAO,CAACL;wBAC/B,IAAII,YAAY,CAAC,GAAG;4BAClB,MAAME,cAAcT,gBAAgBU,GAAG,CAACP,cAAc,EAAE;4BACxD,MAAMQ,YAAYJ,UAAU,IAAIE,YAAYhC,MAAM;4BAClDqB,OAAOc,MAAM,CAACD,WAAW,GAAGvB;4BAC5BqB,YAAYhB,IAAI,CAACL;4BACjBY,gBAAgBa,GAAG,CAACV,WAAWM;4BAC/BV,UAAUd,GAAG,CAACG;4BACdc,WAAW;4BACX;wBACF;oBACF;gBACF;YACF;YAEA,IAAI,CAACA,UAAU;gBACb,oEAAoE;gBACpE,MAAMY,QAAQ1B,IAAI0B,KAAK,CAAC;gBACxB,IAAIA,OAAO;oBACT,MAAM,GAAGC,UAAUC,OAAOZ,OAAO,GAAGU;oBACpC,IAAIC,YAAYC,OAAO;wBACrB,MAAMC,YAAYC,SAASF,OAAO,MAAM;wBACxC,MAAMG,UAAU,GAAGJ,SAAS,CAAC,EAAEE,YAAYb,UAAU,IAAI;wBACzD,MAAMgB,UAAUtB,OAAOU,OAAO,CAACW;wBAC/B,IAAIC,YAAY,CAAC,GAAG;4BAClB,yCAAyC;4BACzCtB,OAAOc,MAAM,CAACQ,UAAU,GAAG,GAAGhC;4BAC9BW,UAAUd,GAAG,CAACG;4BACd;wBACF;oBACF;gBACF;gBAEA,kDAAkD;gBAClD,MAAMiC,eAAevB,OAAOU,OAAO,CAAC;gBACpC,IAAIa,iBAAiB,CAAC,GAAG;oBACvBvB,OAAOc,MAAM,CAACS,cAAc,GAAGjC;gBACjC,OAAO;oBACLU,OAAOL,IAAI,CAACL;gBACd;gBACAW,UAAUd,GAAG,CAACG;YAChB;QACF;IACF;IAEA,OAAOU;AACT,EAAC;AAED;;;;CAIC,GACD,SAASjB,uBACPyC,OAAiB,EACjBrD,cAAwB,EACxBG,uBAAqC;IAErC,MAAM0B,SAAmB,EAAE;IAC3B,MAAMyB,aAAa,IAAIxC,IAAIuC;IAE3B,0DAA0D;IAC1D,MAAME,WAAWvD,eAAewD,GAAG,CAAC,CAACC;QACnC,MAAMC,cAAcD,MAAMxC,OAAO,CAAC,OAAO;QACzC,OAAO;YACL0C,OAAOD;YACPE,UAAUH;YACVI,QAAQ,GAAGH,YAAY,CAAC,CAAC;QAC3B;IACF;IAEA,iEAAiE;IACjE,MAAMI,6BAA6B,IAAIhD,IACrCd,eACGkB,MAAM,CAAC,CAAC6C;QACP,MAAML,cAAcK,EAAE9C,OAAO,CAAC,OAAO;QACrC,OAAOd,yBAAyBuB,IAAIgC;IACtC,GACCF,GAAG,CAAC,CAACO,IAAMA,EAAE9C,OAAO,CAAC,OAAO;IAGjC,4EAA4E;IAC5E,KAAK,MAAM+C,WAAWT,SAAU;QAC9B,+DAA+D;QAC/D,mEAAmE;QACnE,IAAID,WAAW5B,GAAG,CAACsC,QAAQL,KAAK,KAAK,CAAC9B,OAAON,QAAQ,CAACyC,QAAQL,KAAK,GAAG;YACpE9B,OAAOL,IAAI,CAACwC,QAAQL,KAAK;QAC3B;QAEA,uDAAuD;QACvD,KAAK,MAAMM,UAAUZ,QAAS;YAC5B,IAAIY,WAAWD,QAAQL,KAAK,IAAIM,OAAO5C,UAAU,CAAC2C,QAAQH,MAAM,GAAG;gBACjE,2EAA2E;gBAC3E,IAAI1D,yBAAyBuB,IAAIuC,WAAW,CAACH,2BAA2BpC,GAAG,CAACuC,SAAS;oBACnF;gBACF;gBACA,IAAI,CAACpC,OAAON,QAAQ,CAAC0C,SAAS;oBAC5BpC,OAAOL,IAAI,CAACyC;gBACd;YACF;QACF;IACF;IAEA,OAAOpC;AACT"}
@@ -1 +1 @@
1
- {"version":3,"file":"parseCSV.d.ts","sourceRoot":"","sources":["../../src/utilities/parseCSV.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,SAAS,CAAA;AAI7C,MAAM,MAAM,YAAY,GAAG;IACzB,IAAI,EAAE,MAAM,GAAG,MAAM,CAAA;IACrB,GAAG,EAAE,cAAc,CAAA;CACpB,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,QAAQ,kBAAyB,YAAY,KAAG,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,CAmE7F,CAAA"}
1
+ {"version":3,"file":"parseCSV.d.ts","sourceRoot":"","sources":["../../src/utilities/parseCSV.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,SAAS,CAAA;AAI7C,MAAM,MAAM,YAAY,GAAG;IACzB,IAAI,EAAE,MAAM,GAAG,MAAM,CAAA;IACrB,GAAG,EAAE,cAAc,CAAA;CACpB,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,QAAQ,kBAAyB,YAAY,KAAG,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,CA6D7F,CAAA"}
@@ -7,36 +7,30 @@ import { parse } from 'csv-parse';
7
7
  const records = [];
8
8
  const parser = parse({
9
9
  cast: (value, _context)=>{
10
- // Empty strings should be undefined (field not present in update)
11
- // This preserves existing data instead of overwriting with null
10
+ // Empty strings become undefined to preserve existing data during updates
12
11
  if (value === '') {
13
12
  return undefined;
14
13
  }
15
- // Handle booleans
16
14
  if (value === 'true') {
17
15
  return true;
18
16
  }
19
17
  if (value === 'false') {
20
18
  return false;
21
19
  }
22
- // Handle explicit null - user must type "null" to set field to null
20
+ // Explicit null requires typing "null" or "NULL"
23
21
  if (value === 'null' || value === 'NULL') {
24
22
  return null;
25
23
  }
26
- // Don't auto-convert to numbers if the value contains a comma
27
- // This allows hasMany fields to use comma-separated values
24
+ // Keep comma-separated values as strings for hasMany fields
28
25
  if (value.includes(',')) {
29
- return value // Keep as string for comma-separated values
30
- ;
26
+ return value;
31
27
  }
32
- // Handle numbers (only after checking for commas)
33
28
  if (!isNaN(Number(value)) && value !== '') {
34
29
  const num = Number(value);
35
30
  if (String(num) === value || value.includes('.')) {
36
31
  return num;
37
32
  }
38
33
  }
39
- // Return as string
40
34
  return value;
41
35
  },
42
36
  columns: true,
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/utilities/parseCSV.ts"],"sourcesContent":["import type { PayloadRequest } from 'payload'\n\nimport { parse } from 'csv-parse'\n\nexport type ParseCSVArgs = {\n data: Buffer | string\n req: PayloadRequest\n}\n\n/**\n * Parses CSV data into an array of record objects.\n * Handles type coercion for booleans, numbers, and null values.\n */\nexport const parseCSV = async ({ data, req }: ParseCSVArgs): Promise<Record<string, unknown>[]> => {\n return new Promise((resolve, reject) => {\n const records: Record<string, unknown>[] = []\n\n const parser = parse({\n cast: (value, _context) => {\n // Empty strings should be undefined (field not present in update)\n // This preserves existing data instead of overwriting with null\n if (value === '') {\n return undefined\n }\n\n // Handle booleans\n if (value === 'true') {\n return true\n }\n if (value === 'false') {\n return false\n }\n\n // Handle explicit null - user must type \"null\" to set field to null\n if (value === 'null' || value === 'NULL') {\n return null\n }\n\n // Don't auto-convert to numbers if the value contains a comma\n // This allows hasMany fields to use comma-separated values\n if (value.includes(',')) {\n return value // Keep as string for comma-separated values\n }\n\n // Handle numbers (only after checking for commas)\n if (!isNaN(Number(value)) && value !== '') {\n const num = Number(value)\n\n if (String(num) === value || value.includes('.')) {\n return num\n }\n }\n\n // Return as string\n return value\n },\n columns: true,\n skip_empty_lines: true,\n trim: true,\n })\n\n parser.on('readable', () => {\n let record\n while ((record = parser.read()) !== null) {\n records.push(record)\n }\n })\n\n parser.on('error', (err) => {\n req.payload.logger.error({ err, msg: 'Error parsing CSV' })\n reject(err)\n })\n\n parser.on('end', () => {\n resolve(records)\n })\n\n parser.write(data)\n parser.end()\n })\n}\n"],"names":["parse","parseCSV","data","req","Promise","resolve","reject","records","parser","cast","value","_context","undefined","includes","isNaN","Number","num","String","columns","skip_empty_lines","trim","on","record","read","push","err","payload","logger","error","msg","write","end"],"mappings":"AAEA,SAASA,KAAK,QAAQ,YAAW;AAOjC;;;CAGC,GACD,OAAO,MAAMC,WAAW,OAAO,EAAEC,IAAI,EAAEC,GAAG,EAAgB;IACxD,OAAO,IAAIC,QAAQ,CAACC,SAASC;QAC3B,MAAMC,UAAqC,EAAE;QAE7C,MAAMC,SAASR,MAAM;YACnBS,MAAM,CAACC,OAAOC;gBACZ,kEAAkE;gBAClE,gEAAgE;gBAChE,IAAID,UAAU,IAAI;oBAChB,OAAOE;gBACT;gBAEA,kBAAkB;gBAClB,IAAIF,UAAU,QAAQ;oBACpB,OAAO;gBACT;gBACA,IAAIA,UAAU,SAAS;oBACrB,OAAO;gBACT;gBAEA,oEAAoE;gBACpE,IAAIA,UAAU,UAAUA,UAAU,QAAQ;oBACxC,OAAO;gBACT;gBAEA,8DAA8D;gBAC9D,2DAA2D;gBAC3D,IAAIA,MAAMG,QAAQ,CAAC,MAAM;oBACvB,OAAOH,MAAM,4CAA4C;;gBAC3D;gBAEA,kDAAkD;gBAClD,IAAI,CAACI,MAAMC,OAAOL,WAAWA,UAAU,IAAI;oBACzC,MAAMM,MAAMD,OAAOL;oBAEnB,IAAIO,OAAOD,SAASN,SAASA,MAAMG,QAAQ,CAAC,MAAM;wBAChD,OAAOG;oBACT;gBACF;gBAEA,mBAAmB;gBACnB,OAAON;YACT;YACAQ,SAAS;YACTC,kBAAkB;YAClBC,MAAM;QACR;QAEAZ,OAAOa,EAAE,CAAC,YAAY;YACpB,IAAIC;YACJ,MAAO,AAACA,CAAAA,SAASd,OAAOe,IAAI,EAAC,MAAO,KAAM;gBACxChB,QAAQiB,IAAI,CAACF;YACf;QACF;QAEAd,OAAOa,EAAE,CAAC,SAAS,CAACI;YAClBtB,IAAIuB,OAAO,CAACC,MAAM,CAACC,KAAK,CAAC;gBAAEH;gBAAKI,KAAK;YAAoB;YACzDvB,OAAOmB;QACT;QAEAjB,OAAOa,EAAE,CAAC,OAAO;YACfhB,QAAQE;QACV;QAEAC,OAAOsB,KAAK,CAAC5B;QACbM,OAAOuB,GAAG;IACZ;AACF,EAAC"}
1
+ {"version":3,"sources":["../../src/utilities/parseCSV.ts"],"sourcesContent":["import type { PayloadRequest } from 'payload'\n\nimport { parse } from 'csv-parse'\n\nexport type ParseCSVArgs = {\n data: Buffer | string\n req: PayloadRequest\n}\n\n/**\n * Parses CSV data into an array of record objects.\n * Handles type coercion for booleans, numbers, and null values.\n */\nexport const parseCSV = async ({ data, req }: ParseCSVArgs): Promise<Record<string, unknown>[]> => {\n return new Promise((resolve, reject) => {\n const records: Record<string, unknown>[] = []\n\n const parser = parse({\n cast: (value, _context) => {\n // Empty strings become undefined to preserve existing data during updates\n if (value === '') {\n return undefined\n }\n\n if (value === 'true') {\n return true\n }\n if (value === 'false') {\n return false\n }\n\n // Explicit null requires typing \"null\" or \"NULL\"\n if (value === 'null' || value === 'NULL') {\n return null\n }\n\n // Keep comma-separated values as strings for hasMany fields\n if (value.includes(',')) {\n return value\n }\n\n if (!isNaN(Number(value)) && value !== '') {\n const num = Number(value)\n if (String(num) === value || value.includes('.')) {\n return num\n }\n }\n\n return value\n },\n columns: true,\n skip_empty_lines: true,\n trim: true,\n })\n\n parser.on('readable', () => {\n let record\n while ((record = parser.read()) !== null) {\n records.push(record)\n }\n })\n\n parser.on('error', (err) => {\n req.payload.logger.error({ err, msg: 'Error parsing CSV' })\n reject(err)\n })\n\n parser.on('end', () => {\n resolve(records)\n })\n\n parser.write(data)\n parser.end()\n })\n}\n"],"names":["parse","parseCSV","data","req","Promise","resolve","reject","records","parser","cast","value","_context","undefined","includes","isNaN","Number","num","String","columns","skip_empty_lines","trim","on","record","read","push","err","payload","logger","error","msg","write","end"],"mappings":"AAEA,SAASA,KAAK,QAAQ,YAAW;AAOjC;;;CAGC,GACD,OAAO,MAAMC,WAAW,OAAO,EAAEC,IAAI,EAAEC,GAAG,EAAgB;IACxD,OAAO,IAAIC,QAAQ,CAACC,SAASC;QAC3B,MAAMC,UAAqC,EAAE;QAE7C,MAAMC,SAASR,MAAM;YACnBS,MAAM,CAACC,OAAOC;gBACZ,0EAA0E;gBAC1E,IAAID,UAAU,IAAI;oBAChB,OAAOE;gBACT;gBAEA,IAAIF,UAAU,QAAQ;oBACpB,OAAO;gBACT;gBACA,IAAIA,UAAU,SAAS;oBACrB,OAAO;gBACT;gBAEA,iDAAiD;gBACjD,IAAIA,UAAU,UAAUA,UAAU,QAAQ;oBACxC,OAAO;gBACT;gBAEA,4DAA4D;gBAC5D,IAAIA,MAAMG,QAAQ,CAAC,MAAM;oBACvB,OAAOH;gBACT;gBAEA,IAAI,CAACI,MAAMC,OAAOL,WAAWA,UAAU,IAAI;oBACzC,MAAMM,MAAMD,OAAOL;oBACnB,IAAIO,OAAOD,SAASN,SAASA,MAAMG,QAAQ,CAAC,MAAM;wBAChD,OAAOG;oBACT;gBACF;gBAEA,OAAON;YACT;YACAQ,SAAS;YACTC,kBAAkB;YAClBC,MAAM;QACR;QAEAZ,OAAOa,EAAE,CAAC,YAAY;YACpB,IAAIC;YACJ,MAAO,AAACA,CAAAA,SAASd,OAAOe,IAAI,EAAC,MAAO,KAAM;gBACxChB,QAAQiB,IAAI,CAACF;YACf;QACF;QAEAd,OAAOa,EAAE,CAAC,SAAS,CAACI;YAClBtB,IAAIuB,OAAO,CAACC,MAAM,CAACC,KAAK,CAAC;gBAAEH;gBAAKI,KAAK;YAAoB;YACzDvB,OAAOmB;QACT;QAEAjB,OAAOa,EAAE,CAAC,OAAO;YACfhB,QAAQE;QACV;QAEAC,OAAOsB,KAAK,CAAC5B;QACbM,OAAOuB,GAAG;IACZ;AACF,EAAC"}
@@ -0,0 +1,15 @@
1
+ import type { PayloadRequest } from 'payload';
2
+ import type { Limit } from '../types.js';
3
+ /**
4
+ * Resolves a Limit value to a number.
5
+ * If the value is a function, it will be called with the request context.
6
+ * If the value is a number, it will be returned as-is.
7
+ * If the value is undefined, undefined will be returned.
8
+ *
9
+ * Note: A resolved value of 0 means unlimited (no restriction).
10
+ */
11
+ export declare function resolveLimit(args: {
12
+ limit?: Limit;
13
+ req: PayloadRequest;
14
+ }): Promise<number | undefined>;
15
+ //# sourceMappingURL=resolveLimit.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"resolveLimit.d.ts","sourceRoot":"","sources":["../../src/utilities/resolveLimit.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,SAAS,CAAA;AAE7C,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,aAAa,CAAA;AAExC;;;;;;;GAOG;AACH,wBAAsB,YAAY,CAAC,IAAI,EAAE;IACvC,KAAK,CAAC,EAAE,KAAK,CAAA;IACb,GAAG,EAAE,cAAc,CAAA;CACpB,GAAG,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAY9B"}
@@ -0,0 +1,21 @@
1
+ /**
2
+ * Resolves a Limit value to a number.
3
+ * If the value is a function, it will be called with the request context.
4
+ * If the value is a number, it will be returned as-is.
5
+ * If the value is undefined, undefined will be returned.
6
+ *
7
+ * Note: A resolved value of 0 means unlimited (no restriction).
8
+ */ export async function resolveLimit(args) {
9
+ const { limit, req } = args;
10
+ if (limit === undefined) {
11
+ return undefined;
12
+ }
13
+ if (typeof limit === 'number') {
14
+ return limit;
15
+ }
16
+ return limit({
17
+ req
18
+ });
19
+ }
20
+
21
+ //# sourceMappingURL=resolveLimit.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/utilities/resolveLimit.ts"],"sourcesContent":["import type { PayloadRequest } from 'payload'\n\nimport type { Limit } from '../types.js'\n\n/**\n * Resolves a Limit value to a number.\n * If the value is a function, it will be called with the request context.\n * If the value is a number, it will be returned as-is.\n * If the value is undefined, undefined will be returned.\n *\n * Note: A resolved value of 0 means unlimited (no restriction).\n */\nexport async function resolveLimit(args: {\n limit?: Limit\n req: PayloadRequest\n}): Promise<number | undefined> {\n const { limit, req } = args\n\n if (limit === undefined) {\n return undefined\n }\n\n if (typeof limit === 'number') {\n return limit\n }\n\n return limit({ req })\n}\n"],"names":["resolveLimit","args","limit","req","undefined"],"mappings":"AAIA;;;;;;;CAOC,GACD,OAAO,eAAeA,aAAaC,IAGlC;IACC,MAAM,EAAEC,KAAK,EAAEC,GAAG,EAAE,GAAGF;IAEvB,IAAIC,UAAUE,WAAW;QACvB,OAAOA;IACT;IAEA,IAAI,OAAOF,UAAU,UAAU;QAC7B,OAAOA;IACT;IAEA,OAAOA,MAAM;QAAEC;IAAI;AACrB"}
@@ -6,6 +6,19 @@ type UnflattenArgs = {
6
6
  fromCSVFunctions?: Record<string, FromCSVFunction>;
7
7
  req: PayloadRequest;
8
8
  };
9
+ /**
10
+ * Converts flattened CSV data back into a nested document structure.
11
+ *
12
+ * The algorithm:
13
+ * 1. Sorts keys to ensure array indices are processed in order
14
+ * 2. For each flattened key (e.g., "blocks_0_hero_title"), splits by underscore into path segments
15
+ * 3. Traverses/builds the nested structure, handling:
16
+ * - Arrays (numeric segments like "0", "1")
17
+ * - Blocks (blockType detection from slug patterns)
18
+ * - Polymorphic relationships (_relationTo and _id suffix pairs)
19
+ * - Regular nested objects
20
+ * 4. Post-processes to handle localized fields, hasMany conversions, and relationship transforms
21
+ */
9
22
  export declare const unflattenObject: ({ data, fields, fromCSVFunctions, req, }: UnflattenArgs) => Record<string, unknown>;
10
23
  export {};
11
24
  //# sourceMappingURL=unflattenObject.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"unflattenObject.d.ts","sourceRoot":"","sources":["../../src/utilities/unflattenObject.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,cAAc,EAAE,MAAM,SAAS,CAAA;AAE7D,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAIlD,KAAK,aAAa,GAAG;IACnB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;IAC7B,MAAM,EAAE,cAAc,EAAE,CAAA;IACxB,gBAAgB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,CAAA;IAClD,GAAG,EAAE,cAAc,CAAA;CACpB,CAAA;AAED,eAAO,MAAM,eAAe,6CAKzB,aAAa,KAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CA0TxC,CAAA"}
1
+ {"version":3,"file":"unflattenObject.d.ts","sourceRoot":"","sources":["../../src/utilities/unflattenObject.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,cAAc,EAAE,MAAM,SAAS,CAAA;AAE7D,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAIlD,KAAK,aAAa,GAAG;IACnB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;IAC7B,MAAM,EAAE,cAAc,EAAE,CAAA;IACxB,gBAAgB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,CAAA;IAClD,GAAG,EAAE,cAAc,CAAA;CACpB,CAAA;AAED;;;;;;;;;;;;GAYG;AACH,eAAO,MAAM,eAAe,6CAKzB,aAAa,KAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAoSxC,CAAA"}
@@ -1,5 +1,17 @@
1
1
  import { processRichTextField } from './processRichTextField.js';
2
- export const unflattenObject = ({ data, fields, fromCSVFunctions = {}, req })=>{
2
+ /**
3
+ * Converts flattened CSV data back into a nested document structure.
4
+ *
5
+ * The algorithm:
6
+ * 1. Sorts keys to ensure array indices are processed in order
7
+ * 2. For each flattened key (e.g., "blocks_0_hero_title"), splits by underscore into path segments
8
+ * 3. Traverses/builds the nested structure, handling:
9
+ * - Arrays (numeric segments like "0", "1")
10
+ * - Blocks (blockType detection from slug patterns)
11
+ * - Polymorphic relationships (_relationTo and _id suffix pairs)
12
+ * - Regular nested objects
13
+ * 4. Post-processes to handle localized fields, hasMany conversions, and relationship transforms
14
+ */ export const unflattenObject = ({ data, fields, fromCSVFunctions = {}, req })=>{
3
15
  if (!data || typeof data !== 'object') {
4
16
  return {};
5
17
  }
@@ -31,7 +43,6 @@ export const unflattenObject = ({ data, fields, fromCSVFunctions = {}, req })=>{
31
43
  // Check if this is a polymorphic relationship field
32
44
  const isPolymorphic = fields.some((field)=>field.name === baseKey && field.type === 'relationship' && 'relationTo' in field && Array.isArray(field.relationTo));
33
45
  if (isPolymorphic) {
34
- // Check if we've already processed this field
35
46
  if (baseKey in result) {
36
47
  continue;
37
48
  }
@@ -64,29 +75,27 @@ export const unflattenObject = ({ data, fields, fromCSVFunctions = {}, req })=>{
64
75
  value
65
76
  });
66
77
  }
67
- // Parse the flat key into segments
68
78
  // Example: "blocks_0_content_text" -> ["blocks", "0", "content", "text"]
69
- const segments = flatKey.split('_');
70
- let current = result;
71
- for(let i = 0; i < segments.length; i++){
72
- const segment = segments[i];
79
+ const pathSegments = flatKey.split('_');
80
+ let currentObject = result;
81
+ for(let i = 0; i < pathSegments.length; i++){
82
+ const segment = pathSegments[i];
73
83
  if (!segment) {
74
84
  continue;
75
85
  } // Skip empty segments
76
- const nextSegment = segments[i + 1];
77
- const isLast = i === segments.length - 1;
86
+ const nextSegment = pathSegments[i + 1];
87
+ const isLast = i === pathSegments.length - 1;
78
88
  // Check if next segment is a numeric array index (e.g., "0", "1", "2")
79
89
  const isArrayIndex = nextSegment !== undefined && /^\d+$/.test(nextSegment);
80
90
  if (isLast) {
81
91
  // Special handling for blockType suffix in blocks
82
92
  if (segment === 'blockType' && i >= 3) {
83
93
  // Pattern: blocks_0_hero_blockType -> set blockType on the block
84
- const blockFieldName = segments[0] // 'blocks'
94
+ const blockFieldName = pathSegments[0] // 'blocks'
85
95
  ;
86
96
  const isBlockField = fields.some((field)=>field.name === blockFieldName && field.type === 'blocks');
87
- if (isBlockField && segments[1]?.match(/^\d+$/)) {
88
- // This is a block type field
89
- const parent = getParentObject(result, segments.slice(0, 2));
97
+ if (isBlockField && pathSegments[1]?.match(/^\d+$/)) {
98
+ const parent = getParentObject(result, pathSegments.slice(0, 2));
90
99
  if (parent && typeof parent === 'object') {
91
100
  parent.blockType = value;
92
101
  }
@@ -95,10 +104,9 @@ export const unflattenObject = ({ data, fields, fromCSVFunctions = {}, req })=>{
95
104
  }
96
105
  // Special handling for relationship fields with _id suffix
97
106
  if (segment === 'id' && i > 0) {
98
- const parentKey = segments[i - 1];
99
- // Check if the previous segment is an array index
100
- const prevIsIndex = parentKey ? /^\d+$/.test(parentKey) : false;
101
- if (!prevIsIndex) {
107
+ const parentKey = pathSegments[i - 1];
108
+ const isPreviousSegmentArrayIndex = parentKey ? /^\d+$/.test(parentKey) : false;
109
+ if (!isPreviousSegmentArrayIndex) {
102
110
  // Check if this is a relationship field
103
111
  const isRelationship = fields.some((field)=>field.name === parentKey && field.type === 'relationship');
104
112
  if (isRelationship) {
@@ -106,11 +114,9 @@ export const unflattenObject = ({ data, fields, fromCSVFunctions = {}, req })=>{
106
114
  const field = fields.find((f)=>f.name === parentKey && f.type === 'relationship');
107
115
  const isPolymorphic = field && 'relationTo' in field && Array.isArray(field.relationTo);
108
116
  if (isPolymorphic) {
109
- // For polymorphic relationships, check for the corresponding _relationTo field
110
- const relationToKey = segments.slice(0, i).concat('relationTo').join('_');
117
+ const relationToKey = pathSegments.slice(0, i).concat('relationTo').join('_');
111
118
  const relationToValue = data[relationToKey];
112
- // This is a polymorphic relationship
113
- const parent = getParentObject(result, segments.slice(0, i - 1));
119
+ const parent = getParentObject(result, pathSegments.slice(0, i - 1));
114
120
  if (parent && parentKey && typeof parent === 'object') {
115
121
  // Both fields must be defined to create/update the relationship
116
122
  // If either is undefined, skip the field entirely (preserve existing data)
@@ -132,8 +138,7 @@ export const unflattenObject = ({ data, fields, fromCSVFunctions = {}, req })=>{
132
138
  }
133
139
  continue;
134
140
  } else if (!isPolymorphic) {
135
- // Non-polymorphic relationship
136
- const parent = getParentObject(result, segments.slice(0, i - 1));
141
+ const parent = getParentObject(result, pathSegments.slice(0, i - 1));
137
142
  if (parent && parentKey && typeof parent === 'object') {
138
143
  parent[parentKey] = value;
139
144
  }
@@ -142,9 +147,9 @@ export const unflattenObject = ({ data, fields, fromCSVFunctions = {}, req })=>{
142
147
  }
143
148
  }
144
149
  }
145
- // Special handling for _relationTo suffix (skip it, handled above)
150
+ // _relationTo suffix is handled when processing the _id field above
146
151
  if (segment === 'relationTo' && i > 0) {
147
- const parentKey = segments[i - 1];
152
+ const parentKey = pathSegments[i - 1];
148
153
  if (parentKey && !parentKey.match(/^\d+$/)) {
149
154
  const field = fields.find((f)=>f.name === parentKey && f.type === 'relationship');
150
155
  const isPolymorphic = field && 'relationTo' in field && Array.isArray(field.relationTo);
@@ -153,14 +158,13 @@ export const unflattenObject = ({ data, fields, fromCSVFunctions = {}, req })=>{
153
158
  }
154
159
  }
155
160
  }
156
- current[segment] = value;
161
+ currentObject[segment] = value;
157
162
  } else if (isArrayIndex && nextSegment !== undefined) {
158
- // Initialize array if needed
159
- if (!current[segment] || !Array.isArray(current[segment])) {
160
- current[segment] = [];
163
+ if (!currentObject[segment] || !Array.isArray(currentObject[segment])) {
164
+ currentObject[segment] = [];
161
165
  }
162
166
  const arrayIndex = parseInt(nextSegment);
163
- const arr = current[segment];
167
+ const arr = currentObject[segment];
164
168
  // Ensure array has sufficient length
165
169
  while(arr.length <= arrayIndex){
166
170
  arr.push(null);
@@ -169,65 +173,55 @@ export const unflattenObject = ({ data, fields, fromCSVFunctions = {}, req })=>{
169
173
  if (arr[arrayIndex] === null || arr[arrayIndex] === undefined) {
170
174
  arr[arrayIndex] = {};
171
175
  }
172
- // Check if this is a blocks field with block slug pattern
176
+ // Handle blocks field with block slug pattern (e.g., blocks_0_hero_title)
173
177
  const isBlocksField = fields.some((f)=>f.name === segment && f.type === 'blocks');
174
- if (isBlocksField && i + 3 < segments.length) {
175
- // Pattern: blocks_0_hero_title where 'hero' is the block slug
176
- const blockSlug = segments[i + 2];
177
- const blockFieldName = segments[i + 3];
178
+ if (isBlocksField && i + 3 < pathSegments.length) {
179
+ const blockSlug = pathSegments[i + 2];
180
+ const blockFieldName = pathSegments[i + 3];
178
181
  if (blockSlug && blockFieldName) {
179
182
  const blockObject = arr[arrayIndex];
180
- // Set the blockType based on the slug
181
183
  blockObject.blockType = blockSlug;
182
- // Handle nested block fields
183
- if (i + 3 === segments.length - 1) {
184
- // Direct field on the block
184
+ if (i + 3 === pathSegments.length - 1) {
185
185
  blockObject[blockFieldName] = value;
186
186
  } else {
187
- // Nested field in the block
188
187
  if (!blockObject[blockFieldName] || typeof blockObject[blockFieldName] !== 'object') {
189
188
  blockObject[blockFieldName] = {};
190
189
  }
191
- // Continue processing remaining segments
192
- current = blockObject[blockFieldName];
193
- i = i + 3; // Skip index, slug, and field name
194
- continue; // Continue processing the remaining segments (not break!)
190
+ currentObject = blockObject[blockFieldName];
191
+ i = i + 3;
192
+ continue;
195
193
  }
196
194
  break;
197
195
  }
198
196
  }
199
- // If this is the last segment after the index, set the value
200
- if (i + 2 === segments.length - 1) {
201
- const lastSegment = segments[segments.length - 1];
197
+ if (i + 2 === pathSegments.length - 1) {
198
+ const lastSegment = pathSegments[pathSegments.length - 1];
202
199
  if (lastSegment && arr[arrayIndex] && typeof arr[arrayIndex] === 'object') {
203
200
  ;
204
201
  arr[arrayIndex][lastSegment] = value;
205
202
  }
206
203
  break;
207
- } else if (i + 1 === segments.length - 1) {
204
+ } else if (i + 1 === pathSegments.length - 1) {
208
205
  // Direct array value (e.g., tags_0 = "value")
209
206
  arr[arrayIndex] = value;
210
207
  break;
211
208
  } else {
212
- // Continue traversing into the array element
213
- current = arr[arrayIndex];
214
- i++; // skip the index segment
209
+ currentObject = arr[arrayIndex];
210
+ i++;
215
211
  }
216
212
  } else {
217
- // Regular object property
218
- // Check if this segment is already set to null (polymorphic relationship already processed)
219
- if (current[segment] === null && isLast && segment === 'relationTo') {
213
+ // Skip if already set to null (polymorphic relationship already processed)
214
+ if (currentObject[segment] === null && isLast && segment === 'relationTo') {
220
215
  continue;
221
216
  }
222
- if (!current[segment] || typeof current[segment] !== 'object' || Array.isArray(current[segment])) {
223
- current[segment] = {};
217
+ if (!currentObject[segment] || typeof currentObject[segment] !== 'object' || Array.isArray(currentObject[segment])) {
218
+ currentObject[segment] = {};
224
219
  }
225
- // Handle special cases for polymorphic relationships
226
- if (segment === 'relationTo' && i > 0 && segments[i - 1]?.match(/^\d+$/)) {
227
- // This is part of a polymorphic relationship array
228
- current[segment] = value;
229
- } else if (typeof current[segment] === 'object' && !Array.isArray(current[segment]) && current[segment] !== null) {
230
- current = current[segment];
220
+ // Handle polymorphic relationship arrays
221
+ if (segment === 'relationTo' && i > 0 && pathSegments[i - 1]?.match(/^\d+$/)) {
222
+ currentObject[segment] = value;
223
+ } else if (typeof currentObject[segment] === 'object' && !Array.isArray(currentObject[segment]) && currentObject[segment] !== null) {
224
+ currentObject = currentObject[segment];
231
225
  }
232
226
  }
233
227
  }
@@ -272,9 +266,14 @@ const getParentObject = (obj, segments)=>{
272
266
  }
273
267
  return current;
274
268
  };
275
- const postProcessDocument = (doc, fields)=>{
276
- // Handle localized fields - transform from field_locale to { field: { locale: value } }
277
- // This is the format Payload stores in the database
269
+ /**
270
+ * Post-processes the unflattened document to handle special field types:
271
+ * - Localized fields: transforms field_locale keys to nested { field: { locale: value } }
272
+ * - Number hasMany: converts comma-separated strings or arrays to number arrays
273
+ * - Relationship hasMany: converts comma-separated IDs to arrays
274
+ * - Polymorphic relationships: transforms flat {relationTo, id} to {relationTo, value}
275
+ * - Rich text fields: ensures proper data structure
276
+ */ const postProcessDocument = (doc, fields)=>{
278
277
  const localizedFields = fields.filter((field)=>field.localized);
279
278
  const processedLocalizedFields = new Set();
280
279
  for (const field of localizedFields){