@payloadcms/plugin-import-export 3.76.0-canary.0 → 3.76.0-canary.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/CollectionField/index.d.ts.map +1 -1
- package/dist/components/CollectionField/index.js +6 -2
- package/dist/components/CollectionField/index.js.map +1 -1
- package/dist/export/createExport.d.ts +1 -1
- package/dist/export/createExport.d.ts.map +1 -1
- package/dist/export/createExport.js +2 -2
- package/dist/export/createExport.js.map +1 -1
- package/dist/export/getCreateExportCollectionTask.js +1 -1
- package/dist/export/getCreateExportCollectionTask.js.map +1 -1
- package/dist/export/getExportCollection.js +2 -2
- package/dist/export/getExportCollection.js.map +1 -1
- package/dist/import/batchProcessor.d.ts.map +1 -1
- package/dist/import/batchProcessor.js +4 -2
- package/dist/import/batchProcessor.js.map +1 -1
- package/dist/import/getCreateImportCollectionTask.d.ts +9 -6
- package/dist/import/getCreateImportCollectionTask.d.ts.map +1 -1
- package/dist/import/getCreateImportCollectionTask.js +105 -68
- package/dist/import/getCreateImportCollectionTask.js.map +1 -1
- package/dist/import/getImportCollection.d.ts.map +1 -1
- package/dist/import/getImportCollection.js +24 -45
- package/dist/import/getImportCollection.js.map +1 -1
- package/dist/utilities/getFileFromDoc.d.ts +26 -0
- package/dist/utilities/getFileFromDoc.d.ts.map +1 -0
- package/dist/utilities/getFileFromDoc.js +67 -0
- package/dist/utilities/getFileFromDoc.js.map +1 -0
- package/dist/utilities/getPluginCollections.d.ts.map +1 -1
- package/dist/utilities/getPluginCollections.js +16 -0
- package/dist/utilities/getPluginCollections.js.map +1 -1
- package/package.json +7 -7
|
@@ -1,83 +1,120 @@
|
|
|
1
|
+
import { FileRetrievalError } from 'payload';
|
|
2
|
+
import { getFileFromDoc } from '../utilities/getFileFromDoc.js';
|
|
1
3
|
import { createImport } from './createImport.js';
|
|
2
|
-
|
|
3
|
-
export const getCreateCollectionImportTask = (config)=>{
|
|
4
|
-
const inputSchema = getFields(config).concat({
|
|
5
|
-
name: 'user',
|
|
6
|
-
type: 'text'
|
|
7
|
-
}, {
|
|
8
|
-
name: 'userCollection',
|
|
9
|
-
type: 'text'
|
|
10
|
-
}, {
|
|
11
|
-
name: 'importsCollection',
|
|
12
|
-
type: 'text'
|
|
13
|
-
}, {
|
|
14
|
-
name: 'file',
|
|
15
|
-
type: 'group',
|
|
16
|
-
fields: [
|
|
17
|
-
{
|
|
18
|
-
name: 'data',
|
|
19
|
-
type: 'text'
|
|
20
|
-
},
|
|
21
|
-
{
|
|
22
|
-
name: 'mimetype',
|
|
23
|
-
type: 'text'
|
|
24
|
-
},
|
|
25
|
-
{
|
|
26
|
-
name: 'name',
|
|
27
|
-
type: 'text'
|
|
28
|
-
}
|
|
29
|
-
]
|
|
30
|
-
}, {
|
|
31
|
-
name: 'format',
|
|
32
|
-
type: 'select',
|
|
33
|
-
options: [
|
|
34
|
-
'csv',
|
|
35
|
-
'json'
|
|
36
|
-
]
|
|
37
|
-
}, {
|
|
38
|
-
name: 'debug',
|
|
39
|
-
type: 'checkbox'
|
|
40
|
-
}, {
|
|
41
|
-
name: 'maxLimit',
|
|
42
|
-
type: 'number'
|
|
43
|
-
});
|
|
4
|
+
export const getCreateCollectionImportTask = (_config)=>{
|
|
44
5
|
return {
|
|
45
6
|
slug: 'createCollectionImport',
|
|
46
7
|
handler: async ({ input, req })=>{
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
8
|
+
const { batchSize, debug, defaultVersionStatus, importCollection, importId, maxLimit, userCollection, userID } = input;
|
|
9
|
+
// Fetch the import document to get all necessary data
|
|
10
|
+
const importDoc = await req.payload.findByID({
|
|
11
|
+
id: importId,
|
|
12
|
+
collection: importCollection
|
|
13
|
+
});
|
|
14
|
+
if (!importDoc) {
|
|
15
|
+
throw new Error(`Import document not found: ${importId}`);
|
|
50
16
|
}
|
|
51
|
-
|
|
52
|
-
|
|
17
|
+
// Get the collection config for the imports collection
|
|
18
|
+
const collectionConfig = req.payload.config.collections.find((c)=>c.slug === importCollection);
|
|
19
|
+
if (!collectionConfig) {
|
|
20
|
+
throw new Error(`Collection config not found for: ${importCollection}`);
|
|
21
|
+
}
|
|
22
|
+
// Retrieve the file using getFileFromDoc (handles both local and cloud storage)
|
|
23
|
+
const file = await getFileFromDoc({
|
|
24
|
+
collectionConfig,
|
|
25
|
+
doc: {
|
|
26
|
+
filename: importDoc.filename,
|
|
27
|
+
mimeType: importDoc.mimeType,
|
|
28
|
+
url: importDoc.url
|
|
29
|
+
},
|
|
53
30
|
req
|
|
54
31
|
});
|
|
55
|
-
|
|
56
|
-
if (
|
|
57
|
-
|
|
58
|
-
id: input.importId,
|
|
59
|
-
collection: input.importsCollection || 'imports',
|
|
60
|
-
data: {
|
|
61
|
-
status: result.errors.length === 0 ? 'completed' : result.imported + result.updated === 0 ? 'failed' : 'partial',
|
|
62
|
-
summary: {
|
|
63
|
-
imported: result.imported,
|
|
64
|
-
issueDetails: result.errors.length > 0 ? result.errors.map((e)=>({
|
|
65
|
-
data: e.doc,
|
|
66
|
-
error: e.error,
|
|
67
|
-
row: e.index + 1
|
|
68
|
-
})) : undefined,
|
|
69
|
-
issues: result.errors.length,
|
|
70
|
-
total: result.total,
|
|
71
|
-
updated: result.updated
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
});
|
|
32
|
+
const fileMimetype = file.mimetype || importDoc.mimeType;
|
|
33
|
+
if (!fileMimetype) {
|
|
34
|
+
throw new FileRetrievalError(req.t, `Unable to determine mimetype for file: ${importDoc.filename}`);
|
|
75
35
|
}
|
|
36
|
+
const result = await createImport({
|
|
37
|
+
name: importDoc.filename || 'import',
|
|
38
|
+
batchSize,
|
|
39
|
+
collectionSlug: importDoc.collectionSlug,
|
|
40
|
+
debug,
|
|
41
|
+
defaultVersionStatus,
|
|
42
|
+
file: {
|
|
43
|
+
name: importDoc.filename,
|
|
44
|
+
data: file.data,
|
|
45
|
+
mimetype: fileMimetype
|
|
46
|
+
},
|
|
47
|
+
format: fileMimetype === 'text/csv' ? 'csv' : 'json',
|
|
48
|
+
importMode: importDoc.importMode || 'create',
|
|
49
|
+
matchField: importDoc.matchField,
|
|
50
|
+
maxLimit,
|
|
51
|
+
req,
|
|
52
|
+
userCollection,
|
|
53
|
+
userID
|
|
54
|
+
});
|
|
55
|
+
// Update the import document with results
|
|
56
|
+
await req.payload.update({
|
|
57
|
+
id: importId,
|
|
58
|
+
collection: importCollection,
|
|
59
|
+
data: {
|
|
60
|
+
status: result.errors.length === 0 ? 'completed' : result.imported + result.updated === 0 ? 'failed' : 'partial',
|
|
61
|
+
summary: {
|
|
62
|
+
imported: result.imported,
|
|
63
|
+
issueDetails: result.errors.length > 0 ? result.errors.map((e)=>({
|
|
64
|
+
data: e.doc,
|
|
65
|
+
error: e.error,
|
|
66
|
+
row: e.index + 1
|
|
67
|
+
})) : undefined,
|
|
68
|
+
issues: result.errors.length,
|
|
69
|
+
total: result.total,
|
|
70
|
+
updated: result.updated
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
});
|
|
76
74
|
return {
|
|
77
75
|
output: result
|
|
78
76
|
};
|
|
79
77
|
},
|
|
80
|
-
inputSchema
|
|
78
|
+
inputSchema: [
|
|
79
|
+
{
|
|
80
|
+
name: 'importId',
|
|
81
|
+
type: 'text',
|
|
82
|
+
required: true
|
|
83
|
+
},
|
|
84
|
+
{
|
|
85
|
+
name: 'importCollection',
|
|
86
|
+
type: 'text',
|
|
87
|
+
required: true
|
|
88
|
+
},
|
|
89
|
+
{
|
|
90
|
+
name: 'userID',
|
|
91
|
+
type: 'text'
|
|
92
|
+
},
|
|
93
|
+
{
|
|
94
|
+
name: 'userCollection',
|
|
95
|
+
type: 'text'
|
|
96
|
+
},
|
|
97
|
+
{
|
|
98
|
+
name: 'batchSize',
|
|
99
|
+
type: 'number'
|
|
100
|
+
},
|
|
101
|
+
{
|
|
102
|
+
name: 'debug',
|
|
103
|
+
type: 'checkbox'
|
|
104
|
+
},
|
|
105
|
+
{
|
|
106
|
+
name: 'defaultVersionStatus',
|
|
107
|
+
type: 'select',
|
|
108
|
+
options: [
|
|
109
|
+
'draft',
|
|
110
|
+
'published'
|
|
111
|
+
]
|
|
112
|
+
},
|
|
113
|
+
{
|
|
114
|
+
name: 'maxLimit',
|
|
115
|
+
type: 'number'
|
|
116
|
+
}
|
|
117
|
+
]
|
|
81
118
|
};
|
|
82
119
|
};
|
|
83
120
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/import/getCreateImportCollectionTask.ts"],"sourcesContent":["import type { Config, TaskConfig
|
|
1
|
+
{"version":3,"sources":["../../src/import/getCreateImportCollectionTask.ts"],"sourcesContent":["import type { Config, TaskConfig } from 'payload'\n\nimport { FileRetrievalError } from 'payload'\n\nimport { getFileFromDoc } from '../utilities/getFileFromDoc.js'\nimport { createImport } from './createImport.js'\n\nexport type ImportTaskInput = {\n batchSize?: number\n debug?: boolean\n defaultVersionStatus?: 'draft' | 'published'\n importCollection: string\n importId: string\n maxLimit?: number\n userCollection?: string\n userID?: number | string\n}\n\nexport const getCreateCollectionImportTask = (\n _config: Config,\n): TaskConfig<{\n input: ImportTaskInput\n output: object\n}> => {\n return {\n slug: 'createCollectionImport',\n handler: async ({ input, req }) => {\n const {\n batchSize,\n debug,\n defaultVersionStatus,\n importCollection,\n importId,\n maxLimit,\n userCollection,\n userID,\n } = input\n\n // Fetch the import document to get all necessary data\n const importDoc = await req.payload.findByID({\n id: importId,\n collection: importCollection,\n })\n\n if (!importDoc) {\n throw new Error(`Import document not found: ${importId}`)\n }\n\n // Get the collection config for the imports collection\n const collectionConfig = req.payload.config.collections.find(\n (c) => c.slug === importCollection,\n )\n\n if (!collectionConfig) {\n throw new Error(`Collection config not found for: ${importCollection}`)\n }\n\n // Retrieve the file using getFileFromDoc (handles both local and cloud storage)\n const file = await getFileFromDoc({\n collectionConfig,\n doc: {\n filename: importDoc.filename as string,\n mimeType: importDoc.mimeType as string | undefined,\n url: importDoc.url as string | undefined,\n },\n req,\n })\n\n const fileMimetype = file.mimetype || (importDoc.mimeType as string)\n\n if (!fileMimetype) {\n throw new FileRetrievalError(\n req.t,\n `Unable to determine mimetype for file: ${importDoc.filename}`,\n )\n }\n\n const result = await createImport({\n name: (importDoc.filename as string) || 'import',\n batchSize,\n collectionSlug: importDoc.collectionSlug as string,\n debug,\n defaultVersionStatus,\n file: {\n name: importDoc.filename as string,\n data: file.data,\n mimetype: fileMimetype,\n },\n format: fileMimetype === 'text/csv' ? 'csv' : 'json',\n importMode: (importDoc.importMode as 'create' | 'update' | 'upsert') || 'create',\n matchField: importDoc.matchField as string | undefined,\n maxLimit,\n req,\n userCollection,\n userID,\n })\n\n // Update the import document with results\n await req.payload.update({\n id: importId,\n collection: importCollection,\n data: {\n status:\n result.errors.length === 0\n ? 'completed'\n : result.imported + result.updated === 0\n ? 'failed'\n : 'partial',\n summary: {\n imported: result.imported,\n issueDetails:\n result.errors.length > 0\n ? result.errors.map((e) => ({\n data: e.doc,\n error: e.error,\n row: e.index + 1,\n }))\n : undefined,\n issues: result.errors.length,\n total: result.total,\n updated: result.updated,\n },\n },\n })\n\n return {\n output: result,\n }\n },\n inputSchema: [\n {\n name: 'importId',\n type: 'text',\n required: true,\n },\n {\n name: 'importCollection',\n type: 'text',\n required: true,\n },\n {\n name: 'userID',\n type: 'text',\n },\n {\n name: 'userCollection',\n type: 'text',\n },\n {\n name: 'batchSize',\n type: 'number',\n },\n {\n name: 'debug',\n type: 'checkbox',\n },\n {\n name: 'defaultVersionStatus',\n type: 'select',\n options: ['draft', 'published'],\n },\n {\n name: 'maxLimit',\n type: 'number',\n },\n ],\n }\n}\n"],"names":["FileRetrievalError","getFileFromDoc","createImport","getCreateCollectionImportTask","_config","slug","handler","input","req","batchSize","debug","defaultVersionStatus","importCollection","importId","maxLimit","userCollection","userID","importDoc","payload","findByID","id","collection","Error","collectionConfig","config","collections","find","c","file","doc","filename","mimeType","url","fileMimetype","mimetype","t","result","name","collectionSlug","data","format","importMode","matchField","update","status","errors","length","imported","updated","summary","issueDetails","map","e","error","row","index","undefined","issues","total","output","inputSchema","type","required","options"],"mappings":"AAEA,SAASA,kBAAkB,QAAQ,UAAS;AAE5C,SAASC,cAAc,QAAQ,iCAAgC;AAC/D,SAASC,YAAY,QAAQ,oBAAmB;AAahD,OAAO,MAAMC,gCAAgC,CAC3CC;IAKA,OAAO;QACLC,MAAM;QACNC,SAAS,OAAO,EAAEC,KAAK,EAAEC,GAAG,EAAE;YAC5B,MAAM,EACJC,SAAS,EACTC,KAAK,EACLC,oBAAoB,EACpBC,gBAAgB,EAChBC,QAAQ,EACRC,QAAQ,EACRC,cAAc,EACdC,MAAM,EACP,GAAGT;YAEJ,sDAAsD;YACtD,MAAMU,YAAY,MAAMT,IAAIU,OAAO,CAACC,QAAQ,CAAC;gBAC3CC,IAAIP;gBACJQ,YAAYT;YACd;YAEA,IAAI,CAACK,WAAW;gBACd,MAAM,IAAIK,MAAM,CAAC,2BAA2B,EAAET,UAAU;YAC1D;YAEA,uDAAuD;YACvD,MAAMU,mBAAmBf,IAAIU,OAAO,CAACM,MAAM,CAACC,WAAW,CAACC,IAAI,CAC1D,CAACC,IAAMA,EAAEtB,IAAI,KAAKO;YAGpB,IAAI,CAACW,kBAAkB;gBACrB,MAAM,IAAID,MAAM,CAAC,iCAAiC,EAAEV,kBAAkB;YACxE;YAEA,gFAAgF;YAChF,MAAMgB,OAAO,MAAM3B,eAAe;gBAChCsB;gBACAM,KAAK;oBACHC,UAAUb,UAAUa,QAAQ;oBAC5BC,UAAUd,UAAUc,QAAQ;oBAC5BC,KAAKf,UAAUe,GAAG;gBACpB;gBACAxB;YACF;YAEA,MAAMyB,eAAeL,KAAKM,QAAQ,IAAKjB,UAAUc,QAAQ;YAEzD,IAAI,CAACE,cAAc;gBACjB,MAAM,IAAIjC,mBACRQ,IAAI2B,CAAC,EACL,CAAC,uCAAuC,EAAElB,UAAUa,QAAQ,EAAE;YAElE;YAEA,MAAMM,SAAS,MAAMlC,aAAa;gBAChCmC,MAAM,AAACpB,UAAUa,QAAQ,IAAe;gBACxCrB;gBACA6B,gBAAgBrB,UAAUqB,cAAc;gBACxC5B;gBACAC;gBACAiB,MAAM;oBACJS,MAAMpB,UAAUa,QAAQ;oBACxBS,MAAMX,KAAKW,IAAI;oBACfL,UAAUD;gBACZ;gBACAO,QAAQP,iBAAiB,aAAa,QAAQ;gBAC9CQ,YAAY,AAACxB,UAAUwB,UAAU,IAAuC;gBACxEC,YAAYzB,UAAUyB,UAAU;gBAChC5B;gBACAN;gBACAO;gBACAC;YACF;YAEA,0CAA0C;YAC1C,MAAMR,IAAIU,OAAO,CAACyB,MAAM,CAAC;gBACvBvB,IAAIP;gBACJQ,YAAYT;gBACZ2B,MAAM;oBACJK,QACER,OAAOS,MAAM,CAACC,MAAM,KAAK,IACrB,cACAV,OAAOW,QAAQ,GAAGX,OAAOY,OAAO,KAAK,IACnC,WACA;oBACRC,SAAS;wBACPF,UAAUX,OAAOW,QAAQ;wBACzBG,cACEd,OAAOS,MAAM,CAACC,MAAM,GAAG,IACnBV,OAAOS,MAAM,CAACM,GAAG,CAAC,CAACC,IAAO,CAAA;gCACxBb,MAAMa,EAAEvB,GAAG;gCACXwB,OAAOD,EAAEC,KAAK;gCACdC,KAAKF,EAAEG,KAAK,GAAG;4BACjB,CAAA,KACAC;wBACNC,QAAQrB,OAAOS,MAAM,CAACC,MAAM;wBAC5BY,OAAOtB,OAAOsB,KAAK;wBACnBV,SAASZ,OAAOY,OAAO;oBACzB;gBACF;YACF;YAEA,OAAO;gBACLW,QAAQvB;YACV;QACF;QACAwB,aAAa;YACX;gBACEvB,MAAM;gBACNwB,MAAM;gBACNC,UAAU;YACZ;YACA;gBACEzB,MAAM;gBACNwB,MAAM;gBACNC,UAAU;YACZ;YACA;gBACEzB,MAAM;gBACNwB,MAAM;YACR;YACA;gBACExB,MAAM;gBACNwB,MAAM;YACR;YACA;gBACExB,MAAM;gBACNwB,MAAM;YACR;YACA;gBACExB,MAAM;gBACNwB,MAAM;YACR;YACA;gBACExB,MAAM;gBACNwB,MAAM;gBACNE,SAAS;oBAAC;oBAAS;iBAAY;YACjC;YACA;gBACE1B,MAAM;gBACNwB,MAAM;YACR;SACD;IACH;AACF,EAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getImportCollection.d.ts","sourceRoot":"","sources":["../../src/import/getImportCollection.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAGV,gBAAgB,EAChB,MAAM,EACP,MAAM,SAAS,CAAA;
|
|
1
|
+
{"version":3,"file":"getImportCollection.d.ts","sourceRoot":"","sources":["../../src/import/getImportCollection.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAGV,gBAAgB,EAChB,MAAM,EACP,MAAM,SAAS,CAAA;AAIhB,OAAO,KAAK,EAAE,YAAY,EAAE,wBAAwB,EAAS,MAAM,aAAa,CAAA;AAShF,eAAO,MAAM,mBAAmB,4CAI7B;IACD,MAAM,EAAE,MAAM,CAAA;IACd,YAAY,CAAC,EAAE,YAAY,CAAA;IAC3B,YAAY,EAAE,wBAAwB,CAAA;CACvC,KAAG,gBA2QH,CAAA"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import
|
|
2
|
-
import
|
|
1
|
+
import { FileRetrievalError } from 'payload';
|
|
2
|
+
import { getFileFromDoc } from '../utilities/getFileFromDoc.js';
|
|
3
3
|
import { resolveLimit } from '../utilities/resolveLimit.js';
|
|
4
4
|
import { createImport } from './createImport.js';
|
|
5
5
|
import { getFields } from './getFields.js';
|
|
@@ -63,25 +63,28 @@ export const getImportCollection = ({ config, importConfig, pluginConfig })=>{
|
|
|
63
63
|
const debug = pluginConfig.debug || false;
|
|
64
64
|
try {
|
|
65
65
|
// Get file data from the uploaded document
|
|
66
|
+
// First try req.file which is available during the same request (especially important for cloud storage)
|
|
67
|
+
// Fall back to getFileFromDoc for cases where req.file isn't available
|
|
66
68
|
let fileData;
|
|
67
69
|
let fileMimetype;
|
|
68
|
-
if (
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
if (!
|
|
72
|
-
throw new
|
|
70
|
+
if (req.file?.data) {
|
|
71
|
+
fileData = req.file.data;
|
|
72
|
+
fileMimetype = req.file.mimetype || doc.mimeType;
|
|
73
|
+
if (!fileMimetype) {
|
|
74
|
+
throw new FileRetrievalError(req.t, `Unable to determine mimetype for file: ${doc.filename}`);
|
|
73
75
|
}
|
|
74
|
-
fileData = Buffer.from(await response.arrayBuffer());
|
|
75
|
-
fileMimetype = doc.mimeType || 'text/csv';
|
|
76
76
|
} else {
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
77
|
+
const fileFromDoc = await getFileFromDoc({
|
|
78
|
+
collectionConfig,
|
|
79
|
+
doc: {
|
|
80
|
+
filename: doc.filename,
|
|
81
|
+
mimeType: doc.mimeType,
|
|
82
|
+
url: doc.url
|
|
83
|
+
},
|
|
84
|
+
req
|
|
85
|
+
});
|
|
86
|
+
fileData = fileFromDoc.data;
|
|
87
|
+
fileMimetype = fileFromDoc.mimetype;
|
|
85
88
|
}
|
|
86
89
|
const targetCollection = req.payload.collections[doc.collectionSlug];
|
|
87
90
|
const importLimitConfig = targetCollection?.config.custom?.['plugin-import-export']?.importLimit;
|
|
@@ -207,50 +210,26 @@ export const getImportCollection = ({ config, importConfig, pluginConfig })=>{
|
|
|
207
210
|
});
|
|
208
211
|
} else {
|
|
209
212
|
// When jobs queue is enabled, queue the import as a job
|
|
213
|
+
// The job handler will fetch the file from storage using getFileFromDoc
|
|
210
214
|
afterChange.push(async ({ collection: collectionConfig, doc, operation, req })=>{
|
|
211
215
|
if (operation !== 'create') {
|
|
212
216
|
return;
|
|
213
217
|
}
|
|
214
218
|
try {
|
|
215
|
-
//
|
|
216
|
-
let fileData;
|
|
217
|
-
if (doc.url && doc.url.startsWith('http')) {
|
|
218
|
-
const response = await fetch(doc.url);
|
|
219
|
-
if (!response.ok) {
|
|
220
|
-
throw new Error(`Failed to fetch file from URL: ${doc.url}`);
|
|
221
|
-
}
|
|
222
|
-
fileData = Buffer.from(await response.arrayBuffer());
|
|
223
|
-
} else {
|
|
224
|
-
const filePath = doc.filename;
|
|
225
|
-
// Get upload config from the actual sanitized collection config
|
|
226
|
-
const uploadConfig = typeof collectionConfig?.upload === 'object' ? collectionConfig.upload : undefined;
|
|
227
|
-
const uploadDir = uploadConfig?.staticDir || './uploads';
|
|
228
|
-
const fullPath = path.resolve(uploadDir, filePath);
|
|
229
|
-
fileData = await fs.promises.readFile(fullPath);
|
|
230
|
-
}
|
|
219
|
+
// Resolve maxLimit ahead of time since it may involve async config resolution
|
|
231
220
|
const targetCollection = req.payload.collections[doc.collectionSlug];
|
|
232
221
|
const importLimitConfig = targetCollection?.config.custom?.['plugin-import-export']?.importLimit;
|
|
233
222
|
const maxLimit = await resolveLimit({
|
|
234
223
|
limit: importLimitConfig,
|
|
235
224
|
req
|
|
236
225
|
});
|
|
226
|
+
// Only pass minimal data to the job - the handler will fetch the file from storage
|
|
237
227
|
const input = {
|
|
238
|
-
name: doc.filename,
|
|
239
228
|
batchSize,
|
|
240
|
-
collectionSlug: doc.collectionSlug,
|
|
241
229
|
debug: pluginConfig.debug,
|
|
242
230
|
defaultVersionStatus,
|
|
243
|
-
|
|
244
|
-
name: doc.filename,
|
|
245
|
-
// Convert to base64 for job serialization - will be converted back to Buffer in task handler
|
|
246
|
-
data: fileData.toString('base64'),
|
|
247
|
-
mimetype: doc.mimeType || 'text/csv'
|
|
248
|
-
},
|
|
249
|
-
format: doc.mimeType === 'text/csv' ? 'csv' : 'json',
|
|
231
|
+
importCollection: collectionConfig.slug,
|
|
250
232
|
importId: doc.id,
|
|
251
|
-
importMode: doc.importMode || 'create',
|
|
252
|
-
importsCollection: collectionConfig.slug,
|
|
253
|
-
matchField: doc.matchField,
|
|
254
233
|
maxLimit,
|
|
255
234
|
userCollection: req.user?.collection || req?.user?.user?.collection,
|
|
256
235
|
userID: req?.user?.id || req?.user?.user?.id
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/import/getImportCollection.ts"],"sourcesContent":["import type {\n CollectionAfterChangeHook,\n CollectionBeforeOperationHook,\n CollectionConfig,\n Config,\n} from 'payload'\n\nimport fs from 'fs'\nimport path from 'path'\n\nimport type { ImportConfig, ImportExportPluginConfig, Limit } from '../types.js'\nimport type { ImportTaskInput } from './getCreateImportCollectionTask.js'\n\nimport { resolveLimit } from '../utilities/resolveLimit.js'\nimport { createImport } from './createImport.js'\nimport { getFields } from './getFields.js'\nimport { handlePreview } from './handlePreview.js'\n\nexport const getImportCollection = ({\n config,\n importConfig,\n pluginConfig,\n}: {\n config: Config\n importConfig?: ImportConfig\n pluginConfig: ImportExportPluginConfig\n}): CollectionConfig => {\n const beforeOperation: CollectionBeforeOperationHook[] = []\n const afterChange: CollectionAfterChangeHook[] = []\n\n // Extract import-specific settings\n const disableJobsQueue = importConfig?.disableJobsQueue ?? false\n const batchSize = importConfig?.batchSize ?? 100\n const defaultVersionStatus = importConfig?.defaultVersionStatus ?? 'published'\n\n // Get collection slugs for the dropdown\n const collectionSlugs = pluginConfig.collections?.map((c) => c.slug)\n\n const collection: CollectionConfig = {\n slug: 'imports',\n access: {\n update: () => false,\n },\n admin: {\n components: {\n edit: {\n SaveButton: '@payloadcms/plugin-import-export/rsc#ImportSaveButton',\n },\n },\n disableCopyToLocale: true,\n group: false,\n useAsTitle: 'filename',\n },\n disableDuplicate: true,\n endpoints: [\n {\n handler: handlePreview,\n method: 'post',\n path: '/preview-data',\n },\n ],\n fields: getFields(config, { collectionSlugs }),\n hooks: {\n afterChange,\n beforeOperation,\n },\n lockDocuments: false,\n upload: {\n filesRequiredOnCreate: true,\n hideFileInputOnCreate: false,\n hideRemoveFile: true,\n mimeTypes: ['text/csv', 'application/json'],\n },\n }\n\n if (disableJobsQueue) {\n // Process the import synchronously after the document (with file) has been created\n afterChange.push(async ({ collection: collectionConfig, doc, operation, req }) => {\n if (operation !== 'create' || doc.status !== 'pending') {\n return doc\n }\n\n const debug = pluginConfig.debug || false\n\n try {\n // Get file data from the uploaded document\n let fileData: Buffer\n let fileMimetype: string\n\n if (doc.url && doc.url.startsWith('http')) {\n // File has been uploaded to external storage (S3, etc.) - fetch it\n const response = await fetch(doc.url)\n if (!response.ok) {\n throw new Error(`Failed to fetch file from URL: ${doc.url}`)\n }\n fileData = Buffer.from(await response.arrayBuffer())\n fileMimetype = doc.mimeType || 'text/csv'\n } else {\n // File is stored locally - read from filesystem\n const filePath = doc.filename\n // Get upload config from the actual sanitized collection config\n const uploadConfig =\n typeof collectionConfig?.upload === 'object' ? collectionConfig.upload : undefined\n const uploadDir = uploadConfig?.staticDir || './uploads'\n const fullPath = path.resolve(uploadDir, filePath)\n fileData = await fs.promises.readFile(fullPath)\n fileMimetype = doc.mimeType || 'text/csv'\n }\n\n const targetCollection = req.payload.collections[doc.collectionSlug]\n const importLimitConfig: Limit | undefined =\n targetCollection?.config.custom?.['plugin-import-export']?.importLimit\n const maxLimit = await resolveLimit({\n limit: importLimitConfig,\n req,\n })\n\n const result = await createImport({\n id: doc.id,\n name: doc.filename || 'import',\n batchSize,\n collectionSlug: doc.collectionSlug,\n debug,\n defaultVersionStatus,\n file: {\n name: doc.filename,\n data: fileData,\n mimetype: fileMimetype,\n },\n format: fileMimetype === 'text/csv' ? 'csv' : 'json',\n importMode: doc.importMode || 'create',\n matchField: doc.matchField,\n maxLimit,\n req,\n userCollection: req?.user?.collection || req?.user?.user?.collection,\n userID: req?.user?.id || req?.user?.user?.id,\n })\n\n // Determine status\n let status: 'completed' | 'failed' | 'partial'\n if (result.errors.length === 0) {\n status = 'completed'\n } else if (result.imported + result.updated === 0) {\n status = 'failed'\n } else {\n status = 'partial'\n }\n\n const summary = {\n imported: result.imported,\n issueDetails:\n result.errors.length > 0\n ? result.errors.map((e) => ({\n data: e.doc,\n error: e.error,\n row: e.index + 1,\n }))\n : undefined,\n issues: result.errors.length,\n total: result.total,\n updated: result.updated,\n }\n\n // Try to update the document with results (may fail due to transaction timing)\n try {\n await req.payload.update({\n id: doc.id,\n collection: collectionConfig.slug,\n data: {\n status,\n summary,\n },\n overrideAccess: true,\n req,\n })\n } catch (updateErr) {\n // Update may fail if document not yet committed, log but continue\n if (debug) {\n req.payload.logger.error({\n err: updateErr,\n msg: `Failed to update import document ${doc.id} with results`,\n })\n }\n }\n\n // Return updated doc for immediate response\n return {\n ...doc,\n status,\n summary,\n }\n } catch (err) {\n const summary = {\n imported: 0,\n issueDetails: [\n {\n data: {},\n error: err instanceof Error ? err.message : String(err),\n row: 0,\n },\n ],\n issues: 1,\n total: 0,\n updated: 0,\n }\n\n // Try to update document with error status\n try {\n await req.payload.update({\n id: doc.id,\n collection: collectionConfig.slug,\n data: {\n status: 'failed',\n summary,\n },\n overrideAccess: true,\n req,\n })\n } catch (updateErr) {\n // Update may fail if document not yet committed, log but continue\n if (debug) {\n req.payload.logger.error({\n err: updateErr,\n msg: `Failed to update import document ${doc.id} with error status`,\n })\n }\n }\n\n if (debug) {\n req.payload.logger.error({\n err,\n msg: 'Import processing failed',\n })\n }\n\n // Return error status for immediate response\n return {\n ...doc,\n status: 'failed',\n summary,\n }\n }\n })\n } else {\n // When jobs queue is enabled, queue the import as a job\n afterChange.push(async ({ collection: collectionConfig, doc, operation, req }) => {\n if (operation !== 'create') {\n return\n }\n\n try {\n // Get file data for job - need to read from disk/URL since req.file is not available in afterChange\n let fileData: Buffer\n if (doc.url && doc.url.startsWith('http')) {\n const response = await fetch(doc.url)\n if (!response.ok) {\n throw new Error(`Failed to fetch file from URL: ${doc.url}`)\n }\n fileData = Buffer.from(await response.arrayBuffer())\n } else {\n const filePath = doc.filename\n // Get upload config from the actual sanitized collection config\n const uploadConfig =\n typeof collectionConfig?.upload === 'object' ? collectionConfig.upload : undefined\n const uploadDir = uploadConfig?.staticDir || './uploads'\n const fullPath = path.resolve(uploadDir, filePath)\n fileData = await fs.promises.readFile(fullPath)\n }\n\n const targetCollection = req.payload.collections[doc.collectionSlug]\n const importLimitConfig: Limit | undefined =\n targetCollection?.config.custom?.['plugin-import-export']?.importLimit\n const maxLimit = await resolveLimit({\n limit: importLimitConfig,\n req,\n })\n\n const input: ImportTaskInput = {\n name: doc.filename,\n batchSize,\n collectionSlug: doc.collectionSlug,\n debug: pluginConfig.debug,\n defaultVersionStatus,\n file: {\n name: doc.filename,\n // Convert to base64 for job serialization - will be converted back to Buffer in task handler\n data: fileData.toString('base64') as unknown as Buffer,\n mimetype: doc.mimeType || 'text/csv',\n },\n format: doc.mimeType === 'text/csv' ? 'csv' : 'json',\n importId: doc.id,\n importMode: doc.importMode || 'create',\n importsCollection: collectionConfig.slug,\n matchField: doc.matchField,\n maxLimit,\n userCollection: req.user?.collection || req?.user?.user?.collection,\n userID: req?.user?.id || req?.user?.user?.id,\n }\n\n await req.payload.jobs.queue({\n input,\n task: 'createCollectionImport',\n })\n } catch (err) {\n req.payload.logger.error({\n err,\n msg: `Failed to queue import job for document ${doc.id}`,\n })\n }\n })\n }\n\n return collection\n}\n"],"names":["fs","path","resolveLimit","createImport","getFields","handlePreview","getImportCollection","config","importConfig","pluginConfig","beforeOperation","afterChange","disableJobsQueue","batchSize","defaultVersionStatus","collectionSlugs","collections","map","c","slug","collection","access","update","admin","components","edit","SaveButton","disableCopyToLocale","group","useAsTitle","disableDuplicate","endpoints","handler","method","fields","hooks","lockDocuments","upload","filesRequiredOnCreate","hideFileInputOnCreate","hideRemoveFile","mimeTypes","push","collectionConfig","doc","operation","req","status","debug","fileData","fileMimetype","url","startsWith","response","fetch","ok","Error","Buffer","from","arrayBuffer","mimeType","filePath","filename","uploadConfig","undefined","uploadDir","staticDir","fullPath","resolve","promises","readFile","targetCollection","payload","collectionSlug","importLimitConfig","custom","importLimit","maxLimit","limit","result","id","name","file","data","mimetype","format","importMode","matchField","userCollection","user","userID","errors","length","imported","updated","summary","issueDetails","e","error","row","index","issues","total","overrideAccess","updateErr","logger","err","msg","message","String","input","toString","importId","importsCollection","jobs","queue","task"],"mappings":"AAOA,OAAOA,QAAQ,KAAI;AACnB,OAAOC,UAAU,OAAM;AAKvB,SAASC,YAAY,QAAQ,+BAA8B;AAC3D,SAASC,YAAY,QAAQ,oBAAmB;AAChD,SAASC,SAAS,QAAQ,iBAAgB;AAC1C,SAASC,aAAa,QAAQ,qBAAoB;AAElD,OAAO,MAAMC,sBAAsB,CAAC,EAClCC,MAAM,EACNC,YAAY,EACZC,YAAY,EAKb;IACC,MAAMC,kBAAmD,EAAE;IAC3D,MAAMC,cAA2C,EAAE;IAEnD,mCAAmC;IACnC,MAAMC,mBAAmBJ,cAAcI,oBAAoB;IAC3D,MAAMC,YAAYL,cAAcK,aAAa;IAC7C,MAAMC,uBAAuBN,cAAcM,wBAAwB;IAEnE,wCAAwC;IACxC,MAAMC,kBAAkBN,aAAaO,WAAW,EAAEC,IAAI,CAACC,IAAMA,EAAEC,IAAI;IAEnE,MAAMC,aAA+B;QACnCD,MAAM;QACNE,QAAQ;YACNC,QAAQ,IAAM;QAChB;QACAC,OAAO;YACLC,YAAY;gBACVC,MAAM;oBACJC,YAAY;gBACd;YACF;YACAC,qBAAqB;YACrBC,OAAO;YACPC,YAAY;QACd;QACAC,kBAAkB;QAClBC,WAAW;YACT;gBACEC,SAAS3B;gBACT4B,QAAQ;gBACRhC,MAAM;YACR;SACD;QACDiC,QAAQ9B,UAAUG,QAAQ;YAAEQ;QAAgB;QAC5CoB,OAAO;YACLxB;YACAD;QACF;QACA0B,eAAe;QACfC,QAAQ;YACNC,uBAAuB;YACvBC,uBAAuB;YACvBC,gBAAgB;YAChBC,WAAW;gBAAC;gBAAY;aAAmB;QAC7C;IACF;IAEA,IAAI7B,kBAAkB;QACpB,mFAAmF;QACnFD,YAAY+B,IAAI,CAAC,OAAO,EAAEtB,YAAYuB,gBAAgB,EAAEC,GAAG,EAAEC,SAAS,EAAEC,GAAG,EAAE;YAC3E,IAAID,cAAc,YAAYD,IAAIG,MAAM,KAAK,WAAW;gBACtD,OAAOH;YACT;YAEA,MAAMI,QAAQvC,aAAauC,KAAK,IAAI;YAEpC,IAAI;gBACF,2CAA2C;gBAC3C,IAAIC;gBACJ,IAAIC;gBAEJ,IAAIN,IAAIO,GAAG,IAAIP,IAAIO,GAAG,CAACC,UAAU,CAAC,SAAS;oBACzC,mEAAmE;oBACnE,MAAMC,WAAW,MAAMC,MAAMV,IAAIO,GAAG;oBACpC,IAAI,CAACE,SAASE,EAAE,EAAE;wBAChB,MAAM,IAAIC,MAAM,CAAC,+BAA+B,EAAEZ,IAAIO,GAAG,EAAE;oBAC7D;oBACAF,WAAWQ,OAAOC,IAAI,CAAC,MAAML,SAASM,WAAW;oBACjDT,eAAeN,IAAIgB,QAAQ,IAAI;gBACjC,OAAO;oBACL,gDAAgD;oBAChD,MAAMC,WAAWjB,IAAIkB,QAAQ;oBAC7B,gEAAgE;oBAChE,MAAMC,eACJ,OAAOpB,kBAAkBN,WAAW,WAAWM,iBAAiBN,MAAM,GAAG2B;oBAC3E,MAAMC,YAAYF,cAAcG,aAAa;oBAC7C,MAAMC,WAAWlE,KAAKmE,OAAO,CAACH,WAAWJ;oBACzCZ,WAAW,MAAMjD,GAAGqE,QAAQ,CAACC,QAAQ,CAACH;oBACtCjB,eAAeN,IAAIgB,QAAQ,IAAI;gBACjC;gBAEA,MAAMW,mBAAmBzB,IAAI0B,OAAO,CAACxD,WAAW,CAAC4B,IAAI6B,cAAc,CAAC;gBACpE,MAAMC,oBACJH,kBAAkBhE,OAAOoE,QAAQ,CAAC,uBAAuB,EAAEC;gBAC7D,MAAMC,WAAW,MAAM3E,aAAa;oBAClC4E,OAAOJ;oBACP5B;gBACF;gBAEA,MAAMiC,SAAS,MAAM5E,aAAa;oBAChC6E,IAAIpC,IAAIoC,EAAE;oBACVC,MAAMrC,IAAIkB,QAAQ,IAAI;oBACtBjD;oBACA4D,gBAAgB7B,IAAI6B,cAAc;oBAClCzB;oBACAlC;oBACAoE,MAAM;wBACJD,MAAMrC,IAAIkB,QAAQ;wBAClBqB,MAAMlC;wBACNmC,UAAUlC;oBACZ;oBACAmC,QAAQnC,iBAAiB,aAAa,QAAQ;oBAC9CoC,YAAY1C,IAAI0C,UAAU,IAAI;oBAC9BC,YAAY3C,IAAI2C,UAAU;oBAC1BV;oBACA/B;oBACA0C,gBAAgB1C,KAAK2C,MAAMrE,cAAc0B,KAAK2C,MAAMA,MAAMrE;oBAC1DsE,QAAQ5C,KAAK2C,MAAMT,MAAMlC,KAAK2C,MAAMA,MAAMT;gBAC5C;gBAEA,mBAAmB;gBACnB,IAAIjC;gBACJ,IAAIgC,OAAOY,MAAM,CAACC,MAAM,KAAK,GAAG;oBAC9B7C,SAAS;gBACX,OAAO,IAAIgC,OAAOc,QAAQ,GAAGd,OAAOe,OAAO,KAAK,GAAG;oBACjD/C,SAAS;gBACX,OAAO;oBACLA,SAAS;gBACX;gBAEA,MAAMgD,UAAU;oBACdF,UAAUd,OAAOc,QAAQ;oBACzBG,cACEjB,OAAOY,MAAM,CAACC,MAAM,GAAG,IACnBb,OAAOY,MAAM,CAAC1E,GAAG,CAAC,CAACgF,IAAO,CAAA;4BACxBd,MAAMc,EAAErD,GAAG;4BACXsD,OAAOD,EAAEC,KAAK;4BACdC,KAAKF,EAAEG,KAAK,GAAG;wBACjB,CAAA,KACApC;oBACNqC,QAAQtB,OAAOY,MAAM,CAACC,MAAM;oBAC5BU,OAAOvB,OAAOuB,KAAK;oBACnBR,SAASf,OAAOe,OAAO;gBACzB;gBAEA,+EAA+E;gBAC/E,IAAI;oBACF,MAAMhD,IAAI0B,OAAO,CAAClD,MAAM,CAAC;wBACvB0D,IAAIpC,IAAIoC,EAAE;wBACV5D,YAAYuB,iBAAiBxB,IAAI;wBACjCgE,MAAM;4BACJpC;4BACAgD;wBACF;wBACAQ,gBAAgB;wBAChBzD;oBACF;gBACF,EAAE,OAAO0D,WAAW;oBAClB,kEAAkE;oBAClE,IAAIxD,OAAO;wBACTF,IAAI0B,OAAO,CAACiC,MAAM,CAACP,KAAK,CAAC;4BACvBQ,KAAKF;4BACLG,KAAK,CAAC,iCAAiC,EAAE/D,IAAIoC,EAAE,CAAC,aAAa,CAAC;wBAChE;oBACF;gBACF;gBAEA,4CAA4C;gBAC5C,OAAO;oBACL,GAAGpC,GAAG;oBACNG;oBACAgD;gBACF;YACF,EAAE,OAAOW,KAAK;gBACZ,MAAMX,UAAU;oBACdF,UAAU;oBACVG,cAAc;wBACZ;4BACEb,MAAM,CAAC;4BACPe,OAAOQ,eAAelD,QAAQkD,IAAIE,OAAO,GAAGC,OAAOH;4BACnDP,KAAK;wBACP;qBACD;oBACDE,QAAQ;oBACRC,OAAO;oBACPR,SAAS;gBACX;gBAEA,2CAA2C;gBAC3C,IAAI;oBACF,MAAMhD,IAAI0B,OAAO,CAAClD,MAAM,CAAC;wBACvB0D,IAAIpC,IAAIoC,EAAE;wBACV5D,YAAYuB,iBAAiBxB,IAAI;wBACjCgE,MAAM;4BACJpC,QAAQ;4BACRgD;wBACF;wBACAQ,gBAAgB;wBAChBzD;oBACF;gBACF,EAAE,OAAO0D,WAAW;oBAClB,kEAAkE;oBAClE,IAAIxD,OAAO;wBACTF,IAAI0B,OAAO,CAACiC,MAAM,CAACP,KAAK,CAAC;4BACvBQ,KAAKF;4BACLG,KAAK,CAAC,iCAAiC,EAAE/D,IAAIoC,EAAE,CAAC,kBAAkB,CAAC;wBACrE;oBACF;gBACF;gBAEA,IAAIhC,OAAO;oBACTF,IAAI0B,OAAO,CAACiC,MAAM,CAACP,KAAK,CAAC;wBACvBQ;wBACAC,KAAK;oBACP;gBACF;gBAEA,6CAA6C;gBAC7C,OAAO;oBACL,GAAG/D,GAAG;oBACNG,QAAQ;oBACRgD;gBACF;YACF;QACF;IACF,OAAO;QACL,wDAAwD;QACxDpF,YAAY+B,IAAI,CAAC,OAAO,EAAEtB,YAAYuB,gBAAgB,EAAEC,GAAG,EAAEC,SAAS,EAAEC,GAAG,EAAE;YAC3E,IAAID,cAAc,UAAU;gBAC1B;YACF;YAEA,IAAI;gBACF,oGAAoG;gBACpG,IAAII;gBACJ,IAAIL,IAAIO,GAAG,IAAIP,IAAIO,GAAG,CAACC,UAAU,CAAC,SAAS;oBACzC,MAAMC,WAAW,MAAMC,MAAMV,IAAIO,GAAG;oBACpC,IAAI,CAACE,SAASE,EAAE,EAAE;wBAChB,MAAM,IAAIC,MAAM,CAAC,+BAA+B,EAAEZ,IAAIO,GAAG,EAAE;oBAC7D;oBACAF,WAAWQ,OAAOC,IAAI,CAAC,MAAML,SAASM,WAAW;gBACnD,OAAO;oBACL,MAAME,WAAWjB,IAAIkB,QAAQ;oBAC7B,gEAAgE;oBAChE,MAAMC,eACJ,OAAOpB,kBAAkBN,WAAW,WAAWM,iBAAiBN,MAAM,GAAG2B;oBAC3E,MAAMC,YAAYF,cAAcG,aAAa;oBAC7C,MAAMC,WAAWlE,KAAKmE,OAAO,CAACH,WAAWJ;oBACzCZ,WAAW,MAAMjD,GAAGqE,QAAQ,CAACC,QAAQ,CAACH;gBACxC;gBAEA,MAAMI,mBAAmBzB,IAAI0B,OAAO,CAACxD,WAAW,CAAC4B,IAAI6B,cAAc,CAAC;gBACpE,MAAMC,oBACJH,kBAAkBhE,OAAOoE,QAAQ,CAAC,uBAAuB,EAAEC;gBAC7D,MAAMC,WAAW,MAAM3E,aAAa;oBAClC4E,OAAOJ;oBACP5B;gBACF;gBAEA,MAAMgE,QAAyB;oBAC7B7B,MAAMrC,IAAIkB,QAAQ;oBAClBjD;oBACA4D,gBAAgB7B,IAAI6B,cAAc;oBAClCzB,OAAOvC,aAAauC,KAAK;oBACzBlC;oBACAoE,MAAM;wBACJD,MAAMrC,IAAIkB,QAAQ;wBAClB,6FAA6F;wBAC7FqB,MAAMlC,SAAS8D,QAAQ,CAAC;wBACxB3B,UAAUxC,IAAIgB,QAAQ,IAAI;oBAC5B;oBACAyB,QAAQzC,IAAIgB,QAAQ,KAAK,aAAa,QAAQ;oBAC9CoD,UAAUpE,IAAIoC,EAAE;oBAChBM,YAAY1C,IAAI0C,UAAU,IAAI;oBAC9B2B,mBAAmBtE,iBAAiBxB,IAAI;oBACxCoE,YAAY3C,IAAI2C,UAAU;oBAC1BV;oBACAW,gBAAgB1C,IAAI2C,IAAI,EAAErE,cAAc0B,KAAK2C,MAAMA,MAAMrE;oBACzDsE,QAAQ5C,KAAK2C,MAAMT,MAAMlC,KAAK2C,MAAMA,MAAMT;gBAC5C;gBAEA,MAAMlC,IAAI0B,OAAO,CAAC0C,IAAI,CAACC,KAAK,CAAC;oBAC3BL;oBACAM,MAAM;gBACR;YACF,EAAE,OAAOV,KAAK;gBACZ5D,IAAI0B,OAAO,CAACiC,MAAM,CAACP,KAAK,CAAC;oBACvBQ;oBACAC,KAAK,CAAC,wCAAwC,EAAE/D,IAAIoC,EAAE,EAAE;gBAC1D;YACF;QACF;IACF;IAEA,OAAO5D;AACT,EAAC"}
|
|
1
|
+
{"version":3,"sources":["../../src/import/getImportCollection.ts"],"sourcesContent":["import type {\n CollectionAfterChangeHook,\n CollectionBeforeOperationHook,\n CollectionConfig,\n Config,\n} from 'payload'\n\nimport { FileRetrievalError } from 'payload'\n\nimport type { ImportConfig, ImportExportPluginConfig, Limit } from '../types.js'\nimport type { ImportTaskInput } from './getCreateImportCollectionTask.js'\n\nimport { getFileFromDoc } from '../utilities/getFileFromDoc.js'\nimport { resolveLimit } from '../utilities/resolveLimit.js'\nimport { createImport } from './createImport.js'\nimport { getFields } from './getFields.js'\nimport { handlePreview } from './handlePreview.js'\n\nexport const getImportCollection = ({\n config,\n importConfig,\n pluginConfig,\n}: {\n config: Config\n importConfig?: ImportConfig\n pluginConfig: ImportExportPluginConfig\n}): CollectionConfig => {\n const beforeOperation: CollectionBeforeOperationHook[] = []\n const afterChange: CollectionAfterChangeHook[] = []\n\n // Extract import-specific settings\n const disableJobsQueue = importConfig?.disableJobsQueue ?? false\n const batchSize = importConfig?.batchSize ?? 100\n const defaultVersionStatus = importConfig?.defaultVersionStatus ?? 'published'\n\n // Get collection slugs for the dropdown\n const collectionSlugs = pluginConfig.collections?.map((c) => c.slug)\n\n const collection: CollectionConfig = {\n slug: 'imports',\n access: {\n update: () => false,\n },\n admin: {\n components: {\n edit: {\n SaveButton: '@payloadcms/plugin-import-export/rsc#ImportSaveButton',\n },\n },\n disableCopyToLocale: true,\n group: false,\n useAsTitle: 'filename',\n },\n disableDuplicate: true,\n endpoints: [\n {\n handler: handlePreview,\n method: 'post',\n path: '/preview-data',\n },\n ],\n fields: getFields(config, { collectionSlugs }),\n hooks: {\n afterChange,\n beforeOperation,\n },\n lockDocuments: false,\n upload: {\n filesRequiredOnCreate: true,\n hideFileInputOnCreate: false,\n hideRemoveFile: true,\n mimeTypes: ['text/csv', 'application/json'],\n },\n }\n\n if (disableJobsQueue) {\n // Process the import synchronously after the document (with file) has been created\n afterChange.push(async ({ collection: collectionConfig, doc, operation, req }) => {\n if (operation !== 'create' || doc.status !== 'pending') {\n return doc\n }\n\n const debug = pluginConfig.debug || false\n\n try {\n // Get file data from the uploaded document\n // First try req.file which is available during the same request (especially important for cloud storage)\n // Fall back to getFileFromDoc for cases where req.file isn't available\n let fileData: Buffer\n let fileMimetype: string\n\n if (req.file?.data) {\n fileData = req.file.data\n fileMimetype = req.file.mimetype || doc.mimeType\n\n if (!fileMimetype) {\n throw new FileRetrievalError(\n req.t,\n `Unable to determine mimetype for file: ${doc.filename}`,\n )\n }\n } else {\n const fileFromDoc = await getFileFromDoc({\n collectionConfig,\n doc: {\n filename: doc.filename,\n mimeType: doc.mimeType,\n url: doc.url,\n },\n req,\n })\n fileData = fileFromDoc.data\n fileMimetype = fileFromDoc.mimetype\n }\n\n const targetCollection = req.payload.collections[doc.collectionSlug]\n const importLimitConfig: Limit | undefined =\n targetCollection?.config.custom?.['plugin-import-export']?.importLimit\n const maxLimit = await resolveLimit({\n limit: importLimitConfig,\n req,\n })\n\n const result = await createImport({\n id: doc.id,\n name: doc.filename || 'import',\n batchSize,\n collectionSlug: doc.collectionSlug,\n debug,\n defaultVersionStatus,\n file: {\n name: doc.filename,\n data: fileData,\n mimetype: fileMimetype,\n },\n format: fileMimetype === 'text/csv' ? 'csv' : 'json',\n importMode: doc.importMode || 'create',\n matchField: doc.matchField,\n maxLimit,\n req,\n userCollection: req?.user?.collection || req?.user?.user?.collection,\n userID: req?.user?.id || req?.user?.user?.id,\n })\n\n // Determine status\n let status: 'completed' | 'failed' | 'partial'\n if (result.errors.length === 0) {\n status = 'completed'\n } else if (result.imported + result.updated === 0) {\n status = 'failed'\n } else {\n status = 'partial'\n }\n\n const summary = {\n imported: result.imported,\n issueDetails:\n result.errors.length > 0\n ? result.errors.map((e) => ({\n data: e.doc,\n error: e.error,\n row: e.index + 1,\n }))\n : undefined,\n issues: result.errors.length,\n total: result.total,\n updated: result.updated,\n }\n\n // Try to update the document with results (may fail due to transaction timing)\n try {\n await req.payload.update({\n id: doc.id,\n collection: collectionConfig.slug,\n data: {\n status,\n summary,\n },\n overrideAccess: true,\n req,\n })\n } catch (updateErr) {\n // Update may fail if document not yet committed, log but continue\n if (debug) {\n req.payload.logger.error({\n err: updateErr,\n msg: `Failed to update import document ${doc.id} with results`,\n })\n }\n }\n\n // Return updated doc for immediate response\n return {\n ...doc,\n status,\n summary,\n }\n } catch (err) {\n const summary = {\n imported: 0,\n issueDetails: [\n {\n data: {},\n error: err instanceof Error ? err.message : String(err),\n row: 0,\n },\n ],\n issues: 1,\n total: 0,\n updated: 0,\n }\n\n // Try to update document with error status\n try {\n await req.payload.update({\n id: doc.id,\n collection: collectionConfig.slug,\n data: {\n status: 'failed',\n summary,\n },\n overrideAccess: true,\n req,\n })\n } catch (updateErr) {\n // Update may fail if document not yet committed, log but continue\n if (debug) {\n req.payload.logger.error({\n err: updateErr,\n msg: `Failed to update import document ${doc.id} with error status`,\n })\n }\n }\n\n if (debug) {\n req.payload.logger.error({\n err,\n msg: 'Import processing failed',\n })\n }\n\n // Return error status for immediate response\n return {\n ...doc,\n status: 'failed',\n summary,\n }\n }\n })\n } else {\n // When jobs queue is enabled, queue the import as a job\n // The job handler will fetch the file from storage using getFileFromDoc\n afterChange.push(async ({ collection: collectionConfig, doc, operation, req }) => {\n if (operation !== 'create') {\n return\n }\n\n try {\n // Resolve maxLimit ahead of time since it may involve async config resolution\n const targetCollection = req.payload.collections[doc.collectionSlug]\n const importLimitConfig: Limit | undefined =\n targetCollection?.config.custom?.['plugin-import-export']?.importLimit\n const maxLimit = await resolveLimit({\n limit: importLimitConfig,\n req,\n })\n\n // Only pass minimal data to the job - the handler will fetch the file from storage\n const input: ImportTaskInput = {\n batchSize,\n debug: pluginConfig.debug,\n defaultVersionStatus,\n importCollection: collectionConfig.slug,\n importId: doc.id,\n maxLimit,\n userCollection: req.user?.collection || req?.user?.user?.collection,\n userID: req?.user?.id || req?.user?.user?.id,\n }\n\n await req.payload.jobs.queue({\n input,\n task: 'createCollectionImport',\n })\n } catch (err) {\n req.payload.logger.error({\n err,\n msg: `Failed to queue import job for document ${doc.id}`,\n })\n }\n })\n }\n\n return collection\n}\n"],"names":["FileRetrievalError","getFileFromDoc","resolveLimit","createImport","getFields","handlePreview","getImportCollection","config","importConfig","pluginConfig","beforeOperation","afterChange","disableJobsQueue","batchSize","defaultVersionStatus","collectionSlugs","collections","map","c","slug","collection","access","update","admin","components","edit","SaveButton","disableCopyToLocale","group","useAsTitle","disableDuplicate","endpoints","handler","method","path","fields","hooks","lockDocuments","upload","filesRequiredOnCreate","hideFileInputOnCreate","hideRemoveFile","mimeTypes","push","collectionConfig","doc","operation","req","status","debug","fileData","fileMimetype","file","data","mimetype","mimeType","t","filename","fileFromDoc","url","targetCollection","payload","collectionSlug","importLimitConfig","custom","importLimit","maxLimit","limit","result","id","name","format","importMode","matchField","userCollection","user","userID","errors","length","imported","updated","summary","issueDetails","e","error","row","index","undefined","issues","total","overrideAccess","updateErr","logger","err","msg","Error","message","String","input","importCollection","importId","jobs","queue","task"],"mappings":"AAOA,SAASA,kBAAkB,QAAQ,UAAS;AAK5C,SAASC,cAAc,QAAQ,iCAAgC;AAC/D,SAASC,YAAY,QAAQ,+BAA8B;AAC3D,SAASC,YAAY,QAAQ,oBAAmB;AAChD,SAASC,SAAS,QAAQ,iBAAgB;AAC1C,SAASC,aAAa,QAAQ,qBAAoB;AAElD,OAAO,MAAMC,sBAAsB,CAAC,EAClCC,MAAM,EACNC,YAAY,EACZC,YAAY,EAKb;IACC,MAAMC,kBAAmD,EAAE;IAC3D,MAAMC,cAA2C,EAAE;IAEnD,mCAAmC;IACnC,MAAMC,mBAAmBJ,cAAcI,oBAAoB;IAC3D,MAAMC,YAAYL,cAAcK,aAAa;IAC7C,MAAMC,uBAAuBN,cAAcM,wBAAwB;IAEnE,wCAAwC;IACxC,MAAMC,kBAAkBN,aAAaO,WAAW,EAAEC,IAAI,CAACC,IAAMA,EAAEC,IAAI;IAEnE,MAAMC,aAA+B;QACnCD,MAAM;QACNE,QAAQ;YACNC,QAAQ,IAAM;QAChB;QACAC,OAAO;YACLC,YAAY;gBACVC,MAAM;oBACJC,YAAY;gBACd;YACF;YACAC,qBAAqB;YACrBC,OAAO;YACPC,YAAY;QACd;QACAC,kBAAkB;QAClBC,WAAW;YACT;gBACEC,SAAS3B;gBACT4B,QAAQ;gBACRC,MAAM;YACR;SACD;QACDC,QAAQ/B,UAAUG,QAAQ;YAAEQ;QAAgB;QAC5CqB,OAAO;YACLzB;YACAD;QACF;QACA2B,eAAe;QACfC,QAAQ;YACNC,uBAAuB;YACvBC,uBAAuB;YACvBC,gBAAgB;YAChBC,WAAW;gBAAC;gBAAY;aAAmB;QAC7C;IACF;IAEA,IAAI9B,kBAAkB;QACpB,mFAAmF;QACnFD,YAAYgC,IAAI,CAAC,OAAO,EAAEvB,YAAYwB,gBAAgB,EAAEC,GAAG,EAAEC,SAAS,EAAEC,GAAG,EAAE;YAC3E,IAAID,cAAc,YAAYD,IAAIG,MAAM,KAAK,WAAW;gBACtD,OAAOH;YACT;YAEA,MAAMI,QAAQxC,aAAawC,KAAK,IAAI;YAEpC,IAAI;gBACF,2CAA2C;gBAC3C,yGAAyG;gBACzG,uEAAuE;gBACvE,IAAIC;gBACJ,IAAIC;gBAEJ,IAAIJ,IAAIK,IAAI,EAAEC,MAAM;oBAClBH,WAAWH,IAAIK,IAAI,CAACC,IAAI;oBACxBF,eAAeJ,IAAIK,IAAI,CAACE,QAAQ,IAAIT,IAAIU,QAAQ;oBAEhD,IAAI,CAACJ,cAAc;wBACjB,MAAM,IAAInD,mBACR+C,IAAIS,CAAC,EACL,CAAC,uCAAuC,EAAEX,IAAIY,QAAQ,EAAE;oBAE5D;gBACF,OAAO;oBACL,MAAMC,cAAc,MAAMzD,eAAe;wBACvC2C;wBACAC,KAAK;4BACHY,UAAUZ,IAAIY,QAAQ;4BACtBF,UAAUV,IAAIU,QAAQ;4BACtBI,KAAKd,IAAIc,GAAG;wBACd;wBACAZ;oBACF;oBACAG,WAAWQ,YAAYL,IAAI;oBAC3BF,eAAeO,YAAYJ,QAAQ;gBACrC;gBAEA,MAAMM,mBAAmBb,IAAIc,OAAO,CAAC7C,WAAW,CAAC6B,IAAIiB,cAAc,CAAC;gBACpE,MAAMC,oBACJH,kBAAkBrD,OAAOyD,QAAQ,CAAC,uBAAuB,EAAEC;gBAC7D,MAAMC,WAAW,MAAMhE,aAAa;oBAClCiE,OAAOJ;oBACPhB;gBACF;gBAEA,MAAMqB,SAAS,MAAMjE,aAAa;oBAChCkE,IAAIxB,IAAIwB,EAAE;oBACVC,MAAMzB,IAAIY,QAAQ,IAAI;oBACtB5C;oBACAiD,gBAAgBjB,IAAIiB,cAAc;oBAClCb;oBACAnC;oBACAsC,MAAM;wBACJkB,MAAMzB,IAAIY,QAAQ;wBAClBJ,MAAMH;wBACNI,UAAUH;oBACZ;oBACAoB,QAAQpB,iBAAiB,aAAa,QAAQ;oBAC9CqB,YAAY3B,IAAI2B,UAAU,IAAI;oBAC9BC,YAAY5B,IAAI4B,UAAU;oBAC1BP;oBACAnB;oBACA2B,gBAAgB3B,KAAK4B,MAAMvD,cAAc2B,KAAK4B,MAAMA,MAAMvD;oBAC1DwD,QAAQ7B,KAAK4B,MAAMN,MAAMtB,KAAK4B,MAAMA,MAAMN;gBAC5C;gBAEA,mBAAmB;gBACnB,IAAIrB;gBACJ,IAAIoB,OAAOS,MAAM,CAACC,MAAM,KAAK,GAAG;oBAC9B9B,SAAS;gBACX,OAAO,IAAIoB,OAAOW,QAAQ,GAAGX,OAAOY,OAAO,KAAK,GAAG;oBACjDhC,SAAS;gBACX,OAAO;oBACLA,SAAS;gBACX;gBAEA,MAAMiC,UAAU;oBACdF,UAAUX,OAAOW,QAAQ;oBACzBG,cACEd,OAAOS,MAAM,CAACC,MAAM,GAAG,IACnBV,OAAOS,MAAM,CAAC5D,GAAG,CAAC,CAACkE,IAAO,CAAA;4BACxB9B,MAAM8B,EAAEtC,GAAG;4BACXuC,OAAOD,EAAEC,KAAK;4BACdC,KAAKF,EAAEG,KAAK,GAAG;wBACjB,CAAA,KACAC;oBACNC,QAAQpB,OAAOS,MAAM,CAACC,MAAM;oBAC5BW,OAAOrB,OAAOqB,KAAK;oBACnBT,SAASZ,OAAOY,OAAO;gBACzB;gBAEA,+EAA+E;gBAC/E,IAAI;oBACF,MAAMjC,IAAIc,OAAO,CAACvC,MAAM,CAAC;wBACvB+C,IAAIxB,IAAIwB,EAAE;wBACVjD,YAAYwB,iBAAiBzB,IAAI;wBACjCkC,MAAM;4BACJL;4BACAiC;wBACF;wBACAS,gBAAgB;wBAChB3C;oBACF;gBACF,EAAE,OAAO4C,WAAW;oBAClB,kEAAkE;oBAClE,IAAI1C,OAAO;wBACTF,IAAIc,OAAO,CAAC+B,MAAM,CAACR,KAAK,CAAC;4BACvBS,KAAKF;4BACLG,KAAK,CAAC,iCAAiC,EAAEjD,IAAIwB,EAAE,CAAC,aAAa,CAAC;wBAChE;oBACF;gBACF;gBAEA,4CAA4C;gBAC5C,OAAO;oBACL,GAAGxB,GAAG;oBACNG;oBACAiC;gBACF;YACF,EAAE,OAAOY,KAAK;gBACZ,MAAMZ,UAAU;oBACdF,UAAU;oBACVG,cAAc;wBACZ;4BACE7B,MAAM,CAAC;4BACP+B,OAAOS,eAAeE,QAAQF,IAAIG,OAAO,GAAGC,OAAOJ;4BACnDR,KAAK;wBACP;qBACD;oBACDG,QAAQ;oBACRC,OAAO;oBACPT,SAAS;gBACX;gBAEA,2CAA2C;gBAC3C,IAAI;oBACF,MAAMjC,IAAIc,OAAO,CAACvC,MAAM,CAAC;wBACvB+C,IAAIxB,IAAIwB,EAAE;wBACVjD,YAAYwB,iBAAiBzB,IAAI;wBACjCkC,MAAM;4BACJL,QAAQ;4BACRiC;wBACF;wBACAS,gBAAgB;wBAChB3C;oBACF;gBACF,EAAE,OAAO4C,WAAW;oBAClB,kEAAkE;oBAClE,IAAI1C,OAAO;wBACTF,IAAIc,OAAO,CAAC+B,MAAM,CAACR,KAAK,CAAC;4BACvBS,KAAKF;4BACLG,KAAK,CAAC,iCAAiC,EAAEjD,IAAIwB,EAAE,CAAC,kBAAkB,CAAC;wBACrE;oBACF;gBACF;gBAEA,IAAIpB,OAAO;oBACTF,IAAIc,OAAO,CAAC+B,MAAM,CAACR,KAAK,CAAC;wBACvBS;wBACAC,KAAK;oBACP;gBACF;gBAEA,6CAA6C;gBAC7C,OAAO;oBACL,GAAGjD,GAAG;oBACNG,QAAQ;oBACRiC;gBACF;YACF;QACF;IACF,OAAO;QACL,wDAAwD;QACxD,wEAAwE;QACxEtE,YAAYgC,IAAI,CAAC,OAAO,EAAEvB,YAAYwB,gBAAgB,EAAEC,GAAG,EAAEC,SAAS,EAAEC,GAAG,EAAE;YAC3E,IAAID,cAAc,UAAU;gBAC1B;YACF;YAEA,IAAI;gBACF,8EAA8E;gBAC9E,MAAMc,mBAAmBb,IAAIc,OAAO,CAAC7C,WAAW,CAAC6B,IAAIiB,cAAc,CAAC;gBACpE,MAAMC,oBACJH,kBAAkBrD,OAAOyD,QAAQ,CAAC,uBAAuB,EAAEC;gBAC7D,MAAMC,WAAW,MAAMhE,aAAa;oBAClCiE,OAAOJ;oBACPhB;gBACF;gBAEA,mFAAmF;gBACnF,MAAMmD,QAAyB;oBAC7BrF;oBACAoC,OAAOxC,aAAawC,KAAK;oBACzBnC;oBACAqF,kBAAkBvD,iBAAiBzB,IAAI;oBACvCiF,UAAUvD,IAAIwB,EAAE;oBAChBH;oBACAQ,gBAAgB3B,IAAI4B,IAAI,EAAEvD,cAAc2B,KAAK4B,MAAMA,MAAMvD;oBACzDwD,QAAQ7B,KAAK4B,MAAMN,MAAMtB,KAAK4B,MAAMA,MAAMN;gBAC5C;gBAEA,MAAMtB,IAAIc,OAAO,CAACwC,IAAI,CAACC,KAAK,CAAC;oBAC3BJ;oBACAK,MAAM;gBACR;YACF,EAAE,OAAOV,KAAK;gBACZ9C,IAAIc,OAAO,CAAC+B,MAAM,CAACR,KAAK,CAAC;oBACvBS;oBACAC,KAAK,CAAC,wCAAwC,EAAEjD,IAAIwB,EAAE,EAAE;gBAC1D;YACF;QACF;IACF;IAEA,OAAOjD;AACT,EAAC"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { CollectionConfig, PayloadRequest } from 'payload';
|
|
2
|
+
type Args = {
|
|
3
|
+
collectionConfig: CollectionConfig;
|
|
4
|
+
doc: {
|
|
5
|
+
filename: string;
|
|
6
|
+
mimeType?: string;
|
|
7
|
+
url?: string;
|
|
8
|
+
};
|
|
9
|
+
req: PayloadRequest;
|
|
10
|
+
};
|
|
11
|
+
type Result = {
|
|
12
|
+
data: Buffer;
|
|
13
|
+
mimetype: string;
|
|
14
|
+
};
|
|
15
|
+
/**
|
|
16
|
+
* Retrieves file data from an uploaded document, handling both local storage
|
|
17
|
+
* and cloud storage (S3, Azure, GCS, etc.) scenarios correctly.
|
|
18
|
+
*
|
|
19
|
+
* This function uses the same pattern as Payload's internal file retrieval:
|
|
20
|
+
* - For local storage: reads directly from disk (efficient, no HTTP roundtrip)
|
|
21
|
+
* - For cloud storage: fetches via Payload's file endpoint, which triggers
|
|
22
|
+
* the storage adapter's staticHandler to serve the file
|
|
23
|
+
*/
|
|
24
|
+
export declare const getFileFromDoc: ({ collectionConfig, doc, req }: Args) => Promise<Result>;
|
|
25
|
+
export {};
|
|
26
|
+
//# sourceMappingURL=getFileFromDoc.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"getFileFromDoc.d.ts","sourceRoot":"","sources":["../../src/utilities/getFileFromDoc.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAY,cAAc,EAAgB,MAAM,SAAS,CAAA;AAMvF,KAAK,IAAI,GAAG;IACV,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,GAAG,EAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,GAAG,CAAC,EAAE,MAAM,CAAA;KAAE,CAAA;IAC1D,GAAG,EAAE,cAAc,CAAA;CACpB,CAAA;AAED,KAAK,MAAM,GAAG;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,QAAQ,EAAE,MAAM,CAAA;CACjB,CAAA;AAED;;;;;;;;GAQG;AACH,eAAO,MAAM,cAAc,mCAA0C,IAAI,KAAG,OAAO,CAAC,MAAM,CAkEzF,CAAA"}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { FileRetrievalError, getFileByPath } from 'payload';
|
|
2
|
+
import { getExternalFile } from 'payload/internal';
|
|
3
|
+
import { formatAdminURL } from 'payload/shared';
|
|
4
|
+
/**
|
|
5
|
+
* Retrieves file data from an uploaded document, handling both local storage
|
|
6
|
+
* and cloud storage (S3, Azure, GCS, etc.) scenarios correctly.
|
|
7
|
+
*
|
|
8
|
+
* This function uses the same pattern as Payload's internal file retrieval:
|
|
9
|
+
* - For local storage: reads directly from disk (efficient, no HTTP roundtrip)
|
|
10
|
+
* - For cloud storage: fetches via Payload's file endpoint, which triggers
|
|
11
|
+
* the storage adapter's staticHandler to serve the file
|
|
12
|
+
*/ export const getFileFromDoc = async ({ collectionConfig, doc, req })=>{
|
|
13
|
+
const uploadConfig = typeof collectionConfig.upload === 'object' ? collectionConfig.upload : {};
|
|
14
|
+
const disableLocalStorage = uploadConfig.disableLocalStorage ?? false;
|
|
15
|
+
const staticDir = uploadConfig.staticDir || collectionConfig.slug;
|
|
16
|
+
const serverURL = req.payload.config.serverURL;
|
|
17
|
+
const isLocalFile = serverURL && doc.url?.startsWith(serverURL) || doc.url?.startsWith('/');
|
|
18
|
+
if (!disableLocalStorage && isLocalFile && doc.filename) {
|
|
19
|
+
// Local storage enabled - read directly from disk (efficient, no HTTP roundtrip)
|
|
20
|
+
const filePath = `${staticDir}/${doc.filename}`;
|
|
21
|
+
const file = await getFileByPath(filePath);
|
|
22
|
+
if (!file) {
|
|
23
|
+
throw new Error(`File not found at path: ${filePath}`);
|
|
24
|
+
}
|
|
25
|
+
const mimetype = file.mimetype || doc.mimeType;
|
|
26
|
+
if (!mimetype) {
|
|
27
|
+
throw new FileRetrievalError(req.t, `Unable to determine mimetype for file: ${doc.filename}`);
|
|
28
|
+
}
|
|
29
|
+
return {
|
|
30
|
+
data: file.data,
|
|
31
|
+
mimetype
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
if (doc.filename && doc.url) {
|
|
35
|
+
// Cloud storage or external - fetch via Payload's file endpoint
|
|
36
|
+
// getExternalFile constructs full URL, includes cookies for auth, and
|
|
37
|
+
// the request goes through Payload's handler chain (including storage adapter)
|
|
38
|
+
// For relative URLs, construct a full URL using formatAdminURL which properly
|
|
39
|
+
// handles serverURL, basePath, and other config. This is important in job contexts
|
|
40
|
+
// where request headers may not be available for URL construction.
|
|
41
|
+
// Use serverURL from config, falling back to req.origin for local/job requests.
|
|
42
|
+
const fileUrl = doc.url.startsWith('http') ? doc.url : formatAdminURL({
|
|
43
|
+
apiRoute: '',
|
|
44
|
+
path: doc.url,
|
|
45
|
+
serverURL: serverURL || req.origin
|
|
46
|
+
});
|
|
47
|
+
const file = await getExternalFile({
|
|
48
|
+
data: {
|
|
49
|
+
filename: doc.filename,
|
|
50
|
+
url: fileUrl
|
|
51
|
+
},
|
|
52
|
+
req,
|
|
53
|
+
uploadConfig
|
|
54
|
+
});
|
|
55
|
+
const mimetype = file.mimetype || doc.mimeType;
|
|
56
|
+
if (!mimetype) {
|
|
57
|
+
throw new FileRetrievalError(req.t, `Unable to determine mimetype for file: ${doc.filename}`);
|
|
58
|
+
}
|
|
59
|
+
return {
|
|
60
|
+
data: file.data,
|
|
61
|
+
mimetype
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
throw new Error('Unable to retrieve file: missing filename or url');
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
//# sourceMappingURL=getFileFromDoc.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/utilities/getFileFromDoc.ts"],"sourcesContent":["import type { CollectionConfig, FileData, PayloadRequest, UploadConfig } from 'payload'\n\nimport { FileRetrievalError, getFileByPath } from 'payload'\nimport { getExternalFile } from 'payload/internal'\nimport { formatAdminURL } from 'payload/shared'\n\ntype Args = {\n collectionConfig: CollectionConfig\n doc: { filename: string; mimeType?: string; url?: string }\n req: PayloadRequest\n}\n\ntype Result = {\n data: Buffer\n mimetype: string\n}\n\n/**\n * Retrieves file data from an uploaded document, handling both local storage\n * and cloud storage (S3, Azure, GCS, etc.) scenarios correctly.\n *\n * This function uses the same pattern as Payload's internal file retrieval:\n * - For local storage: reads directly from disk (efficient, no HTTP roundtrip)\n * - For cloud storage: fetches via Payload's file endpoint, which triggers\n * the storage adapter's staticHandler to serve the file\n */\nexport const getFileFromDoc = async ({ collectionConfig, doc, req }: Args): Promise<Result> => {\n const uploadConfig: UploadConfig =\n typeof collectionConfig.upload === 'object' ? collectionConfig.upload : {}\n const disableLocalStorage = uploadConfig.disableLocalStorage ?? false\n const staticDir = uploadConfig.staticDir || collectionConfig.slug\n\n const serverURL = req.payload.config.serverURL\n const isLocalFile = (serverURL && doc.url?.startsWith(serverURL)) || doc.url?.startsWith('/')\n\n if (!disableLocalStorage && isLocalFile && doc.filename) {\n // Local storage enabled - read directly from disk (efficient, no HTTP roundtrip)\n const filePath = `${staticDir}/${doc.filename}`\n const file = await getFileByPath(filePath)\n\n if (!file) {\n throw new Error(`File not found at path: ${filePath}`)\n }\n\n const mimetype = file.mimetype || doc.mimeType\n\n if (!mimetype) {\n throw new FileRetrievalError(req.t, `Unable to determine mimetype for file: ${doc.filename}`)\n }\n\n return {\n data: file.data,\n mimetype,\n }\n }\n\n if (doc.filename && doc.url) {\n // Cloud storage or external - fetch via Payload's file endpoint\n // getExternalFile constructs full URL, includes cookies for auth, and\n // the request goes through Payload's handler chain (including storage adapter)\n\n // For relative URLs, construct a full URL using formatAdminURL which properly\n // handles serverURL, basePath, and other config. This is important in job contexts\n // where request headers may not be available for URL construction.\n // Use serverURL from config, falling back to req.origin for local/job requests.\n const fileUrl = doc.url.startsWith('http')\n ? doc.url\n : formatAdminURL({\n apiRoute: '',\n path: doc.url as `/${string}`,\n serverURL: serverURL || req.origin,\n })\n\n const file = await getExternalFile({\n data: { filename: doc.filename, url: fileUrl } as FileData,\n req,\n uploadConfig,\n })\n\n const mimetype = file.mimetype || doc.mimeType\n\n if (!mimetype) {\n throw new FileRetrievalError(req.t, `Unable to determine mimetype for file: ${doc.filename}`)\n }\n\n return {\n data: file.data,\n mimetype,\n }\n }\n\n throw new Error('Unable to retrieve file: missing filename or url')\n}\n"],"names":["FileRetrievalError","getFileByPath","getExternalFile","formatAdminURL","getFileFromDoc","collectionConfig","doc","req","uploadConfig","upload","disableLocalStorage","staticDir","slug","serverURL","payload","config","isLocalFile","url","startsWith","filename","filePath","file","Error","mimetype","mimeType","t","data","fileUrl","apiRoute","path","origin"],"mappings":"AAEA,SAASA,kBAAkB,EAAEC,aAAa,QAAQ,UAAS;AAC3D,SAASC,eAAe,QAAQ,mBAAkB;AAClD,SAASC,cAAc,QAAQ,iBAAgB;AAa/C;;;;;;;;CAQC,GACD,OAAO,MAAMC,iBAAiB,OAAO,EAAEC,gBAAgB,EAAEC,GAAG,EAAEC,GAAG,EAAQ;IACvE,MAAMC,eACJ,OAAOH,iBAAiBI,MAAM,KAAK,WAAWJ,iBAAiBI,MAAM,GAAG,CAAC;IAC3E,MAAMC,sBAAsBF,aAAaE,mBAAmB,IAAI;IAChE,MAAMC,YAAYH,aAAaG,SAAS,IAAIN,iBAAiBO,IAAI;IAEjE,MAAMC,YAAYN,IAAIO,OAAO,CAACC,MAAM,CAACF,SAAS;IAC9C,MAAMG,cAAc,AAACH,aAAaP,IAAIW,GAAG,EAAEC,WAAWL,cAAeP,IAAIW,GAAG,EAAEC,WAAW;IAEzF,IAAI,CAACR,uBAAuBM,eAAeV,IAAIa,QAAQ,EAAE;QACvD,iFAAiF;QACjF,MAAMC,WAAW,GAAGT,UAAU,CAAC,EAAEL,IAAIa,QAAQ,EAAE;QAC/C,MAAME,OAAO,MAAMpB,cAAcmB;QAEjC,IAAI,CAACC,MAAM;YACT,MAAM,IAAIC,MAAM,CAAC,wBAAwB,EAAEF,UAAU;QACvD;QAEA,MAAMG,WAAWF,KAAKE,QAAQ,IAAIjB,IAAIkB,QAAQ;QAE9C,IAAI,CAACD,UAAU;YACb,MAAM,IAAIvB,mBAAmBO,IAAIkB,CAAC,EAAE,CAAC,uCAAuC,EAAEnB,IAAIa,QAAQ,EAAE;QAC9F;QAEA,OAAO;YACLO,MAAML,KAAKK,IAAI;YACfH;QACF;IACF;IAEA,IAAIjB,IAAIa,QAAQ,IAAIb,IAAIW,GAAG,EAAE;QAC3B,gEAAgE;QAChE,sEAAsE;QACtE,+EAA+E;QAE/E,8EAA8E;QAC9E,mFAAmF;QACnF,mEAAmE;QACnE,gFAAgF;QAChF,MAAMU,UAAUrB,IAAIW,GAAG,CAACC,UAAU,CAAC,UAC/BZ,IAAIW,GAAG,GACPd,eAAe;YACbyB,UAAU;YACVC,MAAMvB,IAAIW,GAAG;YACbJ,WAAWA,aAAaN,IAAIuB,MAAM;QACpC;QAEJ,MAAMT,OAAO,MAAMnB,gBAAgB;YACjCwB,MAAM;gBAAEP,UAAUb,IAAIa,QAAQ;gBAAEF,KAAKU;YAAQ;YAC7CpB;YACAC;QACF;QAEA,MAAMe,WAAWF,KAAKE,QAAQ,IAAIjB,IAAIkB,QAAQ;QAE9C,IAAI,CAACD,UAAU;YACb,MAAM,IAAIvB,mBAAmBO,IAAIkB,CAAC,EAAE,CAAC,uCAAuC,EAAEnB,IAAIa,QAAQ,EAAE;QAC9F;QAEA,OAAO;YACLO,MAAML,KAAKK,IAAI;YACfH;QACF;IACF;IAEA,MAAM,IAAID,MAAM;AAClB,EAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getPluginCollections.d.ts","sourceRoot":"","sources":["../../src/utilities/getPluginCollections.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,EAAE,MAAM,SAAS,CAAA;AAEvD,OAAO,KAAK,EAA8B,wBAAwB,EAAE,MAAM,aAAa,CAAA;AAKvF,MAAM,MAAM,uBAAuB,GAAG;IACpC;;;OAGG;IACH,mBAAmB,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IACxC;;;OAGG;IACH,mBAAmB,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IACxC;;OAEG;IACH,iBAAiB,EAAE,gBAAgB,EAAE,CAAA;IACrC;;OAEG;IACH,iBAAiB,EAAE,gBAAgB,EAAE,CAAA;CACtC,CAAA;AAED;;;;;;;;;;;;GAYG;AACH,eAAO,MAAM,oBAAoB,8BAG9B;IACD,MAAM,EAAE,MAAM,CAAA;IACd,YAAY,EAAE,wBAAwB,CAAA;CACvC,KAAG,OAAO,CAAC,uBAAuB,
|
|
1
|
+
{"version":3,"file":"getPluginCollections.d.ts","sourceRoot":"","sources":["../../src/utilities/getPluginCollections.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,EAAE,MAAM,SAAS,CAAA;AAEvD,OAAO,KAAK,EAA8B,wBAAwB,EAAE,MAAM,aAAa,CAAA;AAKvF,MAAM,MAAM,uBAAuB,GAAG;IACpC;;;OAGG;IACH,mBAAmB,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IACxC;;;OAGG;IACH,mBAAmB,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IACxC;;OAEG;IACH,iBAAiB,EAAE,gBAAgB,EAAE,CAAA;IACrC;;OAEG;IACH,iBAAiB,EAAE,gBAAgB,EAAE,CAAA;CACtC,CAAA;AAED;;;;;;;;;;;;GAYG;AACH,eAAO,MAAM,oBAAoB,8BAG9B;IACD,MAAM,EAAE,MAAM,CAAA;IACd,YAAY,EAAE,wBAAwB,CAAA;CACvC,KAAG,OAAO,CAAC,uBAAuB,CA8JlC,CAAA"}
|
|
@@ -51,6 +51,14 @@ import { getImportCollection } from '../import/getImportCollection.js';
|
|
|
51
51
|
});
|
|
52
52
|
// If the slug changed, this is a separate collection; otherwise it modifies the base
|
|
53
53
|
if (customExport.slug !== baseExportCollection.slug) {
|
|
54
|
+
// Store the source collection slug so CollectionField can use it as default
|
|
55
|
+
customExport.admin = {
|
|
56
|
+
...customExport.admin,
|
|
57
|
+
custom: {
|
|
58
|
+
...customExport.admin?.custom,
|
|
59
|
+
defaultCollectionSlug: collectionConfig.slug
|
|
60
|
+
}
|
|
61
|
+
};
|
|
54
62
|
exportCollections.push(customExport);
|
|
55
63
|
customExportSlugMap.set(collectionConfig.slug, customExport.slug);
|
|
56
64
|
} else {
|
|
@@ -70,6 +78,14 @@ import { getImportCollection } from '../import/getImportCollection.js';
|
|
|
70
78
|
});
|
|
71
79
|
// If the slug changed, this is a separate collection; otherwise it modifies the base
|
|
72
80
|
if (customImport.slug !== baseImportCollection.slug) {
|
|
81
|
+
// Store the source collection slug so CollectionField can use it as default
|
|
82
|
+
customImport.admin = {
|
|
83
|
+
...customImport.admin,
|
|
84
|
+
custom: {
|
|
85
|
+
...customImport.admin?.custom,
|
|
86
|
+
defaultCollectionSlug: collectionConfig.slug
|
|
87
|
+
}
|
|
88
|
+
};
|
|
73
89
|
importCollections.push(customImport);
|
|
74
90
|
// Map this target collection to its custom import collection
|
|
75
91
|
customImportSlugMap.set(collectionConfig.slug, customImport.slug);
|