@payloadcms/storage-s3 3.82.1 → 3.83.0-canary.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/dist/adapter.d.ts +15 -0
  2. package/dist/adapter.d.ts.map +1 -0
  3. package/dist/adapter.js +56 -0
  4. package/dist/adapter.js.map +1 -0
  5. package/dist/client/S3ClientUploadHandler.d.ts.map +1 -1
  6. package/dist/client/S3ClientUploadHandler.js +4 -3
  7. package/dist/client/S3ClientUploadHandler.js.map +1 -1
  8. package/dist/deleteFile.d.ts +12 -0
  9. package/dist/deleteFile.d.ts.map +1 -0
  10. package/dist/deleteFile.js +15 -0
  11. package/dist/deleteFile.js.map +1 -0
  12. package/dist/generateSignedURL.d.ts +2 -1
  13. package/dist/generateSignedURL.d.ts.map +1 -1
  14. package/dist/generateSignedURL.js +11 -5
  15. package/dist/generateSignedURL.js.map +1 -1
  16. package/dist/generateURL.d.ts +7 -4
  17. package/dist/generateURL.d.ts.map +1 -1
  18. package/dist/generateURL.js +11 -5
  19. package/dist/generateURL.js.map +1 -1
  20. package/dist/getFile.d.ts +27 -0
  21. package/dist/getFile.d.ts.map +1 -0
  22. package/dist/getFile.js +174 -0
  23. package/dist/getFile.js.map +1 -0
  24. package/dist/index.d.ts +15 -1
  25. package/dist/index.d.ts.map +1 -1
  26. package/dist/index.js +25 -43
  27. package/dist/index.js.map +1 -1
  28. package/dist/uploadFile.d.ts +16 -0
  29. package/dist/uploadFile.d.ts.map +1 -0
  30. package/dist/uploadFile.js +39 -0
  31. package/dist/uploadFile.js.map +1 -0
  32. package/package.json +4 -4
  33. package/dist/handleDelete.d.ts +0 -9
  34. package/dist/handleDelete.d.ts.map +0 -1
  35. package/dist/handleDelete.js +0 -11
  36. package/dist/handleDelete.js.map +0 -1
  37. package/dist/handleUpload.d.ts +0 -13
  38. package/dist/handleUpload.d.ts.map +0 -1
  39. package/dist/handleUpload.js +0 -37
  40. package/dist/handleUpload.js.map +0 -1
  41. package/dist/staticHandler.d.ts +0 -21
  42. package/dist/staticHandler.d.ts.map +0 -1
  43. package/dist/staticHandler.js +0 -172
  44. package/dist/staticHandler.js.map +0 -1
package/dist/index.js CHANGED
@@ -1,11 +1,8 @@
1
1
  import * as AWS from '@aws-sdk/client-s3';
2
2
  import { cloudStoragePlugin } from '@payloadcms/plugin-cloud-storage';
3
3
  import { initClientUploads } from '@payloadcms/plugin-cloud-storage/utilities';
4
+ import { createS3Adapter } from './adapter.js';
4
5
  import { getGenerateSignedURLHandler } from './generateSignedURL.js';
5
- import { getGenerateURL } from './generateURL.js';
6
- import { getHandleDelete } from './handleDelete.js';
7
- import { getHandleUpload } from './handleUpload.js';
8
- import { getHandler } from './staticHandler.js';
9
6
  const s3Clients = new Map();
10
7
  const defaultRequestHandlerOpts = {
11
8
  httpAgent: {
@@ -40,7 +37,8 @@ export const s3Storage = (s3StorageOptions)=>(incomingConfig)=>{
40
37
  acl: s3StorageOptions.acl,
41
38
  bucket: s3StorageOptions.bucket,
42
39
  collections: s3StorageOptions.collections,
43
- getStorageClient
40
+ getStorageClient,
41
+ useCompositePrefixes: s3StorageOptions.useCompositePrefixes
44
42
  }),
45
43
  serverHandlerPath: '/storage-s3-generate-signed-url'
46
44
  });
@@ -58,18 +56,35 @@ export const s3Storage = (s3StorageOptions)=>(incomingConfig)=>{
58
56
  return cloudStoragePlugin({
59
57
  alwaysInsertFields: true,
60
58
  collections: collectionsWithoutAdapter,
61
- enabled: false
59
+ enabled: false,
60
+ useCompositePrefixes: s3StorageOptions.useCompositePrefixes
62
61
  })(incomingConfig);
63
62
  }
64
63
  return incomingConfig;
65
64
  }
66
- const adapter = s3StorageInternal(getStorageClient, s3StorageOptions);
65
+ // Determine signedDownloads for this collection
66
+ const resolveSignedDownloads = (slug)=>{
67
+ const collectionStorageConfig = s3StorageOptions.collections[slug];
68
+ let signedDownloads = typeof collectionStorageConfig === 'object' ? collectionStorageConfig.signedDownloads ?? false : null;
69
+ if (signedDownloads === null) {
70
+ signedDownloads = s3StorageOptions.signedDownloads ?? false;
71
+ }
72
+ return signedDownloads;
73
+ };
67
74
  // Add adapter to each collection option object
68
75
  const collectionsWithAdapter = Object.entries(s3StorageOptions.collections).reduce((acc, [slug, collOptions])=>({
69
76
  ...acc,
70
77
  [slug]: {
71
78
  ...collOptions === true ? {} : collOptions,
72
- adapter
79
+ adapter: createS3Adapter({
80
+ acl: s3StorageOptions.acl,
81
+ bucket: s3StorageOptions.bucket,
82
+ clientUploads: s3StorageOptions.clientUploads,
83
+ config: s3StorageOptions.config,
84
+ getStorageClient,
85
+ signedDownloads: resolveSignedDownloads(slug),
86
+ useCompositePrefixes: s3StorageOptions.useCompositePrefixes
87
+ })
73
88
  }
74
89
  }), {});
75
90
  // Set disableLocalStorage: true for collections specified in the plugin options
@@ -90,42 +105,9 @@ export const s3Storage = (s3StorageOptions)=>(incomingConfig)=>{
90
105
  };
91
106
  return cloudStoragePlugin({
92
107
  alwaysInsertFields: s3StorageOptions.alwaysInsertFields,
93
- collections: collectionsWithAdapter
108
+ collections: collectionsWithAdapter,
109
+ useCompositePrefixes: s3StorageOptions.useCompositePrefixes
94
110
  })(config);
95
111
  };
96
- function s3StorageInternal(getStorageClient, { acl, bucket, clientUploads, collections, config = {}, signedDownloads: topLevelSignedDownloads }) {
97
- return ({ collection, prefix })=>{
98
- const collectionStorageConfig = collections[collection.slug];
99
- let signedDownloads = typeof collectionStorageConfig === 'object' ? collectionStorageConfig.signedDownloads ?? false : null;
100
- if (signedDownloads === null) {
101
- signedDownloads = topLevelSignedDownloads ?? null;
102
- }
103
- return {
104
- name: 's3',
105
- clientUploads,
106
- generateURL: getGenerateURL({
107
- bucket,
108
- config
109
- }),
110
- handleDelete: getHandleDelete({
111
- bucket,
112
- getStorageClient
113
- }),
114
- handleUpload: getHandleUpload({
115
- acl,
116
- bucket,
117
- collection,
118
- getStorageClient,
119
- prefix
120
- }),
121
- staticHandler: getHandler({
122
- bucket,
123
- collection,
124
- getStorageClient,
125
- signedDownloads: signedDownloads ?? false
126
- })
127
- };
128
- };
129
- }
130
112
 
131
113
  //# sourceMappingURL=index.js.map
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import type {\n Adapter,\n ClientUploadsConfig,\n PluginOptions as CloudStoragePluginOptions,\n CollectionOptions,\n GeneratedAdapter,\n} from '@payloadcms/plugin-cloud-storage/types'\nimport type { NodeHttpHandlerOptions } from '@smithy/node-http-handler'\nimport type { Config, Plugin, UploadCollectionSlug } from 'payload'\n\nimport * as AWS from '@aws-sdk/client-s3'\nimport { cloudStoragePlugin } from '@payloadcms/plugin-cloud-storage'\nimport { initClientUploads } from '@payloadcms/plugin-cloud-storage/utilities'\n\nimport type { SignedDownloadsConfig } from './staticHandler.js'\n\nimport { getGenerateSignedURLHandler } from './generateSignedURL.js'\nimport { getGenerateURL } from './generateURL.js'\nimport { getHandleDelete } from './handleDelete.js'\nimport { getHandleUpload } from './handleUpload.js'\nimport { getHandler } from './staticHandler.js'\n\nexport type S3StorageOptions = {\n /**\n * Access control list for uploaded files.\n */\n acl?: 'private' | 'public-read'\n\n /**\n * When enabled, fields (like the prefix field) will always be inserted into\n * the collection schema regardless of whether the plugin is enabled. This\n * ensures a consistent schema across all environments.\n *\n * This will be enabled by default in Payload v4.\n *\n * @default false\n */\n alwaysInsertFields?: boolean\n\n /**\n * Bucket name to upload files to.\n *\n * Must follow [AWS S3 bucket naming conventions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html).\n */\n\n bucket: string\n\n /**\n * Optional cache key to identify the S3 storage client instance.\n * If not provided, a default key will be used.\n *\n * @default `s3:containerName`\n */\n clientCacheKey?: string\n\n /**\n * Do uploads directly on the client to bypass limits on Vercel. You must allow CORS PUT method for the bucket to your website.\n */\n clientUploads?: ClientUploadsConfig\n /**\n * Collection options to apply the S3 adapter to.\n */\n collections: Partial<\n Record<\n UploadCollectionSlug,\n | ({\n signedDownloads?: SignedDownloadsConfig\n } & Omit<CollectionOptions, 'adapter'>)\n | true\n >\n >\n /**\n * AWS S3 client configuration. Highly dependent on your AWS setup.\n *\n * [AWS.S3ClientConfig Docs](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3/interfaces/s3clientconfig.html)\n */\n config: AWS.S3ClientConfig\n\n /**\n * Whether or not to disable local storage\n *\n * @default true\n */\n disableLocalStorage?: boolean\n\n /**\n * Whether or not to enable the plugin\n *\n * Default: true\n */\n enabled?: boolean\n /**\n * Use pre-signed URLs for files downloading. Can be overriden per-collection.\n */\n signedDownloads?: SignedDownloadsConfig\n}\n\ntype S3StoragePlugin = (storageS3Args: S3StorageOptions) => Plugin\n\nconst s3Clients = new Map<string, AWS.S3>()\n\nconst defaultRequestHandlerOpts: NodeHttpHandlerOptions = {\n httpAgent: {\n keepAlive: true,\n maxSockets: 100,\n },\n httpsAgent: {\n keepAlive: true,\n maxSockets: 100,\n },\n}\n\nexport const s3Storage: S3StoragePlugin =\n (s3StorageOptions: S3StorageOptions) =>\n (incomingConfig: Config): Config => {\n const cacheKey = s3StorageOptions.clientCacheKey || `s3:${s3StorageOptions.bucket}`\n\n const getStorageClient: () => AWS.S3 = () => {\n if (s3Clients.has(cacheKey)) {\n return s3Clients.get(cacheKey)!\n }\n\n s3Clients.set(\n cacheKey,\n new AWS.S3({\n requestHandler: defaultRequestHandlerOpts,\n ...(s3StorageOptions.config ?? {}),\n }),\n )\n\n return s3Clients.get(cacheKey)!\n }\n\n const isPluginDisabled = s3StorageOptions.enabled === false\n\n initClientUploads({\n clientHandler: '@payloadcms/storage-s3/client#S3ClientUploadHandler',\n collections: s3StorageOptions.collections,\n config: incomingConfig,\n enabled: !isPluginDisabled && Boolean(s3StorageOptions.clientUploads),\n serverHandler: getGenerateSignedURLHandler({\n access:\n typeof s3StorageOptions.clientUploads === 'object'\n ? s3StorageOptions.clientUploads.access\n : undefined,\n acl: s3StorageOptions.acl,\n bucket: s3StorageOptions.bucket,\n collections: s3StorageOptions.collections,\n getStorageClient,\n }),\n serverHandlerPath: '/storage-s3-generate-signed-url',\n })\n\n if (isPluginDisabled) {\n // If alwaysInsertFields is true, still call cloudStoragePlugin to insert fields\n if (s3StorageOptions.alwaysInsertFields) {\n // Build collections with adapter: null since plugin is disabled\n const collectionsWithoutAdapter: CloudStoragePluginOptions['collections'] = Object.entries(\n s3StorageOptions.collections,\n ).reduce(\n (acc, [slug, collOptions]) => ({\n ...acc,\n [slug]: {\n ...(collOptions === true ? {} : collOptions),\n adapter: null,\n },\n }),\n {} as Record<string, CollectionOptions>,\n )\n\n return cloudStoragePlugin({\n alwaysInsertFields: true,\n collections: collectionsWithoutAdapter,\n enabled: false,\n })(incomingConfig)\n }\n\n return incomingConfig\n }\n\n const adapter = s3StorageInternal(getStorageClient, s3StorageOptions)\n\n // Add adapter to each collection option object\n const collectionsWithAdapter: CloudStoragePluginOptions['collections'] = Object.entries(\n s3StorageOptions.collections,\n ).reduce(\n (acc, [slug, collOptions]) => ({\n ...acc,\n [slug]: {\n ...(collOptions === true ? {} : collOptions),\n adapter,\n },\n }),\n {} as Record<string, CollectionOptions>,\n )\n\n // Set disableLocalStorage: true for collections specified in the plugin options\n const config = {\n ...incomingConfig,\n collections: (incomingConfig.collections || []).map((collection) => {\n if (!collectionsWithAdapter[collection.slug]) {\n return collection\n }\n\n return {\n ...collection,\n upload: {\n ...(typeof collection.upload === 'object' ? collection.upload : {}),\n disableLocalStorage: true,\n },\n }\n }),\n }\n\n return cloudStoragePlugin({\n alwaysInsertFields: s3StorageOptions.alwaysInsertFields,\n collections: collectionsWithAdapter,\n })(config)\n }\n\nfunction s3StorageInternal(\n getStorageClient: () => AWS.S3,\n {\n acl,\n bucket,\n clientUploads,\n collections,\n config = {},\n signedDownloads: topLevelSignedDownloads,\n }: S3StorageOptions,\n): Adapter {\n return ({ collection, prefix }): GeneratedAdapter => {\n const collectionStorageConfig = collections[collection.slug]\n\n let signedDownloads: null | SignedDownloadsConfig =\n typeof collectionStorageConfig === 'object'\n ? (collectionStorageConfig.signedDownloads ?? false)\n : null\n\n if (signedDownloads === null) {\n signedDownloads = topLevelSignedDownloads ?? null\n }\n\n return {\n name: 's3',\n clientUploads,\n generateURL: getGenerateURL({ bucket, config }),\n handleDelete: getHandleDelete({ bucket, getStorageClient }),\n handleUpload: getHandleUpload({\n acl,\n bucket,\n collection,\n getStorageClient,\n prefix,\n }),\n staticHandler: getHandler({\n bucket,\n collection,\n getStorageClient,\n signedDownloads: signedDownloads ?? false,\n }),\n }\n }\n}\n"],"names":["AWS","cloudStoragePlugin","initClientUploads","getGenerateSignedURLHandler","getGenerateURL","getHandleDelete","getHandleUpload","getHandler","s3Clients","Map","defaultRequestHandlerOpts","httpAgent","keepAlive","maxSockets","httpsAgent","s3Storage","s3StorageOptions","incomingConfig","cacheKey","clientCacheKey","bucket","getStorageClient","has","get","set","S3","requestHandler","config","isPluginDisabled","enabled","clientHandler","collections","Boolean","clientUploads","serverHandler","access","undefined","acl","serverHandlerPath","alwaysInsertFields","collectionsWithoutAdapter","Object","entries","reduce","acc","slug","collOptions","adapter","s3StorageInternal","collectionsWithAdapter","map","collection","upload","disableLocalStorage","signedDownloads","topLevelSignedDownloads","prefix","collectionStorageConfig","name","generateURL","handleDelete","handleUpload","staticHandler"],"mappings":"AAUA,YAAYA,SAAS,qBAAoB;AACzC,SAASC,kBAAkB,QAAQ,mCAAkC;AACrE,SAASC,iBAAiB,QAAQ,6CAA4C;AAI9E,SAASC,2BAA2B,QAAQ,yBAAwB;AACpE,SAASC,cAAc,QAAQ,mBAAkB;AACjD,SAASC,eAAe,QAAQ,oBAAmB;AACnD,SAASC,eAAe,QAAQ,oBAAmB;AACnD,SAASC,UAAU,QAAQ,qBAAoB;AA+E/C,MAAMC,YAAY,IAAIC;AAEtB,MAAMC,4BAAoD;IACxDC,WAAW;QACTC,WAAW;QACXC,YAAY;IACd;IACAC,YAAY;QACVF,WAAW;QACXC,YAAY;IACd;AACF;AAEA,OAAO,MAAME,YACX,CAACC,mBACD,CAACC;QACC,MAAMC,WAAWF,iBAAiBG,cAAc,IAAI,CAAC,GAAG,EAAEH,iBAAiBI,MAAM,EAAE;QAEnF,MAAMC,mBAAiC;YACrC,IAAIb,UAAUc,GAAG,CAACJ,WAAW;gBAC3B,OAAOV,UAAUe,GAAG,CAACL;YACvB;YAEAV,UAAUgB,GAAG,CACXN,UACA,IAAIlB,IAAIyB,EAAE,CAAC;gBACTC,gBAAgBhB;gBAChB,GAAIM,iBAAiBW,MAAM,IAAI,CAAC,CAAC;YACnC;YAGF,OAAOnB,UAAUe,GAAG,CAACL;QACvB;QAEA,MAAMU,mBAAmBZ,iBAAiBa,OAAO,KAAK;QAEtD3B,kBAAkB;YAChB4B,eAAe;YACfC,aAAaf,iBAAiBe,WAAW;YACzCJ,QAAQV;YACRY,SAAS,CAACD,oBAAoBI,QAAQhB,iBAAiBiB,aAAa;YACpEC,eAAe/B,4BAA4B;gBACzCgC,QACE,OAAOnB,iBAAiBiB,aAAa,KAAK,WACtCjB,iBAAiBiB,aAAa,CAACE,MAAM,GACrCC;gBACNC,KAAKrB,iBAAiBqB,GAAG;gBACzBjB,QAAQJ,iBAAiBI,MAAM;gBAC/BW,aAAaf,iBAAiBe,WAAW;gBACzCV;YACF;YACAiB,mBAAmB;QACrB;QAEA,IAAIV,kBAAkB;YACpB,gFAAgF;YAChF,IAAIZ,iBAAiBuB,kBAAkB,EAAE;gBACvC,gEAAgE;gBAChE,MAAMC,4BAAsEC,OAAOC,OAAO,CACxF1B,iBAAiBe,WAAW,EAC5BY,MAAM,CACN,CAACC,KAAK,CAACC,MAAMC,YAAY,GAAM,CAAA;wBAC7B,GAAGF,GAAG;wBACN,CAACC,KAAK,EAAE;4BACN,GAAIC,gBAAgB,OAAO,CAAC,IAAIA,WAAW;4BAC3CC,SAAS;wBACX;oBACF,CAAA,GACA,CAAC;gBAGH,OAAO9C,mBAAmB;oBACxBsC,oBAAoB;oBACpBR,aAAaS;oBACbX,SAAS;gBACX,GAAGZ;YACL;YAEA,OAAOA;QACT;QAEA,MAAM8B,UAAUC,kBAAkB3B,kBAAkBL;QAEpD,+CAA+C;QAC/C,MAAMiC,yBAAmER,OAAOC,OAAO,CACrF1B,iBAAiBe,WAAW,EAC5BY,MAAM,CACN,CAACC,KAAK,CAACC,MAAMC,YAAY,GAAM,CAAA;gBAC7B,GAAGF,GAAG;gBACN,CAACC,KAAK,EAAE;oBACN,GAAIC,gBAAgB,OAAO,CAAC,IAAIA,WAAW;oBAC3CC;gBACF;YACF,CAAA,GACA,CAAC;QAGH,gFAAgF;QAChF,MAAMpB,SAAS;YACb,GAAGV,cAAc;YACjBc,aAAa,AAACd,CAAAA,eAAec,WAAW,IAAI,EAAE,AAAD,EAAGmB,GAAG,CAAC,CAACC;gBACnD,IAAI,CAACF,sBAAsB,CAACE,WAAWN,IAAI,CAAC,EAAE;oBAC5C,OAAOM;gBACT;gBAEA,OAAO;oBACL,GAAGA,UAAU;oBACbC,QAAQ;wBACN,GAAI,OAAOD,WAAWC,MAAM,KAAK,WAAWD,WAAWC,MAAM,GAAG,CAAC,CAAC;wBAClEC,qBAAqB;oBACvB;gBACF;YACF;QACF;QAEA,OAAOpD,mBAAmB;YACxBsC,oBAAoBvB,iBAAiBuB,kBAAkB;YACvDR,aAAakB;QACf,GAAGtB;IACL,EAAC;AAEH,SAASqB,kBACP3B,gBAA8B,EAC9B,EACEgB,GAAG,EACHjB,MAAM,EACNa,aAAa,EACbF,WAAW,EACXJ,SAAS,CAAC,CAAC,EACX2B,iBAAiBC,uBAAuB,EACvB;IAEnB,OAAO,CAAC,EAAEJ,UAAU,EAAEK,MAAM,EAAE;QAC5B,MAAMC,0BAA0B1B,WAAW,CAACoB,WAAWN,IAAI,CAAC;QAE5D,IAAIS,kBACF,OAAOG,4BAA4B,WAC9BA,wBAAwBH,eAAe,IAAI,QAC5C;QAEN,IAAIA,oBAAoB,MAAM;YAC5BA,kBAAkBC,2BAA2B;QAC/C;QAEA,OAAO;YACLG,MAAM;YACNzB;YACA0B,aAAavD,eAAe;gBAAEgB;gBAAQO;YAAO;YAC7CiC,cAAcvD,gBAAgB;gBAAEe;gBAAQC;YAAiB;YACzDwC,cAAcvD,gBAAgB;gBAC5B+B;gBACAjB;gBACA+B;gBACA9B;gBACAmC;YACF;YACAM,eAAevD,WAAW;gBACxBa;gBACA+B;gBACA9B;gBACAiC,iBAAiBA,mBAAmB;YACtC;QACF;IACF;AACF"}
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import type {\n ClientUploadsConfig,\n PluginOptions as CloudStoragePluginOptions,\n CollectionOptions,\n} from '@payloadcms/plugin-cloud-storage/types'\nimport type { NodeHttpHandlerOptions } from '@smithy/node-http-handler'\nimport type { Config, Plugin, UploadCollectionSlug } from 'payload'\n\nimport * as AWS from '@aws-sdk/client-s3'\nimport { cloudStoragePlugin } from '@payloadcms/plugin-cloud-storage'\nimport { initClientUploads } from '@payloadcms/plugin-cloud-storage/utilities'\n\nimport type { SignedDownloadsConfig } from './getFile.js'\n\nimport { createS3Adapter } from './adapter.js'\nimport { getGenerateSignedURLHandler } from './generateSignedURL.js'\n\nexport type S3StorageOptions = {\n /**\n * Access control list for uploaded files.\n */\n acl?: 'private' | 'public-read'\n\n /**\n * When enabled, fields (like the prefix field) will always be inserted into\n * the collection schema regardless of whether the plugin is enabled. This\n * ensures a consistent schema across all environments.\n *\n * This will be enabled by default in Payload v4.\n *\n * @default false\n */\n alwaysInsertFields?: boolean\n\n /**\n * Bucket name to upload files to.\n *\n * Must follow [AWS S3 bucket naming conventions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html).\n */\n\n bucket: string\n\n /**\n * Optional cache key to identify the S3 storage client instance.\n * If not provided, a default key will be used.\n *\n * @default `s3:containerName`\n */\n clientCacheKey?: string\n\n /**\n * Do uploads directly on the client to bypass limits on Vercel. You must allow CORS PUT method for the bucket to your website.\n */\n clientUploads?: ClientUploadsConfig\n /**\n * Collection options to apply the S3 adapter to.\n */\n collections: Partial<\n Record<\n UploadCollectionSlug,\n | ({\n signedDownloads?: SignedDownloadsConfig\n } & Omit<CollectionOptions, 'adapter'>)\n | true\n >\n >\n /**\n * AWS S3 client configuration. Highly dependent on your AWS setup.\n *\n * [AWS.S3ClientConfig Docs](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3/interfaces/s3clientconfig.html)\n */\n config: AWS.S3ClientConfig\n\n /**\n * Whether or not to disable local storage\n *\n * @default true\n */\n disableLocalStorage?: boolean\n\n /**\n * Whether or not to enable the plugin\n *\n * Default: true\n */\n enabled?: boolean\n /**\n * Use pre-signed URLs for files downloading. Can be overriden per-collection.\n */\n signedDownloads?: SignedDownloadsConfig\n /**\n * When true, the collection-level prefix and document-level prefix are combined\n * (compositional). When false (default), document prefix overrides collection\n * prefix entirely.\n *\n * Example:\n * - collection prefix: `collection-prefix/`\n * - document prefix: `document-prefix/`\n * - resulting prefix with useCompositePrefixes=true: `collection-prefix/document-prefix/`\n * - resulting prefix with useCompositePrefixes=false: `document-prefix/`\n *\n * @default false\n */\n useCompositePrefixes?: boolean\n}\n\ntype S3StoragePlugin = (storageS3Args: S3StorageOptions) => Plugin\n\nconst s3Clients = new Map<string, AWS.S3>()\n\nconst defaultRequestHandlerOpts: NodeHttpHandlerOptions = {\n httpAgent: {\n keepAlive: true,\n maxSockets: 100,\n },\n httpsAgent: {\n keepAlive: true,\n maxSockets: 100,\n },\n}\n\nexport const s3Storage: S3StoragePlugin =\n (s3StorageOptions: S3StorageOptions) =>\n (incomingConfig: Config): Config => {\n const cacheKey = s3StorageOptions.clientCacheKey || `s3:${s3StorageOptions.bucket}`\n\n const getStorageClient: () => AWS.S3 = () => {\n if (s3Clients.has(cacheKey)) {\n return s3Clients.get(cacheKey)!\n }\n\n s3Clients.set(\n cacheKey,\n new AWS.S3({\n requestHandler: defaultRequestHandlerOpts,\n ...(s3StorageOptions.config ?? {}),\n }),\n )\n\n return s3Clients.get(cacheKey)!\n }\n\n const isPluginDisabled = s3StorageOptions.enabled === false\n\n initClientUploads({\n clientHandler: '@payloadcms/storage-s3/client#S3ClientUploadHandler',\n collections: s3StorageOptions.collections,\n config: incomingConfig,\n enabled: !isPluginDisabled && Boolean(s3StorageOptions.clientUploads),\n serverHandler: getGenerateSignedURLHandler({\n access:\n typeof s3StorageOptions.clientUploads === 'object'\n ? s3StorageOptions.clientUploads.access\n : undefined,\n acl: s3StorageOptions.acl,\n bucket: s3StorageOptions.bucket,\n collections: s3StorageOptions.collections,\n getStorageClient,\n useCompositePrefixes: s3StorageOptions.useCompositePrefixes,\n }),\n serverHandlerPath: '/storage-s3-generate-signed-url',\n })\n\n if (isPluginDisabled) {\n // If alwaysInsertFields is true, still call cloudStoragePlugin to insert fields\n if (s3StorageOptions.alwaysInsertFields) {\n // Build collections with adapter: null since plugin is disabled\n const collectionsWithoutAdapter: CloudStoragePluginOptions['collections'] = Object.entries(\n s3StorageOptions.collections,\n ).reduce(\n (acc, [slug, collOptions]) => ({\n ...acc,\n [slug]: {\n ...(collOptions === true ? {} : collOptions),\n adapter: null,\n },\n }),\n {} as Record<string, CollectionOptions>,\n )\n\n return cloudStoragePlugin({\n alwaysInsertFields: true,\n collections: collectionsWithoutAdapter,\n enabled: false,\n useCompositePrefixes: s3StorageOptions.useCompositePrefixes,\n })(incomingConfig)\n }\n\n return incomingConfig\n }\n\n // Determine signedDownloads for this collection\n const resolveSignedDownloads = (slug: string): SignedDownloadsConfig => {\n const collectionStorageConfig = s3StorageOptions.collections[slug]\n\n let signedDownloads: null | SignedDownloadsConfig =\n typeof collectionStorageConfig === 'object'\n ? (collectionStorageConfig.signedDownloads ?? false)\n : null\n\n if (signedDownloads === null) {\n signedDownloads = s3StorageOptions.signedDownloads ?? false\n }\n\n return signedDownloads\n }\n\n // Add adapter to each collection option object\n const collectionsWithAdapter: CloudStoragePluginOptions['collections'] = Object.entries(\n s3StorageOptions.collections,\n ).reduce(\n (acc, [slug, collOptions]) => ({\n ...acc,\n [slug]: {\n ...(collOptions === true ? {} : collOptions),\n adapter: createS3Adapter({\n acl: s3StorageOptions.acl,\n bucket: s3StorageOptions.bucket,\n clientUploads: s3StorageOptions.clientUploads,\n config: s3StorageOptions.config,\n getStorageClient,\n signedDownloads: resolveSignedDownloads(slug),\n useCompositePrefixes: s3StorageOptions.useCompositePrefixes,\n }),\n },\n }),\n {} as Record<string, CollectionOptions>,\n )\n\n // Set disableLocalStorage: true for collections specified in the plugin options\n const config = {\n ...incomingConfig,\n collections: (incomingConfig.collections || []).map((collection) => {\n if (!collectionsWithAdapter[collection.slug]) {\n return collection\n }\n\n return {\n ...collection,\n upload: {\n ...(typeof collection.upload === 'object' ? collection.upload : {}),\n disableLocalStorage: true,\n },\n }\n }),\n }\n\n return cloudStoragePlugin({\n alwaysInsertFields: s3StorageOptions.alwaysInsertFields,\n collections: collectionsWithAdapter,\n useCompositePrefixes: s3StorageOptions.useCompositePrefixes,\n })(config)\n }\n"],"names":["AWS","cloudStoragePlugin","initClientUploads","createS3Adapter","getGenerateSignedURLHandler","s3Clients","Map","defaultRequestHandlerOpts","httpAgent","keepAlive","maxSockets","httpsAgent","s3Storage","s3StorageOptions","incomingConfig","cacheKey","clientCacheKey","bucket","getStorageClient","has","get","set","S3","requestHandler","config","isPluginDisabled","enabled","clientHandler","collections","Boolean","clientUploads","serverHandler","access","undefined","acl","useCompositePrefixes","serverHandlerPath","alwaysInsertFields","collectionsWithoutAdapter","Object","entries","reduce","acc","slug","collOptions","adapter","resolveSignedDownloads","collectionStorageConfig","signedDownloads","collectionsWithAdapter","map","collection","upload","disableLocalStorage"],"mappings":"AAQA,YAAYA,SAAS,qBAAoB;AACzC,SAASC,kBAAkB,QAAQ,mCAAkC;AACrE,SAASC,iBAAiB,QAAQ,6CAA4C;AAI9E,SAASC,eAAe,QAAQ,eAAc;AAC9C,SAASC,2BAA2B,QAAQ,yBAAwB;AA6FpE,MAAMC,YAAY,IAAIC;AAEtB,MAAMC,4BAAoD;IACxDC,WAAW;QACTC,WAAW;QACXC,YAAY;IACd;IACAC,YAAY;QACVF,WAAW;QACXC,YAAY;IACd;AACF;AAEA,OAAO,MAAME,YACX,CAACC,mBACD,CAACC;QACC,MAAMC,WAAWF,iBAAiBG,cAAc,IAAI,CAAC,GAAG,EAAEH,iBAAiBI,MAAM,EAAE;QAEnF,MAAMC,mBAAiC;YACrC,IAAIb,UAAUc,GAAG,CAACJ,WAAW;gBAC3B,OAAOV,UAAUe,GAAG,CAACL;YACvB;YAEAV,UAAUgB,GAAG,CACXN,UACA,IAAIf,IAAIsB,EAAE,CAAC;gBACTC,gBAAgBhB;gBAChB,GAAIM,iBAAiBW,MAAM,IAAI,CAAC,CAAC;YACnC;YAGF,OAAOnB,UAAUe,GAAG,CAACL;QACvB;QAEA,MAAMU,mBAAmBZ,iBAAiBa,OAAO,KAAK;QAEtDxB,kBAAkB;YAChByB,eAAe;YACfC,aAAaf,iBAAiBe,WAAW;YACzCJ,QAAQV;YACRY,SAAS,CAACD,oBAAoBI,QAAQhB,iBAAiBiB,aAAa;YACpEC,eAAe3B,4BAA4B;gBACzC4B,QACE,OAAOnB,iBAAiBiB,aAAa,KAAK,WACtCjB,iBAAiBiB,aAAa,CAACE,MAAM,GACrCC;gBACNC,KAAKrB,iBAAiBqB,GAAG;gBACzBjB,QAAQJ,iBAAiBI,MAAM;gBAC/BW,aAAaf,iBAAiBe,WAAW;gBACzCV;gBACAiB,sBAAsBtB,iBAAiBsB,oBAAoB;YAC7D;YACAC,mBAAmB;QACrB;QAEA,IAAIX,kBAAkB;YACpB,gFAAgF;YAChF,IAAIZ,iBAAiBwB,kBAAkB,EAAE;gBACvC,gEAAgE;gBAChE,MAAMC,4BAAsEC,OAAOC,OAAO,CACxF3B,iBAAiBe,WAAW,EAC5Ba,MAAM,CACN,CAACC,KAAK,CAACC,MAAMC,YAAY,GAAM,CAAA;wBAC7B,GAAGF,GAAG;wBACN,CAACC,KAAK,EAAE;4BACN,GAAIC,gBAAgB,OAAO,CAAC,IAAIA,WAAW;4BAC3CC,SAAS;wBACX;oBACF,CAAA,GACA,CAAC;gBAGH,OAAO5C,mBAAmB;oBACxBoC,oBAAoB;oBACpBT,aAAaU;oBACbZ,SAAS;oBACTS,sBAAsBtB,iBAAiBsB,oBAAoB;gBAC7D,GAAGrB;YACL;YAEA,OAAOA;QACT;QAEA,gDAAgD;QAChD,MAAMgC,yBAAyB,CAACH;YAC9B,MAAMI,0BAA0BlC,iBAAiBe,WAAW,CAACe,KAAK;YAElE,IAAIK,kBACF,OAAOD,4BAA4B,WAC9BA,wBAAwBC,eAAe,IAAI,QAC5C;YAEN,IAAIA,oBAAoB,MAAM;gBAC5BA,kBAAkBnC,iBAAiBmC,eAAe,IAAI;YACxD;YAEA,OAAOA;QACT;QAEA,+CAA+C;QAC/C,MAAMC,yBAAmEV,OAAOC,OAAO,CACrF3B,iBAAiBe,WAAW,EAC5Ba,MAAM,CACN,CAACC,KAAK,CAACC,MAAMC,YAAY,GAAM,CAAA;gBAC7B,GAAGF,GAAG;gBACN,CAACC,KAAK,EAAE;oBACN,GAAIC,gBAAgB,OAAO,CAAC,IAAIA,WAAW;oBAC3CC,SAAS1C,gBAAgB;wBACvB+B,KAAKrB,iBAAiBqB,GAAG;wBACzBjB,QAAQJ,iBAAiBI,MAAM;wBAC/Ba,eAAejB,iBAAiBiB,aAAa;wBAC7CN,QAAQX,iBAAiBW,MAAM;wBAC/BN;wBACA8B,iBAAiBF,uBAAuBH;wBACxCR,sBAAsBtB,iBAAiBsB,oBAAoB;oBAC7D;gBACF;YACF,CAAA,GACA,CAAC;QAGH,gFAAgF;QAChF,MAAMX,SAAS;YACb,GAAGV,cAAc;YACjBc,aAAa,AAACd,CAAAA,eAAec,WAAW,IAAI,EAAE,AAAD,EAAGsB,GAAG,CAAC,CAACC;gBACnD,IAAI,CAACF,sBAAsB,CAACE,WAAWR,IAAI,CAAC,EAAE;oBAC5C,OAAOQ;gBACT;gBAEA,OAAO;oBACL,GAAGA,UAAU;oBACbC,QAAQ;wBACN,GAAI,OAAOD,WAAWC,MAAM,KAAK,WAAWD,WAAWC,MAAM,GAAG,CAAC,CAAC;wBAClEC,qBAAqB;oBACvB;gBACF;YACF;QACF;QAEA,OAAOpD,mBAAmB;YACxBoC,oBAAoBxB,iBAAiBwB,kBAAkB;YACvDT,aAAaqB;YACbd,sBAAsBtB,iBAAiBsB,oBAAoB;QAC7D,GAAGX;IACL,EAAC"}
@@ -0,0 +1,16 @@
1
+ import type * as AWS from '@aws-sdk/client-s3';
2
+ interface UploadArgs {
3
+ acl?: 'private' | 'public-read';
4
+ bucket: string;
5
+ buffer: Buffer;
6
+ client: AWS.S3;
7
+ collectionPrefix?: string;
8
+ docPrefix?: string;
9
+ filename: string;
10
+ mimeType: string;
11
+ tempFilePath?: string;
12
+ useCompositePrefixes?: boolean;
13
+ }
14
+ export declare function uploadFile({ acl, bucket, buffer, client, collectionPrefix, docPrefix, filename, mimeType, tempFilePath, useCompositePrefixes, }: UploadArgs): Promise<void>;
15
+ export {};
16
+ //# sourceMappingURL=uploadFile.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"uploadFile.d.ts","sourceRoot":"","sources":["../src/uploadFile.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,GAAG,MAAM,oBAAoB,CAAA;AAM9C,UAAU,UAAU;IAClB,GAAG,CAAC,EAAE,SAAS,GAAG,aAAa,CAAA;IAC/B,MAAM,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,GAAG,CAAC,EAAE,CAAA;IACd,gBAAgB,CAAC,EAAE,MAAM,CAAA;IACzB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,QAAQ,EAAE,MAAM,CAAA;IAChB,YAAY,CAAC,EAAE,MAAM,CAAA;IACrB,oBAAoB,CAAC,EAAE,OAAO,CAAA;CAC/B;AAID,wBAAsB,UAAU,CAAC,EAC/B,GAAG,EACH,MAAM,EACN,MAAM,EACN,MAAM,EACN,gBAAqB,EACrB,SAAS,EACT,QAAQ,EACR,QAAQ,EACR,YAAY,EACZ,oBAA4B,GAC7B,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CAoC5B"}
@@ -0,0 +1,39 @@
1
+ import { Upload } from '@aws-sdk/lib-storage';
2
+ import { getFileKey } from '@payloadcms/plugin-cloud-storage/utilities';
3
+ import fs from 'fs';
4
+ const multipartThreshold = 1024 * 1024 * 50 // 50MB
5
+ ;
6
+ export async function uploadFile({ acl, bucket, buffer, client, collectionPrefix = '', docPrefix, filename, mimeType, tempFilePath, useCompositePrefixes = false }) {
7
+ const fileKey = getFileKey({
8
+ collectionPrefix,
9
+ docPrefix: docPrefix || '',
10
+ filename,
11
+ useCompositePrefixes
12
+ });
13
+ const fileBufferOrStream = tempFilePath ? fs.createReadStream(tempFilePath) : buffer;
14
+ if (buffer.length > 0 && buffer.length < multipartThreshold) {
15
+ await client.putObject({
16
+ ACL: acl,
17
+ Body: fileBufferOrStream,
18
+ Bucket: bucket,
19
+ ContentType: mimeType,
20
+ Key: fileKey
21
+ });
22
+ return;
23
+ }
24
+ const parallelUploadS3 = new Upload({
25
+ client,
26
+ params: {
27
+ ACL: acl,
28
+ Body: fileBufferOrStream,
29
+ Bucket: bucket,
30
+ ContentType: mimeType,
31
+ Key: fileKey
32
+ },
33
+ partSize: multipartThreshold,
34
+ queueSize: 4
35
+ });
36
+ await parallelUploadS3.done();
37
+ }
38
+
39
+ //# sourceMappingURL=uploadFile.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/uploadFile.ts"],"sourcesContent":["import type * as AWS from '@aws-sdk/client-s3'\n\nimport { Upload } from '@aws-sdk/lib-storage'\nimport { getFileKey } from '@payloadcms/plugin-cloud-storage/utilities'\nimport fs from 'fs'\n\ninterface UploadArgs {\n acl?: 'private' | 'public-read'\n bucket: string\n buffer: Buffer\n client: AWS.S3\n collectionPrefix?: string\n docPrefix?: string\n filename: string\n mimeType: string\n tempFilePath?: string\n useCompositePrefixes?: boolean\n}\n\nconst multipartThreshold = 1024 * 1024 * 50 // 50MB\n\nexport async function uploadFile({\n acl,\n bucket,\n buffer,\n client,\n collectionPrefix = '',\n docPrefix,\n filename,\n mimeType,\n tempFilePath,\n useCompositePrefixes = false,\n}: UploadArgs): Promise<void> {\n const fileKey = getFileKey({\n collectionPrefix,\n docPrefix: docPrefix || '',\n filename,\n useCompositePrefixes,\n })\n\n const fileBufferOrStream = tempFilePath ? fs.createReadStream(tempFilePath) : buffer\n\n if (buffer.length > 0 && buffer.length < multipartThreshold) {\n await client.putObject({\n ACL: acl,\n Body: fileBufferOrStream,\n Bucket: bucket,\n ContentType: mimeType,\n Key: fileKey,\n })\n\n return\n }\n\n const parallelUploadS3 = new Upload({\n client,\n params: {\n ACL: acl,\n Body: fileBufferOrStream,\n Bucket: bucket,\n ContentType: mimeType,\n Key: fileKey,\n },\n partSize: multipartThreshold,\n queueSize: 4,\n })\n\n await parallelUploadS3.done()\n}\n"],"names":["Upload","getFileKey","fs","multipartThreshold","uploadFile","acl","bucket","buffer","client","collectionPrefix","docPrefix","filename","mimeType","tempFilePath","useCompositePrefixes","fileKey","fileBufferOrStream","createReadStream","length","putObject","ACL","Body","Bucket","ContentType","Key","parallelUploadS3","params","partSize","queueSize","done"],"mappings":"AAEA,SAASA,MAAM,QAAQ,uBAAsB;AAC7C,SAASC,UAAU,QAAQ,6CAA4C;AACvE,OAAOC,QAAQ,KAAI;AAenB,MAAMC,qBAAqB,OAAO,OAAO,GAAG,OAAO;;AAEnD,OAAO,eAAeC,WAAW,EAC/BC,GAAG,EACHC,MAAM,EACNC,MAAM,EACNC,MAAM,EACNC,mBAAmB,EAAE,EACrBC,SAAS,EACTC,QAAQ,EACRC,QAAQ,EACRC,YAAY,EACZC,uBAAuB,KAAK,EACjB;IACX,MAAMC,UAAUd,WAAW;QACzBQ;QACAC,WAAWA,aAAa;QACxBC;QACAG;IACF;IAEA,MAAME,qBAAqBH,eAAeX,GAAGe,gBAAgB,CAACJ,gBAAgBN;IAE9E,IAAIA,OAAOW,MAAM,GAAG,KAAKX,OAAOW,MAAM,GAAGf,oBAAoB;QAC3D,MAAMK,OAAOW,SAAS,CAAC;YACrBC,KAAKf;YACLgB,MAAML;YACNM,QAAQhB;YACRiB,aAAaX;YACbY,KAAKT;QACP;QAEA;IACF;IAEA,MAAMU,mBAAmB,IAAIzB,OAAO;QAClCQ;QACAkB,QAAQ;YACNN,KAAKf;YACLgB,MAAML;YACNM,QAAQhB;YACRiB,aAAaX;YACbY,KAAKT;QACP;QACAY,UAAUxB;QACVyB,WAAW;IACb;IAEA,MAAMH,iBAAiBI,IAAI;AAC7B"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@payloadcms/storage-s3",
3
- "version": "3.82.1",
3
+ "version": "3.83.0-canary.1",
4
4
  "description": "Payload storage adapter for Amazon S3",
5
5
  "homepage": "https://payloadcms.com",
6
6
  "repository": {
@@ -40,14 +40,14 @@
40
40
  "@aws-sdk/client-s3": "^3.614.0",
41
41
  "@aws-sdk/lib-storage": "^3.614.0",
42
42
  "@aws-sdk/s3-request-presigner": "^3.614.0",
43
- "@payloadcms/plugin-cloud-storage": "3.82.1"
43
+ "@payloadcms/plugin-cloud-storage": "3.83.0-canary.1"
44
44
  },
45
45
  "devDependencies": {
46
46
  "@smithy/node-http-handler": "4.0.3",
47
- "payload": "3.82.1"
47
+ "payload": "3.83.0-canary.1"
48
48
  },
49
49
  "peerDependencies": {
50
- "payload": "3.82.1"
50
+ "payload": "3.83.0-canary.1"
51
51
  },
52
52
  "engines": {
53
53
  "node": "^18.20.2 || >=20.9.0"
@@ -1,9 +0,0 @@
1
- import type * as AWS from '@aws-sdk/client-s3';
2
- import type { HandleDelete } from '@payloadcms/plugin-cloud-storage/types';
3
- interface Args {
4
- bucket: string;
5
- getStorageClient: () => AWS.S3;
6
- }
7
- export declare const getHandleDelete: ({ bucket, getStorageClient }: Args) => HandleDelete;
8
- export {};
9
- //# sourceMappingURL=handleDelete.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"handleDelete.d.ts","sourceRoot":"","sources":["../src/handleDelete.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,GAAG,MAAM,oBAAoB,CAAA;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,wCAAwC,CAAA;AAI1E,UAAU,IAAI;IACZ,MAAM,EAAE,MAAM,CAAA;IACd,gBAAgB,EAAE,MAAM,GAAG,CAAC,EAAE,CAAA;CAC/B;AAED,eAAO,MAAM,eAAe,iCAAkC,IAAI,KAAG,YAOpE,CAAA"}
@@ -1,11 +0,0 @@
1
- import path from 'path';
2
- export const getHandleDelete = ({ bucket, getStorageClient })=>{
3
- return async ({ doc: { prefix = '' }, filename })=>{
4
- await getStorageClient().deleteObject({
5
- Bucket: bucket,
6
- Key: path.posix.join(prefix, filename)
7
- });
8
- };
9
- };
10
-
11
- //# sourceMappingURL=handleDelete.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/handleDelete.ts"],"sourcesContent":["import type * as AWS from '@aws-sdk/client-s3'\nimport type { HandleDelete } from '@payloadcms/plugin-cloud-storage/types'\n\nimport path from 'path'\n\ninterface Args {\n bucket: string\n getStorageClient: () => AWS.S3\n}\n\nexport const getHandleDelete = ({ bucket, getStorageClient }: Args): HandleDelete => {\n return async ({ doc: { prefix = '' }, filename }) => {\n await getStorageClient().deleteObject({\n Bucket: bucket,\n Key: path.posix.join(prefix, filename),\n })\n }\n}\n"],"names":["path","getHandleDelete","bucket","getStorageClient","doc","prefix","filename","deleteObject","Bucket","Key","posix","join"],"mappings":"AAGA,OAAOA,UAAU,OAAM;AAOvB,OAAO,MAAMC,kBAAkB,CAAC,EAAEC,MAAM,EAAEC,gBAAgB,EAAQ;IAChE,OAAO,OAAO,EAAEC,KAAK,EAAEC,SAAS,EAAE,EAAE,EAAEC,QAAQ,EAAE;QAC9C,MAAMH,mBAAmBI,YAAY,CAAC;YACpCC,QAAQN;YACRO,KAAKT,KAAKU,KAAK,CAACC,IAAI,CAACN,QAAQC;QAC/B;IACF;AACF,EAAC"}
@@ -1,13 +0,0 @@
1
- import type * as AWS from '@aws-sdk/client-s3';
2
- import type { HandleUpload } from '@payloadcms/plugin-cloud-storage/types';
3
- import type { CollectionConfig } from 'payload';
4
- interface Args {
5
- acl?: 'private' | 'public-read';
6
- bucket: string;
7
- collection: CollectionConfig;
8
- getStorageClient: () => AWS.S3;
9
- prefix?: string;
10
- }
11
- export declare const getHandleUpload: ({ acl, bucket, getStorageClient, prefix, }: Args) => HandleUpload;
12
- export {};
13
- //# sourceMappingURL=handleUpload.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"handleUpload.d.ts","sourceRoot":"","sources":["../src/handleUpload.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,GAAG,MAAM,oBAAoB,CAAA;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,wCAAwC,CAAA;AAC1E,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAA;AAM/C,UAAU,IAAI;IACZ,GAAG,CAAC,EAAE,SAAS,GAAG,aAAa,CAAA;IAC/B,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,gBAAgB,CAAA;IAC5B,gBAAgB,EAAE,MAAM,GAAG,CAAC,EAAE,CAAA;IAC9B,MAAM,CAAC,EAAE,MAAM,CAAA;CAChB;AAID,eAAO,MAAM,eAAe,+CAKzB,IAAI,KAAG,YAqCT,CAAA"}
@@ -1,37 +0,0 @@
1
- import { Upload } from '@aws-sdk/lib-storage';
2
- import fs from 'fs';
3
- import path from 'path';
4
- const multipartThreshold = 1024 * 1024 * 50 // 50MB
5
- ;
6
- export const getHandleUpload = ({ acl, bucket, getStorageClient, prefix = '' })=>{
7
- return async ({ data, file })=>{
8
- const fileKey = path.posix.join(data.prefix || prefix, file.filename);
9
- const fileBufferOrStream = file.tempFilePath ? fs.createReadStream(file.tempFilePath) : file.buffer;
10
- if (file.buffer.length > 0 && file.buffer.length < multipartThreshold) {
11
- await getStorageClient().putObject({
12
- ACL: acl,
13
- Body: fileBufferOrStream,
14
- Bucket: bucket,
15
- ContentType: file.mimeType,
16
- Key: fileKey
17
- });
18
- return data;
19
- }
20
- const parallelUploadS3 = new Upload({
21
- client: getStorageClient(),
22
- params: {
23
- ACL: acl,
24
- Body: fileBufferOrStream,
25
- Bucket: bucket,
26
- ContentType: file.mimeType,
27
- Key: fileKey
28
- },
29
- partSize: multipartThreshold,
30
- queueSize: 4
31
- });
32
- await parallelUploadS3.done();
33
- return data;
34
- };
35
- };
36
-
37
- //# sourceMappingURL=handleUpload.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/handleUpload.ts"],"sourcesContent":["import type * as AWS from '@aws-sdk/client-s3'\nimport type { HandleUpload } from '@payloadcms/plugin-cloud-storage/types'\nimport type { CollectionConfig } from 'payload'\n\nimport { Upload } from '@aws-sdk/lib-storage'\nimport fs from 'fs'\nimport path from 'path'\n\ninterface Args {\n acl?: 'private' | 'public-read'\n bucket: string\n collection: CollectionConfig\n getStorageClient: () => AWS.S3\n prefix?: string\n}\n\nconst multipartThreshold = 1024 * 1024 * 50 // 50MB\n\nexport const getHandleUpload = ({\n acl,\n bucket,\n getStorageClient,\n prefix = '',\n}: Args): HandleUpload => {\n return async ({ data, file }) => {\n const fileKey = path.posix.join(data.prefix || prefix, file.filename)\n\n const fileBufferOrStream = file.tempFilePath\n ? fs.createReadStream(file.tempFilePath)\n : file.buffer\n\n if (file.buffer.length > 0 && file.buffer.length < multipartThreshold) {\n await getStorageClient().putObject({\n ACL: acl,\n Body: fileBufferOrStream,\n Bucket: bucket,\n ContentType: file.mimeType,\n Key: fileKey,\n })\n\n return data\n }\n\n const parallelUploadS3 = new Upload({\n client: getStorageClient(),\n params: {\n ACL: acl,\n Body: fileBufferOrStream,\n Bucket: bucket,\n ContentType: file.mimeType,\n Key: fileKey,\n },\n partSize: multipartThreshold,\n queueSize: 4,\n })\n\n await parallelUploadS3.done()\n\n return data\n }\n}\n"],"names":["Upload","fs","path","multipartThreshold","getHandleUpload","acl","bucket","getStorageClient","prefix","data","file","fileKey","posix","join","filename","fileBufferOrStream","tempFilePath","createReadStream","buffer","length","putObject","ACL","Body","Bucket","ContentType","mimeType","Key","parallelUploadS3","client","params","partSize","queueSize","done"],"mappings":"AAIA,SAASA,MAAM,QAAQ,uBAAsB;AAC7C,OAAOC,QAAQ,KAAI;AACnB,OAAOC,UAAU,OAAM;AAUvB,MAAMC,qBAAqB,OAAO,OAAO,GAAG,OAAO;;AAEnD,OAAO,MAAMC,kBAAkB,CAAC,EAC9BC,GAAG,EACHC,MAAM,EACNC,gBAAgB,EAChBC,SAAS,EAAE,EACN;IACL,OAAO,OAAO,EAAEC,IAAI,EAAEC,IAAI,EAAE;QAC1B,MAAMC,UAAUT,KAAKU,KAAK,CAACC,IAAI,CAACJ,KAAKD,MAAM,IAAIA,QAAQE,KAAKI,QAAQ;QAEpE,MAAMC,qBAAqBL,KAAKM,YAAY,GACxCf,GAAGgB,gBAAgB,CAACP,KAAKM,YAAY,IACrCN,KAAKQ,MAAM;QAEf,IAAIR,KAAKQ,MAAM,CAACC,MAAM,GAAG,KAAKT,KAAKQ,MAAM,CAACC,MAAM,GAAGhB,oBAAoB;YACrE,MAAMI,mBAAmBa,SAAS,CAAC;gBACjCC,KAAKhB;gBACLiB,MAAMP;gBACNQ,QAAQjB;gBACRkB,aAAad,KAAKe,QAAQ;gBAC1BC,KAAKf;YACP;YAEA,OAAOF;QACT;QAEA,MAAMkB,mBAAmB,IAAI3B,OAAO;YAClC4B,QAAQrB;YACRsB,QAAQ;gBACNR,KAAKhB;gBACLiB,MAAMP;gBACNQ,QAAQjB;gBACRkB,aAAad,KAAKe,QAAQ;gBAC1BC,KAAKf;YACP;YACAmB,UAAU3B;YACV4B,WAAW;QACb;QAEA,MAAMJ,iBAAiBK,IAAI;QAE3B,OAAOvB;IACT;AACF,EAAC"}
@@ -1,21 +0,0 @@
1
- import type * as AWS from '@aws-sdk/client-s3';
2
- import type { StaticHandler } from '@payloadcms/plugin-cloud-storage/types';
3
- import type { CollectionConfig, PayloadRequest } from 'payload';
4
- export type SignedDownloadsConfig = {
5
- /** @default 7200 */
6
- expiresIn?: number;
7
- shouldUseSignedURL?(args: {
8
- collection: CollectionConfig;
9
- filename: string;
10
- req: PayloadRequest;
11
- }): boolean | Promise<boolean>;
12
- } | boolean;
13
- interface Args {
14
- bucket: string;
15
- collection: CollectionConfig;
16
- getStorageClient: () => AWS.S3;
17
- signedDownloads?: SignedDownloadsConfig;
18
- }
19
- export declare const getHandler: ({ bucket, collection, getStorageClient, signedDownloads, }: Args) => StaticHandler;
20
- export {};
21
- //# sourceMappingURL=staticHandler.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"staticHandler.d.ts","sourceRoot":"","sources":["../src/staticHandler.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,GAAG,MAAM,oBAAoB,CAAA;AAC9C,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,wCAAwC,CAAA;AAC3E,OAAO,KAAK,EAAE,gBAAgB,EAAE,cAAc,EAAE,MAAM,SAAS,CAAA;AAU/D,MAAM,MAAM,qBAAqB,GAC7B;IACE,oBAAoB;IACpB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,kBAAkB,CAAC,CAAC,IAAI,EAAE;QACxB,UAAU,EAAE,gBAAgB,CAAA;QAC5B,QAAQ,EAAE,MAAM,CAAA;QAChB,GAAG,EAAE,cAAc,CAAA;KACpB,GAAG,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;CAC/B,GACD,OAAO,CAAA;AAEX,UAAU,IAAI;IACZ,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,gBAAgB,CAAA;IAC5B,gBAAgB,EAAE,MAAM,GAAG,CAAC,EAAE,CAAA;IAC9B,eAAe,CAAC,EAAE,qBAAqB,CAAA;CACxC;AA8BD,eAAO,MAAM,UAAU,+DAKpB,IAAI,KAAG,aAiKT,CAAA"}
@@ -1,172 +0,0 @@
1
- import { GetObjectCommand } from '@aws-sdk/client-s3';
2
- import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
3
- import { getFilePrefix } from '@payloadcms/plugin-cloud-storage/utilities';
4
- import path from 'path';
5
- import { getRangeRequestInfo } from 'payload/internal';
6
- import { sanitizeFilename } from 'payload/shared';
7
- const isNodeReadableStream = (body)=>{
8
- return typeof body === 'object' && body !== null && 'pipe' in body && typeof body.pipe === 'function' && 'destroy' in body && typeof body.destroy === 'function';
9
- };
10
- const abortRequestAndDestroyStream = ({ abortController, object })=>{
11
- try {
12
- abortController.abort();
13
- } catch {
14
- /* noop */ }
15
- if (object?.Body && isNodeReadableStream(object.Body)) {
16
- object.Body.destroy();
17
- }
18
- };
19
- export const getHandler = ({ bucket, collection, getStorageClient, signedDownloads })=>{
20
- return async (req, { headers: incomingHeaders, params: { clientUploadContext, filename, prefix: prefixQueryParam } })=>{
21
- let object = undefined;
22
- let streamed = false;
23
- const abortController = new AbortController();
24
- if (req.signal) {
25
- req.signal.addEventListener('abort', ()=>{
26
- abortRequestAndDestroyStream({
27
- abortController,
28
- object
29
- });
30
- });
31
- }
32
- try {
33
- const prefix = await getFilePrefix({
34
- clientUploadContext,
35
- collection,
36
- filename,
37
- prefixQueryParam,
38
- req
39
- });
40
- const key = path.posix.join(prefix, sanitizeFilename(filename));
41
- if (signedDownloads && !clientUploadContext) {
42
- let useSignedURL = true;
43
- if (typeof signedDownloads === 'object' && typeof signedDownloads.shouldUseSignedURL === 'function') {
44
- useSignedURL = await signedDownloads.shouldUseSignedURL({
45
- collection,
46
- filename,
47
- req
48
- });
49
- }
50
- if (useSignedURL) {
51
- const command = new GetObjectCommand({
52
- Bucket: bucket,
53
- Key: key
54
- });
55
- const signedUrl = await getSignedUrl(getStorageClient(), command, typeof signedDownloads === 'object' ? signedDownloads : {
56
- expiresIn: 7200
57
- });
58
- return Response.redirect(signedUrl, 302);
59
- }
60
- }
61
- // Get file size first for range validation and to set Content-Length header before streaming
62
- const headObject = await getStorageClient().headObject({
63
- Bucket: bucket,
64
- Key: key
65
- });
66
- const fileSize = headObject.ContentLength;
67
- if (!fileSize) {
68
- return new Response('Internal Server Error', {
69
- status: 500
70
- });
71
- }
72
- // Handle range request
73
- const rangeHeader = req.headers.get('range');
74
- const rangeResult = getRangeRequestInfo({
75
- fileSize,
76
- rangeHeader
77
- });
78
- if (rangeResult.type === 'invalid') {
79
- return new Response(null, {
80
- headers: new Headers(rangeResult.headers),
81
- status: rangeResult.status
82
- });
83
- }
84
- const rangeForS3 = rangeResult.type === 'partial' ? `bytes=${rangeResult.rangeStart}-${rangeResult.rangeEnd}` : undefined;
85
- let headers = new Headers(incomingHeaders);
86
- // Add range-related headers from the result
87
- for (const [key, value] of Object.entries(rangeResult.headers)){
88
- headers.append(key, value);
89
- }
90
- headers.append('Content-Type', String(headObject.ContentType));
91
- if (headObject.ETag) {
92
- headers.append('ETag', headObject.ETag);
93
- }
94
- // Add Content-Security-Policy header for SVG files to prevent executable code
95
- if (headObject.ContentType === 'image/svg+xml') {
96
- headers.append('Content-Security-Policy', "script-src 'none'");
97
- }
98
- const etagFromHeaders = req.headers.get('etag') || req.headers.get('if-none-match');
99
- const objectEtag = headObject.ETag;
100
- if (collection.upload && typeof collection.upload === 'object' && typeof collection.upload.modifyResponseHeaders === 'function') {
101
- headers = collection.upload.modifyResponseHeaders({
102
- headers
103
- }) || headers;
104
- }
105
- if (etagFromHeaders && etagFromHeaders === objectEtag) {
106
- return new Response(null, {
107
- headers,
108
- status: 304
109
- });
110
- }
111
- object = await getStorageClient().getObject({
112
- Bucket: bucket,
113
- Key: key,
114
- Range: rangeForS3
115
- }, {
116
- abortSignal: abortController.signal
117
- });
118
- if (!object.Body) {
119
- return new Response(null, {
120
- status: 404,
121
- statusText: 'Not Found'
122
- });
123
- }
124
- if (!isNodeReadableStream(object.Body)) {
125
- req.payload.logger.error({
126
- key,
127
- msg: 'S3 object body is not a readable stream'
128
- });
129
- return new Response('Internal Server Error', {
130
- status: 500
131
- });
132
- }
133
- const stream = object.Body;
134
- stream.on('error', (err)=>{
135
- req.payload.logger.error({
136
- err,
137
- key,
138
- msg: 'Error while streaming S3 object (aborting)'
139
- });
140
- abortRequestAndDestroyStream({
141
- abortController,
142
- object
143
- });
144
- });
145
- streamed = true;
146
- return new Response(stream, {
147
- headers,
148
- status: rangeResult.status
149
- });
150
- } catch (err) {
151
- if (err && typeof err === 'object' && ('name' in err && (err.name === 'NoSuchKey' || err.name === 'NotFound') || 'httpStatusCode' in err && err.httpStatusCode === 404)) {
152
- return new Response(null, {
153
- status: 404,
154
- statusText: 'Not Found'
155
- });
156
- }
157
- req.payload.logger.error(err);
158
- return new Response('Internal Server Error', {
159
- status: 500
160
- });
161
- } finally{
162
- if (!streamed) {
163
- abortRequestAndDestroyStream({
164
- abortController,
165
- object
166
- });
167
- }
168
- }
169
- };
170
- };
171
-
172
- //# sourceMappingURL=staticHandler.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/staticHandler.ts"],"sourcesContent":["import type * as AWS from '@aws-sdk/client-s3'\nimport type { StaticHandler } from '@payloadcms/plugin-cloud-storage/types'\nimport type { CollectionConfig, PayloadRequest } from 'payload'\nimport type { Readable } from 'stream'\n\nimport { GetObjectCommand } from '@aws-sdk/client-s3'\nimport { getSignedUrl } from '@aws-sdk/s3-request-presigner'\nimport { getFilePrefix } from '@payloadcms/plugin-cloud-storage/utilities'\nimport path from 'path'\nimport { getRangeRequestInfo } from 'payload/internal'\nimport { sanitizeFilename } from 'payload/shared'\n\nexport type SignedDownloadsConfig =\n | {\n /** @default 7200 */\n expiresIn?: number\n shouldUseSignedURL?(args: {\n collection: CollectionConfig\n filename: string\n req: PayloadRequest\n }): boolean | Promise<boolean>\n }\n | boolean\n\ninterface Args {\n bucket: string\n collection: CollectionConfig\n getStorageClient: () => AWS.S3\n signedDownloads?: SignedDownloadsConfig\n}\n\nconst isNodeReadableStream = (body: AWS.GetObjectOutput['Body']): body is Readable => {\n return (\n typeof body === 'object' &&\n body !== null &&\n 'pipe' in body &&\n typeof body.pipe === 'function' &&\n 'destroy' in body &&\n typeof body.destroy === 'function'\n )\n}\n\nconst abortRequestAndDestroyStream = ({\n abortController,\n object,\n}: {\n abortController: AbortController\n object?: AWS.GetObjectOutput\n}) => {\n try {\n abortController.abort()\n } catch {\n /* noop */\n }\n if (object?.Body && isNodeReadableStream(object.Body)) {\n object.Body.destroy()\n }\n}\n\nexport const getHandler = ({\n bucket,\n collection,\n getStorageClient,\n signedDownloads,\n}: Args): StaticHandler => {\n return async (\n req,\n {\n headers: incomingHeaders,\n params: { clientUploadContext, filename, prefix: prefixQueryParam },\n },\n ) => {\n let object: AWS.GetObjectOutput | undefined = undefined\n let streamed = false\n\n const abortController = new AbortController()\n if (req.signal) {\n req.signal.addEventListener('abort', () => {\n abortRequestAndDestroyStream({ abortController, object })\n })\n }\n\n try {\n const prefix = await getFilePrefix({\n clientUploadContext,\n collection,\n filename,\n prefixQueryParam,\n req,\n })\n\n const key = path.posix.join(prefix, sanitizeFilename(filename))\n\n if (signedDownloads && !clientUploadContext) {\n let useSignedURL = true\n if (\n typeof signedDownloads === 'object' &&\n typeof signedDownloads.shouldUseSignedURL === 'function'\n ) {\n useSignedURL = await signedDownloads.shouldUseSignedURL({ collection, filename, req })\n }\n\n if (useSignedURL) {\n const command = new GetObjectCommand({ Bucket: bucket, Key: key })\n const signedUrl = await getSignedUrl(\n getStorageClient(),\n command,\n typeof signedDownloads === 'object' ? signedDownloads : { expiresIn: 7200 },\n )\n return Response.redirect(signedUrl, 302)\n }\n }\n\n // Get file size first for range validation and to set Content-Length header before streaming\n const headObject = await getStorageClient().headObject({\n Bucket: bucket,\n Key: key,\n })\n const fileSize = headObject.ContentLength\n\n if (!fileSize) {\n return new Response('Internal Server Error', { status: 500 })\n }\n\n // Handle range request\n const rangeHeader = req.headers.get('range')\n const rangeResult = getRangeRequestInfo({ fileSize, rangeHeader })\n\n if (rangeResult.type === 'invalid') {\n return new Response(null, {\n headers: new Headers(rangeResult.headers),\n status: rangeResult.status,\n })\n }\n\n const rangeForS3 =\n rangeResult.type === 'partial'\n ? `bytes=${rangeResult.rangeStart}-${rangeResult.rangeEnd}`\n : undefined\n\n let headers = new Headers(incomingHeaders)\n\n // Add range-related headers from the result\n for (const [key, value] of Object.entries(rangeResult.headers)) {\n headers.append(key, value)\n }\n\n headers.append('Content-Type', String(headObject.ContentType))\n if (headObject.ETag) {\n headers.append('ETag', headObject.ETag)\n }\n\n // Add Content-Security-Policy header for SVG files to prevent executable code\n if (headObject.ContentType === 'image/svg+xml') {\n headers.append('Content-Security-Policy', \"script-src 'none'\")\n }\n\n const etagFromHeaders = req.headers.get('etag') || req.headers.get('if-none-match')\n const objectEtag = headObject.ETag\n\n if (\n collection.upload &&\n typeof collection.upload === 'object' &&\n typeof collection.upload.modifyResponseHeaders === 'function'\n ) {\n headers = collection.upload.modifyResponseHeaders({ headers }) || headers\n }\n\n if (etagFromHeaders && etagFromHeaders === objectEtag) {\n return new Response(null, {\n headers,\n status: 304,\n })\n }\n\n object = await getStorageClient().getObject(\n {\n Bucket: bucket,\n Key: key,\n Range: rangeForS3,\n },\n { abortSignal: abortController.signal },\n )\n\n if (!object.Body) {\n return new Response(null, { status: 404, statusText: 'Not Found' })\n }\n\n if (!isNodeReadableStream(object.Body)) {\n req.payload.logger.error({\n key,\n msg: 'S3 object body is not a readable stream',\n })\n return new Response('Internal Server Error', { status: 500 })\n }\n\n const stream = object.Body\n stream.on('error', (err: Error) => {\n req.payload.logger.error({\n err,\n key,\n msg: 'Error while streaming S3 object (aborting)',\n })\n abortRequestAndDestroyStream({ abortController, object })\n })\n\n streamed = true\n return new Response(stream, { headers, status: rangeResult.status })\n } catch (err) {\n if (\n err &&\n typeof err === 'object' &&\n (('name' in err && (err.name === 'NoSuchKey' || err.name === 'NotFound')) ||\n ('httpStatusCode' in err && err.httpStatusCode === 404))\n ) {\n return new Response(null, { status: 404, statusText: 'Not Found' })\n }\n req.payload.logger.error(err)\n return new Response('Internal Server Error', { status: 500 })\n } finally {\n if (!streamed) {\n abortRequestAndDestroyStream({ abortController, object })\n }\n }\n }\n}\n"],"names":["GetObjectCommand","getSignedUrl","getFilePrefix","path","getRangeRequestInfo","sanitizeFilename","isNodeReadableStream","body","pipe","destroy","abortRequestAndDestroyStream","abortController","object","abort","Body","getHandler","bucket","collection","getStorageClient","signedDownloads","req","headers","incomingHeaders","params","clientUploadContext","filename","prefix","prefixQueryParam","undefined","streamed","AbortController","signal","addEventListener","key","posix","join","useSignedURL","shouldUseSignedURL","command","Bucket","Key","signedUrl","expiresIn","Response","redirect","headObject","fileSize","ContentLength","status","rangeHeader","get","rangeResult","type","Headers","rangeForS3","rangeStart","rangeEnd","value","Object","entries","append","String","ContentType","ETag","etagFromHeaders","objectEtag","upload","modifyResponseHeaders","getObject","Range","abortSignal","statusText","payload","logger","error","msg","stream","on","err","name","httpStatusCode"],"mappings":"AAKA,SAASA,gBAAgB,QAAQ,qBAAoB;AACrD,SAASC,YAAY,QAAQ,gCAA+B;AAC5D,SAASC,aAAa,QAAQ,6CAA4C;AAC1E,OAAOC,UAAU,OAAM;AACvB,SAASC,mBAAmB,QAAQ,mBAAkB;AACtD,SAASC,gBAAgB,QAAQ,iBAAgB;AAqBjD,MAAMC,uBAAuB,CAACC;IAC5B,OACE,OAAOA,SAAS,YAChBA,SAAS,QACT,UAAUA,QACV,OAAOA,KAAKC,IAAI,KAAK,cACrB,aAAaD,QACb,OAAOA,KAAKE,OAAO,KAAK;AAE5B;AAEA,MAAMC,+BAA+B,CAAC,EACpCC,eAAe,EACfC,MAAM,EAIP;IACC,IAAI;QACFD,gBAAgBE,KAAK;IACvB,EAAE,OAAM;IACN,QAAQ,GACV;IACA,IAAID,QAAQE,QAAQR,qBAAqBM,OAAOE,IAAI,GAAG;QACrDF,OAAOE,IAAI,CAACL,OAAO;IACrB;AACF;AAEA,OAAO,MAAMM,aAAa,CAAC,EACzBC,MAAM,EACNC,UAAU,EACVC,gBAAgB,EAChBC,eAAe,EACV;IACL,OAAO,OACLC,KACA,EACEC,SAASC,eAAe,EACxBC,QAAQ,EAAEC,mBAAmB,EAAEC,QAAQ,EAAEC,QAAQC,gBAAgB,EAAE,EACpE;QAED,IAAIf,SAA0CgB;QAC9C,IAAIC,WAAW;QAEf,MAAMlB,kBAAkB,IAAImB;QAC5B,IAAIV,IAAIW,MAAM,EAAE;YACdX,IAAIW,MAAM,CAACC,gBAAgB,CAAC,SAAS;gBACnCtB,6BAA6B;oBAAEC;oBAAiBC;gBAAO;YACzD;QACF;QAEA,IAAI;YACF,MAAMc,SAAS,MAAMxB,cAAc;gBACjCsB;gBACAP;gBACAQ;gBACAE;gBACAP;YACF;YAEA,MAAMa,MAAM9B,KAAK+B,KAAK,CAACC,IAAI,CAACT,QAAQrB,iBAAiBoB;YAErD,IAAIN,mBAAmB,CAACK,qBAAqB;gBAC3C,IAAIY,eAAe;gBACnB,IACE,OAAOjB,oBAAoB,YAC3B,OAAOA,gBAAgBkB,kBAAkB,KAAK,YAC9C;oBACAD,eAAe,MAAMjB,gBAAgBkB,kBAAkB,CAAC;wBAAEpB;wBAAYQ;wBAAUL;oBAAI;gBACtF;gBAEA,IAAIgB,cAAc;oBAChB,MAAME,UAAU,IAAItC,iBAAiB;wBAAEuC,QAAQvB;wBAAQwB,KAAKP;oBAAI;oBAChE,MAAMQ,YAAY,MAAMxC,aACtBiB,oBACAoB,SACA,OAAOnB,oBAAoB,WAAWA,kBAAkB;wBAAEuB,WAAW;oBAAK;oBAE5E,OAAOC,SAASC,QAAQ,CAACH,WAAW;gBACtC;YACF;YAEA,6FAA6F;YAC7F,MAAMI,aAAa,MAAM3B,mBAAmB2B,UAAU,CAAC;gBACrDN,QAAQvB;gBACRwB,KAAKP;YACP;YACA,MAAMa,WAAWD,WAAWE,aAAa;YAEzC,IAAI,CAACD,UAAU;gBACb,OAAO,IAAIH,SAAS,yBAAyB;oBAAEK,QAAQ;gBAAI;YAC7D;YAEA,uBAAuB;YACvB,MAAMC,cAAc7B,IAAIC,OAAO,CAAC6B,GAAG,CAAC;YACpC,MAAMC,cAAc/C,oBAAoB;gBAAE0C;gBAAUG;YAAY;YAEhE,IAAIE,YAAYC,IAAI,KAAK,WAAW;gBAClC,OAAO,IAAIT,SAAS,MAAM;oBACxBtB,SAAS,IAAIgC,QAAQF,YAAY9B,OAAO;oBACxC2B,QAAQG,YAAYH,MAAM;gBAC5B;YACF;YAEA,MAAMM,aACJH,YAAYC,IAAI,KAAK,YACjB,CAAC,MAAM,EAAED,YAAYI,UAAU,CAAC,CAAC,EAAEJ,YAAYK,QAAQ,EAAE,GACzD5B;YAEN,IAAIP,UAAU,IAAIgC,QAAQ/B;YAE1B,4CAA4C;YAC5C,KAAK,MAAM,CAACW,KAAKwB,MAAM,IAAIC,OAAOC,OAAO,CAACR,YAAY9B,OAAO,EAAG;gBAC9DA,QAAQuC,MAAM,CAAC3B,KAAKwB;YACtB;YAEApC,QAAQuC,MAAM,CAAC,gBAAgBC,OAAOhB,WAAWiB,WAAW;YAC5D,IAAIjB,WAAWkB,IAAI,EAAE;gBACnB1C,QAAQuC,MAAM,CAAC,QAAQf,WAAWkB,IAAI;YACxC;YAEA,8EAA8E;YAC9E,IAAIlB,WAAWiB,WAAW,KAAK,iBAAiB;gBAC9CzC,QAAQuC,MAAM,CAAC,2BAA2B;YAC5C;YAEA,MAAMI,kBAAkB5C,IAAIC,OAAO,CAAC6B,GAAG,CAAC,WAAW9B,IAAIC,OAAO,CAAC6B,GAAG,CAAC;YACnE,MAAMe,aAAapB,WAAWkB,IAAI;YAElC,IACE9C,WAAWiD,MAAM,IACjB,OAAOjD,WAAWiD,MAAM,KAAK,YAC7B,OAAOjD,WAAWiD,MAAM,CAACC,qBAAqB,KAAK,YACnD;gBACA9C,UAAUJ,WAAWiD,MAAM,CAACC,qBAAqB,CAAC;oBAAE9C;gBAAQ,MAAMA;YACpE;YAEA,IAAI2C,mBAAmBA,oBAAoBC,YAAY;gBACrD,OAAO,IAAItB,SAAS,MAAM;oBACxBtB;oBACA2B,QAAQ;gBACV;YACF;YAEApC,SAAS,MAAMM,mBAAmBkD,SAAS,CACzC;gBACE7B,QAAQvB;gBACRwB,KAAKP;gBACLoC,OAAOf;YACT,GACA;gBAAEgB,aAAa3D,gBAAgBoB,MAAM;YAAC;YAGxC,IAAI,CAACnB,OAAOE,IAAI,EAAE;gBAChB,OAAO,IAAI6B,SAAS,MAAM;oBAAEK,QAAQ;oBAAKuB,YAAY;gBAAY;YACnE;YAEA,IAAI,CAACjE,qBAAqBM,OAAOE,IAAI,GAAG;gBACtCM,IAAIoD,OAAO,CAACC,MAAM,CAACC,KAAK,CAAC;oBACvBzC;oBACA0C,KAAK;gBACP;gBACA,OAAO,IAAIhC,SAAS,yBAAyB;oBAAEK,QAAQ;gBAAI;YAC7D;YAEA,MAAM4B,SAAShE,OAAOE,IAAI;YAC1B8D,OAAOC,EAAE,CAAC,SAAS,CAACC;gBAClB1D,IAAIoD,OAAO,CAACC,MAAM,CAACC,KAAK,CAAC;oBACvBI;oBACA7C;oBACA0C,KAAK;gBACP;gBACAjE,6BAA6B;oBAAEC;oBAAiBC;gBAAO;YACzD;YAEAiB,WAAW;YACX,OAAO,IAAIc,SAASiC,QAAQ;gBAAEvD;gBAAS2B,QAAQG,YAAYH,MAAM;YAAC;QACpE,EAAE,OAAO8B,KAAK;YACZ,IACEA,OACA,OAAOA,QAAQ,YACd,CAAA,AAAC,UAAUA,OAAQA,CAAAA,IAAIC,IAAI,KAAK,eAAeD,IAAIC,IAAI,KAAK,UAAS,KACnE,oBAAoBD,OAAOA,IAAIE,cAAc,KAAK,GAAG,GACxD;gBACA,OAAO,IAAIrC,SAAS,MAAM;oBAAEK,QAAQ;oBAAKuB,YAAY;gBAAY;YACnE;YACAnD,IAAIoD,OAAO,CAACC,MAAM,CAACC,KAAK,CAACI;YACzB,OAAO,IAAInC,SAAS,yBAAyB;gBAAEK,QAAQ;YAAI;QAC7D,SAAU;YACR,IAAI,CAACnB,UAAU;gBACbnB,6BAA6B;oBAAEC;oBAAiBC;gBAAO;YACzD;QACF;IACF;AACF,EAAC"}