@vercel/blob 2.1.0-371428d-20260119152220 → 2.1.0-454414f-20260204161813

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/del.ts","../src/head.ts","../src/list.ts","../src/copy.ts","../src/index.ts"],"sourcesContent":["import { requestApi } from './api';\nimport type { BlobCommandOptions } from './helpers';\n\n/**\n * Deletes one or multiple blobs from your store.\n * Detailed documentation can be found here: https://vercel.com/docs/vercel-blob/using-blob-sdk#delete-a-blob\n *\n * @param urlOrPathname - Blob url (or pathname) to delete. You can pass either a single value or an array of values. You can only delete blobs that are located in a store, that your 'BLOB_READ_WRITE_TOKEN' has access to.\n * @param options - Additional options for the request.\n */\nexport async function del(\n urlOrPathname: string[] | string,\n options?: BlobCommandOptions,\n): Promise<void> {\n await requestApi(\n '/delete',\n {\n method: 'POST',\n headers: { 'content-type': 'application/json' },\n body: JSON.stringify({\n urls: Array.isArray(urlOrPathname) ? urlOrPathname : [urlOrPathname],\n }),\n signal: options?.abortSignal,\n },\n options,\n );\n}\n","import { requestApi } from './api';\nimport type { BlobCommandOptions } from './helpers';\n\n/**\n * Result of the head method containing metadata about a blob.\n */\nexport interface HeadBlobResult {\n /**\n * The size of the blob in bytes.\n */\n size: number;\n\n /**\n * The date when the blob was uploaded.\n */\n uploadedAt: Date;\n\n /**\n * The pathname of the blob within the store.\n */\n pathname: string;\n\n /**\n * The content type of the blob.\n */\n contentType: string;\n\n /**\n * The content disposition header value.\n */\n contentDisposition: string;\n\n /**\n * The URL of the blob.\n */\n url: string;\n\n /**\n * A URL that will cause browsers to download the file instead of displaying it inline.\n */\n downloadUrl: string;\n\n /**\n * The cache control header value.\n */\n cacheControl: string;\n}\n\ninterface HeadBlobApiResponse extends Omit<HeadBlobResult, 'uploadedAt'> {\n uploadedAt: string; // when receiving data from our API, uploadedAt is a string\n}\n\n/**\n * Fetches metadata of a blob object.\n * Detailed documentation can be found here: https://vercel.com/docs/vercel-blob/using-blob-sdk#get-blob-metadata\n *\n * @param urlOrPathname - Blob url or pathname to lookup.\n * @param options - Additional options for the request.\n */\nexport async function head(\n urlOrPathname: string,\n options?: BlobCommandOptions,\n): Promise<HeadBlobResult> {\n const searchParams = new URLSearchParams({ url: urlOrPathname });\n\n const response = await requestApi<HeadBlobApiResponse>(\n `?${searchParams.toString()}`,\n // HEAD can't have body as a response, so we use GET\n {\n method: 'GET',\n signal: options?.abortSignal,\n },\n options,\n );\n\n return {\n url: response.url,\n downloadUrl: response.downloadUrl,\n pathname: response.pathname,\n size: response.size,\n contentType: response.contentType,\n contentDisposition: response.contentDisposition,\n cacheControl: response.cacheControl,\n uploadedAt: new Date(response.uploadedAt),\n };\n}\n","import { requestApi } from './api';\nimport type { BlobCommandOptions } from './helpers';\n\n/**\n * Basic blob object information returned by the list method.\n */\nexport interface ListBlobResultBlob {\n /**\n * The URL of the blob.\n */\n url: string;\n\n /**\n * A URL that will cause browsers to download the file instead of displaying it inline.\n */\n downloadUrl: string;\n\n /**\n * The pathname of the blob within the store.\n */\n pathname: string;\n\n /**\n * The size of the blob in bytes.\n */\n size: number;\n\n /**\n * The date when the blob was uploaded.\n */\n uploadedAt: Date;\n}\n\n/**\n * Result of the list method in expanded mode (default).\n */\nexport interface ListBlobResult {\n /**\n * Array of blob objects in the store.\n */\n blobs: ListBlobResultBlob[];\n\n /**\n * Pagination cursor for the next set of results, if hasMore is true.\n */\n cursor?: string;\n\n /**\n * Indicates if there are more results available.\n */\n hasMore: boolean;\n}\n\n/**\n * Result of the list method in folded mode.\n */\nexport interface ListFoldedBlobResult extends ListBlobResult {\n /**\n * Array of folder paths in the store.\n */\n folders: string[];\n}\n\n/**\n * @internal Internal interface for the API response blob structure.\n * Maps the API response format where uploadedAt is a string, not a Date.\n */\ninterface ListBlobApiResponseBlob\n extends Omit<ListBlobResultBlob, 'uploadedAt'> {\n uploadedAt: string;\n}\n\n/**\n * @internal Internal interface for the API response structure.\n */\ninterface ListBlobApiResponse extends Omit<ListBlobResult, 'blobs'> {\n blobs: ListBlobApiResponseBlob[];\n folders?: string[];\n}\n\n/**\n * Options for the list method.\n */\nexport interface ListCommandOptions<\n M extends 'expanded' | 'folded' | undefined = undefined,\n> extends BlobCommandOptions {\n /**\n * The maximum number of blobs to return.\n * @defaultvalue 1000\n */\n limit?: number;\n\n /**\n * Filters the result to only include blobs that start with this prefix.\n * If used together with `mode: 'folded'`, make sure to include a trailing slash after the foldername.\n */\n prefix?: string;\n\n /**\n * The cursor to use for pagination. Can be obtained from the response of a previous `list` request.\n */\n cursor?: string;\n\n /**\n * Defines how the blobs are listed\n * - `expanded` the blobs property contains all blobs.\n * - `folded` the blobs property contains only the blobs at the root level of your store. Blobs that are located inside a folder get merged into a single entry in the folder response property.\n * @defaultvalue 'expanded'\n */\n mode?: M;\n}\n\n/**\n * @internal Type helper to determine the return type based on the mode parameter.\n */\ntype ListCommandResult<\n M extends 'expanded' | 'folded' | undefined = undefined,\n> = M extends 'folded' ? ListFoldedBlobResult : ListBlobResult;\n\n/**\n * Fetches a paginated list of blob objects from your store.\n *\n * @param options - Configuration options including:\n * - token - (Optional) A string specifying the read-write token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - limit - (Optional) The maximum number of blobs to return. Defaults to 1000.\n * - prefix - (Optional) Filters the result to only include blobs that start with this prefix. If used with mode: 'folded', include a trailing slash after the folder name.\n * - cursor - (Optional) The cursor to use for pagination. Can be obtained from the response of a previous list request.\n * - mode - (Optional) Defines how the blobs are listed. Can be 'expanded' (default) or 'folded'. In folded mode, blobs located inside a folder are merged into a single entry in the folders response property.\n * - abortSignal - (Optional) AbortSignal to cancel the operation.\n * @returns A promise that resolves to an object containing:\n * - blobs: An array of blob objects with size, uploadedAt, pathname, url, and downloadUrl properties\n * - cursor: A string for pagination (if hasMore is true)\n * - hasMore: A boolean indicating if there are more results available\n * - folders: (Only in 'folded' mode) An array of folder paths\n */\nexport async function list<\n M extends 'expanded' | 'folded' | undefined = undefined,\n>(options?: ListCommandOptions<M>): Promise<ListCommandResult<M>> {\n const searchParams = new URLSearchParams();\n\n if (options?.limit) {\n searchParams.set('limit', options.limit.toString());\n }\n if (options?.prefix) {\n searchParams.set('prefix', options.prefix);\n }\n if (options?.cursor) {\n searchParams.set('cursor', options.cursor);\n }\n if (options?.mode) {\n searchParams.set('mode', options.mode);\n }\n\n const response = await requestApi<ListBlobApiResponse>(\n `?${searchParams.toString()}`,\n {\n method: 'GET',\n signal: options?.abortSignal,\n },\n options,\n );\n\n if (options?.mode === 'folded') {\n return {\n folders: response.folders ?? [],\n cursor: response.cursor,\n hasMore: response.hasMore,\n blobs: response.blobs.map(mapBlobResult),\n } as ListCommandResult<M>;\n }\n\n return {\n cursor: response.cursor,\n hasMore: response.hasMore,\n blobs: response.blobs.map(mapBlobResult),\n } as ListCommandResult<M>;\n}\n\n/**\n * @internal Helper function to map API response blob format to the expected return type.\n * Converts the uploadedAt string into a Date object.\n */\nfunction mapBlobResult(\n blobResult: ListBlobApiResponseBlob,\n): ListBlobResultBlob {\n return {\n url: blobResult.url,\n downloadUrl: blobResult.downloadUrl,\n pathname: blobResult.pathname,\n size: blobResult.size,\n uploadedAt: new Date(blobResult.uploadedAt),\n };\n}\n","import { MAXIMUM_PATHNAME_LENGTH, requestApi } from './api';\nimport type { CommonCreateBlobOptions } from './helpers';\nimport { BlobError, disallowedPathnameCharacters } from './helpers';\n\nexport type CopyCommandOptions = CommonCreateBlobOptions;\n\nexport interface CopyBlobResult {\n url: string;\n downloadUrl: string;\n pathname: string;\n contentType: string;\n contentDisposition: string;\n}\n\n/**\n * Copies a blob to another location in your store.\n * Detailed documentation can be found here: https://vercel.com/docs/vercel-blob/using-blob-sdk#copy-a-blob\n *\n * @param fromUrlOrPathname - The blob URL (or pathname) to copy. You can only copy blobs that are in the store, that your 'BLOB_READ_WRITE_TOKEN' has access to.\n * @param toPathname - The pathname to copy the blob to. This includes the filename.\n * @param options - Additional options. The copy method will not preserve any metadata configuration (e.g.: 'cacheControlMaxAge') of the source blob. If you want to copy the metadata, you need to define it here again.\n */\nexport async function copy(\n fromUrlOrPathname: string,\n toPathname: string,\n options: CopyCommandOptions,\n): Promise<CopyBlobResult> {\n if (!options) {\n throw new BlobError('missing options, see usage');\n }\n\n if (options.access !== 'public' && options.access !== 'private') {\n throw new BlobError('access must be \"public\" or \"private\"');\n }\n\n if (toPathname.length > MAXIMUM_PATHNAME_LENGTH) {\n throw new BlobError(\n `pathname is too long, maximum length is ${MAXIMUM_PATHNAME_LENGTH}`,\n );\n }\n\n for (const invalidCharacter of disallowedPathnameCharacters) {\n if (toPathname.includes(invalidCharacter)) {\n throw new BlobError(\n `pathname cannot contain \"${invalidCharacter}\", please encode it if needed`,\n );\n }\n }\n\n const headers: Record<string, string> = {};\n\n // access is always required, so always add it to headers\n headers['x-vercel-blob-access'] = options.access;\n\n if (options.addRandomSuffix !== undefined) {\n headers['x-add-random-suffix'] = options.addRandomSuffix ? '1' : '0';\n }\n\n if (options.allowOverwrite !== undefined) {\n headers['x-allow-overwrite'] = options.allowOverwrite ? '1' : '0';\n }\n\n if (options.contentType) {\n headers['x-content-type'] = options.contentType;\n }\n\n if (options.cacheControlMaxAge !== undefined) {\n headers['x-cache-control-max-age'] = options.cacheControlMaxAge.toString();\n }\n\n const params = new URLSearchParams({\n pathname: toPathname,\n fromUrl: fromUrlOrPathname,\n });\n\n const response = await requestApi<CopyBlobResult>(\n `?${params.toString()}`,\n {\n method: 'PUT',\n headers,\n signal: options.abortSignal,\n },\n options,\n );\n\n return {\n url: response.url,\n downloadUrl: response.downloadUrl,\n pathname: response.pathname,\n contentType: response.contentType,\n contentDisposition: response.contentDisposition,\n };\n}\n","import type { CommonCreateBlobOptions } from './helpers';\nimport type { CompleteMultipartUploadCommandOptions } from './multipart/complete';\nimport { createCompleteMultipartUploadMethod } from './multipart/complete';\nimport { createCreateMultipartUploadMethod } from './multipart/create';\nimport { createCreateMultipartUploaderMethod } from './multipart/create-uploader';\nimport type { UploadPartCommandOptions } from './multipart/upload';\nimport { createUploadPartMethod } from './multipart/upload';\nimport type { PutCommandOptions } from './put';\nimport { createPutMethod } from './put';\n\n// expose api BlobErrors\nexport {\n BlobAccessError,\n BlobClientTokenExpiredError,\n BlobContentTypeNotAllowedError,\n BlobFileTooLargeError,\n BlobNotFoundError,\n BlobPathnameMismatchError,\n BlobRequestAbortedError,\n BlobServiceNotAvailable,\n BlobServiceRateLimited,\n BlobStoreNotFoundError,\n BlobStoreSuspendedError,\n BlobUnknownError,\n} from './api';\n// expose generic BlobError and download url util\nexport {\n BlobError,\n getDownloadUrl,\n type OnUploadProgressCallback,\n type UploadProgressEvent,\n} from './helpers';\n\n// vercelBlob.put()\n\nexport type { PutBlobResult } from './put-helpers';\nexport type { PutCommandOptions };\n\n/**\n * Uploads a blob into your store from your server.\n * Detailed documentation can be found here: https://vercel.com/docs/vercel-blob/using-blob-sdk#upload-a-blob\n *\n * If you want to upload from the browser directly, or if you're hitting Vercel upload limits, check out the documentation for client uploads: https://vercel.com/docs/vercel-blob/using-blob-sdk#client-uploads\n *\n * @param pathname - The pathname to upload the blob to, including the extension. This will influence the URL of your blob like https://$storeId.public.blob.vercel-storage.com/$pathname.\n * @param body - The content of your blob, can be a: string, File, Blob, Buffer or Stream. We support almost everything fetch supports: https://developer.mozilla.org/en-US/docs/Web/API/RequestInit#body.\n * @param options - Configuration options including:\n * - access - (Required) Must be 'public' or 'private'. Public blobs are accessible via URL, private blobs require authentication.\n * - addRandomSuffix - (Optional) A boolean specifying whether to add a random suffix to the pathname. It defaults to false. We recommend using this option to ensure there are no conflicts in your blob filenames.\n * - allowOverwrite - (Optional) A boolean to allow overwriting blobs. By default an error will be thrown if you try to overwrite a blob by using the same pathname for multiple blobs.\n * - contentType - (Optional) A string indicating the media type. By default, it's extracted from the pathname's extension.\n * - cacheControlMaxAge - (Optional) A number in seconds to configure how long Blobs are cached. Defaults to one month. Cannot be set to a value lower than 1 minute.\n * - token - (Optional) A string specifying the token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - multipart - (Optional) Whether to use multipart upload for large files. It will split the file into multiple parts, upload them in parallel and retry failed parts.\n * - abortSignal - (Optional) AbortSignal to cancel the operation.\n * - onUploadProgress - (Optional) Callback to track upload progress: onUploadProgress(\\{loaded: number, total: number, percentage: number\\})\n * @returns A promise that resolves to the blob information, including pathname, contentType, contentDisposition, url, and downloadUrl.\n */\nexport const put = createPutMethod<PutCommandOptions>({\n allowedOptions: [\n 'cacheControlMaxAge',\n 'addRandomSuffix',\n 'allowOverwrite',\n 'contentType',\n ],\n});\n\n// vercelBlob.del()\n\nexport { del } from './del';\n\n// vercelBlob.head()\n\nexport type { HeadBlobResult } from './head';\nexport { head } from './head';\n\n// vercelBlob.list()\n\nexport type {\n ListBlobResult,\n ListBlobResultBlob,\n ListCommandOptions,\n ListFoldedBlobResult,\n} from './list';\nexport { list } from './list';\n\n// vercelBlob.copy()\n\nexport type { CopyBlobResult, CopyCommandOptions } from './copy';\nexport { copy } from './copy';\n\n// vercelBlob. createMultipartUpload()\n// vercelBlob. uploadPart()\n// vercelBlob. completeMultipartUpload()\n// vercelBlob. createMultipartUploader()\n\n/**\n * Creates a multipart upload. This is the first step in the manual multipart upload process.\n *\n * @param pathname - A string specifying the path inside the blob store. This will be the base value of the return URL and includes the filename and extension.\n * @param options - Configuration options including:\n * - access - (Required) Must be 'public' or 'private'. Public blobs are accessible via URL, private blobs require authentication.\n * - addRandomSuffix - (Optional) A boolean specifying whether to add a random suffix to the pathname. It defaults to true.\n * - allowOverwrite - (Optional) A boolean to allow overwriting blobs. By default an error will be thrown if you try to overwrite a blob by using the same pathname for multiple blobs.\n * - contentType - (Optional) The media type for the file. If not specified, it's derived from the file extension. Falls back to application/octet-stream when no extension exists or can't be matched.\n * - cacheControlMaxAge - (Optional) A number in seconds to configure the edge and browser cache. Defaults to one year.\n * - token - (Optional) A string specifying the token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - abortSignal - (Optional) AbortSignal to cancel the operation.\n * @returns A promise that resolves to an object containing:\n * - key: A string that identifies the blob object.\n * - uploadId: A string that identifies the multipart upload. Both are needed for subsequent uploadPart calls.\n */\nexport const createMultipartUpload =\n createCreateMultipartUploadMethod<CommonCreateBlobOptions>({\n allowedOptions: [\n 'cacheControlMaxAge',\n 'addRandomSuffix',\n 'allowOverwrite',\n 'contentType',\n ],\n });\n\n/**\n * Creates a multipart uploader that simplifies the multipart upload process.\n * This is a wrapper around the manual multipart upload process that provides a more convenient API.\n *\n * @param pathname - A string specifying the path inside the blob store. This will be the base value of the return URL and includes the filename and extension.\n * @param options - Configuration options including:\n * - access - (Required) Must be 'public' or 'private'. Public blobs are accessible via URL, private blobs require authentication.\n * - addRandomSuffix - (Optional) A boolean specifying whether to add a random suffix to the pathname. It defaults to true.\n * - allowOverwrite - (Optional) A boolean to allow overwriting blobs. By default an error will be thrown if you try to overwrite a blob by using the same pathname for multiple blobs.\n * - contentType - (Optional) The media type for the file. If not specified, it's derived from the file extension. Falls back to application/octet-stream when no extension exists or can't be matched.\n * - cacheControlMaxAge - (Optional) A number in seconds to configure the edge and browser cache. Defaults to one year.\n * - token - (Optional) A string specifying the token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - abortSignal - (Optional) AbortSignal to cancel the operation.\n * @returns A promise that resolves to an uploader object with the following properties and methods:\n * - key: A string that identifies the blob object.\n * - uploadId: A string that identifies the multipart upload.\n * - uploadPart: A method to upload a part of the file.\n * - complete: A method to complete the multipart upload process.\n */\nexport const createMultipartUploader =\n createCreateMultipartUploaderMethod<CommonCreateBlobOptions>({\n allowedOptions: [\n 'cacheControlMaxAge',\n 'addRandomSuffix',\n 'allowOverwrite',\n 'contentType',\n ],\n });\n\nexport type { UploadPartCommandOptions };\n\n/**\n * Uploads a part of a multipart upload.\n * Used as part of the manual multipart upload process.\n *\n * @param pathname - Same value as the pathname parameter passed to createMultipartUpload. This will influence the final URL of your blob.\n * @param body - A blob object as ReadableStream, String, ArrayBuffer or Blob based on these supported body types. Each part must be a minimum of 5MB, except the last one which can be smaller.\n * @param options - Configuration options including:\n * - access - (Required) Must be 'public' or 'private'. Public blobs are accessible via URL, private blobs require authentication.\n * - uploadId - (Required) A string returned from createMultipartUpload which identifies the multipart upload.\n * - key - (Required) A string returned from createMultipartUpload which identifies the blob object.\n * - partNumber - (Required) A number identifying which part is uploaded (1-based index).\n * - contentType - (Optional) The media type for the blob. By default, it's derived from the pathname.\n * - token - (Optional) A string specifying the token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - addRandomSuffix - (Optional) A boolean specifying whether to add a random suffix to the pathname.\n * - allowOverwrite - (Optional) A boolean to allow overwriting blobs.\n * - cacheControlMaxAge - (Optional) A number in seconds to configure how long Blobs are cached.\n * - abortSignal - (Optional) AbortSignal to cancel the running request.\n * - onUploadProgress - (Optional) Callback to track upload progress: onUploadProgress(\\{loaded: number, total: number, percentage: number\\})\n * @returns A promise that resolves to the uploaded part information containing etag and partNumber, which will be needed for the completeMultipartUpload call.\n */\nexport const uploadPart = createUploadPartMethod<UploadPartCommandOptions>({\n allowedOptions: [\n 'cacheControlMaxAge',\n 'addRandomSuffix',\n 'allowOverwrite',\n 'contentType',\n ],\n});\n\nexport type { CompleteMultipartUploadCommandOptions };\n\n/**\n * Completes a multipart upload by combining all uploaded parts.\n * This is the final step in the manual multipart upload process.\n *\n * @param pathname - Same value as the pathname parameter passed to createMultipartUpload.\n * @param parts - An array containing all the uploaded parts information from previous uploadPart calls. Each part must have properties etag and partNumber.\n * @param options - Configuration options including:\n * - access - (Required) Must be 'public' or 'private'. Public blobs are accessible via URL, private blobs require authentication.\n * - uploadId - (Required) A string returned from createMultipartUpload which identifies the multipart upload.\n * - key - (Required) A string returned from createMultipartUpload which identifies the blob object.\n * - contentType - (Optional) The media type for the file. If not specified, it's derived from the file extension.\n * - token - (Optional) A string specifying the token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - addRandomSuffix - (Optional) A boolean specifying whether to add a random suffix to the pathname. It defaults to true.\n * - allowOverwrite - (Optional) A boolean to allow overwriting blobs.\n * - cacheControlMaxAge - (Optional) A number in seconds to configure the edge and browser cache. Defaults to one year.\n * - abortSignal - (Optional) AbortSignal to cancel the operation.\n * @returns A promise that resolves to the finalized blob information, including pathname, contentType, contentDisposition, url, and downloadUrl.\n */\nexport const completeMultipartUpload =\n createCompleteMultipartUploadMethod<CompleteMultipartUploadCommandOptions>({\n allowedOptions: [\n 'cacheControlMaxAge',\n 'addRandomSuffix',\n 'allowOverwrite',\n 'contentType',\n ],\n });\n\nexport { createFolder } from './create-folder';\nexport type { Part, PartInput } from './multipart/helpers';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;AAUA,eAAsB,IACpB,eACA,SACe;AACf,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,MACE,QAAQ;AAAA,MACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,MAC9C,MAAM,KAAK,UAAU;AAAA,QACnB,MAAM,MAAM,QAAQ,aAAa,IAAI,gBAAgB,CAAC,aAAa;AAAA,MACrE,CAAC;AAAA,MACD,QAAQ,mCAAS;AAAA,IACnB;AAAA,IACA;AAAA,EACF;AACF;;;ACiCA,eAAsB,KACpB,eACA,SACyB;AACzB,QAAM,eAAe,IAAI,gBAAgB,EAAE,KAAK,cAAc,CAAC;AAE/D,QAAM,WAAW,MAAM;AAAA,IACrB,IAAI,aAAa,SAAS,CAAC;AAAA;AAAA,IAE3B;AAAA,MACE,QAAQ;AAAA,MACR,QAAQ,mCAAS;AAAA,IACnB;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,KAAK,SAAS;AAAA,IACd,aAAa,SAAS;AAAA,IACtB,UAAU,SAAS;AAAA,IACnB,MAAM,SAAS;AAAA,IACf,aAAa,SAAS;AAAA,IACtB,oBAAoB,SAAS;AAAA,IAC7B,cAAc,SAAS;AAAA,IACvB,YAAY,IAAI,KAAK,SAAS,UAAU;AAAA,EAC1C;AACF;;;ACkDA,eAAsB,KAEpB,SAAgE;AAzIlE;AA0IE,QAAM,eAAe,IAAI,gBAAgB;AAEzC,MAAI,mCAAS,OAAO;AAClB,iBAAa,IAAI,SAAS,QAAQ,MAAM,SAAS,CAAC;AAAA,EACpD;AACA,MAAI,mCAAS,QAAQ;AACnB,iBAAa,IAAI,UAAU,QAAQ,MAAM;AAAA,EAC3C;AACA,MAAI,mCAAS,QAAQ;AACnB,iBAAa,IAAI,UAAU,QAAQ,MAAM;AAAA,EAC3C;AACA,MAAI,mCAAS,MAAM;AACjB,iBAAa,IAAI,QAAQ,QAAQ,IAAI;AAAA,EACvC;AAEA,QAAM,WAAW,MAAM;AAAA,IACrB,IAAI,aAAa,SAAS,CAAC;AAAA,IAC3B;AAAA,MACE,QAAQ;AAAA,MACR,QAAQ,mCAAS;AAAA,IACnB;AAAA,IACA;AAAA,EACF;AAEA,OAAI,mCAAS,UAAS,UAAU;AAC9B,WAAO;AAAA,MACL,UAAS,cAAS,YAAT,YAAoB,CAAC;AAAA,MAC9B,QAAQ,SAAS;AAAA,MACjB,SAAS,SAAS;AAAA,MAClB,OAAO,SAAS,MAAM,IAAI,aAAa;AAAA,IACzC;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ,SAAS;AAAA,IACjB,SAAS,SAAS;AAAA,IAClB,OAAO,SAAS,MAAM,IAAI,aAAa;AAAA,EACzC;AACF;AAMA,SAAS,cACP,YACoB;AACpB,SAAO;AAAA,IACL,KAAK,WAAW;AAAA,IAChB,aAAa,WAAW;AAAA,IACxB,UAAU,WAAW;AAAA,IACrB,MAAM,WAAW;AAAA,IACjB,YAAY,IAAI,KAAK,WAAW,UAAU;AAAA,EAC5C;AACF;;;AC1KA,eAAsB,KACpB,mBACA,YACA,SACyB;AACzB,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,UAAU,4BAA4B;AAAA,EAClD;AAEA,MAAI,QAAQ,WAAW,YAAY,QAAQ,WAAW,WAAW;AAC/D,UAAM,IAAI,UAAU,sCAAsC;AAAA,EAC5D;AAEA,MAAI,WAAW,SAAS,yBAAyB;AAC/C,UAAM,IAAI;AAAA,MACR,2CAA2C,uBAAuB;AAAA,IACpE;AAAA,EACF;AAEA,aAAW,oBAAoB,8BAA8B;AAC3D,QAAI,WAAW,SAAS,gBAAgB,GAAG;AACzC,YAAM,IAAI;AAAA,QACR,4BAA4B,gBAAgB;AAAA,MAC9C;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAAkC,CAAC;AAGzC,UAAQ,sBAAsB,IAAI,QAAQ;AAE1C,MAAI,QAAQ,oBAAoB,QAAW;AACzC,YAAQ,qBAAqB,IAAI,QAAQ,kBAAkB,MAAM;AAAA,EACnE;AAEA,MAAI,QAAQ,mBAAmB,QAAW;AACxC,YAAQ,mBAAmB,IAAI,QAAQ,iBAAiB,MAAM;AAAA,EAChE;AAEA,MAAI,QAAQ,aAAa;AACvB,YAAQ,gBAAgB,IAAI,QAAQ;AAAA,EACtC;AAEA,MAAI,QAAQ,uBAAuB,QAAW;AAC5C,YAAQ,yBAAyB,IAAI,QAAQ,mBAAmB,SAAS;AAAA,EAC3E;AAEA,QAAM,SAAS,IAAI,gBAAgB;AAAA,IACjC,UAAU;AAAA,IACV,SAAS;AAAA,EACX,CAAC;AAED,QAAM,WAAW,MAAM;AAAA,IACrB,IAAI,OAAO,SAAS,CAAC;AAAA,IACrB;AAAA,MACE,QAAQ;AAAA,MACR;AAAA,MACA,QAAQ,QAAQ;AAAA,IAClB;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,KAAK,SAAS;AAAA,IACd,aAAa,SAAS;AAAA,IACtB,UAAU,SAAS;AAAA,IACnB,aAAa,SAAS;AAAA,IACtB,oBAAoB,SAAS;AAAA,EAC/B;AACF;;;AClCO,IAAM,MAAM,gBAAmC;AAAA,EACpD,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF,CAAC;AA+CM,IAAM,wBACX,kCAA2D;AAAA,EACzD,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF,CAAC;AAqBI,IAAM,0BACX,oCAA6D;AAAA,EAC3D,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF,CAAC;AAwBI,IAAM,aAAa,uBAAiD;AAAA,EACzE,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF,CAAC;AAsBM,IAAM,0BACX,oCAA2E;AAAA,EACzE,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF,CAAC;","names":[]}
1
+ {"version":3,"sources":["../src/del.ts","../src/head.ts","../src/list.ts","../src/copy.ts","../src/index.ts"],"sourcesContent":["import { requestApi } from './api';\nimport type { BlobCommandOptions } from './helpers';\n\n/**\n * Deletes one or multiple blobs from your store.\n * Detailed documentation can be found here: https://vercel.com/docs/vercel-blob/using-blob-sdk#delete-a-blob\n *\n * @param urlOrPathname - Blob url (or pathname) to delete. You can pass either a single value or an array of values. You can only delete blobs that are located in a store, that your 'BLOB_READ_WRITE_TOKEN' has access to.\n * @param options - Additional options for the request.\n */\nexport async function del(\n urlOrPathname: string[] | string,\n options?: BlobCommandOptions,\n): Promise<void> {\n await requestApi(\n '/delete',\n {\n method: 'POST',\n headers: { 'content-type': 'application/json' },\n body: JSON.stringify({\n urls: Array.isArray(urlOrPathname) ? urlOrPathname : [urlOrPathname],\n }),\n signal: options?.abortSignal,\n },\n options,\n );\n}\n","import { requestApi } from './api';\nimport type { BlobCommandOptions } from './helpers';\n\n/**\n * Result of the head method containing metadata about a blob.\n */\nexport interface HeadBlobResult {\n /**\n * The size of the blob in bytes.\n */\n size: number;\n\n /**\n * The date when the blob was uploaded.\n */\n uploadedAt: Date;\n\n /**\n * The pathname of the blob within the store.\n */\n pathname: string;\n\n /**\n * The content type of the blob.\n */\n contentType: string;\n\n /**\n * The content disposition header value.\n */\n contentDisposition: string;\n\n /**\n * The URL of the blob.\n */\n url: string;\n\n /**\n * A URL that will cause browsers to download the file instead of displaying it inline.\n */\n downloadUrl: string;\n\n /**\n * The cache control header value.\n */\n cacheControl: string;\n\n /**\n * The ETag of the blob. Can be used with `ifMatch` for conditional writes.\n */\n etag: string;\n}\n\ninterface HeadBlobApiResponse extends Omit<HeadBlobResult, 'uploadedAt'> {\n uploadedAt: string; // when receiving data from our API, uploadedAt is a string\n}\n\n/**\n * Fetches metadata of a blob object.\n * Detailed documentation can be found here: https://vercel.com/docs/vercel-blob/using-blob-sdk#get-blob-metadata\n *\n * @param urlOrPathname - Blob url or pathname to lookup.\n * @param options - Additional options for the request.\n */\nexport async function head(\n urlOrPathname: string,\n options?: BlobCommandOptions,\n): Promise<HeadBlobResult> {\n const searchParams = new URLSearchParams({ url: urlOrPathname });\n\n const response = await requestApi<HeadBlobApiResponse>(\n `?${searchParams.toString()}`,\n // HEAD can't have body as a response, so we use GET\n {\n method: 'GET',\n signal: options?.abortSignal,\n },\n options,\n );\n\n return {\n url: response.url,\n downloadUrl: response.downloadUrl,\n pathname: response.pathname,\n size: response.size,\n contentType: response.contentType,\n contentDisposition: response.contentDisposition,\n cacheControl: response.cacheControl,\n uploadedAt: new Date(response.uploadedAt),\n etag: response.etag,\n };\n}\n","import { requestApi } from './api';\nimport type { BlobCommandOptions } from './helpers';\n\n/**\n * Basic blob object information returned by the list method.\n */\nexport interface ListBlobResultBlob {\n /**\n * The URL of the blob.\n */\n url: string;\n\n /**\n * A URL that will cause browsers to download the file instead of displaying it inline.\n */\n downloadUrl: string;\n\n /**\n * The pathname of the blob within the store.\n */\n pathname: string;\n\n /**\n * The size of the blob in bytes.\n */\n size: number;\n\n /**\n * The date when the blob was uploaded.\n */\n uploadedAt: Date;\n\n /**\n * The ETag of the blob. Can be used with `ifMatch` for conditional writes.\n */\n etag: string;\n}\n\n/**\n * Result of the list method in expanded mode (default).\n */\nexport interface ListBlobResult {\n /**\n * Array of blob objects in the store.\n */\n blobs: ListBlobResultBlob[];\n\n /**\n * Pagination cursor for the next set of results, if hasMore is true.\n */\n cursor?: string;\n\n /**\n * Indicates if there are more results available.\n */\n hasMore: boolean;\n}\n\n/**\n * Result of the list method in folded mode.\n */\nexport interface ListFoldedBlobResult extends ListBlobResult {\n /**\n * Array of folder paths in the store.\n */\n folders: string[];\n}\n\n/**\n * @internal Internal interface for the API response blob structure.\n * Maps the API response format where uploadedAt is a string, not a Date.\n */\ninterface ListBlobApiResponseBlob\n extends Omit<ListBlobResultBlob, 'uploadedAt'> {\n uploadedAt: string;\n}\n\n/**\n * @internal Internal interface for the API response structure.\n */\ninterface ListBlobApiResponse extends Omit<ListBlobResult, 'blobs'> {\n blobs: ListBlobApiResponseBlob[];\n folders?: string[];\n}\n\n/**\n * Options for the list method.\n */\nexport interface ListCommandOptions<\n M extends 'expanded' | 'folded' | undefined = undefined,\n> extends BlobCommandOptions {\n /**\n * The maximum number of blobs to return.\n * @defaultvalue 1000\n */\n limit?: number;\n\n /**\n * Filters the result to only include blobs that start with this prefix.\n * If used together with `mode: 'folded'`, make sure to include a trailing slash after the foldername.\n */\n prefix?: string;\n\n /**\n * The cursor to use for pagination. Can be obtained from the response of a previous `list` request.\n */\n cursor?: string;\n\n /**\n * Defines how the blobs are listed\n * - `expanded` the blobs property contains all blobs.\n * - `folded` the blobs property contains only the blobs at the root level of your store. Blobs that are located inside a folder get merged into a single entry in the folder response property.\n * @defaultvalue 'expanded'\n */\n mode?: M;\n}\n\n/**\n * @internal Type helper to determine the return type based on the mode parameter.\n */\ntype ListCommandResult<\n M extends 'expanded' | 'folded' | undefined = undefined,\n> = M extends 'folded' ? ListFoldedBlobResult : ListBlobResult;\n\n/**\n * Fetches a paginated list of blob objects from your store.\n *\n * @param options - Configuration options including:\n * - token - (Optional) A string specifying the read-write token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - limit - (Optional) The maximum number of blobs to return. Defaults to 1000.\n * - prefix - (Optional) Filters the result to only include blobs that start with this prefix. If used with mode: 'folded', include a trailing slash after the folder name.\n * - cursor - (Optional) The cursor to use for pagination. Can be obtained from the response of a previous list request.\n * - mode - (Optional) Defines how the blobs are listed. Can be 'expanded' (default) or 'folded'. In folded mode, blobs located inside a folder are merged into a single entry in the folders response property.\n * - abortSignal - (Optional) AbortSignal to cancel the operation.\n * @returns A promise that resolves to an object containing:\n * - blobs: An array of blob objects with size, uploadedAt, pathname, url, and downloadUrl properties\n * - cursor: A string for pagination (if hasMore is true)\n * - hasMore: A boolean indicating if there are more results available\n * - folders: (Only in 'folded' mode) An array of folder paths\n */\nexport async function list<\n M extends 'expanded' | 'folded' | undefined = undefined,\n>(options?: ListCommandOptions<M>): Promise<ListCommandResult<M>> {\n const searchParams = new URLSearchParams();\n\n if (options?.limit) {\n searchParams.set('limit', options.limit.toString());\n }\n if (options?.prefix) {\n searchParams.set('prefix', options.prefix);\n }\n if (options?.cursor) {\n searchParams.set('cursor', options.cursor);\n }\n if (options?.mode) {\n searchParams.set('mode', options.mode);\n }\n\n const response = await requestApi<ListBlobApiResponse>(\n `?${searchParams.toString()}`,\n {\n method: 'GET',\n signal: options?.abortSignal,\n },\n options,\n );\n\n if (options?.mode === 'folded') {\n return {\n folders: response.folders ?? [],\n cursor: response.cursor,\n hasMore: response.hasMore,\n blobs: response.blobs.map(mapBlobResult),\n } as ListCommandResult<M>;\n }\n\n return {\n cursor: response.cursor,\n hasMore: response.hasMore,\n blobs: response.blobs.map(mapBlobResult),\n } as ListCommandResult<M>;\n}\n\n/**\n * @internal Helper function to map API response blob format to the expected return type.\n * Converts the uploadedAt string into a Date object.\n */\nfunction mapBlobResult(\n blobResult: ListBlobApiResponseBlob,\n): ListBlobResultBlob {\n return {\n url: blobResult.url,\n downloadUrl: blobResult.downloadUrl,\n pathname: blobResult.pathname,\n size: blobResult.size,\n uploadedAt: new Date(blobResult.uploadedAt),\n etag: blobResult.etag,\n };\n}\n","import { MAXIMUM_PATHNAME_LENGTH, requestApi } from './api';\nimport type { CommonCreateBlobOptions } from './helpers';\nimport { BlobError, disallowedPathnameCharacters } from './helpers';\n\nexport type CopyCommandOptions = CommonCreateBlobOptions;\n\nexport interface CopyBlobResult {\n url: string;\n downloadUrl: string;\n pathname: string;\n contentType: string;\n contentDisposition: string;\n /**\n * The ETag of the blob. Can be used with `ifMatch` for conditional writes.\n */\n etag: string;\n}\n\n/**\n * Copies a blob to another location in your store.\n * Detailed documentation can be found here: https://vercel.com/docs/vercel-blob/using-blob-sdk#copy-a-blob\n *\n * @param fromUrlOrPathname - The blob URL (or pathname) to copy. You can only copy blobs that are in the store, that your 'BLOB_READ_WRITE_TOKEN' has access to.\n * @param toPathname - The pathname to copy the blob to. This includes the filename.\n * @param options - Additional options. The copy method will not preserve any metadata configuration (e.g.: 'cacheControlMaxAge') of the source blob. If you want to copy the metadata, you need to define it here again.\n */\nexport async function copy(\n fromUrlOrPathname: string,\n toPathname: string,\n options: CopyCommandOptions,\n): Promise<CopyBlobResult> {\n if (!options) {\n throw new BlobError('missing options, see usage');\n }\n\n if (options.access !== 'public') {\n throw new BlobError('access must be \"public\"');\n }\n\n if (toPathname.length > MAXIMUM_PATHNAME_LENGTH) {\n throw new BlobError(\n `pathname is too long, maximum length is ${MAXIMUM_PATHNAME_LENGTH}`,\n );\n }\n\n for (const invalidCharacter of disallowedPathnameCharacters) {\n if (toPathname.includes(invalidCharacter)) {\n throw new BlobError(\n `pathname cannot contain \"${invalidCharacter}\", please encode it if needed`,\n );\n }\n }\n\n const headers: Record<string, string> = {};\n\n if (options.addRandomSuffix !== undefined) {\n headers['x-add-random-suffix'] = options.addRandomSuffix ? '1' : '0';\n }\n\n if (options.allowOverwrite !== undefined) {\n headers['x-allow-overwrite'] = options.allowOverwrite ? '1' : '0';\n }\n\n if (options.contentType) {\n headers['x-content-type'] = options.contentType;\n }\n\n if (options.cacheControlMaxAge !== undefined) {\n headers['x-cache-control-max-age'] = options.cacheControlMaxAge.toString();\n }\n\n if (options.ifMatch) {\n headers['x-if-match'] = options.ifMatch;\n }\n\n const params = new URLSearchParams({\n pathname: toPathname,\n fromUrl: fromUrlOrPathname,\n });\n\n const response = await requestApi<CopyBlobResult>(\n `?${params.toString()}`,\n {\n method: 'PUT',\n headers,\n signal: options.abortSignal,\n },\n options,\n );\n\n return {\n url: response.url,\n downloadUrl: response.downloadUrl,\n pathname: response.pathname,\n contentType: response.contentType,\n contentDisposition: response.contentDisposition,\n etag: response.etag,\n };\n}\n","import type { CommonCreateBlobOptions } from './helpers';\nimport type { CompleteMultipartUploadCommandOptions } from './multipart/complete';\nimport { createCompleteMultipartUploadMethod } from './multipart/complete';\nimport { createCreateMultipartUploadMethod } from './multipart/create';\nimport { createCreateMultipartUploaderMethod } from './multipart/create-uploader';\nimport type { UploadPartCommandOptions } from './multipart/upload';\nimport { createUploadPartMethod } from './multipart/upload';\nimport type { PutCommandOptions } from './put';\nimport { createPutMethod } from './put';\n\n// expose api BlobErrors\nexport {\n BlobAccessError,\n BlobClientTokenExpiredError,\n BlobContentTypeNotAllowedError,\n BlobFileTooLargeError,\n BlobNotFoundError,\n BlobPathnameMismatchError,\n BlobPreconditionFailedError,\n BlobRequestAbortedError,\n BlobServiceNotAvailable,\n BlobServiceRateLimited,\n BlobStoreNotFoundError,\n BlobStoreSuspendedError,\n BlobUnknownError,\n} from './api';\n// expose generic BlobError and download url util\nexport {\n BlobError,\n getDownloadUrl,\n type OnUploadProgressCallback,\n type UploadProgressEvent,\n} from './helpers';\n\n// vercelBlob.put()\n\nexport type { PutBlobResult } from './put-helpers';\nexport type { PutCommandOptions };\n\n/**\n * Uploads a blob into your store from your server.\n * Detailed documentation can be found here: https://vercel.com/docs/vercel-blob/using-blob-sdk#upload-a-blob\n *\n * If you want to upload from the browser directly, or if you're hitting Vercel upload limits, check out the documentation for client uploads: https://vercel.com/docs/vercel-blob/using-blob-sdk#client-uploads\n *\n * @param pathname - The pathname to upload the blob to, including the extension. This will influence the URL of your blob like https://$storeId.public.blob.vercel-storage.com/$pathname.\n * @param body - The content of your blob, can be a: string, File, Blob, Buffer or Stream. We support almost everything fetch supports: https://developer.mozilla.org/en-US/docs/Web/API/RequestInit#body.\n * @param options - Configuration options including:\n * - access - (Required) Must be 'public' as blobs are publicly accessible.\n * - addRandomSuffix - (Optional) A boolean specifying whether to add a random suffix to the pathname. It defaults to false. We recommend using this option to ensure there are no conflicts in your blob filenames.\n * - allowOverwrite - (Optional) A boolean to allow overwriting blobs. By default an error will be thrown if you try to overwrite a blob by using the same pathname for multiple blobs.\n * - contentType - (Optional) A string indicating the media type. By default, it's extracted from the pathname's extension.\n * - cacheControlMaxAge - (Optional) A number in seconds to configure how long Blobs are cached. Defaults to one month. Cannot be set to a value lower than 1 minute.\n * - token - (Optional) A string specifying the token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - multipart - (Optional) Whether to use multipart upload for large files. It will split the file into multiple parts, upload them in parallel and retry failed parts.\n * - abortSignal - (Optional) AbortSignal to cancel the operation.\n * - onUploadProgress - (Optional) Callback to track upload progress: onUploadProgress(\\{loaded: number, total: number, percentage: number\\})\n * @returns A promise that resolves to the blob information, including pathname, contentType, contentDisposition, url, and downloadUrl.\n */\nexport const put = createPutMethod<PutCommandOptions>({\n allowedOptions: [\n 'cacheControlMaxAge',\n 'addRandomSuffix',\n 'allowOverwrite',\n 'contentType',\n 'ifMatch',\n ],\n});\n\n// vercelBlob.del()\n\nexport { del } from './del';\n\n// vercelBlob.head()\n\nexport type { HeadBlobResult } from './head';\nexport { head } from './head';\n\n// vercelBlob.list()\n\nexport type {\n ListBlobResult,\n ListBlobResultBlob,\n ListCommandOptions,\n ListFoldedBlobResult,\n} from './list';\nexport { list } from './list';\n\n// vercelBlob.copy()\n\nexport type { CopyBlobResult, CopyCommandOptions } from './copy';\nexport { copy } from './copy';\n\n// vercelBlob. createMultipartUpload()\n// vercelBlob. uploadPart()\n// vercelBlob. completeMultipartUpload()\n// vercelBlob. createMultipartUploader()\n\n/**\n * Creates a multipart upload. This is the first step in the manual multipart upload process.\n *\n * @param pathname - A string specifying the path inside the blob store. This will be the base value of the return URL and includes the filename and extension.\n * @param options - Configuration options including:\n * - access - (Required) Must be 'public' as blobs are publicly accessible.\n * - addRandomSuffix - (Optional) A boolean specifying whether to add a random suffix to the pathname. It defaults to true.\n * - allowOverwrite - (Optional) A boolean to allow overwriting blobs. By default an error will be thrown if you try to overwrite a blob by using the same pathname for multiple blobs.\n * - contentType - (Optional) The media type for the file. If not specified, it's derived from the file extension. Falls back to application/octet-stream when no extension exists or can't be matched.\n * - cacheControlMaxAge - (Optional) A number in seconds to configure the edge and browser cache. Defaults to one year.\n * - token - (Optional) A string specifying the token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - abortSignal - (Optional) AbortSignal to cancel the operation.\n * @returns A promise that resolves to an object containing:\n * - key: A string that identifies the blob object.\n * - uploadId: A string that identifies the multipart upload. Both are needed for subsequent uploadPart calls.\n */\nexport const createMultipartUpload =\n createCreateMultipartUploadMethod<CommonCreateBlobOptions>({\n allowedOptions: [\n 'cacheControlMaxAge',\n 'addRandomSuffix',\n 'allowOverwrite',\n 'contentType',\n 'ifMatch',\n ],\n });\n\n/**\n * Creates a multipart uploader that simplifies the multipart upload process.\n * This is a wrapper around the manual multipart upload process that provides a more convenient API.\n *\n * @param pathname - A string specifying the path inside the blob store. This will be the base value of the return URL and includes the filename and extension.\n * @param options - Configuration options including:\n * - access - (Required) Must be 'public' as blobs are publicly accessible.\n * - addRandomSuffix - (Optional) A boolean specifying whether to add a random suffix to the pathname. It defaults to true.\n * - allowOverwrite - (Optional) A boolean to allow overwriting blobs. By default an error will be thrown if you try to overwrite a blob by using the same pathname for multiple blobs.\n * - contentType - (Optional) The media type for the file. If not specified, it's derived from the file extension. Falls back to application/octet-stream when no extension exists or can't be matched.\n * - cacheControlMaxAge - (Optional) A number in seconds to configure the edge and browser cache. Defaults to one year.\n * - token - (Optional) A string specifying the token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - abortSignal - (Optional) AbortSignal to cancel the operation.\n * @returns A promise that resolves to an uploader object with the following properties and methods:\n * - key: A string that identifies the blob object.\n * - uploadId: A string that identifies the multipart upload.\n * - uploadPart: A method to upload a part of the file.\n * - complete: A method to complete the multipart upload process.\n */\nexport const createMultipartUploader =\n createCreateMultipartUploaderMethod<CommonCreateBlobOptions>({\n allowedOptions: [\n 'cacheControlMaxAge',\n 'addRandomSuffix',\n 'allowOverwrite',\n 'contentType',\n 'ifMatch',\n ],\n });\n\nexport type { UploadPartCommandOptions };\n\n/**\n * Uploads a part of a multipart upload.\n * Used as part of the manual multipart upload process.\n *\n * @param pathname - Same value as the pathname parameter passed to createMultipartUpload. This will influence the final URL of your blob.\n * @param body - A blob object as ReadableStream, String, ArrayBuffer or Blob based on these supported body types. Each part must be a minimum of 5MB, except the last one which can be smaller.\n * @param options - Configuration options including:\n * - access - (Required) Must be 'public' as blobs are publicly accessible.\n * - uploadId - (Required) A string returned from createMultipartUpload which identifies the multipart upload.\n * - key - (Required) A string returned from createMultipartUpload which identifies the blob object.\n * - partNumber - (Required) A number identifying which part is uploaded (1-based index).\n * - contentType - (Optional) The media type for the blob. By default, it's derived from the pathname.\n * - token - (Optional) A string specifying the token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - addRandomSuffix - (Optional) A boolean specifying whether to add a random suffix to the pathname.\n * - allowOverwrite - (Optional) A boolean to allow overwriting blobs.\n * - cacheControlMaxAge - (Optional) A number in seconds to configure how long Blobs are cached.\n * - abortSignal - (Optional) AbortSignal to cancel the running request.\n * - onUploadProgress - (Optional) Callback to track upload progress: onUploadProgress(\\{loaded: number, total: number, percentage: number\\})\n * @returns A promise that resolves to the uploaded part information containing etag and partNumber, which will be needed for the completeMultipartUpload call.\n */\nexport const uploadPart = createUploadPartMethod<UploadPartCommandOptions>({\n allowedOptions: [\n 'cacheControlMaxAge',\n 'addRandomSuffix',\n 'allowOverwrite',\n 'contentType',\n ],\n});\n\nexport type { CompleteMultipartUploadCommandOptions };\n\n/**\n * Completes a multipart upload by combining all uploaded parts.\n * This is the final step in the manual multipart upload process.\n *\n * @param pathname - Same value as the pathname parameter passed to createMultipartUpload.\n * @param parts - An array containing all the uploaded parts information from previous uploadPart calls. Each part must have properties etag and partNumber.\n * @param options - Configuration options including:\n * - access - (Required) Must be 'public' as blobs are publicly accessible.\n * - uploadId - (Required) A string returned from createMultipartUpload which identifies the multipart upload.\n * - key - (Required) A string returned from createMultipartUpload which identifies the blob object.\n * - contentType - (Optional) The media type for the file. If not specified, it's derived from the file extension.\n * - token - (Optional) A string specifying the token to use when making requests. It defaults to process.env.BLOB_READ_WRITE_TOKEN when deployed on Vercel.\n * - addRandomSuffix - (Optional) A boolean specifying whether to add a random suffix to the pathname. It defaults to true.\n * - allowOverwrite - (Optional) A boolean to allow overwriting blobs.\n * - cacheControlMaxAge - (Optional) A number in seconds to configure the edge and browser cache. Defaults to one year.\n * - abortSignal - (Optional) AbortSignal to cancel the operation.\n * @returns A promise that resolves to the finalized blob information, including pathname, contentType, contentDisposition, url, and downloadUrl.\n */\nexport const completeMultipartUpload =\n createCompleteMultipartUploadMethod<CompleteMultipartUploadCommandOptions>({\n allowedOptions: [\n 'cacheControlMaxAge',\n 'addRandomSuffix',\n 'allowOverwrite',\n 'contentType',\n ],\n });\n\nexport { createFolder } from './create-folder';\nexport type { Part, PartInput } from './multipart/helpers';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAUA,eAAsB,IACpB,eACA,SACe;AACf,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,MACE,QAAQ;AAAA,MACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,MAC9C,MAAM,KAAK,UAAU;AAAA,QACnB,MAAM,MAAM,QAAQ,aAAa,IAAI,gBAAgB,CAAC,aAAa;AAAA,MACrE,CAAC;AAAA,MACD,QAAQ,mCAAS;AAAA,IACnB;AAAA,IACA;AAAA,EACF;AACF;;;ACsCA,eAAsB,KACpB,eACA,SACyB;AACzB,QAAM,eAAe,IAAI,gBAAgB,EAAE,KAAK,cAAc,CAAC;AAE/D,QAAM,WAAW,MAAM;AAAA,IACrB,IAAI,aAAa,SAAS,CAAC;AAAA;AAAA,IAE3B;AAAA,MACE,QAAQ;AAAA,MACR,QAAQ,mCAAS;AAAA,IACnB;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,KAAK,SAAS;AAAA,IACd,aAAa,SAAS;AAAA,IACtB,UAAU,SAAS;AAAA,IACnB,MAAM,SAAS;AAAA,IACf,aAAa,SAAS;AAAA,IACtB,oBAAoB,SAAS;AAAA,IAC7B,cAAc,SAAS;AAAA,IACvB,YAAY,IAAI,KAAK,SAAS,UAAU;AAAA,IACxC,MAAM,SAAS;AAAA,EACjB;AACF;;;ACiDA,eAAsB,KAEpB,SAAgE;AA9IlE;AA+IE,QAAM,eAAe,IAAI,gBAAgB;AAEzC,MAAI,mCAAS,OAAO;AAClB,iBAAa,IAAI,SAAS,QAAQ,MAAM,SAAS,CAAC;AAAA,EACpD;AACA,MAAI,mCAAS,QAAQ;AACnB,iBAAa,IAAI,UAAU,QAAQ,MAAM;AAAA,EAC3C;AACA,MAAI,mCAAS,QAAQ;AACnB,iBAAa,IAAI,UAAU,QAAQ,MAAM;AAAA,EAC3C;AACA,MAAI,mCAAS,MAAM;AACjB,iBAAa,IAAI,QAAQ,QAAQ,IAAI;AAAA,EACvC;AAEA,QAAM,WAAW,MAAM;AAAA,IACrB,IAAI,aAAa,SAAS,CAAC;AAAA,IAC3B;AAAA,MACE,QAAQ;AAAA,MACR,QAAQ,mCAAS;AAAA,IACnB;AAAA,IACA;AAAA,EACF;AAEA,OAAI,mCAAS,UAAS,UAAU;AAC9B,WAAO;AAAA,MACL,UAAS,cAAS,YAAT,YAAoB,CAAC;AAAA,MAC9B,QAAQ,SAAS;AAAA,MACjB,SAAS,SAAS;AAAA,MAClB,OAAO,SAAS,MAAM,IAAI,aAAa;AAAA,IACzC;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ,SAAS;AAAA,IACjB,SAAS,SAAS;AAAA,IAClB,OAAO,SAAS,MAAM,IAAI,aAAa;AAAA,EACzC;AACF;AAMA,SAAS,cACP,YACoB;AACpB,SAAO;AAAA,IACL,KAAK,WAAW;AAAA,IAChB,aAAa,WAAW;AAAA,IACxB,UAAU,WAAW;AAAA,IACrB,MAAM,WAAW;AAAA,IACjB,YAAY,IAAI,KAAK,WAAW,UAAU;AAAA,IAC1C,MAAM,WAAW;AAAA,EACnB;AACF;;;AC5KA,eAAsB,KACpB,mBACA,YACA,SACyB;AACzB,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,UAAU,4BAA4B;AAAA,EAClD;AAEA,MAAI,QAAQ,WAAW,UAAU;AAC/B,UAAM,IAAI,UAAU,yBAAyB;AAAA,EAC/C;AAEA,MAAI,WAAW,SAAS,yBAAyB;AAC/C,UAAM,IAAI;AAAA,MACR,2CAA2C,uBAAuB;AAAA,IACpE;AAAA,EACF;AAEA,aAAW,oBAAoB,8BAA8B;AAC3D,QAAI,WAAW,SAAS,gBAAgB,GAAG;AACzC,YAAM,IAAI;AAAA,QACR,4BAA4B,gBAAgB;AAAA,MAC9C;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAAkC,CAAC;AAEzC,MAAI,QAAQ,oBAAoB,QAAW;AACzC,YAAQ,qBAAqB,IAAI,QAAQ,kBAAkB,MAAM;AAAA,EACnE;AAEA,MAAI,QAAQ,mBAAmB,QAAW;AACxC,YAAQ,mBAAmB,IAAI,QAAQ,iBAAiB,MAAM;AAAA,EAChE;AAEA,MAAI,QAAQ,aAAa;AACvB,YAAQ,gBAAgB,IAAI,QAAQ;AAAA,EACtC;AAEA,MAAI,QAAQ,uBAAuB,QAAW;AAC5C,YAAQ,yBAAyB,IAAI,QAAQ,mBAAmB,SAAS;AAAA,EAC3E;AAEA,MAAI,QAAQ,SAAS;AACnB,YAAQ,YAAY,IAAI,QAAQ;AAAA,EAClC;AAEA,QAAM,SAAS,IAAI,gBAAgB;AAAA,IACjC,UAAU;AAAA,IACV,SAAS;AAAA,EACX,CAAC;AAED,QAAM,WAAW,MAAM;AAAA,IACrB,IAAI,OAAO,SAAS,CAAC;AAAA,IACrB;AAAA,MACE,QAAQ;AAAA,MACR;AAAA,MACA,QAAQ,QAAQ;AAAA,IAClB;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,KAAK,SAAS;AAAA,IACd,aAAa,SAAS;AAAA,IACtB,UAAU,SAAS;AAAA,IACnB,aAAa,SAAS;AAAA,IACtB,oBAAoB,SAAS;AAAA,IAC7B,MAAM,SAAS;AAAA,EACjB;AACF;;;ACvCO,IAAM,MAAM,gBAAmC;AAAA,EACpD,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF,CAAC;AA+CM,IAAM,wBACX,kCAA2D;AAAA,EACzD,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF,CAAC;AAqBI,IAAM,0BACX,oCAA6D;AAAA,EAC3D,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF,CAAC;AAwBI,IAAM,aAAa,uBAAiD;AAAA,EACzE,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF,CAAC;AAsBM,IAAM,0BACX,oCAA2E;AAAA,EACzE,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF,CAAC;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vercel/blob",
3
- "version": "2.1.0-371428d-20260119152220",
3
+ "version": "2.1.0-454414f-20260204161813",
4
4
  "description": "The Vercel Blob JavaScript API client",
5
5
  "homepage": "https://vercel.com/storage/blob",
6
6
  "repository": {
@@ -50,18 +50,18 @@
50
50
  "is-buffer": "^2.0.5",
51
51
  "is-node-process": "^1.2.0",
52
52
  "throttleit": "^2.1.0",
53
- "undici": "^5.28.4"
53
+ "undici": "^6.23.0"
54
54
  },
55
55
  "devDependencies": {
56
- "@edge-runtime/jest-environment": "2.3.10",
57
- "@edge-runtime/types": "2.2.9",
56
+ "@edge-runtime/jest-environment": "4.0.0",
57
+ "@edge-runtime/types": "4.0.0",
58
58
  "@types/async-retry": "1.4.9",
59
- "@types/jest": "29.5.14",
60
- "@types/node": "22.19.1",
61
- "jest": "29.7.0",
62
- "jest-environment-jsdom": "29.7.0",
63
- "ts-jest": "29.2.6",
64
- "tsup": "8.4.0",
59
+ "@types/jest": "30.0.0",
60
+ "@types/node": "24.10.10",
61
+ "jest": "30.2.0",
62
+ "jest-environment-jsdom": "30.2.0",
63
+ "ts-jest": "29.4.6",
64
+ "tsup": "8.5.1",
65
65
  "tsconfig": "0.0.0"
66
66
  },
67
67
  "engines": {
@@ -1 +0,0 @@
1
- {"version":3,"sources":["/home/runner/work/storage/storage/packages/blob/dist/chunk-4ACRCP3X.cjs","../src/helpers.ts","../src/multipart/helpers.ts","../src/bytes.ts","../src/api.ts","../src/debug.ts","../src/dom-exception.ts","../src/is-network-error.ts","../src/fetch.ts","../src/xhr.ts","../src/request.ts","../src/put-helpers.ts","../src/multipart/complete.ts","../src/multipart/create.ts","../src/multipart/upload.ts","../src/multipart/create-uploader.ts","../src/put.ts","../src/multipart/uncontrolled.ts","../src/create-folder.ts"],"names":["_a","DOMException","error"],"mappings":"AAAA;ACIA,gDAA8B;ADF9B;AACA;AEFA,yFAAqB;AACrB,gCAAyB;AAsCzB,IAAM,+BAAA,EAAiC,IAAI,OAAA,CAAiB,CAAC,OAAA,EAAA,GAAY;AAEvE,EAAA,IAAI;AACF,IAAA,MAAM,mBAAA,EAAqB,IAAI,UAAA,CAAW,CAAC,GAAA,EAAK,GAAA,EAAK,GAAA,EAAK,GAAA,EAAK,GAAG,CAAC,CAAA;AACnE,IAAA,MAAM,KAAA,EAAO,IAAI,IAAA,CAAK,CAAC,kBAAkB,CAAC,CAAA;AAC1C,IAAA,IAAA,CACG,IAAA,CAAK,CAAA,CACL,IAAA,CAAK,CAAC,IAAA,EAAA,GAAS;AACd,MAAA,OAAA,CAAQ,KAAA,IAAS,OAAO,CAAA;AAAA,IAC1B,CAAC,CAAA,CACA,KAAA,CAAM,CAAA,EAAA,GAAM;AACX,MAAA,OAAA,CAAQ,KAAK,CAAA;AAAA,IACf,CAAC,CAAA;AAAA,EACL,EAAA,UAAQ;AACN,IAAA,OAAA,CAAQ,KAAK,CAAA;AAAA,EACf;AACF,CAAC,CAAA;AAED,MAAA,SAAsB,gBAAA,CACpB,KAAA,EACmD;AAEnD,EAAA,GAAA,CAAI,MAAA,WAAiB,cAAA,EAAgB;AACnC,IAAA,OAAO,KAAA;AAAA,EACT;AAKA,EAAA,GAAA,CAAI,MAAA,WAAiB,IAAA,EAAM;AACzB,IAAA,OAAO,KAAA,CAAM,MAAA,CAAO,CAAA;AAAA,EACtB;AAEA,EAAA,GAAA,CAAI,sBAAA,CAAuB,KAAK,CAAA,EAAG;AACjC,IAAA,OAAO,gBAAA,CAAS,KAAA,CAAM,KAAK,CAAA;AAAA,EAC7B;AAEA,EAAA,IAAI,WAAA;AAIJ,EAAA,GAAA,CAAI,MAAA,WAAiB,WAAA,EAAa;AAChC,IAAA,YAAA,EAAc,IAAI,UAAA,CAAW,KAAK,CAAA;AAAA,EACpC,EAAA,KAAA,GAAA,CAAW,cAAA,CAAe,KAAK,CAAA,EAAG;AAChC,IAAA,YAAA,EAAc,KAAA;AAAA,EAChB,EAAA,KAAO;AAEL,IAAA,YAAA,EAAc,kBAAA,CAAmB,KAAe,CAAA;AAAA,EAClD;AAIA,EAAA,GAAA,CAAI,MAAM,8BAAA,EAAgC;AACxC,IAAA,OAAO,IAAI,IAAA,CAAK,CAAC,WAAW,CAAC,CAAA,CAAE,MAAA,CAAO,CAAA;AAAA,EACxC;AAGA,EAAA,OAAO,IAAI,cAAA,CAAyC;AAAA,IAClD,KAAA,CAAM,UAAA,EAAY;AAChB,MAAA,UAAA,CAAW,OAAA,CAAQ,WAAW,CAAA;AAC9B,MAAA,UAAA,CAAW,KAAA,CAAM,CAAA;AAAA,IACnB;AAAA,EACF,CAAC,CAAA;AACH;AAGO,SAAS,sBAAA,CAAuB,KAAA,EAAmC;AACxE,EAAA,OACE,OAAO,MAAA,IAAU,SAAA,GACjB,OAAQ,KAAA,CAAmB,KAAA,IAAS,WAAA,GACnC,KAAA,CAAmB,SAAA,GACpB,OAAQ,KAAA,CAAmB,MAAA,IAAU,WAAA;AAAA,EAErC,OAAO,KAAA,CAAM,eAAA,IAAmB,QAAA;AAEpC;AAEA,SAAS,kBAAA,CAAmB,CAAA,EAAuB;AACjD,EAAA,MAAM,IAAA,EAAM,IAAI,WAAA,CAAY,CAAA;AAC5B,EAAA,OAAO,GAAA,CAAI,MAAA,CAAO,CAAC,CAAA;AACrB;AAEA,SAAS,cAAA,CAAe,KAAA,EAAiC;AACvD,EAAA,OAAO,gCAAA,KAAc,CAAA;AACvB;AFlEA;AACA;AG5CA,IAAM,YAAA,EAAc,+CAAA;AAEpB,IAAM,IAAA,EAAmB;AAAA,EACvB,CAAA,EAAG,CAAA;AAAA,EAEH,EAAA,EAAI,EAAA,GAAK,EAAA;AAAA,EAET,EAAA,EAAI,EAAA,GAAK,EAAA;AAAA,EAET,EAAA,EAAI,EAAA,GAAK,EAAA;AAAA,EACT,EAAA,EAAI,KAAA,GAAQ,CAAA;AAAA,EACZ,EAAA,EAAI,KAAA,GAAQ;AACd,CAAA;AAEO,SAAS,KAAA,CAAM,GAAA,EAAqC;AACzD,EAAA,GAAA,CAAI,OAAO,IAAA,IAAQ,SAAA,GAAY,CAAC,MAAA,CAAO,KAAA,CAAM,GAAG,CAAA,EAAG;AACjD,IAAA,OAAO,GAAA;AAAA,EACT;AACA,EAAA,GAAA,CAAI,OAAO,IAAA,IAAQ,QAAA,EAAU;AAC3B,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,MAAM,QAAA,EAAU,WAAA,CAAY,IAAA,CAAK,GAAG,CAAA;AACpC,EAAA,IAAI,UAAA;AACJ,EAAA,IAAI,KAAA,EAAiB,GAAA;AAErB,EAAA,GAAA,CAAI,CAAC,OAAA,EAAS;AACZ,IAAA,WAAA,EAAa,QAAA,CAAS,GAAA,EAAK,EAAE,CAAA;AAAA,EAC/B,EAAA,KAAO;AACL,IAAA,MAAM,CAAC,EAAE,GAAA,EAAK,EAAE,EAAE,SAAS,EAAA,EAAI,OAAA;AAC/B,IAAA,GAAA,CAAI,CAAC,GAAA,EAAK;AACR,MAAA,OAAO,IAAA;AAAA,IACT;AACA,IAAA,WAAA,EAAa,UAAA,CAAW,GAAG,CAAA;AAC3B,IAAA,GAAA,CAAI,SAAA,EAAW;AACb,MAAA,KAAA,EAAO,SAAA,CAAU,WAAA,CAAY,CAAA;AAAA,IAC/B;AAAA,EACF;AAEA,EAAA,GAAA,CAAI,MAAA,CAAO,KAAA,CAAM,UAAU,CAAA,EAAG;AAC5B,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,IAAI,EAAA,EAAI,UAAU,CAAA;AAC1C;AHqCA;AACA;ACtFA,IAAM,wBAAA,EAA0B,6BAAA;AAkGzB,SAAS,wBAAA,CAAyB,OAAA,EAAsC;AAC7E,EAAA,GAAA,CAAI,QAAA,GAAA,KAAA,EAAA,KAAA,EAAA,EAAA,OAAA,CAAS,KAAA,EAAO;AAClB,IAAA,OAAO,OAAA,CAAQ,KAAA;AAAA,EACjB;AAEA,EAAA,GAAA,CAAI,OAAA,CAAQ,GAAA,CAAI,qBAAA,EAAuB;AACrC,IAAA,OAAO,OAAA,CAAQ,GAAA,CAAI,qBAAA;AAAA,EACrB;AAEA,EAAA,MAAM,IAAI,SAAA;AAAA,IACR;AAAA,EACF,CAAA;AACF;AAEO,IAAM,UAAA,EAAN,MAAA,QAAwB,MAAM;AAAA,EACnC,WAAA,CAAY,OAAA,EAAiB;AAC3B,IAAA,KAAA,CAAM,CAAA,aAAA,EAAgB,OAAO,CAAA,CAAA;AAC/B,EAAA;AACF;AAUwD;AAC3B,EAAA;AAEN,EAAA;AAED,EAAA;AACtB;AAIuD;AAChC,EAAA;AACZ,IAAA;AACT,EAAA;AAEyB,EAAA;AAGrB,EAAA;AAKN;AAEa;AAM0B;AAGhB,EAAA;AACZ,IAAA;AACT,EAAA;AAEyB,EAAA;AAGH,EAAA;AACb,IAAA;AACT,EAAA;AAEqB,EAAA;AAEM,EAAA;AACA,IAAA;AACjB,IAAA;AAAA;AAEK,IAAA;AACM,MAAA;AACV,MAAA;AACT,IAAA;AAC2B,EAAA;AAEH,EAAA;AACzB;AAE8C;AACjC,EAAA;AACV,EAAA;AAIY,IAAA;AAER,EAAA;AAER,EAAA;AAEqB,EAAA;AACvB;AAGS;AAEgD;AAC5C,EAAA;AACF,IAAA;AACT,EAAA;AAE8B,EAAA;AACV,IAAA;AACW,MAAA;AAC7B,IAAA;AAGwB,IAAA;AAC1B,EAAA;AAE4B,EAAA;AAEd,IAAA;AACd,EAAA;AAE6B,EAAA;AAEf,IAAA;AACd,EAAA;AAEO,EAAA;AACT;AAGE;AAG6B,EAAA;AAEuB,EAAA;AACrB,IAAA;AACN,MAAA;AAEG,QAAA;AACF,QAAA;AACF,QAAA;AACT,QAAA;AAGe,QAAA;AACE,UAAA;AACL,UAAA;AACnB,UAAA;AACsB,UAAA;AACxB,QAAA;AACD,MAAA;AACH,IAAA;AAEkB,IAAA;AACK,MAAA;AAEI,QAAA;AACF,UAAA;AACnB,UAAA;AACF,QAAA;AACD,MAAA;AACH,IAAA;AACD,EAAA;AACH;AAE0E;AAE3D,EAAA;AACM,EAAA;AAErB;AAE6E;AAC9C,EAAA;AACpB,IAAA;AACT,EAAA;AAEgC,EAAA;AACvB,IAAA;AACT,EAAA;AAEO,EAAA;AACT;ADjFkC;AACA;AIxNhB;AJ0NgB;AACA;AK3Nd;AAApB;AAGI;AAEY,EAAA;AAGI,IAAA;AAClB,EAAA;AACM;AAER;AAGiE;AAC5C,EAAA;AACa,IAAA;AAChC,EAAA;AACF;ALsNkC;AACA;AM1OlCA;AAEE;AAIM,EAAA;AACM,IAAA;AACI,EAAA;AACiB,IAAA;AAC/B,EAAA;AACC;ANwO6B;AACA;AOnOJ;AAEH;AAEL;AACpB,EAAA;AAAA;AACA,EAAA;AAAA;AACA,EAAA;AAAA;AACA,EAAA;AAAA;AACA,EAAA;AAAA;AACA,EAAA;AAAA;AACA,EAAA;AAAA;AACA,EAAA;AAAA;AACD;AAE6C;AAG1C,EAAA;AAIY,EAAA;AACL,IAAA;AACT,EAAA;AAIsB,EAAA;AACG,IAAA;AACzB,EAAA;AAE+B,EAAA;AACjC;APiOkC;AACA;AQnRZ;AAWmB;AAEC;AAElB;AAEqB;AAC3C,EAAA;AACA,EAAA;AACA,EAAA;AACI;AACe,EAAA;AACf,EAAA;AAEW,EAAA;AACS,IAAA;AAGC,MAAA;AAER,MAAA;AAEP,MAAA;AACJ,QAAA;AACuB,QAAA;AACX,UAAA;AACa,UAAA;AACzB,QAAA;AACF,MAAA;AAE0B,MAAA;AACrB,IAAA;AACO,MAAA;AACd,IAAA;AACF,EAAA;AAIE,EAAA;AAIK,EAAA;AACL,IAAA;AAAA;AAEA,IAAA;AACK,MAAA;AACyB,MAAA;AAC5B,MAAA;AACF,IAAA;AACF,EAAA;AACF;AR4PkC;AACA;ASxTL;AAEc;AACzC,EAAA;AACA,EAAA;AACA,EAAA;AACI;AACa,EAAA;AACyB,EAAA;AAS3B,EAAA;AACiB,IAAA;AACF,MAAA;AACrB,IAAA;AAOO,MAAA;AACd,IAAA;AACF,EAAA;AAE6B,EAAA;AACX,IAAA;AACQ,IAAA;AAGF,IAAA;AACQ,MAAA;AAChB,QAAA;AACe,UAAA;AACzB,QAAA;AACD,MAAA;AACH,IAAA;AAGmB,IAAA;AAjDvBA,MAAAA;AAkDU,MAAA;AACsB,QAAA;AACxB,QAAA;AACF,MAAA;AAE4B,MAAA;AAEzB,MAAA;AAKiB,MAAA;AACO,QAAA;AACD,QAAA;AACC,QAAA;AACA,QAAA;AAC1B,MAAA;AAGoB,MAAA;AACP,QAAA;AACI,QAAA;AAChB,QAAA;AACD,MAAA;AAEe,MAAA;AAClB,IAAA;AAGoB,IAAA;AACG,MAAA;AACvB,IAAA;AAGsB,IAAA;AACC,MAAA;AACvB,IAAA;AAGoB,IAAA;AACM,MAAA;AAC1B,IAAA;AAGkB,IAAA;AACY,MAAA;AACJ,MAAA;AACI,QAAA;AAC3B,MAAA;AACH,IAAA;AAGiB,IAAA;AACH,MAAA;AACA,QAAA;AACX,MAAA;AAGwB,MAAA;AACb,QAAA;AACV,QAAA;AACF,MAAA;AACF,IAAA;AAIa,IAAA;AACd,EAAA;AACH;AT+QkC;AACA;AUlYa;AAC7C,EAAA;AACA,EAAA;AACA,EAAA;AACuB;AACD,EAAA;AAChB,IAAA;AACwB,MAAA;AAC5B,IAAA;AAEY,IAAA;AACc,MAAA;AAC1B,IAAA;AACF,EAAA;AAEc,EAAA;AACc,IAAA;AAC5B,EAAA;AAEY,EAAA;AACoB,IAAA;AAChC,EAAA;AAEgB,EAAA;AAClB;AVgYkC;AACA;AIxYK;AAEhC;AACS,EAAA;AACN,IAAA;AACR,EAAA;AACF;AAEa;AACkB,EAAA;AACrB,IAAA;AACR,EAAA;AACF;AAEO;AACwB,EAAA;AAC3B,IAAA;AACwB,MAAA;AACxB,IAAA;AACF,EAAA;AACF;AAEO;AACS,EAAA;AACN,IAAA;AACR,EAAA;AACF;AAEO;AACwB,EAAA;AACC,IAAA;AAC9B,EAAA;AACF;AAEO;AACS,EAAA;AACN,IAAA;AACR,EAAA;AACF;AAEO;AACS,EAAA;AACN,IAAA;AACR,EAAA;AACF;AAEO;AACS,EAAA;AACN,IAAA;AACR,EAAA;AACF;AAEO;AACS,EAAA;AACN,IAAA;AACR,EAAA;AACF;AAEO;AACS,EAAA;AACN,IAAA;AACR,EAAA;AACF;AAEO;AAGyB,EAAA;AAC5B,IAAA;AACE,MAAA;AAGF,IAAA;AAEkB,IAAA;AACpB,EAAA;AACF;AAEO;AACS,EAAA;AACN,IAAA;AACR,EAAA;AACF;AAwByB;AAEQ;AACT,EAAA;AAClB,EAAA;AAIY,IAAA;AAER,EAAA;AAER,EAAA;AAEU,EAAA;AACZ;AAE8B;AACxB,EAAA;AAC0B,IAAA;AAED,IAAA;AACrB,EAAA;AACC,IAAA;AACT,EAAA;AACF;AAES;AAGqB,EAAA;AAEjB,EAAA;AACa,IAAA;AACxB,EAAA;AACF;AAIE;AAvKF,EAAA;AAyKM,EAAA;AACA,EAAA;AAEA,EAAA;AAC2B,IAAA;AACjB,IAAA;AACG,IAAA;AACT,EAAA;AACC,IAAA;AACT,EAAA;AAKI,EAAA;AACK,IAAA;AACT,EAAA;AAGE,EAAA;AAGO,IAAA;AACT,EAAA;AAEgB,EAAA;AACP,IAAA;AACT,EAAA;AAEI,EAAA;AACK,IAAA;AACT,EAAA;AAEI,EAAA;AACU,EAAA;AACP,IAAA;AACS,MAAA;AACZ,MAAA;AACG,IAAA;AACyB,MAAA;AAC5B,MAAA;AACG,IAAA;AACS,MAAA;AACZ,MAAA;AACG,IAAA;AACS,MAAA;AACZ,MAAA;AACG,IAAA;AACS,MAAA;AACZ,MAAA;AACG,IAAA;AACS,MAAA;AACZ,MAAA;AACG,IAAA;AACS,MAAA;AACZ,MAAA;AACG,IAAA;AACS,MAAA;AACZ,MAAA;AACG,IAAA;AACmB,MAAA;AACtB,MAAA;AACG,IAAA;AACS,MAAA;AACZ,MAAA;AACG,IAAA;AACK,MAAA;AACR,MAAA;AACG,IAAA;AACA,IAAA;AACL,IAAA;AACc,MAAA;AACZ,MAAA;AACJ,EAAA;AAEqB,EAAA;AACvB;AAGE;AAImB,EAAA;AACL,EAAA;AACO,EAAA;AAEQ,EAAA;AACG,EAAA;AACf,EAAA;AACA,EAAA;AACC,EAAA;AAEhB,EAAA;AAGK,EAAA;AAAA;AAIL,EAAA;AACa,IAAA;AACf,EAAA;AAEI,EAAA;AACa,IAAA;AACL,MAAA;AACD,MAAA;AACK,MAAA;AACb,IAAA;AACH,EAAA;AAE0B,EAAA;AACR,IAAA;AACV,MAAA;AAGA,MAAA;AACsB,QAAA;AACL,UAAA;AACX,UAAA;AACD,YAAA;AACM,YAAA;AACP,cAAA;AACA,cAAA;AACiB,cAAA;AAEb,cAAA;AAEW,cAAA;AACZ,cAAA;AACK,cAAA;AACV,YAAA;AACF,UAAA;AACkB,UAAA;AA/S5BA,YAAAA;AAiT8B,YAAA;AACA,YAAA;AAEZ,YAAA;AAKiB,YAAA;AACjB,cAAA;AACF,YAAA;AAEe,YAAA;AACb,cAAA;AAAA;AAAA;AAAA;AAAA;AAKA,cAAA;AACA,cAAA;AACF,YAAA;AAEF,UAAA;AACL,QAAA;AACa,MAAA;AAEOC,QAAAA;AACV,UAAA;AACT,UAAA;AACF,QAAA;AAIwB,QAAA;AAChBC,UAAAA;AACR,QAAA;AAGqB,QAAA;AACT,UAAA;AACV,UAAA;AACF,QAAA;AAGMA,QAAAA;AACR,MAAA;AAEY,MAAA;AACH,QAAA;AACT,MAAA;AAEwB,MAAA;AAIb,MAAA;AAIH,QAAA;AACR,MAAA;AAGU,MAAA;AACZ,IAAA;AACA,IAAA;AACsB,MAAA;AACA,MAAA;AACG,QAAA;AACb,UAAA;AACR,QAAA;AAE0B,QAAA;AAC5B,MAAA;AACF,IAAA;AACF,EAAA;AAEkB,EAAA;AACW,IAAA;AAC7B,EAAA;AAQI,EAAA;AACa,IAAA;AACL,MAAA;AACD,MAAA;AACK,MAAA;AACb,IAAA;AACH,EAAA;AAE+B,EAAA;AACjC;AAES;AAGuC,EAAA;AAE1C,EAAA;AAEA,IAAA;AAGa,MAAA;AAGb,IAAA;AAIa,MAAA;AAEf,IAAA;AACM,EAAA;AAER,EAAA;AAEO,EAAA;AACT;AAES;AACH,EAAA;AACiB,IAAA;AACb,EAAA;AACC,IAAA;AACT,EAAA;AACF;AJiQkC;AACA;AW5qBA;AACZ,EAAA;AACH,EAAA;AACD,EAAA;AACH,EAAA;AACL,EAAA;AACV;AAqDE;AAGyC,EAAA;AAGd,EAAA;AAEC,EAAA;AACC,IAAA;AAC7B,EAAA;AAG0B,EAAA;AAGG,IAAA;AAG7B,EAAA;AAG0B,EAAA;AAGG,IAAA;AAG7B,EAAA;AAG0B,EAAA;AAGG,IAAA;AAE7B,EAAA;AAEO,EAAA;AACT;AAIE;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAMoB;AACL,EAAA;AACO,IAAA;AACtB,EAAA;AAEsB,EAAA;AACV,IAAA;AACR,MAAA;AACF,IAAA;AACF,EAAA;AAE+B,EAAA;AACP,IAAA;AACV,MAAA;AACR,QAAA;AACF,MAAA;AACF,IAAA;AACF,EAAA;AAEc,EAAA;AACQ,IAAA;AACtB,EAAA;AAEuB,EAAA;AACD,IAAA;AACtB,EAAA;AAEiB,EAAA;AACI,IAAA;AACrB,EAAA;AAEc,EAAA;AACU,IAAA;AACxB,EAAA;AAEO,EAAA;AACT;AXolBkC;AACA;AYntBlB;AAGiC,EAAA;AACvB,IAAA;AACpB,MAAA;AACS,MAAA;AACT,MAAA;AACA,MAAA;AACD,IAAA;AAEe,IAAA;AAET,IAAA;AACa,MAAA;AACL,MAAA;AACb,MAAA;AACA,MAAA;AACA,MAAA;AACA,MAAA;AACD,IAAA;AACH,EAAA;AACF;AAEsB;AACpB,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAQyB;AACN,EAAA;AAEf,EAAA;AACqB,IAAA;AACI,MAAA;AACzB,MAAA;AACU,QAAA;AACC,QAAA;AACJ,UAAA;AACa,UAAA;AACA,UAAA;AACG,UAAA;AAAA;AAAA;AAGN,UAAA;AACf,QAAA;AAC0B,QAAA;AACV,QAAA;AAClB,MAAA;AACA,MAAA;AACF,IAAA;AAEuB,IAAA;AAEhB,IAAA;AACgB,EAAA;AAEJ,IAAA;AAGP,MAAA;AACL,IAAA;AACC,MAAA;AACR,IAAA;AACF,EAAA;AACF;AZmsBkC;AACA;AaxyBlB;AAGkB,EAAA;AACR,IAAA;AACpB,MAAA;AACS,MAAA;AACT,MAAA;AACA,MAAA;AACD,IAAA;AAEe,IAAA;AAEV,IAAA;AACJ,MAAA;AACA,MAAA;AACA,MAAA;AACF,IAAA;AAEO,IAAA;AACA,MAAA;AACK,MAAA;AACZ,IAAA;AACF,EAAA;AACF;AAOsB;AAKC,EAAA;AAEF,EAAA;AAEf,EAAA;AACqB,IAAA;AACI,MAAA;AACzB,MAAA;AACU,QAAA;AACC,QAAA;AACJ,UAAA;AACa,UAAA;AAClB,QAAA;AACgB,QAAA;AAClB,MAAA;AACA,MAAA;AACF,IAAA;AAE6B,IAAA;AAEtB,IAAA;AACgB,EAAA;AAEJ,IAAA;AAGP,MAAA;AACZ,IAAA;AAEM,IAAA;AACR,EAAA;AACF;AbmxBkC;AACA;Ac71Bb;AA2CjB;AAIA,EAAA;AAEsB,IAAA;AACpB,MAAA;AACS,MAAA;AACT,MAAA;AACA,MAAA;AACD,IAAA;AAEe,IAAA;AAES,IAAA;AACb,MAAA;AACR,QAAA;AACF,MAAA;AACF,IAAA;AAEqB,IAAA;AACD,MAAA;AACL,MAAA;AACb,MAAA;AACoB,MAAA;AACpB,MAAA;AACA,MAAA;AACD,IAAA;AAEM,IAAA;AACQ,MAAA;AACO,MAAA;AACtB,IAAA;AACF,EAAA;AACF;AAEiC;AAC/B,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAC8B,EAAA;AAC9B,EAAA;AASiC;AAhGnC,EAAA;AAiGqB,EAAA;AAEK,EAAA;AACG,IAAA;AACzB,IAAA;AACU,MAAA;AACA,MAAA;AACC,MAAA;AACJ,QAAA;AACa,QAAA;AACH,QAAA;AACM,QAAA;AACO,QAAA;AAC5B,MAAA;AAAA;AAEW,MAAA;AACb,IAAA;AACA,IAAA;AACF,EAAA;AAE6B,EAAA;AACG,IAAA;AAChC,EAAA;AAEY,EAAA;AAEE,IAAA;AACP,EAAA;AAEG,IAAA;AACV,EAAA;AAEuB,EAAA;AAEvB,EAAA;AAEO,EAAA;AACT;AAIoC;AAGD;AAEV;AAYM;AAC7B,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AASkB;AACQ,EAAA;AACM,EAAA;AAEH,EAAA;AACc,IAAA;AACT,IAAA;AACV,IAAA;AACF,IAAA;AACN,IAAA;AACU,IAAA;AAET,IAAA;AACY,IAAA;AACT,IAAA;AACF,IAAA;AAImB,IAAA;AACR,IAAA;AAEvB,IAAA;AACE,IAAA;AAEwB,IAAA;AACA,MAAA;AArMlCF,QAAAA;AAsM8B,QAAA;AACN,UAAA;AACC,YAAA;AACf,UAAA;AACA,UAAA;AACF,QAAA;AACc,QAAA;AAEZ,QAAA;AAKM,QAAA;AACJ,MAAA;AACR,IAAA;AAEmB,IAAA;AAEkB,IAAA;AACnC,MAAA;AACE,QAAA;AACA,QAAA;AACA,QAAA;AACA,QAAA;AACS,QAAA;AACT,QAAA;AACe,QAAA;AACjB,MAAA;AAEU,MAAA;AAEH,MAAA;AACD,QAAA;AACsB,UAAA;AAEd,UAAA;AACM,YAAA;AACR,YAAA;AAEW,YAAA;AACI,cAAA;AACL,gBAAA;AACG,gBAAA;AACP,kBAAA;AACP,gBAAA;AACF,cAAA;AAES,cAAA;AACZ,YAAA;AACU,YAAA;AACV,YAAA;AACF,UAAA;AAEwB,UAAA;AAIN,UAAA;AACG,UAAA;AACb,YAAA;AACY,YAAA;AACF,cAAA;AACR,cAAA;AACR,YAAA;AAEoB,YAAA;AAEF,YAAA;AAClB,YAAA;AACc,YAAA;AAEV,YAAA;AACiB,cAAA;AACL,gBAAA;AACG,gBAAA;AACP,kBAAA;AACP,gBAAA;AACF,cAAA;AAEe,cAAA;AAChB,cAAA;AACU,cAAA;AACZ,YAAA;AACF,UAAA;AACc,QAAA;AACF,UAAA;AACd,QAAA;AACF,MAAA;AAEA,MAAA;AACE,QAAA;AACA,QAAA;AACA,QAAA;AACA,QAAA;AACS,QAAA;AACT,QAAA;AACe,QAAA;AACjB,MAAA;AAEU,MAAA;AACZ,IAAA;AAE6D,IAAA;AAC3D,MAAA;AAEA,MAAA;AACE,QAAA;AACA,QAAA;AACK,QAAA;AACL,QAAA;AACU,QAAA;AACV,QAAA;AACA,QAAA;AACA,QAAA;AACS,QAAA;AACT,QAAA;AACe,QAAA;AACjB,MAAA;AAEI,MAAA;AACI,QAAA;AAGE,UAAA;AACsB,UAAA;AACH,YAAA;AACnB,UAAA;AAEF,QAAA;AAEgB,QAAA;AACpB,UAAA;AACA,UAAA;AACA,UAAA;AACA,UAAA;AACS,UAAA;AACJ,YAAA;AACe,YAAA;AACpB,UAAA;AACA,UAAA;AACA,UAAA;AACD,QAAA;AAED,QAAA;AACE,UAAA;AACA,UAAA;AACK,UAAA;AACL,UAAA;AACA,UAAA;AACA,UAAA;AACS,UAAA;AACT,UAAA;AACe,UAAA;AACjB,QAAA;AAEc,QAAA;AACZ,UAAA;AACF,QAAA;AAEoB,QAAA;AACD,UAAA;AACG,UAAA;AACrB,QAAA;AAEuB,QAAA;AACxB,QAAA;AACuB,QAAA;AAEL,QAAA;AACN,UAAA;AACZ,QAAA;AAEiB,QAAA;AACO,UAAA;AACD,YAAA;AACG,YAAA;AACxB,UAAA;AACA,UAAA;AACF,QAAA;AAEc,QAAA;AACO,UAAA;AACrB,QAAA;AACc,MAAA;AAEF,QAAA;AACd,MAAA;AACF,IAAA;AAE2B,IAAA;AACX,MAAA;AACZ,QAAA;AACF,MAAA;AAEA,MAAA;AACE,QAAA;AACA,QAAA;AACA,QAAA;AACA,QAAA;AACc,QAAA;AAChB,MAAA;AAEuB,MAAA;AACF,QAAA;AACH,QAAA;AACU,UAAA;AAC1B,QAAA;AACF,MAAA;AACF,IAAA;AAEsC,IAAA;AAEtB,MAAA;AACZ,QAAA;AACF,MAAA;AACW,MAAA;AACa,MAAA;AACL,MAAA;AAEA,MAAA;AAIN,QAAA;AACN,MAAA;AACgB,QAAA;AACvB,MAAA;AACF,IAAA;AACD,EAAA;AACH;AdgtBkC;AACA;AejnClB;AAGkB,EAAA;AACR,IAAA;AACpB,MAAA;AACS,MAAA;AACT,MAAA;AACA,MAAA;AACD,IAAA;AAEe,IAAA;AAEV,IAAA;AACJ,MAAA;AACA,MAAA;AACA,MAAA;AACF,IAAA;AAEO,IAAA;AACA,MAAA;AACK,MAAA;AAEO,MAAA;AACU,QAAA;AACb,UAAA;AACR,YAAA;AACF,UAAA;AACF,QAAA;AAEqB,QAAA;AACT,UAAA;AACL,UAAA;AACL,UAAA;AACoB,UAAA;AACpB,UAAA;AACA,UAAA;AACD,QAAA;AAEM,QAAA;AACQ,UAAA;AACb,UAAA;AACF,QAAA;AACF,MAAA;AAE8B,MAAA;AACrB,QAAA;AACK,UAAA;AACL,UAAA;AACL,UAAA;AACA,UAAA;AACA,UAAA;AACA,UAAA;AACD,QAAA;AACH,MAAA;AACF,IAAA;AACF,EAAA;AACF;Af0mCkC;AACA;AgBhrCb;AhBkrCa;AACA;AiBzqCZ;AAMY,EAAA;AAE1B,EAAA;AACD,IAAA;AACe,IAAA;AACpB,EAAA;AAGM,EAAA;AACJ,IAAA;AACA,IAAA;AACA,IAAA;AACF,EAAA;AAEoB,EAAA;AACC,EAAA;AAGD,EAAA;AACR,IAAA;AACL,IAAA;AACL,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACD,EAAA;AAGkB,EAAA;AACP,IAAA;AACL,IAAA;AACL,IAAA;AACA,IAAA;AACA,IAAA;AACS,IAAA;AACV,EAAA;AAMM,EAAA;AACT;AjBypCkC;AACA;AgB9rCkC;AAClE,EAAA;AACA,EAAA;AACA,EAAA;AACmC;AAEjC,EAAA;AAIW,IAAA;AACW,MAAA;AACtB,IAAA;AAEyB,IAAA;AACb,MAAA;AACR,QAAA;AACF,MAAA;AACF,IAAA;AAEsB,IAAA;AACpB,MAAA;AACS,MAAA;AACT,MAAA;AACA,MAAA;AACD,IAAA;AAEe,IAAA;AAEU,IAAA;AACjB,MAAA;AACT,IAAA;AAEyB,IAAA;AAIN,IAAA;AAEI,IAAA;AACC,MAAA;AACtB,MAAA;AACU,QAAA;AACR,QAAA;AACA,QAAA;AACgB,QAAA;AAClB,MAAA;AACA,MAAA;AACK,QAAA;AACH,QAAA;AACF,MAAA;AACF,IAAA;AAEO,IAAA;AACS,MAAA;AACQ,MAAA;AACH,MAAA;AACG,MAAA;AACF,MAAA;AACtB,IAAA;AACF,EAAA;AACF;AhBkrCkC;AACA;AkBtvChC;AAGgC,EAAA;AAES,EAAA;AAEd,EAAA;AAER,EAAA;AACI,EAAA;AACC,IAAA;AACtB,IAAA;AACU,MAAA;AACR,MAAA;AACgB,MAAA;AAClB,IAAA;AACA,IAAA;AACF,EAAA;AAEO,EAAA;AACS,IAAA;AACK,IAAA;AACrB,EAAA;AACF;AlBkvCkC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/storage/storage/packages/blob/dist/chunk-4ACRCP3X.cjs","sourcesContent":[null,"// common util interface for blob raw commands, not meant to be used directly\n// this is why it's not exported from index/client\n\nimport type { Readable } from 'node:stream';\nimport { isNodeProcess } from 'is-node-process';\nimport type { RequestInit, Response } from 'undici';\nimport { isNodeJsReadableStream } from './multipart/helpers';\nimport type { PutBody } from './put-helpers';\n\nexport { bytes } from './bytes';\n\nconst defaultVercelBlobApiUrl = 'https://vercel.com/api/blob';\n\nexport interface BlobCommandOptions {\n /**\n * Define your blob API token.\n * @defaultvalue process.env.BLOB_READ_WRITE_TOKEN\n */\n token?: string;\n /**\n * `AbortSignal` to cancel the running request. See https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal\n */\n abortSignal?: AbortSignal;\n}\n\n// shared interface for put, copy and multipart upload\nexport interface CommonCreateBlobOptions extends BlobCommandOptions {\n /**\n * Whether the blob should be publicly accessible.\n * - 'public': The blob will be publicly accessible via its URL.\n * - 'private': The blob will require authentication to access.\n */\n access: 'public' | 'private';\n /**\n * Adds a random suffix to the filename.\n * @defaultvalue false\n */\n addRandomSuffix?: boolean;\n /**\n * Allow overwriting an existing blob. By default this is set to false and will throw an error if the blob already exists.\n * @defaultvalue false\n */\n allowOverwrite?: boolean;\n /**\n * Defines the content type of the blob. By default, this value is inferred from the pathname. Sent as the 'content-type' header when downloading a blob.\n */\n contentType?: string;\n /**\n * Number in seconds to configure the edge and browser cache. The minimum is 1 minute. There's no maximum but keep in mind that browser and edge caches will do a best effort to respect this value.\n * Detailed documentation can be found here: https://vercel.com/docs/storage/vercel-blob#caching\n * @defaultvalue 30 * 24 * 60 * 60 (1 Month)\n */\n cacheControlMaxAge?: number;\n}\n\n/**\n * Event object passed to the onUploadProgress callback.\n */\nexport interface UploadProgressEvent {\n /**\n * The number of bytes uploaded.\n */\n loaded: number;\n\n /**\n * The total number of bytes to upload.\n */\n total: number;\n\n /**\n * The percentage of the upload that has been completed.\n */\n percentage: number;\n}\n\n/**\n * Callback type for tracking upload progress.\n */\nexport type OnUploadProgressCallback = (\n progressEvent: UploadProgressEvent,\n) => void;\n\nexport type InternalOnUploadProgressCallback = (loaded: number) => void;\n\nexport type BlobRequestInit = Omit<RequestInit, 'body'> & { body?: PutBody };\n\nexport type BlobRequest = ({\n input,\n init,\n onUploadProgress,\n}: {\n input: string | URL;\n init: BlobRequestInit;\n onUploadProgress?: InternalOnUploadProgressCallback;\n}) => Promise<Response>;\n\n/**\n * Interface for including upload progress tracking capabilities.\n */\nexport interface WithUploadProgress {\n /**\n * Callback to track the upload progress. You will receive an object with the following properties:\n * - `loaded`: The number of bytes uploaded\n * - `total`: The total number of bytes to upload\n * - `percentage`: The percentage of the upload that has been completed\n */\n onUploadProgress?: OnUploadProgressCallback;\n}\n\nexport function getTokenFromOptionsOrEnv(options?: BlobCommandOptions): string {\n if (options?.token) {\n return options.token;\n }\n\n if (process.env.BLOB_READ_WRITE_TOKEN) {\n return process.env.BLOB_READ_WRITE_TOKEN;\n }\n\n throw new BlobError(\n 'No token found. Either configure the `BLOB_READ_WRITE_TOKEN` environment variable, or pass a `token` option to your calls.',\n );\n}\n\nexport class BlobError extends Error {\n constructor(message: string) {\n super(`Vercel Blob: ${message}`);\n }\n}\n\n/**\n * Generates a download URL for a blob.\n * The download URL includes a ?download=1 parameter which causes browsers to download\n * the file instead of displaying it inline.\n *\n * @param blobUrl - The URL of the blob to generate a download URL for\n * @returns A string containing the download URL with the download parameter appended\n */\nexport function getDownloadUrl(blobUrl: string): string {\n const url = new URL(blobUrl);\n\n url.searchParams.set('download', '1');\n\n return url.toString();\n}\n\n// Extracted from https://github.com/sindresorhus/is-plain-obj/blob/main/index.js\n// It's just nearly impossible to use ESM modules with our current setup\nexport function isPlainObject(value: unknown): boolean {\n if (typeof value !== 'object' || value === null) {\n return false;\n }\n\n const prototype = Object.getPrototypeOf(value);\n return (\n (prototype === null ||\n prototype === Object.prototype ||\n Object.getPrototypeOf(prototype) === null) &&\n !(Symbol.toStringTag in value) &&\n !(Symbol.iterator in value)\n );\n}\n\nexport const disallowedPathnameCharacters = ['//'];\n\n// Chrome: implemented https://developer.chrome.com/docs/capabilities/web-apis/fetch-streaming-requests\n// Microsoft Edge: implemented (Chromium)\n// Firefox: not implemented, BOO!! https://bugzilla.mozilla.org/show_bug.cgi?id=1469359\n// Safari: not implemented, BOO!! https://github.com/WebKit/standards-positions/issues/24\nexport const supportsRequestStreams = (() => {\n // The next line is mostly for Node.js 16 to avoid trying to do new Request() as it's not supported\n // TODO: Can be removed when Node.js 16 is no more required internally\n if (isNodeProcess()) {\n return true;\n }\n\n const apiUrl = getApiUrl();\n\n // Localhost generally doesn't work with HTTP 2 so we can stop here\n if (apiUrl.startsWith('http://localhost')) {\n return false;\n }\n\n let duplexAccessed = false;\n\n const hasContentType = new Request(getApiUrl(), {\n body: new ReadableStream(),\n method: 'POST',\n // @ts-expect-error -- TypeScript doesn't yet have duplex but it's in the spec: https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1729\n get duplex() {\n duplexAccessed = true;\n return 'half';\n },\n }).headers.has('Content-Type');\n\n return duplexAccessed && !hasContentType;\n})();\n\nexport function getApiUrl(pathname = ''): string {\n let baseUrl = null;\n try {\n // wrapping this code in a try/catch as this function is used in the browser and Vite doesn't define the process.env.\n // As this varaible is NOT used in production, it will always default to production endpoint\n baseUrl =\n process.env.VERCEL_BLOB_API_URL ||\n process.env.NEXT_PUBLIC_VERCEL_BLOB_API_URL;\n } catch {\n // noop\n }\n\n return `${baseUrl || defaultVercelBlobApiUrl}${pathname}`;\n}\n\nconst TEXT_ENCODER =\n typeof TextEncoder === 'function' ? new TextEncoder() : null;\n\nexport function computeBodyLength(body: PutBody): number {\n if (!body) {\n return 0;\n }\n\n if (typeof body === 'string') {\n if (TEXT_ENCODER) {\n return TEXT_ENCODER.encode(body).byteLength;\n }\n\n // React Native doesn't have TextEncoder\n return new Blob([body]).size;\n }\n\n if ('byteLength' in body && typeof body.byteLength === 'number') {\n // handles Uint8Array, ArrayBuffer, Buffer, and ArrayBufferView\n return body.byteLength;\n }\n\n if ('size' in body && typeof body.size === 'number') {\n // handles Blob and File\n return body.size;\n }\n\n return 0;\n}\n\nexport const createChunkTransformStream = (\n chunkSize: number,\n onProgress?: (bytes: number) => void,\n): TransformStream<ArrayBuffer | Uint8Array> => {\n let buffer = new Uint8Array(0);\n\n return new TransformStream<ArrayBuffer, Uint8Array>({\n transform(chunk, controller) {\n queueMicrotask(() => {\n // Combine the new chunk with any leftover data\n const newBuffer = new Uint8Array(buffer.length + chunk.byteLength);\n newBuffer.set(buffer);\n newBuffer.set(new Uint8Array(chunk), buffer.length);\n buffer = newBuffer;\n\n // Output complete chunks\n while (buffer.length >= chunkSize) {\n const newChunk = buffer.slice(0, chunkSize);\n controller.enqueue(newChunk);\n onProgress?.(newChunk.byteLength);\n buffer = buffer.slice(chunkSize);\n }\n });\n },\n\n flush(controller) {\n queueMicrotask(() => {\n // Send any remaining data\n if (buffer.length > 0) {\n controller.enqueue(buffer);\n onProgress?.(buffer.byteLength);\n }\n });\n },\n });\n};\n\nexport function isReadableStream(value: PutBody): value is ReadableStream {\n return (\n globalThis.ReadableStream && // TODO: Can be removed once Node.js 16 is no more required internally\n value instanceof ReadableStream\n );\n}\n\nexport function isStream(value: PutBody): value is ReadableStream | Readable {\n if (isReadableStream(value)) {\n return true;\n }\n\n if (isNodeJsReadableStream(value)) {\n return true;\n }\n\n return false;\n}\n","import type { Buffer } from 'buffer';\nimport isBuffer from 'is-buffer';\nimport { Readable } from 'stream';\nimport type { PutBody } from '../put-helpers';\n\n/**\n * Input format for a multipart upload part.\n * Used internally for processing multipart uploads.\n */\nexport interface PartInput {\n /**\n * The part number (1-based index).\n */\n partNumber: number;\n\n /**\n * The content of the part.\n */\n blob: PutBody;\n}\n\n/**\n * Represents a single part of a multipart upload.\n * This structure is used when completing a multipart upload to specify the\n * uploaded parts and their order.\n */\nexport interface Part {\n /**\n * The ETag value returned when the part was uploaded.\n * This value is used to verify the integrity of the uploaded part.\n */\n etag: string;\n\n /**\n * The part number of this part (1-based).\n * This number is used to order the parts when completing the multipart upload.\n */\n partNumber: number;\n}\n\nconst supportsNewBlobFromArrayBuffer = new Promise<boolean>((resolve) => {\n // React Native doesn't support creating a Blob from an ArrayBuffer, so we feature detect it\n try {\n const helloAsArrayBuffer = new Uint8Array([104, 101, 108, 108, 111]);\n const blob = new Blob([helloAsArrayBuffer]);\n blob\n .text()\n .then((text) => {\n resolve(text === 'hello');\n })\n .catch(() => {\n resolve(false);\n });\n } catch {\n resolve(false);\n }\n});\n\nexport async function toReadableStream(\n value: PutBody,\n): Promise<ReadableStream<ArrayBuffer | Uint8Array>> {\n // Already a ReadableStream, nothing to do\n if (value instanceof ReadableStream) {\n return value as ReadableStream<ArrayBuffer>;\n }\n\n // In the case of a Blob or File (which inherits from Blob), we could use .slice() to create pointers\n // to the original data instead of loading data in memory gradually.\n // Here's an explanation on this subject: https://stackoverflow.com/a/24834417\n if (value instanceof Blob) {\n return value.stream();\n }\n\n if (isNodeJsReadableStream(value)) {\n return Readable.toWeb(value) as ReadableStream<ArrayBuffer>;\n }\n\n let streamValue: Uint8Array;\n\n // While ArrayBuffer is valid as a fetch body, when used in a ReadableStream it will fail in Node.js with\n // The \"chunk\" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of ArrayBuffer\n if (value instanceof ArrayBuffer) {\n streamValue = new Uint8Array(value);\n } else if (isNodeJsBuffer(value)) {\n streamValue = value;\n } else {\n // value is a string, we need to convert it to a Uint8Array to get create a stream from it\n streamValue = stringToUint8Array(value as string);\n }\n\n // This line ensures that even when we get a buffer of 70MB, we'll create a stream out of it so we can have\n // better progress indication during uploads\n if (await supportsNewBlobFromArrayBuffer) {\n return new Blob([streamValue]).stream();\n }\n\n // from https://github.com/sindresorhus/to-readable-stream/blob/main/index.js\n return new ReadableStream<ArrayBuffer | Uint8Array>({\n start(controller) {\n controller.enqueue(streamValue);\n controller.close();\n },\n });\n}\n\n// From https://github.com/sindresorhus/is-stream/\nexport function isNodeJsReadableStream(value: PutBody): value is Readable {\n return (\n typeof value === 'object' &&\n typeof (value as Readable).pipe === 'function' &&\n (value as Readable).readable &&\n typeof (value as Readable)._read === 'function' &&\n // @ts-expect-error _readableState does exists on Readable\n typeof value._readableState === 'object'\n );\n}\n\nfunction stringToUint8Array(s: string): Uint8Array {\n const enc = new TextEncoder();\n return enc.encode(s);\n}\n\nfunction isNodeJsBuffer(value: PutBody): value is Buffer {\n return isBuffer(value);\n}\n","/*!\n * bytes\n * Copyright(c) 2012-2014 TJ Holowaychuk\n * Copyright(c) 2015 Jed Watson\n * MIT Licensed\n */\n\n// from https://github.com/visionmedia/bytes.js/blob/master/index.js\n// had too many issues with bundling: https://github.com/vercel/storage/issues/818\ntype ByteUnit = 'b' | 'kb' | 'mb' | 'gb' | 'tb' | 'pb';\n\ntype ByteUnitMap = {\n readonly [_K in ByteUnit]: number;\n};\n\nconst parseRegExp = /^((-|\\+)?(\\d+(?:\\.\\d+)?)) *(kb|mb|gb|tb|pb)$/i;\n\nconst map: ByteUnitMap = {\n b: 1,\n\n kb: 1 << 10,\n\n mb: 1 << 20,\n\n gb: 1 << 30,\n tb: 1024 ** 4,\n pb: 1024 ** 5,\n};\n\nexport function bytes(val: string | number): number | null {\n if (typeof val === 'number' && !Number.isNaN(val)) {\n return val;\n }\n if (typeof val !== 'string') {\n return null;\n }\n\n const results = parseRegExp.exec(val);\n let floatValue: number;\n let unit: ByteUnit = 'b';\n\n if (!results) {\n floatValue = parseInt(val, 10);\n } else {\n const [, res, , , unitMatch] = results;\n if (!res) {\n return null;\n }\n floatValue = parseFloat(res);\n if (unitMatch) {\n unit = unitMatch.toLowerCase() as ByteUnit;\n }\n }\n\n if (Number.isNaN(floatValue)) {\n return null;\n }\n\n return Math.floor(map[unit] * floatValue);\n}\n","import retry from 'async-retry';\nimport type { Response } from 'undici';\nimport { debug } from './debug';\nimport { DOMException } from './dom-exception';\nimport type {\n BlobCommandOptions,\n BlobRequestInit,\n WithUploadProgress,\n} from './helpers';\nimport {\n BlobError,\n computeBodyLength,\n getApiUrl,\n getTokenFromOptionsOrEnv,\n} from './helpers';\nimport isNetworkError from './is-network-error';\nimport { blobRequest } from './request';\n\n// maximum pathname length is:\n// 1024 (provider limit) - 26 chars (vercel internal suffixes) - 31 chars (blob `-randomId` suffix) = 967\n// we round it to 950 to make it more human friendly, and we apply the limit whatever the value of\n// addRandomSuffix is, to make it consistent\nexport const MAXIMUM_PATHNAME_LENGTH = 950;\n\nexport class BlobAccessError extends BlobError {\n constructor() {\n super('Access denied, please provide a valid token for this resource.');\n }\n}\n\nexport class BlobContentTypeNotAllowedError extends BlobError {\n constructor(message: string) {\n super(`Content type mismatch, ${message}.`);\n }\n}\n\nexport class BlobPathnameMismatchError extends BlobError {\n constructor(message: string) {\n super(\n `Pathname mismatch, ${message}. Check the pathname used in upload() or put() matches the one from the client token.`,\n );\n }\n}\n\nexport class BlobClientTokenExpiredError extends BlobError {\n constructor() {\n super('Client token has expired.');\n }\n}\n\nexport class BlobFileTooLargeError extends BlobError {\n constructor(message: string) {\n super(`File is too large, ${message}.`);\n }\n}\n\nexport class BlobStoreNotFoundError extends BlobError {\n constructor() {\n super('This store does not exist.');\n }\n}\n\nexport class BlobStoreSuspendedError extends BlobError {\n constructor() {\n super('This store has been suspended.');\n }\n}\n\nexport class BlobUnknownError extends BlobError {\n constructor() {\n super('Unknown error, please visit https://vercel.com/help.');\n }\n}\n\nexport class BlobNotFoundError extends BlobError {\n constructor() {\n super('The requested blob does not exist');\n }\n}\n\nexport class BlobServiceNotAvailable extends BlobError {\n constructor() {\n super('The blob service is currently not available. Please try again.');\n }\n}\n\nexport class BlobServiceRateLimited extends BlobError {\n public readonly retryAfter: number;\n\n constructor(seconds?: number) {\n super(\n `Too many requests please lower the number of concurrent requests ${\n seconds ? ` - try again in ${seconds} seconds` : ''\n }.`,\n );\n\n this.retryAfter = seconds ?? 0;\n }\n}\n\nexport class BlobRequestAbortedError extends BlobError {\n constructor() {\n super('The request was aborted.');\n }\n}\n\ntype BlobApiErrorCodes =\n | 'store_suspended'\n | 'forbidden'\n | 'not_found'\n | 'unknown_error'\n | 'bad_request'\n | 'store_not_found'\n | 'not_allowed'\n | 'service_unavailable'\n | 'rate_limited'\n | 'content_type_not_allowed'\n | 'client_token_pathname_mismatch'\n | 'client_token_expired'\n | 'file_too_large';\n\nexport interface BlobApiError {\n error?: { code?: BlobApiErrorCodes; message?: string };\n}\n\n// This version is used to ensure that the client and server are compatible\n// The server (Vercel Blob API) uses this information to change its behavior like the\n// response format\nconst BLOB_API_VERSION = 11;\n\nfunction getApiVersion(): string {\n let versionOverride = null;\n try {\n // wrapping this code in a try/catch as this function is used in the browser and Vite doesn't define the process.env.\n // As this varaible is NOT used in production, it will always default to the BLOB_API_VERSION\n versionOverride =\n process.env.VERCEL_BLOB_API_VERSION_OVERRIDE ||\n process.env.NEXT_PUBLIC_VERCEL_BLOB_API_VERSION_OVERRIDE;\n } catch {\n // noop\n }\n\n return `${versionOverride ?? BLOB_API_VERSION}`;\n}\n\nfunction getRetries(): number {\n try {\n const retries = process.env.VERCEL_BLOB_RETRIES || '10';\n\n return parseInt(retries, 10);\n } catch {\n return 10;\n }\n}\n\nfunction createBlobServiceRateLimited(\n response: Response,\n): BlobServiceRateLimited {\n const retryAfter = response.headers.get('retry-after');\n\n return new BlobServiceRateLimited(\n retryAfter ? parseInt(retryAfter, 10) : undefined,\n );\n}\n\n// reads the body of a error response\nasync function getBlobError(\n response: Response,\n): Promise<{ code: string; error: BlobError }> {\n let code: BlobApiErrorCodes;\n let message: string | undefined;\n\n try {\n const data = (await response.json()) as BlobApiError;\n code = data.error?.code ?? 'unknown_error';\n message = data.error?.message;\n } catch {\n code = 'unknown_error';\n }\n\n // Now that we have multiple API clients out in the wild handling errors, we can't just send a different\n // error code for this type of error. We need to add a new field in the API response to handle this correctly,\n // but for now, we can just check the message.\n if (message?.includes('contentType') && message.includes('is not allowed')) {\n code = 'content_type_not_allowed';\n }\n\n if (\n message?.includes('\"pathname\"') &&\n message.includes('does not match the token payload')\n ) {\n code = 'client_token_pathname_mismatch';\n }\n\n if (message === 'Token expired') {\n code = 'client_token_expired';\n }\n\n if (message?.includes('the file length cannot be greater than')) {\n code = 'file_too_large';\n }\n\n let error: BlobError;\n switch (code) {\n case 'store_suspended':\n error = new BlobStoreSuspendedError();\n break;\n case 'forbidden':\n error = new BlobAccessError();\n break;\n case 'content_type_not_allowed':\n error = new BlobContentTypeNotAllowedError(message!);\n break;\n case 'client_token_pathname_mismatch':\n error = new BlobPathnameMismatchError(message!);\n break;\n case 'client_token_expired':\n error = new BlobClientTokenExpiredError();\n break;\n case 'file_too_large':\n error = new BlobFileTooLargeError(message!);\n break;\n case 'not_found':\n error = new BlobNotFoundError();\n break;\n case 'store_not_found':\n error = new BlobStoreNotFoundError();\n break;\n case 'bad_request':\n error = new BlobError(message ?? 'Bad request');\n break;\n case 'service_unavailable':\n error = new BlobServiceNotAvailable();\n break;\n case 'rate_limited':\n error = createBlobServiceRateLimited(response);\n break;\n case 'unknown_error':\n case 'not_allowed':\n default:\n error = new BlobUnknownError();\n break;\n }\n\n return { code, error };\n}\n\nexport async function requestApi<TResponse>(\n pathname: string,\n init: BlobRequestInit,\n commandOptions: (BlobCommandOptions & WithUploadProgress) | undefined,\n): Promise<TResponse> {\n const apiVersion = getApiVersion();\n const token = getTokenFromOptionsOrEnv(commandOptions);\n const extraHeaders = getProxyThroughAlternativeApiHeaderFromEnv();\n\n const [, , , storeId = ''] = token.split('_');\n const requestId = `${storeId}:${Date.now()}:${Math.random().toString(16).slice(2)}`;\n let retryCount = 0;\n let bodyLength = 0;\n let totalLoaded = 0;\n const sendBodyLength =\n commandOptions?.onUploadProgress || shouldUseXContentLength();\n\n if (\n init.body &&\n // 1. For upload progress we always need to know the total size of the body\n // 2. In development we need the header for put() to work correctly when passing a stream\n sendBodyLength\n ) {\n bodyLength = computeBodyLength(init.body);\n }\n\n if (commandOptions?.onUploadProgress) {\n commandOptions.onUploadProgress({\n loaded: 0,\n total: bodyLength,\n percentage: 0,\n });\n }\n\n const apiResponse = await retry(\n async (bail) => {\n let res: Response;\n\n // try/catch here to treat certain errors as not-retryable\n try {\n res = await blobRequest({\n input: getApiUrl(pathname),\n init: {\n ...init,\n headers: {\n 'x-api-blob-request-id': requestId,\n 'x-api-blob-request-attempt': String(retryCount),\n 'x-api-version': apiVersion,\n ...(sendBodyLength\n ? { 'x-content-length': String(bodyLength) }\n : {}),\n authorization: `Bearer ${token}`,\n ...extraHeaders,\n ...init.headers,\n },\n },\n onUploadProgress: commandOptions?.onUploadProgress\n ? (loaded) => {\n const total = bodyLength !== 0 ? bodyLength : loaded;\n totalLoaded = loaded;\n const percentage =\n bodyLength > 0\n ? Number(((loaded / total) * 100).toFixed(2))\n : 0;\n\n // Leave percentage 100 for the end of request\n if (percentage === 100 && bodyLength > 0) {\n return;\n }\n\n commandOptions.onUploadProgress?.({\n loaded,\n // When passing a stream to put(), we have no way to know the total size of the body.\n // Instead of defining total as total?: number we decided to set the total to the currently\n // loaded number. This is not inaccurate and way more practical for DX.\n // Passing down a stream to put() is very rare\n total,\n percentage,\n });\n }\n : undefined,\n });\n } catch (error) {\n // if the request was aborted, don't retry\n if (error instanceof DOMException && error.name === 'AbortError') {\n bail(new BlobRequestAbortedError());\n return;\n }\n\n // We specifically target network errors because fetch network errors are regular TypeErrors\n // We want to retry for network errors, but not for other TypeErrors\n if (isNetworkError(error)) {\n throw error;\n }\n\n // If we messed up the request part, don't even retry\n if (error instanceof TypeError) {\n bail(error);\n return;\n }\n\n // retry for any other erros thrown by fetch\n throw error;\n }\n\n if (res.ok) {\n return res;\n }\n\n const { code, error } = await getBlobError(res);\n\n // only retry for certain errors\n if (\n code === 'unknown_error' ||\n code === 'service_unavailable' ||\n code === 'internal_server_error'\n ) {\n throw error;\n }\n\n // don't retry for e.g. suspended stores\n bail(error);\n },\n {\n retries: getRetries(),\n onRetry: (error) => {\n if (error instanceof Error) {\n debug(`retrying API request to ${pathname}`, error.message);\n }\n\n retryCount = retryCount + 1;\n },\n },\n );\n\n if (!apiResponse) {\n throw new BlobUnknownError();\n }\n\n // Calling onUploadProgress here has two benefits:\n // 1. It ensures 100% is only reached at the end of the request. While otherwise you can reach 100%\n // before the request is fully done, as we only really measure what gets sent over the wire, not what\n // has been processed by the server.\n // 2. It makes the uploadProgress \"work\" even in rare cases where fetch/xhr onprogress is not working\n // And in the case of multipart uploads it actually provides a simple progress indication (per part)\n if (commandOptions?.onUploadProgress) {\n commandOptions.onUploadProgress({\n loaded: totalLoaded,\n total: totalLoaded,\n percentage: 100,\n });\n }\n\n return (await apiResponse.json()) as TResponse;\n}\n\nfunction getProxyThroughAlternativeApiHeaderFromEnv(): {\n 'x-proxy-through-alternative-api'?: string;\n} {\n const extraHeaders: Record<string, string> = {};\n\n try {\n if (\n 'VERCEL_BLOB_PROXY_THROUGH_ALTERNATIVE_API' in process.env &&\n process.env.VERCEL_BLOB_PROXY_THROUGH_ALTERNATIVE_API !== undefined\n ) {\n extraHeaders['x-proxy-through-alternative-api'] =\n process.env.VERCEL_BLOB_PROXY_THROUGH_ALTERNATIVE_API;\n } else if (\n 'NEXT_PUBLIC_VERCEL_BLOB_PROXY_THROUGH_ALTERNATIVE_API' in process.env &&\n process.env.NEXT_PUBLIC_VERCEL_BLOB_PROXY_THROUGH_ALTERNATIVE_API !==\n undefined\n ) {\n extraHeaders['x-proxy-through-alternative-api'] =\n process.env.NEXT_PUBLIC_VERCEL_BLOB_PROXY_THROUGH_ALTERNATIVE_API;\n }\n } catch {\n // noop\n }\n\n return extraHeaders;\n}\n\nfunction shouldUseXContentLength(): boolean {\n try {\n return process.env.VERCEL_BLOB_USE_X_CONTENT_LENGTH === '1';\n } catch {\n return false;\n }\n}\n","let debugIsActive = false;\n\n// wrapping this code in a try/catch in case some env doesn't support process.env (vite by default)\ntry {\n if (\n process.env.DEBUG?.includes('blob') ||\n process.env.NEXT_PUBLIC_DEBUG?.includes('blob')\n ) {\n debugIsActive = true;\n }\n} catch {\n // noop\n}\n\n// Set process.env.DEBUG = 'blob' to enable debug logging\nexport function debug(message: string, ...args: unknown[]): void {\n if (debugIsActive) {\n console.debug(`vercel-blob: ${message}`, ...args);\n }\n}\n","// TODO: Once Node 16 is no more needed internally, we can remove this file and use the native DOMException type.\nexport const DOMException =\n globalThis.DOMException ??\n (() => {\n // DOMException was only made a global in Node v17.0.0,\n // but fetch supports >= v16.8.\n try {\n atob('~');\n } catch (err) {\n return Object.getPrototypeOf(err).constructor;\n }\n })();\n","// @ts-nocheck -- This file is copy pasted\n\n// Source: https://github.com/sindresorhus/is-network-error/blob/main/index.js\n// Why: Jest + ES6 modules = harder than maintaining a nuclear plant\n\n/**\n * MIT License\n\nCopyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n */\n\nconst objectToString = Object.prototype.toString;\n\nconst isError = (value) => objectToString.call(value) === '[object Error]';\n\nconst errorMessages = new Set([\n 'network error', // Chrome\n 'Failed to fetch', // Chrome\n 'NetworkError when attempting to fetch resource.', // Firefox\n 'The Internet connection appears to be offline.', // Safari 16\n 'Load failed', // Safari 17+\n 'Network request failed', // `cross-fetch`\n 'fetch failed', // Undici (Node.js)\n 'terminated', // Undici (Node.js)\n]);\n\nexport default function isNetworkError(error) {\n const isValid =\n error &&\n isError(error) &&\n error.name === 'TypeError' &&\n typeof error.message === 'string';\n\n if (!isValid) {\n return false;\n }\n\n // We do an extra check for Safari 17+ as it has a very generic error message.\n // Network errors in Safari have no stack.\n if (error.message === 'Load failed') {\n return error.stack === undefined;\n }\n\n return errorMessages.has(error.message);\n}\n","import type { BodyInit } from 'undici';\nimport { fetch } from 'undici';\nimport { debug } from './debug';\nimport type { BlobRequest } from './helpers';\nimport {\n createChunkTransformStream,\n isStream,\n supportsRequestStreams,\n} from './helpers';\nimport { toReadableStream } from './multipart/helpers';\nimport type { PutBody } from './put-helpers';\n\nexport const hasFetch = typeof fetch === 'function';\n\nexport const hasFetchWithUploadProgress = hasFetch && supportsRequestStreams;\n\nconst CHUNK_SIZE = 64 * 1024;\n\nexport const blobFetch: BlobRequest = async ({\n input,\n init,\n onUploadProgress,\n}) => {\n debug('using fetch');\n let body: BodyInit | undefined;\n\n if (init.body) {\n if (onUploadProgress) {\n // We transform the body to a stream here instead of at the call site\n // So that on retries we can reuse the original body, otherwise we would not be able to reuse it\n const stream = await toReadableStream(init.body);\n\n let loaded = 0;\n\n const chunkTransformStream = createChunkTransformStream(\n CHUNK_SIZE,\n (newLoaded: number) => {\n loaded += newLoaded;\n onUploadProgress(loaded);\n },\n );\n\n body = stream.pipeThrough(chunkTransformStream);\n } else {\n body = init.body as BodyInit;\n }\n }\n\n // Only set duplex option when supported and dealing with a stream body\n const duplex =\n supportsRequestStreams && body && isStream(body as PutBody)\n ? 'half'\n : undefined;\n\n return fetch(\n input,\n // @ts-expect-error -- Blob and Nodejs Blob are triggering type errors, fine with it\n {\n ...init,\n ...(init.body ? { body } : {}),\n duplex,\n },\n );\n};\n","import type { Response as UndiciResponse } from 'undici';\nimport { debug } from './debug';\nimport { type BlobRequest, isReadableStream } from './helpers';\n\nexport const hasXhr = typeof XMLHttpRequest !== 'undefined';\n\nexport const blobXhr: BlobRequest = async ({\n input,\n init,\n onUploadProgress,\n}) => {\n debug('using xhr');\n let body: XMLHttpRequestBodyInit | null = null;\n\n // xhr.send only support XMLHttpRequestBodyInit types, excluding ReadableStream (web)\n // and Readable (node)\n // We do have to support ReadableStream being sent to xhr as our library allows\n // for Safari to use put(path, ReadableStream, { onUploadProgress }) which would\n // end up here.\n // We do not have to support Readable being sent to xhr as using Node.js you would\n // endup in the fetch implementation by default.\n if (init.body) {\n if (isReadableStream(init.body)) {\n body = await new Response(init.body).blob();\n } else {\n // We \"type lie\" here, what we should do instead:\n // Exclude ReadableStream:\n // body = init.body as Exclude<PutBody, ReadableStream | Readable>;\n // We can't do this because init.body (PutBody) relies on Blob (node:buffer)\n // while XMLHttpRequestBodyInit relies on native Blob type.\n // If we get rid of undici we can remove this trick.\n body = init.body as XMLHttpRequestBodyInit;\n }\n }\n\n return new Promise((resolve, reject) => {\n const xhr = new XMLHttpRequest();\n xhr.open(init.method || 'GET', input.toString(), true);\n\n // Handle upload progress\n if (onUploadProgress) {\n xhr.upload.addEventListener('progress', (event) => {\n if (event.lengthComputable) {\n onUploadProgress(event.loaded);\n }\n });\n }\n\n // Handle response\n xhr.onload = () => {\n if (init.signal?.aborted) {\n reject(new DOMException('The user aborted the request.', 'AbortError'));\n return;\n }\n\n const headers = new Headers();\n const rawHeaders = xhr\n .getAllResponseHeaders()\n .trim()\n .split(/[\\r\\n]+/);\n\n // Parse headers\n rawHeaders.forEach((line) => {\n const parts = line.split(': ');\n const key = parts.shift();\n const value = parts.join(': ');\n if (key) headers.set(key.toLowerCase(), value);\n });\n\n // Create response object, api-blob sends back text and api.ts will turn it into json if necessary\n const response = new Response(xhr.response as string, {\n status: xhr.status,\n statusText: xhr.statusText,\n headers,\n }) as unknown as UndiciResponse;\n\n resolve(response);\n };\n\n // Handle network errors\n xhr.onerror = () => {\n reject(new TypeError('Network request failed'));\n };\n\n // Handle timeouts\n xhr.ontimeout = () => {\n reject(new TypeError('Network request timed out'));\n };\n\n // Handle aborts\n xhr.onabort = () => {\n reject(new DOMException('The user aborted a request.', 'AbortError'));\n };\n\n // Set headers\n if (init.headers) {\n const headers = new Headers(init.headers as HeadersInit);\n headers.forEach((value, key) => {\n xhr.setRequestHeader(key, value);\n });\n }\n\n // Handle abort signal\n if (init.signal) {\n init.signal.addEventListener('abort', () => {\n xhr.abort();\n });\n\n // If already aborted, abort xhr immediately\n if (init.signal.aborted) {\n xhr.abort();\n return;\n }\n }\n\n // We're cheating and saying that nobody is gonna use put() with a stream in an environment not supporting\n // fetch with streams. If this ever happens please open an issue and we'll figure it out.\n xhr.send(body);\n });\n};\n","import type { Response } from 'undici';\nimport { blobFetch, hasFetch, hasFetchWithUploadProgress } from './fetch';\nimport type { BlobRequest } from './helpers';\nimport { blobXhr, hasXhr } from './xhr';\n\nexport const blobRequest: BlobRequest = async ({\n input,\n init,\n onUploadProgress,\n}): Promise<Response> => {\n if (onUploadProgress) {\n if (hasFetchWithUploadProgress) {\n return blobFetch({ input, init, onUploadProgress });\n }\n\n if (hasXhr) {\n return blobXhr({ input, init, onUploadProgress });\n }\n }\n\n if (hasFetch) {\n return blobFetch({ input, init });\n }\n\n if (hasXhr) {\n return blobXhr({ input, init });\n }\n\n throw new Error('No request implementation available');\n};\n","import type { Readable } from 'stream';\n// We use the undici types to ensure TS doesn't complain about native types (like ReadableStream) vs\n// undici types fetch expects (like Blob is from node:buffer..)\n// import type { Blob } from 'node:buffer';\nimport type { File } from 'undici';\nimport { MAXIMUM_PATHNAME_LENGTH } from './api';\nimport type { ClientCommonCreateBlobOptions } from './client';\nimport type { CommonCreateBlobOptions } from './helpers';\nimport { BlobError, disallowedPathnameCharacters } from './helpers';\n\nexport const putOptionHeaderMap = {\n cacheControlMaxAge: 'x-cache-control-max-age',\n addRandomSuffix: 'x-add-random-suffix',\n allowOverwrite: 'x-allow-overwrite',\n contentType: 'x-content-type',\n access: 'x-vercel-blob-access',\n};\n\n/**\n * Result of a successful put or copy operation.\n */\nexport interface PutBlobResult {\n /**\n * The URL of the blob.\n */\n url: string;\n /**\n * A URL that will cause browsers to download the file instead of displaying it inline.\n */\n downloadUrl: string;\n /**\n * The pathname of the blob within the store.\n */\n pathname: string;\n /**\n * The content-type of the blob.\n */\n contentType: string;\n /**\n * The content disposition header value.\n */\n contentDisposition: string;\n}\n\nexport type PutBlobApiResponse = PutBlobResult;\n\n/**\n * Represents the body content for a put operation.\n * Can be one of several supported types.\n */\nexport type PutBody =\n | string\n | Readable // Node.js streams\n | Buffer // Node.js buffers\n | Blob\n | ArrayBuffer\n | ReadableStream // Streams API (= Web streams in Node.js)\n | File;\n\nexport type CommonPutCommandOptions = CommonCreateBlobOptions &\n ClientCommonCreateBlobOptions;\n\nexport interface CreatePutMethodOptions<TOptions> {\n allowedOptions: (keyof typeof putOptionHeaderMap)[];\n getToken?: (pathname: string, options: TOptions) => Promise<string>;\n extraChecks?: (options: TOptions) => void;\n}\n\nexport function createPutHeaders<TOptions extends CommonPutCommandOptions>(\n allowedOptions: CreatePutMethodOptions<TOptions>['allowedOptions'],\n options: TOptions,\n): Record<string, string> {\n const headers: Record<string, string> = {};\n\n // access is always required, so always add it to headers\n headers[putOptionHeaderMap.access] = options.access;\n\n if (allowedOptions.includes('contentType') && options.contentType) {\n headers[putOptionHeaderMap.contentType] = options.contentType;\n }\n\n if (\n allowedOptions.includes('addRandomSuffix') &&\n options.addRandomSuffix !== undefined\n ) {\n headers[putOptionHeaderMap.addRandomSuffix] = options.addRandomSuffix\n ? '1'\n : '0';\n }\n\n if (\n allowedOptions.includes('allowOverwrite') &&\n options.allowOverwrite !== undefined\n ) {\n headers[putOptionHeaderMap.allowOverwrite] = options.allowOverwrite\n ? '1'\n : '0';\n }\n\n if (\n allowedOptions.includes('cacheControlMaxAge') &&\n options.cacheControlMaxAge !== undefined\n ) {\n headers[putOptionHeaderMap.cacheControlMaxAge] =\n options.cacheControlMaxAge.toString();\n }\n\n return headers;\n}\n\nexport async function createPutOptions<\n TOptions extends CommonPutCommandOptions,\n>({\n pathname,\n options,\n extraChecks,\n getToken,\n}: {\n pathname: string;\n options?: TOptions;\n extraChecks?: CreatePutMethodOptions<TOptions>['extraChecks'];\n getToken?: CreatePutMethodOptions<TOptions>['getToken'];\n}): Promise<TOptions> {\n if (!pathname) {\n throw new BlobError('pathname is required');\n }\n\n if (pathname.length > MAXIMUM_PATHNAME_LENGTH) {\n throw new BlobError(\n `pathname is too long, maximum length is ${MAXIMUM_PATHNAME_LENGTH}`,\n );\n }\n\n for (const invalidCharacter of disallowedPathnameCharacters) {\n if (pathname.includes(invalidCharacter)) {\n throw new BlobError(\n `pathname cannot contain \"${invalidCharacter}\", please encode it if needed`,\n );\n }\n }\n\n if (!options) {\n throw new BlobError('missing options, see usage');\n }\n\n if (options.access !== 'public' && options.access !== 'private') {\n throw new BlobError('access must be \"public\" or \"private\"');\n }\n\n if (extraChecks) {\n extraChecks(options);\n }\n\n if (getToken) {\n options.token = await getToken(pathname, options);\n }\n\n return options;\n}\n","import { BlobServiceNotAvailable, requestApi } from '../api';\nimport { debug } from '../debug';\nimport type { BlobCommandOptions, CommonCreateBlobOptions } from '../helpers';\nimport type {\n CreatePutMethodOptions,\n PutBlobApiResponse,\n PutBlobResult,\n} from '../put-helpers';\nimport { createPutHeaders, createPutOptions } from '../put-helpers';\nimport type { Part } from './helpers';\n\n/**\n * Options for completing a multipart upload.\n * Used with the completeMultipartUpload method.\n */\nexport interface CommonCompleteMultipartUploadOptions {\n /**\n * Unique upload identifier for the multipart upload, received from createMultipartUpload.\n * This ID is used to identify which multipart upload is being completed.\n */\n uploadId: string;\n\n /**\n * Unique key identifying the blob object, received from createMultipartUpload.\n * This key is used to identify which blob object the parts belong to.\n */\n key: string;\n}\n\nexport type CompleteMultipartUploadCommandOptions =\n CommonCompleteMultipartUploadOptions & CommonCreateBlobOptions;\n\nexport function createCompleteMultipartUploadMethod<\n TOptions extends CompleteMultipartUploadCommandOptions,\n>({ allowedOptions, getToken, extraChecks }: CreatePutMethodOptions<TOptions>) {\n return async (pathname: string, parts: Part[], optionsInput: TOptions) => {\n const options = await createPutOptions({\n pathname,\n options: optionsInput,\n extraChecks,\n getToken,\n });\n\n const headers = createPutHeaders(allowedOptions, options);\n\n return completeMultipartUpload({\n uploadId: options.uploadId,\n key: options.key,\n pathname,\n headers,\n options,\n parts,\n });\n };\n}\n\nexport async function completeMultipartUpload({\n uploadId,\n key,\n pathname,\n parts,\n headers,\n options,\n}: {\n uploadId: string;\n key: string;\n pathname: string;\n parts: Part[];\n headers: Record<string, string>;\n options: BlobCommandOptions;\n}): Promise<PutBlobResult> {\n const params = new URLSearchParams({ pathname });\n\n try {\n const response = await requestApi<PutBlobApiResponse>(\n `/mpu?${params.toString()}`,\n {\n method: 'POST',\n headers: {\n ...headers,\n 'content-type': 'application/json',\n 'x-mpu-action': 'complete',\n 'x-mpu-upload-id': uploadId,\n // key can be any utf8 character so we need to encode it as HTTP headers can only be us-ascii\n // https://www.rfc-editor.org/rfc/rfc7230#swection-3.2.4\n 'x-mpu-key': encodeURIComponent(key),\n },\n body: JSON.stringify(parts),\n signal: options.abortSignal,\n },\n options,\n );\n\n debug('mpu: complete', response);\n\n return response;\n } catch (error: unknown) {\n if (\n error instanceof TypeError &&\n (error.message === 'Failed to fetch' || error.message === 'fetch failed')\n ) {\n throw new BlobServiceNotAvailable();\n } else {\n throw error;\n }\n }\n}\n","import { BlobServiceNotAvailable, requestApi } from '../api';\nimport { debug } from '../debug';\nimport type { BlobCommandOptions, CommonCreateBlobOptions } from '../helpers';\nimport type { CreatePutMethodOptions } from '../put-helpers';\nimport { createPutHeaders, createPutOptions } from '../put-helpers';\n\nexport function createCreateMultipartUploadMethod<\n TOptions extends CommonCreateBlobOptions,\n>({ allowedOptions, getToken, extraChecks }: CreatePutMethodOptions<TOptions>) {\n return async (pathname: string, optionsInput: TOptions) => {\n const options = await createPutOptions({\n pathname,\n options: optionsInput,\n extraChecks,\n getToken,\n });\n\n const headers = createPutHeaders(allowedOptions, options);\n\n const createMultipartUploadResponse = await createMultipartUpload(\n pathname,\n headers,\n options,\n );\n\n return {\n key: createMultipartUploadResponse.key,\n uploadId: createMultipartUploadResponse.uploadId,\n };\n };\n}\n\ninterface CreateMultipartUploadApiResponse {\n uploadId: string;\n key: string;\n}\n\nexport async function createMultipartUpload(\n pathname: string,\n headers: Record<string, string>,\n options: BlobCommandOptions,\n): Promise<CreateMultipartUploadApiResponse> {\n debug('mpu: create', 'pathname:', pathname);\n\n const params = new URLSearchParams({ pathname });\n\n try {\n const response = await requestApi<CreateMultipartUploadApiResponse>(\n `/mpu?${params.toString()}`,\n {\n method: 'POST',\n headers: {\n ...headers,\n 'x-mpu-action': 'create',\n },\n signal: options.abortSignal,\n },\n options,\n );\n\n debug('mpu: create', response);\n\n return response;\n } catch (error: unknown) {\n if (\n error instanceof TypeError &&\n (error.message === 'Failed to fetch' || error.message === 'fetch failed')\n ) {\n throw new BlobServiceNotAvailable();\n }\n\n throw error;\n }\n}\n","import throttle from 'throttleit';\nimport { BlobServiceNotAvailable, requestApi } from '../api';\nimport { debug } from '../debug';\nimport type {\n BlobCommandOptions,\n CommonCreateBlobOptions,\n WithUploadProgress,\n} from '../helpers';\nimport { BlobError, bytes, isPlainObject } from '../helpers';\nimport type { CreatePutMethodOptions, PutBody } from '../put-helpers';\nimport { createPutHeaders, createPutOptions } from '../put-helpers';\nimport type { Part, PartInput } from './helpers';\n\n/**\n * Options for uploading a part in a multipart upload process.\n * Used with the uploadPart method.\n */\nexport interface CommonMultipartUploadOptions {\n /**\n * Unique upload identifier for the multipart upload, received from createMultipartUpload.\n * This ID is used to associate all uploaded parts with the same multipart upload.\n */\n uploadId: string;\n\n /**\n * Unique key identifying the blob object, received from createMultipartUpload.\n * This key is used to identify which blob object the parts belong to.\n */\n key: string;\n\n /**\n * A number identifying which part is being uploaded (1-based).\n * This number is used to order the parts when completing the multipart upload.\n * Parts must be uploaded with consecutive part numbers starting from 1.\n */\n partNumber: number;\n}\n\nexport type UploadPartCommandOptions = CommonMultipartUploadOptions &\n CommonCreateBlobOptions;\n\nexport function createUploadPartMethod<\n TOptions extends UploadPartCommandOptions,\n>({ allowedOptions, getToken, extraChecks }: CreatePutMethodOptions<TOptions>) {\n return async (\n pathname: string,\n body: PutBody,\n optionsInput: TOptions,\n ): Promise<Part> => {\n const options = await createPutOptions({\n pathname,\n options: optionsInput,\n extraChecks,\n getToken,\n });\n\n const headers = createPutHeaders(allowedOptions, options);\n\n if (isPlainObject(body)) {\n throw new BlobError(\n \"Body must be a string, buffer or stream. You sent a plain JavaScript object, double check what you're trying to upload.\",\n );\n }\n\n const result = await uploadPart({\n uploadId: options.uploadId,\n key: options.key,\n pathname,\n part: { blob: body, partNumber: options.partNumber },\n headers,\n options,\n });\n\n return {\n etag: result.etag,\n partNumber: options.partNumber,\n };\n };\n}\n\nexport async function uploadPart({\n uploadId,\n key,\n pathname,\n headers,\n options,\n internalAbortController = new AbortController(),\n part,\n}: {\n uploadId: string;\n key: string;\n pathname: string;\n headers: Record<string, string>;\n options: BlobCommandOptions & WithUploadProgress;\n internalAbortController?: AbortController;\n part: PartInput;\n}): Promise<UploadPartApiResponse> {\n const params = new URLSearchParams({ pathname });\n\n const responsePromise = requestApi<UploadPartApiResponse>(\n `/mpu?${params.toString()}`,\n {\n signal: internalAbortController.signal,\n method: 'POST',\n headers: {\n ...headers,\n 'x-mpu-action': 'upload',\n 'x-mpu-key': encodeURIComponent(key),\n 'x-mpu-upload-id': uploadId,\n 'x-mpu-part-number': part.partNumber.toString(),\n },\n // weird things between undici types and native fetch types\n body: part.blob,\n },\n options,\n );\n\n function handleAbort(): void {\n internalAbortController.abort();\n }\n\n if (options.abortSignal?.aborted) {\n // abort if the signal is already aborted\n handleAbort();\n } else {\n // we connect the internal abort controller to the external abortSignal to allow the user to cancel the upload\n options.abortSignal?.addEventListener('abort', handleAbort);\n }\n\n const response = await responsePromise;\n\n options.abortSignal?.removeEventListener('abort', handleAbort);\n\n return response;\n}\n\n// Most browsers will cap requests at 6 concurrent uploads per domain (Vercel Blob API domain)\n// In other environments, we can afford to be more aggressive\nconst maxConcurrentUploads = typeof window !== 'undefined' ? 6 : 8;\n\n// 5MB is the minimum part size accepted by Vercel Blob, but we set our default part size to 8mb like the aws cli\nconst partSizeInBytes = 8 * 1024 * 1024;\n\nconst maxBytesInMemory = maxConcurrentUploads * partSizeInBytes * 2;\n\ninterface UploadPartApiResponse {\n etag: string;\n}\n\nexport interface BlobUploadPart {\n partNumber: number;\n blob: Blob;\n}\n\n// Can we rewrite this function without new Promise?\nexport function uploadAllParts({\n uploadId,\n key,\n pathname,\n stream,\n headers,\n options,\n totalToLoad,\n}: {\n uploadId: string;\n key: string;\n pathname: string;\n stream: ReadableStream<ArrayBuffer>;\n headers: Record<string, string>;\n options: BlobCommandOptions & WithUploadProgress;\n totalToLoad: number;\n}): Promise<Part[]> {\n debug('mpu: upload init', 'key:', key);\n const internalAbortController = new AbortController();\n\n return new Promise((resolve, reject) => {\n const partsToUpload: BlobUploadPart[] = [];\n const completedParts: Part[] = [];\n const reader = stream.getReader();\n let activeUploads = 0;\n let reading = false;\n let currentPartNumber = 1;\n // this next variable is used to escape the read loop when an error occurs\n let rejected = false;\n let currentBytesInMemory = 0;\n let doneReading = false;\n let bytesSent = 0;\n\n // This must be outside the read loop, in case we reach the maxBytesInMemory and\n // we exit the loop but some bytes are still to be sent on the next read invocation.\n let arrayBuffers: ArrayBuffer[] = [];\n let currentPartBytesRead = 0;\n\n let onUploadProgress: (() => void) | undefined;\n const totalLoadedPerPartNumber: Record<string, number> = {};\n\n if (options.onUploadProgress) {\n onUploadProgress = throttle(() => {\n const loaded = Object.values(totalLoadedPerPartNumber).reduce(\n (acc, cur) => {\n return acc + cur;\n },\n 0,\n );\n const total = totalToLoad || loaded;\n const percentage =\n totalToLoad > 0\n ? Number(((loaded / totalToLoad || loaded) * 100).toFixed(2))\n : 0;\n\n // we call the user's onUploadProgress callback\n options.onUploadProgress?.({ loaded, total, percentage });\n }, 150);\n }\n\n read().catch(cancel);\n\n async function read(): Promise<void> {\n debug(\n 'mpu: upload read start',\n 'activeUploads:',\n activeUploads,\n 'currentBytesInMemory:',\n `${bytes(currentBytesInMemory)}/${bytes(maxBytesInMemory)}`,\n 'bytesSent:',\n bytes(bytesSent),\n );\n\n reading = true;\n\n while (currentBytesInMemory < maxBytesInMemory && !rejected) {\n try {\n const { value, done } = await reader.read();\n\n if (done) {\n doneReading = true;\n debug('mpu: upload read consumed the whole stream');\n // done is sent when the stream is fully consumed. That's why we're not using the value here.\n if (arrayBuffers.length > 0) {\n partsToUpload.push({\n partNumber: currentPartNumber++,\n blob: new Blob(arrayBuffers, {\n type: 'application/octet-stream',\n }),\n });\n\n sendParts();\n }\n reading = false;\n return;\n }\n\n currentBytesInMemory += value.byteLength;\n\n // This code ensures that each part will be exactly of `partSizeInBytes` size\n // Otherwise R2 will refuse it. AWS S3 is fine with parts of different sizes.\n let valueOffset = 0;\n while (valueOffset < value.byteLength) {\n const remainingPartSize = partSizeInBytes - currentPartBytesRead;\n const endOffset = Math.min(\n valueOffset + remainingPartSize,\n value.byteLength,\n );\n\n const chunk = value.slice(valueOffset, endOffset);\n\n arrayBuffers.push(chunk);\n currentPartBytesRead += chunk.byteLength;\n valueOffset = endOffset;\n\n if (currentPartBytesRead === partSizeInBytes) {\n partsToUpload.push({\n partNumber: currentPartNumber++,\n blob: new Blob(arrayBuffers, {\n type: 'application/octet-stream',\n }),\n });\n\n arrayBuffers = [];\n currentPartBytesRead = 0;\n sendParts();\n }\n }\n } catch (error) {\n cancel(error);\n }\n }\n\n debug(\n 'mpu: upload read end',\n 'activeUploads:',\n activeUploads,\n 'currentBytesInMemory:',\n `${bytes(currentBytesInMemory)}/${bytes(maxBytesInMemory)}`,\n 'bytesSent:',\n bytes(bytesSent),\n );\n\n reading = false;\n }\n\n async function sendPart(part: BlobUploadPart): Promise<void> {\n activeUploads++;\n\n debug(\n 'mpu: upload send part start',\n 'partNumber:',\n part.partNumber,\n 'size:',\n part.blob.size,\n 'activeUploads:',\n activeUploads,\n 'currentBytesInMemory:',\n `${bytes(currentBytesInMemory)}/${bytes(maxBytesInMemory)}`,\n 'bytesSent:',\n bytes(bytesSent),\n );\n\n try {\n const uploadProgressForPart: WithUploadProgress['onUploadProgress'] =\n options.onUploadProgress\n ? (event) => {\n totalLoadedPerPartNumber[part.partNumber] = event.loaded;\n if (onUploadProgress) {\n onUploadProgress();\n }\n }\n : undefined;\n\n const completedPart = await uploadPart({\n uploadId,\n key,\n pathname,\n headers,\n options: {\n ...options,\n onUploadProgress: uploadProgressForPart,\n },\n internalAbortController,\n part,\n });\n\n debug(\n 'mpu: upload send part end',\n 'partNumber:',\n part.partNumber,\n 'activeUploads',\n activeUploads,\n 'currentBytesInMemory:',\n `${bytes(currentBytesInMemory)}/${bytes(maxBytesInMemory)}`,\n 'bytesSent:',\n bytes(bytesSent),\n );\n\n if (rejected) {\n return;\n }\n\n completedParts.push({\n partNumber: part.partNumber,\n etag: completedPart.etag,\n });\n\n currentBytesInMemory -= part.blob.size;\n activeUploads--;\n bytesSent += part.blob.size;\n\n if (partsToUpload.length > 0) {\n sendParts();\n }\n\n if (doneReading) {\n if (activeUploads === 0) {\n reader.releaseLock();\n resolve(completedParts);\n }\n return;\n }\n\n if (!reading) {\n read().catch(cancel);\n }\n } catch (error) {\n // cancel if fetch throws an error\n cancel(error);\n }\n }\n\n function sendParts(): void {\n if (rejected) {\n return;\n }\n\n debug(\n 'send parts',\n 'activeUploads',\n activeUploads,\n 'partsToUpload',\n partsToUpload.length,\n );\n\n while (activeUploads < maxConcurrentUploads && partsToUpload.length > 0) {\n const partToSend = partsToUpload.shift();\n if (partToSend) {\n void sendPart(partToSend);\n }\n }\n }\n\n function cancel(error: unknown): void {\n // a previous call already rejected the whole call, ignore\n if (rejected) {\n return;\n }\n rejected = true;\n internalAbortController.abort();\n reader.releaseLock();\n if (\n error instanceof TypeError &&\n (error.message === 'Failed to fetch' ||\n error.message === 'fetch failed')\n ) {\n reject(new BlobServiceNotAvailable());\n } else {\n reject(error as Error);\n }\n }\n });\n}\n","import {\n BlobError,\n type CommonCreateBlobOptions,\n isPlainObject,\n} from '../helpers';\nimport type { CreatePutMethodOptions, PutBody } from '../put-helpers';\nimport { createPutHeaders, createPutOptions } from '../put-helpers';\nimport { completeMultipartUpload } from './complete';\nimport { createMultipartUpload } from './create';\nimport type { Part } from './helpers';\nimport { uploadPart as rawUploadPart } from './upload';\n\nexport function createCreateMultipartUploaderMethod<\n TOptions extends CommonCreateBlobOptions,\n>({ allowedOptions, getToken, extraChecks }: CreatePutMethodOptions<TOptions>) {\n return async (pathname: string, optionsInput: TOptions) => {\n const options = await createPutOptions({\n pathname,\n options: optionsInput,\n extraChecks,\n getToken,\n });\n\n const headers = createPutHeaders(allowedOptions, options);\n\n const createMultipartUploadResponse = await createMultipartUpload(\n pathname,\n headers,\n options,\n );\n\n return {\n key: createMultipartUploadResponse.key,\n uploadId: createMultipartUploadResponse.uploadId,\n\n async uploadPart(partNumber: number, body: PutBody) {\n if (isPlainObject(body)) {\n throw new BlobError(\n \"Body must be a string, buffer or stream. You sent a plain JavaScript object, double check what you're trying to upload.\",\n );\n }\n\n const result = await rawUploadPart({\n uploadId: createMultipartUploadResponse.uploadId,\n key: createMultipartUploadResponse.key,\n pathname,\n part: { partNumber, blob: body },\n headers,\n options,\n });\n\n return {\n etag: result.etag,\n partNumber,\n };\n },\n\n async complete(parts: Part[]) {\n return completeMultipartUpload({\n uploadId: createMultipartUploadResponse.uploadId,\n key: createMultipartUploadResponse.key,\n pathname,\n parts,\n headers,\n options,\n });\n },\n };\n };\n}\n","import throttle from 'throttleit';\nimport { requestApi } from './api';\nimport type { CommonCreateBlobOptions, WithUploadProgress } from './helpers';\nimport { BlobError, isPlainObject } from './helpers';\nimport { uncontrolledMultipartUpload } from './multipart/uncontrolled';\nimport type {\n CreatePutMethodOptions,\n PutBlobApiResponse,\n PutBlobResult,\n PutBody,\n} from './put-helpers';\nimport { createPutHeaders, createPutOptions } from './put-helpers';\n\nexport interface PutCommandOptions\n extends CommonCreateBlobOptions,\n WithUploadProgress {\n /**\n * Whether to use multipart upload. Use this when uploading large files. It will split the file into multiple parts, upload them in parallel and retry failed parts.\n * @defaultvalue false\n */\n multipart?: boolean;\n}\n\nexport function createPutMethod<TOptions extends PutCommandOptions>({\n allowedOptions,\n getToken,\n extraChecks,\n}: CreatePutMethodOptions<TOptions>) {\n return async function put(\n pathname: string,\n body: PutBody,\n optionsInput: TOptions,\n ): Promise<PutBlobResult> {\n if (!body) {\n throw new BlobError('body is required');\n }\n\n if (isPlainObject(body)) {\n throw new BlobError(\n \"Body must be a string, buffer or stream. You sent a plain JavaScript object, double check what you're trying to upload.\",\n );\n }\n\n const options = await createPutOptions({\n pathname,\n options: optionsInput,\n extraChecks,\n getToken,\n });\n\n const headers = createPutHeaders(allowedOptions, options);\n\n if (options.multipart === true) {\n return uncontrolledMultipartUpload(pathname, body, headers, options);\n }\n\n const onUploadProgress = options.onUploadProgress\n ? throttle(options.onUploadProgress, 100)\n : undefined;\n\n const params = new URLSearchParams({ pathname });\n\n const response = await requestApi<PutBlobApiResponse>(\n `/?${params.toString()}`,\n {\n method: 'PUT',\n body,\n headers,\n signal: options.abortSignal,\n },\n {\n ...options,\n onUploadProgress,\n },\n );\n\n return {\n url: response.url,\n downloadUrl: response.downloadUrl,\n pathname: response.pathname,\n contentType: response.contentType,\n contentDisposition: response.contentDisposition,\n };\n };\n}\n","import { debug } from '../debug';\nimport type { BlobCommandOptions, WithUploadProgress } from '../helpers';\nimport { computeBodyLength } from '../helpers';\nimport type { PutBlobResult, PutBody } from '../put-helpers';\nimport { completeMultipartUpload } from './complete';\nimport { createMultipartUpload } from './create';\nimport { toReadableStream } from './helpers';\nimport { uploadAllParts } from './upload';\n\n// this automatically slices the body into parts and uploads all of them as multiple parts\nexport async function uncontrolledMultipartUpload(\n pathname: string,\n body: PutBody,\n headers: Record<string, string>,\n options: BlobCommandOptions & WithUploadProgress,\n): Promise<PutBlobResult> {\n debug('mpu: init', 'pathname:', pathname, 'headers:', headers);\n\n const optionsWithoutOnUploadProgress = {\n ...options,\n onUploadProgress: undefined,\n };\n\n // Step 1: Start multipart upload\n const createMultipartUploadResponse = await createMultipartUpload(\n pathname,\n headers,\n optionsWithoutOnUploadProgress,\n );\n\n const totalToLoad = computeBodyLength(body);\n const stream = await toReadableStream(body);\n\n // Step 2: Upload parts one by one\n const parts = await uploadAllParts({\n uploadId: createMultipartUploadResponse.uploadId,\n key: createMultipartUploadResponse.key,\n pathname,\n stream,\n headers,\n options,\n totalToLoad,\n });\n\n // Step 3: Complete multipart upload\n const blob = await completeMultipartUpload({\n uploadId: createMultipartUploadResponse.uploadId,\n key: createMultipartUploadResponse.key,\n pathname,\n parts,\n headers,\n options: optionsWithoutOnUploadProgress,\n });\n\n // changes:\n // stream => set percentage to 0% even if loaded/total is valid\n // stream => send onUploadProgress 100% at the end of the request here\n\n return blob;\n}\n","import { requestApi } from './api';\nimport type { BlobCommandOptions } from './helpers';\nimport { type PutBlobApiResponse, putOptionHeaderMap } from './put-helpers';\n\nexport interface CreateFolderResult {\n pathname: string;\n url: string;\n}\n\n/**\n * Creates a folder in your store. Vercel Blob has no real concept of folders, our file browser on Vercel.com displays folders based on the presence of trailing slashes in the pathname. Unless you are building a file browser system, you probably don't need to use this method.\n *\n * Use the resulting `url` to delete the folder, just like you would delete a blob.\n * @param pathname - Can be user1/ or user1/avatars/\n * @param options - Additional options like `token`\n */\nexport async function createFolder(\n pathname: string,\n options: BlobCommandOptions = {},\n): Promise<CreateFolderResult> {\n const folderPathname = pathname.endsWith('/') ? pathname : `${pathname}/`;\n\n const headers: Record<string, string> = {};\n\n headers[putOptionHeaderMap.addRandomSuffix] = '0';\n\n const params = new URLSearchParams({ pathname: folderPathname });\n const response = await requestApi<PutBlobApiResponse>(\n `/?${params.toString()}`,\n {\n method: 'PUT',\n headers,\n signal: options.abortSignal,\n },\n options,\n );\n\n return {\n url: response.url,\n pathname: response.pathname,\n };\n}\n"]}