@junobuild/storage 0.1.1 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,14 +1,14 @@
1
1
  [![npm][npm-badge]][npm-badge-url]
2
2
  [![license][npm-license]][npm-license-url]
3
3
 
4
- [npm-badge]: https://img.shields.io/npm/v/@junobuild/utils
5
- [npm-badge-url]: https://www.npmjs.com/package/@junobuild/utils
6
- [npm-license]: https://img.shields.io/npm/l/@junobuild/utils
4
+ [npm-badge]: https://img.shields.io/npm/v/@junobuild/storage
5
+ [npm-badge-url]: https://www.npmjs.com/package/@junobuild/storage
6
+ [npm-license]: https://img.shields.io/npm/l/@junobuild/storage
7
7
  [npm-license-url]: https://github.com/junobuild/juno-js/blob/main/LICENSE
8
8
 
9
- # Juno JavaScript utils
9
+ # Juno JavaScript Storage
10
10
 
11
- A collection of utilities and constants for Juno JS libs.
11
+ A library for interfacing with Juno's Storage features.
12
12
 
13
13
  ## License
14
14
 
@@ -1,2 +1,2 @@
1
- import{Principal as R}from"@dfinity/principal";var U=t=>t==null,k=t=>!U(t);var n=t=>k(t)?[t]:[];var _=()=>typeof window<"u";var B=async({asset:{data:t,filename:s,collection:o,headers:a,token:l,fullPath:i,encoding:b,description:A},actor:c,init_asset_upload:C})=>{let{batch_id:p}=await C({collection:o,full_path:i,name:s,token:n(l),encoding_type:n(b),description:n(A)}),u=19e5,d=[],y=_()?new Blob([await t.arrayBuffer()]):t,f=0n;for(let e=0;e<y.size;e+=u){let m=y.slice(e,e+u);d.push({batchId:p,chunk:m,actor:c,orderId:f}),f++}let r=[];for await(let e of I({uploadChunks:d}))r=[...r,...e];let h=a.find(([e,m])=>e.toLowerCase()==="content-type")===void 0&&t.type!==void 0&&t.type!==""?[["Content-Type",t.type]]:void 0;await c.commit_asset_upload({batch_id:p,chunk_ids:r.map(({chunk_id:e})=>e),headers:[...a,...h||[]]})};async function*I({uploadChunks:t,limit:s=12}){for(let o=0;o<t.length;o=o+s){let a=t.slice(o,o+s);yield await Promise.all(a.map(i=>w(i)))}}var w=async({batchId:t,chunk:s,actor:o,orderId:a})=>o.upload_asset_chunk({batch_id:t,content:new Uint8Array(await s.arrayBuffer()),order_id:n(a)});export{B as uploadAsset};
1
+ import{toNullable as a}from"@dfinity/utils";import{isBrowser as U}from"@junobuild/utils";var R=async({asset:{data:t,filename:s,collection:o,headers:n,token:r,fullPath:i,encoding:f,description:k},actor:d,init_asset_upload:I})=>{let{batch_id:c}=await I({collection:o,full_path:i,name:s,token:a(r),encoding_type:a(f),description:a(k)}),p=19e5,u=[],y=U()?new Blob([await t.arrayBuffer()]):t,h=0n;for(let e=0;e<y.size;e+=p){let C=y.slice(e,e+p);u.push({batchId:c,chunk:C,actor:d,orderId:h}),h++}let l=[];for await(let e of _({uploadChunks:u}))l=[...l,...e];let m=n.find(([e,C])=>e.toLowerCase()==="content-type")===void 0&&t.type!==void 0&&t.type!==""?[["Content-Type",t.type]]:void 0;await d.commit_asset_upload({batch_id:c,chunk_ids:l.map(({chunk_id:e})=>e),headers:[...n,...m||[]]})};async function*_({uploadChunks:t,limit:s=12}){for(let o=0;o<t.length;o=o+s){let n=t.slice(o,o+s);yield await Promise.all(n.map(i=>A(i)))}}var A=async({batchId:t,chunk:s,actor:o,orderId:n})=>o.upload_asset_chunk({batch_id:t,content:new Uint8Array(await s.arrayBuffer()),order_id:a(n)});export{R as uploadAsset};
2
2
  //# sourceMappingURL=index.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
- "sources": ["../../../utils/src/utils/debounce.utils.ts", "../../../utils/src/utils/json.utils.ts", "../../../utils/src/utils/null.utils.ts", "../../../utils/src/utils/did.utils.ts", "../../../utils/src/utils/env.utils.ts", "../../src/api/storage.api.ts"],
4
- "sourcesContent": ["/**\n * Creates a debounced function that delays invoking the provided function until after the specified timeout.\n * @param {Function} func - The function to debounce.\n * @param {number} [timeout=300] - The number of milliseconds to delay. Defaults to 300ms if not specified or invalid.\n * @returns {Function} A debounced function.\n */\n// eslint-disable-next-line @typescript-eslint/ban-types, local-rules/prefer-object-params\nexport const debounce = (func: Function, timeout?: number): Function => {\n let timer: ReturnType<typeof setTimeout> | undefined;\n\n return (...args: unknown[]) => {\n const next = () => func(...args);\n\n if (timer) {\n clearTimeout(timer);\n }\n\n timer = setTimeout(next, timeout !== undefined && timeout > 0 ? timeout : 300);\n };\n};\n", "import {Principal} from '@dfinity/principal';\nimport {nonNullish} from './null.utils';\n\nconst JSON_KEY_BIGINT = '__bigint__';\nconst JSON_KEY_PRINCIPAL = '__principal__';\nconst JSON_KEY_UINT8ARRAY = '__uint8array__';\n\n/**\n * A function that alters the behavior of the stringification process for BigInt, Principal, and Uint8Array.\n * @param {string} _key - The key of the value being stringified.\n * @param {unknown} value - The value being stringified.\n * @returns {unknown} The altered value for stringification.\n */\n// eslint-disable-next-line local-rules/prefer-object-params\nexport const jsonReplacer = (_key: string, value: unknown): unknown => {\n if (typeof value === 'bigint') {\n return {[JSON_KEY_BIGINT]: `${value}`};\n }\n\n if (nonNullish(value) && value instanceof Principal) {\n return {[JSON_KEY_PRINCIPAL]: value.toText()};\n }\n\n if (nonNullish(value) && value instanceof Uint8Array) {\n return {[JSON_KEY_UINT8ARRAY]: Array.from(value)};\n }\n\n return value;\n};\n\n/**\n * A parser that interprets revived BigInt, Principal, and Uint8Array when constructing JavaScript values or objects.\n * @param {string} _key - The key of the value being parsed.\n * @param {unknown} value - The value being parsed.\n * @returns {unknown} The parsed value.\n */\n// eslint-disable-next-line local-rules/prefer-object-params\nexport const jsonReviver = (_key: string, value: unknown): unknown => {\n const mapValue = <T>(key: string): T => (value as Record<string, T>)[key];\n\n if (nonNullish(value) && typeof value === 'object' && JSON_KEY_BIGINT in value) {\n return BigInt(mapValue(JSON_KEY_BIGINT));\n }\n\n if (nonNullish(value) && typeof value === 'object' && JSON_KEY_PRINCIPAL in value) {\n return Principal.fromText(mapValue(JSON_KEY_PRINCIPAL));\n }\n\n if (nonNullish(value) && typeof value === 'object' && JSON_KEY_UINT8ARRAY in value) {\n return Uint8Array.from(mapValue(JSON_KEY_UINT8ARRAY));\n }\n\n return value;\n};\n", "/**\n * Checks if the provided argument is null or undefined.\n * @template T\n * @param {T | undefined | null} argument - The argument to check.\n * @returns {boolean} True if the argument is null or undefined, false otherwise.\n */\n// eslint-disable-next-line local-rules/use-option-type-wrapper\nexport const isNullish = <T>(argument: T | undefined | null): argument is undefined | null =>\n argument === null || argument === undefined;\n\n/**\n * Checks if the provided argument is neither null nor undefined.\n * @template T\n * @param {T | undefined | null} argument - The argument to check.\n * @returns {boolean} True if the argument is neither null nor undefined, false otherwise.\n */\n// eslint-disable-next-line local-rules/use-option-type-wrapper\nexport const nonNullish = <T>(argument: T | undefined | null): argument is NonNullable<T> =>\n !isNullish(argument);\n\n/**\n * Represents an error thrown when a value is null or undefined.\n * @class\n * @extends {Error}\n */\nexport class NullishError extends Error {}\n\n/**\n * Asserts that a value is neither null nor undefined.\n * @template T\n * @param {T} value - The value to check.\n * @param {string} [message] - The optional error message to use if the assertion fails.\n * @throws {NullishError} If the value is null or undefined.\n * @returns {asserts value is NonNullable<T>} Asserts that the value is neither null nor undefined.\n */\nexport const assertNonNullish: <T>(\n value: T,\n message?: string\n // eslint-disable-next-line local-rules/prefer-object-params\n) => asserts value is NonNullable<T> = <T>(value: T, message?: string): void => {\n if (isNullish(value)) {\n throw new NullishError(message);\n }\n};\n", "import {jsonReplacer, jsonReviver} from './json.utils';\nimport {nonNullish} from './null.utils';\n\n/**\n * Converts a value to a nullable array.\n * @template T\n * @param {T} [value] - The value to convert.\n * @returns {([] | [T])} A nullable array containing the value if non-nullish, or an empty array if nullish.\n */\nexport const toNullable = <T>(value?: T): [] | [T] => (nonNullish(value) ? [value] : []);\n\n/**\n * Extracts a value from a nullable array.\n * @template T\n * @param {([] | [T])} value - The nullable array.\n * @returns {(T | undefined)} The value if present, or undefined if the array is empty.\n */\nexport const fromNullable = <T>(value: [] | [T]): T | undefined => value?.[0];\n\n/**\n * Converts data to a Uint8Array for transmission or storage.\n * @template T\n * @param {T} data - The data to convert.\n * @returns {Promise<Uint8Array>} A promise that resolves to a Uint8Array representation of the data.\n */\nexport const toArray = async <T>(data: T): Promise<Uint8Array> => {\n const blob: Blob = new Blob([JSON.stringify(data, jsonReplacer)], {\n type: 'application/json; charset=utf-8'\n });\n return new Uint8Array(await blob.arrayBuffer());\n};\n\n/**\n * Converts a Uint8Array or number array back to the original data type.\n * @template T\n * @param {(Uint8Array | number[])} data - The array to convert.\n * @returns {Promise<T>} A promise that resolves to the original data.\n */\nexport const fromArray = async <T>(data: Uint8Array | number[]): Promise<T> => {\n const blob: Blob = new Blob([data instanceof Uint8Array ? data : new Uint8Array(data)], {\n type: 'application/json; charset=utf-8'\n });\n return JSON.parse(await blob.text(), jsonReviver);\n};\n", "/**\n * Checks if the current environment is a browser.\n * @returns {boolean} True if the current environment is a browser, false otherwise.\n */\nexport const isBrowser = (): boolean => typeof window !== `undefined`;\n", "import {isBrowser, toNullable} from '@junobuild/utils';\nimport type {\n _SERVICE as ConsoleActor,\n InitAssetKey as ConsoleInitAssetKey,\n InitUploadResult as ConsoleInitUploadResult\n} from '../../declarations/console/console.did';\nimport type {\n _SERVICE as SatelliteActor,\n InitAssetKey as SatelliteInitAssetKey,\n InitUploadResult as SatelliteInitUploadResult\n} from '../../declarations/satellite/satellite.did';\nimport type {ENCODING_TYPE, Storage} from '../types/storage.types';\n\nexport type UploadAsset = Required<Omit<Storage, 'token' | 'encoding' | 'description'>> &\n Pick<Storage, 'token' | 'encoding' | 'description'>;\n\nexport const uploadAsset = async ({\n asset: {data, filename, collection, headers, token, fullPath, encoding, description},\n actor,\n init_asset_upload\n}: {\n asset: UploadAsset;\n actor: SatelliteActor | ConsoleActor;\n init_asset_upload: (\n initAssetKey: SatelliteInitAssetKey | ConsoleInitAssetKey\n ) => Promise<SatelliteInitUploadResult | ConsoleInitUploadResult>;\n}): Promise<void> => {\n const {batch_id: batchId} = await init_asset_upload({\n collection,\n full_path: fullPath,\n name: filename,\n token: toNullable<string>(token),\n encoding_type: toNullable<ENCODING_TYPE>(encoding),\n description: toNullable(description)\n });\n\n // https://forum.dfinity.org/t/optimal-upload-chunk-size/20444/23?u=peterparker\n const chunkSize = 1900000;\n\n const uploadChunks: UploadChunkParams[] = [];\n\n // Prevent transforming chunk to arrayBuffer error: The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.\n const clone: Blob = isBrowser() ? new Blob([await data.arrayBuffer()]) : data;\n\n // Split data into chunks\n let orderId = 0n;\n for (let start = 0; start < clone.size; start += chunkSize) {\n const chunk: Blob = clone.slice(start, start + chunkSize);\n\n uploadChunks.push({\n batchId,\n chunk,\n actor,\n orderId\n });\n\n orderId++;\n }\n\n // Upload chunks to the IC in batch - i.e. 12 chunks uploaded at a time.\n let chunkIds: UploadChunkResult[] = [];\n for await (const results of batchUploadChunks({uploadChunks})) {\n chunkIds = [...chunkIds, ...results];\n }\n\n const contentType: [[string, string]] | undefined =\n headers.find(([type, _]) => type.toLowerCase() === 'content-type') === undefined &&\n data.type !== undefined &&\n data.type !== ''\n ? [['Content-Type', data.type]]\n : undefined;\n\n await actor.commit_asset_upload({\n batch_id: batchId,\n chunk_ids: chunkIds.map(({chunk_id}: UploadChunkResult) => chunk_id),\n headers: [...headers, ...(contentType ? contentType : [])]\n });\n};\n\nasync function* batchUploadChunks({\n uploadChunks,\n limit = 12\n}: {\n uploadChunks: UploadChunkParams[];\n limit?: number;\n}): AsyncGenerator<UploadChunkResult[], void> {\n for (let i = 0; i < uploadChunks.length; i = i + limit) {\n const batch = uploadChunks.slice(i, i + limit);\n const result = await Promise.all(batch.map((params) => uploadChunk(params)));\n yield result;\n }\n}\n\ninterface UploadChunkResult {\n chunk_id: bigint;\n}\n\ninterface UploadChunkParams {\n batchId: bigint;\n chunk: Blob;\n actor: SatelliteActor | ConsoleActor;\n orderId: bigint;\n}\n\nconst uploadChunk = async ({\n batchId,\n chunk,\n actor,\n orderId\n}: UploadChunkParams): Promise<UploadChunkResult> =>\n actor.upload_asset_chunk({\n batch_id: batchId,\n content: new Uint8Array(await chunk.arrayBuffer()),\n order_id: toNullable(orderId)\n });\n"],
5
- "mappings": "ACAA,OAAQ,aAAAA,MAAgB,qBCOjB,IAAMC,EAAgBC,GAC3BA,GAAa,KASFC,EAAiBD,GAC5B,CAACD,EAAUC,CAAQ,ECTd,IAAME,EAAiBC,GAAyBC,EAAWD,CAAK,EAAI,CAACA,CAAK,EAAI,CAAC,ECL/E,IAAME,EAAY,IAAe,OAAO,OAAW,ICYnD,IAAMC,EAAc,MAAO,CAChC,MAAO,CAAC,KAAAC,EAAM,SAAAC,EAAU,WAAAC,EAAY,QAAAC,EAAS,MAAAC,EAAO,SAAAC,EAAU,SAAAC,EAAU,YAAAC,CAAW,EACnF,MAAAC,EACA,kBAAAC,CACF,IAMqB,CACnB,GAAM,CAAC,SAAUC,CAAO,EAAI,MAAMD,EAAkB,CAClD,WAAAP,EACA,UAAWG,EACX,KAAMJ,EACN,MAAOU,EAAmBP,CAAK,EAC/B,cAAeO,EAA0BL,CAAQ,EACjD,YAAaK,EAAWJ,CAAW,CACrC,CAAC,EAGKK,EAAY,KAEZC,EAAoC,CAAC,EAGrCC,EAAcC,EAAU,EAAI,IAAI,KAAK,CAAC,MAAMf,EAAK,YAAY,CAAC,CAAC,EAAIA,EAGrEgB,EAAU,GACd,QAASC,EAAQ,EAAGA,EAAQH,EAAM,KAAMG,GAASL,EAAW,CAC1D,IAAMM,EAAcJ,EAAM,MAAMG,EAAOA,EAAQL,CAAS,EAExDC,EAAa,KAAK,CAChB,QAAAH,EACA,MAAAQ,EACA,MAAAV,EACA,QAAAQ,CACF,CAAC,EAEDA,GACF,CAGA,IAAIG,EAAgC,CAAC,EACrC,cAAiBC,KAAWC,EAAkB,CAAC,aAAAR,CAAY,CAAC,EAC1DM,EAAW,CAAC,GAAGA,EAAU,GAAGC,CAAO,EAGrC,IAAME,EACJnB,EAAQ,KAAK,CAAC,CAACoB,EAAMC,CAAC,IAAMD,EAAK,YAAY,IAAM,cAAc,IAAM,QACvEvB,EAAK,OAAS,QACdA,EAAK,OAAS,GACV,CAAC,CAAC,eAAgBA,EAAK,IAAI,CAAC,EAC5B,OAEN,MAAMQ,EAAM,oBAAoB,CAC9B,SAAUE,EACV,UAAWS,EAAS,IAAI,CAAC,CAAC,SAAAM,CAAQ,IAAyBA,CAAQ,EACnE,QAAS,CAAC,GAAGtB,EAAS,GAAImB,GAA4B,CAAC,CAAE,CAC3D,CAAC,CACH,EAEA,eAAgBD,EAAkB,CAChC,aAAAR,EACA,MAAAa,EAAQ,EACV,EAG8C,CAC5C,QAASC,EAAI,EAAGA,EAAId,EAAa,OAAQc,EAAIA,EAAID,EAAO,CACtD,IAAME,EAAQf,EAAa,MAAMc,EAAGA,EAAID,CAAK,EAE7C,MADe,MAAM,QAAQ,IAAIE,EAAM,IAAKC,GAAWC,EAAYD,CAAM,CAAC,CAAC,CAE7E,CACF,CAaA,IAAMC,EAAc,MAAO,CACzB,QAAApB,EACA,MAAAQ,EACA,MAAAV,EACA,QAAAQ,CACF,IACER,EAAM,mBAAmB,CACvB,SAAUE,EACV,QAAS,IAAI,WAAW,MAAMQ,EAAM,YAAY,CAAC,EACjD,SAAUP,EAAWK,CAAO,CAC9B,CAAC",
6
- "names": ["Principal", "isNullish", "argument", "nonNullish", "toNullable", "value", "nonNullish", "isBrowser", "uploadAsset", "data", "filename", "collection", "headers", "token", "fullPath", "encoding", "description", "actor", "init_asset_upload", "batchId", "g", "chunkSize", "uploadChunks", "clone", "h", "orderId", "start", "chunk", "chunkIds", "results", "batchUploadChunks", "contentType", "type", "_", "chunk_id", "limit", "i", "batch", "params", "uploadChunk"]
3
+ "sources": ["../../src/api/storage.api.ts"],
4
+ "sourcesContent": ["import {toNullable} from '@dfinity/utils';\nimport {isBrowser} from '@junobuild/utils';\nimport type {\n _SERVICE as ConsoleActor,\n InitAssetKey as ConsoleInitAssetKey,\n InitUploadResult as ConsoleInitUploadResult\n} from '../../declarations/console/console.did';\nimport type {\n _SERVICE as SatelliteActor,\n InitAssetKey as SatelliteInitAssetKey,\n InitUploadResult as SatelliteInitUploadResult\n} from '../../declarations/satellite/satellite.did';\nimport type {ENCODING_TYPE, Storage} from '../types/storage.types';\n\nexport type UploadAsset = Required<Omit<Storage, 'token' | 'encoding' | 'description'>> &\n Pick<Storage, 'token' | 'encoding' | 'description'>;\n\nexport const uploadAsset = async ({\n asset: {data, filename, collection, headers, token, fullPath, encoding, description},\n actor,\n init_asset_upload\n}: {\n asset: UploadAsset;\n actor: SatelliteActor | ConsoleActor;\n init_asset_upload: (\n initAssetKey: SatelliteInitAssetKey | ConsoleInitAssetKey\n ) => Promise<SatelliteInitUploadResult | ConsoleInitUploadResult>;\n}): Promise<void> => {\n const {batch_id: batchId} = await init_asset_upload({\n collection,\n full_path: fullPath,\n name: filename,\n token: toNullable<string>(token),\n encoding_type: toNullable<ENCODING_TYPE>(encoding),\n description: toNullable(description)\n });\n\n // https://forum.dfinity.org/t/optimal-upload-chunk-size/20444/23?u=peterparker\n const chunkSize = 1900000;\n\n const uploadChunks: UploadChunkParams[] = [];\n\n // Prevent transforming chunk to arrayBuffer error: The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.\n const clone: Blob = isBrowser() ? new Blob([await data.arrayBuffer()]) : data;\n\n // Split data into chunks\n let orderId = 0n;\n for (let start = 0; start < clone.size; start += chunkSize) {\n const chunk: Blob = clone.slice(start, start + chunkSize);\n\n uploadChunks.push({\n batchId,\n chunk,\n actor,\n orderId\n });\n\n orderId++;\n }\n\n // Upload chunks to the IC in batch - i.e. 12 chunks uploaded at a time.\n let chunkIds: UploadChunkResult[] = [];\n for await (const results of batchUploadChunks({uploadChunks})) {\n chunkIds = [...chunkIds, ...results];\n }\n\n const contentType: [[string, string]] | undefined =\n headers.find(([type, _]) => type.toLowerCase() === 'content-type') === undefined &&\n data.type !== undefined &&\n data.type !== ''\n ? [['Content-Type', data.type]]\n : undefined;\n\n await actor.commit_asset_upload({\n batch_id: batchId,\n chunk_ids: chunkIds.map(({chunk_id}: UploadChunkResult) => chunk_id),\n headers: [...headers, ...(contentType ? contentType : [])]\n });\n};\n\nasync function* batchUploadChunks({\n uploadChunks,\n limit = 12\n}: {\n uploadChunks: UploadChunkParams[];\n limit?: number;\n}): AsyncGenerator<UploadChunkResult[], void> {\n for (let i = 0; i < uploadChunks.length; i = i + limit) {\n const batch = uploadChunks.slice(i, i + limit);\n const result = await Promise.all(batch.map((params) => uploadChunk(params)));\n yield result;\n }\n}\n\ninterface UploadChunkResult {\n chunk_id: bigint;\n}\n\ninterface UploadChunkParams {\n batchId: bigint;\n chunk: Blob;\n actor: SatelliteActor | ConsoleActor;\n orderId: bigint;\n}\n\nconst uploadChunk = async ({\n batchId,\n chunk,\n actor,\n orderId\n}: UploadChunkParams): Promise<UploadChunkResult> =>\n actor.upload_asset_chunk({\n batch_id: batchId,\n content: new Uint8Array(await chunk.arrayBuffer()),\n order_id: toNullable(orderId)\n });\n"],
5
+ "mappings": "AAAA,OAAQ,cAAAA,MAAiB,iBACzB,OAAQ,aAAAC,MAAgB,mBAgBjB,IAAMC,EAAc,MAAO,CAChC,MAAO,CAAC,KAAAC,EAAM,SAAAC,EAAU,WAAAC,EAAY,QAAAC,EAAS,MAAAC,EAAO,SAAAC,EAAU,SAAAC,EAAU,YAAAC,CAAW,EACnF,MAAAC,EACA,kBAAAC,CACF,IAMqB,CACnB,GAAM,CAAC,SAAUC,CAAO,EAAI,MAAMD,EAAkB,CAClD,WAAAP,EACA,UAAWG,EACX,KAAMJ,EACN,MAAOJ,EAAmBO,CAAK,EAC/B,cAAeP,EAA0BS,CAAQ,EACjD,YAAaT,EAAWU,CAAW,CACrC,CAAC,EAGKI,EAAY,KAEZC,EAAoC,CAAC,EAGrCC,EAAcf,EAAU,EAAI,IAAI,KAAK,CAAC,MAAME,EAAK,YAAY,CAAC,CAAC,EAAIA,EAGrEc,EAAU,GACd,QAASC,EAAQ,EAAGA,EAAQF,EAAM,KAAME,GAASJ,EAAW,CAC1D,IAAMK,EAAcH,EAAM,MAAME,EAAOA,EAAQJ,CAAS,EAExDC,EAAa,KAAK,CAChB,QAAAF,EACA,MAAAM,EACA,MAAAR,EACA,QAAAM,CACF,CAAC,EAEDA,GACF,CAGA,IAAIG,EAAgC,CAAC,EACrC,cAAiBC,KAAWC,EAAkB,CAAC,aAAAP,CAAY,CAAC,EAC1DK,EAAW,CAAC,GAAGA,EAAU,GAAGC,CAAO,EAGrC,IAAME,EACJjB,EAAQ,KAAK,CAAC,CAACkB,EAAMC,CAAC,IAAMD,EAAK,YAAY,IAAM,cAAc,IAAM,QACvErB,EAAK,OAAS,QACdA,EAAK,OAAS,GACV,CAAC,CAAC,eAAgBA,EAAK,IAAI,CAAC,EAC5B,OAEN,MAAMQ,EAAM,oBAAoB,CAC9B,SAAUE,EACV,UAAWO,EAAS,IAAI,CAAC,CAAC,SAAAM,CAAQ,IAAyBA,CAAQ,EACnE,QAAS,CAAC,GAAGpB,EAAS,GAAIiB,GAA4B,CAAC,CAAE,CAC3D,CAAC,CACH,EAEA,eAAgBD,EAAkB,CAChC,aAAAP,EACA,MAAAY,EAAQ,EACV,EAG8C,CAC5C,QAASC,EAAI,EAAGA,EAAIb,EAAa,OAAQa,EAAIA,EAAID,EAAO,CACtD,IAAME,EAAQd,EAAa,MAAMa,EAAGA,EAAID,CAAK,EAE7C,MADe,MAAM,QAAQ,IAAIE,EAAM,IAAKC,GAAWC,EAAYD,CAAM,CAAC,CAAC,CAE7E,CACF,CAaA,IAAMC,EAAc,MAAO,CACzB,QAAAlB,EACA,MAAAM,EACA,MAAAR,EACA,QAAAM,CACF,IACEN,EAAM,mBAAmB,CACvB,SAAUE,EACV,QAAS,IAAI,WAAW,MAAMM,EAAM,YAAY,CAAC,EACjD,SAAUnB,EAAWiB,CAAO,CAC9B,CAAC",
6
+ "names": ["toNullable", "isBrowser", "uploadAsset", "data", "filename", "collection", "headers", "token", "fullPath", "encoding", "description", "actor", "init_asset_upload", "batchId", "chunkSize", "uploadChunks", "clone", "orderId", "start", "chunk", "chunkIds", "results", "batchUploadChunks", "contentType", "type", "_", "chunk_id", "limit", "i", "batch", "params", "uploadChunk"]
7
7
  }
@@ -1,4 +1,4 @@
1
1
  import { createRequire as topLevelCreateRequire } from 'module';
2
2
  const require = topLevelCreateRequire(import.meta.url);
3
- import{Principal as R}from"@dfinity/principal";var U=t=>t==null,k=t=>!U(t);var n=t=>k(t)?[t]:[];var _=()=>typeof window<"u";var B=async({asset:{data:t,filename:s,collection:o,headers:a,token:l,fullPath:i,encoding:b,description:A},actor:c,init_asset_upload:C})=>{let{batch_id:p}=await C({collection:o,full_path:i,name:s,token:n(l),encoding_type:n(b),description:n(A)}),u=19e5,d=[],y=_()?new Blob([await t.arrayBuffer()]):t,f=0n;for(let e=0;e<y.size;e+=u){let m=y.slice(e,e+u);d.push({batchId:p,chunk:m,actor:c,orderId:f}),f++}let r=[];for await(let e of I({uploadChunks:d}))r=[...r,...e];let h=a.find(([e,m])=>e.toLowerCase()==="content-type")===void 0&&t.type!==void 0&&t.type!==""?[["Content-Type",t.type]]:void 0;await c.commit_asset_upload({batch_id:p,chunk_ids:r.map(({chunk_id:e})=>e),headers:[...a,...h||[]]})};async function*I({uploadChunks:t,limit:s=12}){for(let o=0;o<t.length;o=o+s){let a=t.slice(o,o+s);yield await Promise.all(a.map(i=>w(i)))}}var w=async({batchId:t,chunk:s,actor:o,orderId:a})=>o.upload_asset_chunk({batch_id:t,content:new Uint8Array(await s.arrayBuffer()),order_id:n(a)});export{B as uploadAsset};
3
+ import{toNullable as a}from"@dfinity/utils";import{isBrowser as U}from"@junobuild/utils";var R=async({asset:{data:t,filename:s,collection:o,headers:n,token:r,fullPath:i,encoding:f,description:k},actor:d,init_asset_upload:I})=>{let{batch_id:c}=await I({collection:o,full_path:i,name:s,token:a(r),encoding_type:a(f),description:a(k)}),p=19e5,u=[],y=U()?new Blob([await t.arrayBuffer()]):t,h=0n;for(let e=0;e<y.size;e+=p){let C=y.slice(e,e+p);u.push({batchId:c,chunk:C,actor:d,orderId:h}),h++}let l=[];for await(let e of _({uploadChunks:u}))l=[...l,...e];let m=n.find(([e,C])=>e.toLowerCase()==="content-type")===void 0&&t.type!==void 0&&t.type!==""?[["Content-Type",t.type]]:void 0;await d.commit_asset_upload({batch_id:c,chunk_ids:l.map(({chunk_id:e})=>e),headers:[...n,...m||[]]})};async function*_({uploadChunks:t,limit:s=12}){for(let o=0;o<t.length;o=o+s){let n=t.slice(o,o+s);yield await Promise.all(n.map(i=>A(i)))}}var A=async({batchId:t,chunk:s,actor:o,orderId:n})=>o.upload_asset_chunk({batch_id:t,content:new Uint8Array(await s.arrayBuffer()),order_id:a(n)});export{R as uploadAsset};
4
4
  //# sourceMappingURL=index.mjs.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
- "sources": ["../../../utils/src/utils/debounce.utils.ts", "../../../utils/src/utils/json.utils.ts", "../../../utils/src/utils/null.utils.ts", "../../../utils/src/utils/did.utils.ts", "../../../utils/src/utils/env.utils.ts", "../../src/api/storage.api.ts"],
4
- "sourcesContent": ["/**\n * Creates a debounced function that delays invoking the provided function until after the specified timeout.\n * @param {Function} func - The function to debounce.\n * @param {number} [timeout=300] - The number of milliseconds to delay. Defaults to 300ms if not specified or invalid.\n * @returns {Function} A debounced function.\n */\n// eslint-disable-next-line @typescript-eslint/ban-types, local-rules/prefer-object-params\nexport const debounce = (func: Function, timeout?: number): Function => {\n let timer: ReturnType<typeof setTimeout> | undefined;\n\n return (...args: unknown[]) => {\n const next = () => func(...args);\n\n if (timer) {\n clearTimeout(timer);\n }\n\n timer = setTimeout(next, timeout !== undefined && timeout > 0 ? timeout : 300);\n };\n};\n", "import {Principal} from '@dfinity/principal';\nimport {nonNullish} from './null.utils';\n\nconst JSON_KEY_BIGINT = '__bigint__';\nconst JSON_KEY_PRINCIPAL = '__principal__';\nconst JSON_KEY_UINT8ARRAY = '__uint8array__';\n\n/**\n * A function that alters the behavior of the stringification process for BigInt, Principal, and Uint8Array.\n * @param {string} _key - The key of the value being stringified.\n * @param {unknown} value - The value being stringified.\n * @returns {unknown} The altered value for stringification.\n */\n// eslint-disable-next-line local-rules/prefer-object-params\nexport const jsonReplacer = (_key: string, value: unknown): unknown => {\n if (typeof value === 'bigint') {\n return {[JSON_KEY_BIGINT]: `${value}`};\n }\n\n if (nonNullish(value) && value instanceof Principal) {\n return {[JSON_KEY_PRINCIPAL]: value.toText()};\n }\n\n if (nonNullish(value) && value instanceof Uint8Array) {\n return {[JSON_KEY_UINT8ARRAY]: Array.from(value)};\n }\n\n return value;\n};\n\n/**\n * A parser that interprets revived BigInt, Principal, and Uint8Array when constructing JavaScript values or objects.\n * @param {string} _key - The key of the value being parsed.\n * @param {unknown} value - The value being parsed.\n * @returns {unknown} The parsed value.\n */\n// eslint-disable-next-line local-rules/prefer-object-params\nexport const jsonReviver = (_key: string, value: unknown): unknown => {\n const mapValue = <T>(key: string): T => (value as Record<string, T>)[key];\n\n if (nonNullish(value) && typeof value === 'object' && JSON_KEY_BIGINT in value) {\n return BigInt(mapValue(JSON_KEY_BIGINT));\n }\n\n if (nonNullish(value) && typeof value === 'object' && JSON_KEY_PRINCIPAL in value) {\n return Principal.fromText(mapValue(JSON_KEY_PRINCIPAL));\n }\n\n if (nonNullish(value) && typeof value === 'object' && JSON_KEY_UINT8ARRAY in value) {\n return Uint8Array.from(mapValue(JSON_KEY_UINT8ARRAY));\n }\n\n return value;\n};\n", "/**\n * Checks if the provided argument is null or undefined.\n * @template T\n * @param {T | undefined | null} argument - The argument to check.\n * @returns {boolean} True if the argument is null or undefined, false otherwise.\n */\n// eslint-disable-next-line local-rules/use-option-type-wrapper\nexport const isNullish = <T>(argument: T | undefined | null): argument is undefined | null =>\n argument === null || argument === undefined;\n\n/**\n * Checks if the provided argument is neither null nor undefined.\n * @template T\n * @param {T | undefined | null} argument - The argument to check.\n * @returns {boolean} True if the argument is neither null nor undefined, false otherwise.\n */\n// eslint-disable-next-line local-rules/use-option-type-wrapper\nexport const nonNullish = <T>(argument: T | undefined | null): argument is NonNullable<T> =>\n !isNullish(argument);\n\n/**\n * Represents an error thrown when a value is null or undefined.\n * @class\n * @extends {Error}\n */\nexport class NullishError extends Error {}\n\n/**\n * Asserts that a value is neither null nor undefined.\n * @template T\n * @param {T} value - The value to check.\n * @param {string} [message] - The optional error message to use if the assertion fails.\n * @throws {NullishError} If the value is null or undefined.\n * @returns {asserts value is NonNullable<T>} Asserts that the value is neither null nor undefined.\n */\nexport const assertNonNullish: <T>(\n value: T,\n message?: string\n // eslint-disable-next-line local-rules/prefer-object-params\n) => asserts value is NonNullable<T> = <T>(value: T, message?: string): void => {\n if (isNullish(value)) {\n throw new NullishError(message);\n }\n};\n", "import {jsonReplacer, jsonReviver} from './json.utils';\nimport {nonNullish} from './null.utils';\n\n/**\n * Converts a value to a nullable array.\n * @template T\n * @param {T} [value] - The value to convert.\n * @returns {([] | [T])} A nullable array containing the value if non-nullish, or an empty array if nullish.\n */\nexport const toNullable = <T>(value?: T): [] | [T] => (nonNullish(value) ? [value] : []);\n\n/**\n * Extracts a value from a nullable array.\n * @template T\n * @param {([] | [T])} value - The nullable array.\n * @returns {(T | undefined)} The value if present, or undefined if the array is empty.\n */\nexport const fromNullable = <T>(value: [] | [T]): T | undefined => value?.[0];\n\n/**\n * Converts data to a Uint8Array for transmission or storage.\n * @template T\n * @param {T} data - The data to convert.\n * @returns {Promise<Uint8Array>} A promise that resolves to a Uint8Array representation of the data.\n */\nexport const toArray = async <T>(data: T): Promise<Uint8Array> => {\n const blob: Blob = new Blob([JSON.stringify(data, jsonReplacer)], {\n type: 'application/json; charset=utf-8'\n });\n return new Uint8Array(await blob.arrayBuffer());\n};\n\n/**\n * Converts a Uint8Array or number array back to the original data type.\n * @template T\n * @param {(Uint8Array | number[])} data - The array to convert.\n * @returns {Promise<T>} A promise that resolves to the original data.\n */\nexport const fromArray = async <T>(data: Uint8Array | number[]): Promise<T> => {\n const blob: Blob = new Blob([data instanceof Uint8Array ? data : new Uint8Array(data)], {\n type: 'application/json; charset=utf-8'\n });\n return JSON.parse(await blob.text(), jsonReviver);\n};\n", "/**\n * Checks if the current environment is a browser.\n * @returns {boolean} True if the current environment is a browser, false otherwise.\n */\nexport const isBrowser = (): boolean => typeof window !== `undefined`;\n", "import {isBrowser, toNullable} from '@junobuild/utils';\nimport type {\n _SERVICE as ConsoleActor,\n InitAssetKey as ConsoleInitAssetKey,\n InitUploadResult as ConsoleInitUploadResult\n} from '../../declarations/console/console.did';\nimport type {\n _SERVICE as SatelliteActor,\n InitAssetKey as SatelliteInitAssetKey,\n InitUploadResult as SatelliteInitUploadResult\n} from '../../declarations/satellite/satellite.did';\nimport type {ENCODING_TYPE, Storage} from '../types/storage.types';\n\nexport type UploadAsset = Required<Omit<Storage, 'token' | 'encoding' | 'description'>> &\n Pick<Storage, 'token' | 'encoding' | 'description'>;\n\nexport const uploadAsset = async ({\n asset: {data, filename, collection, headers, token, fullPath, encoding, description},\n actor,\n init_asset_upload\n}: {\n asset: UploadAsset;\n actor: SatelliteActor | ConsoleActor;\n init_asset_upload: (\n initAssetKey: SatelliteInitAssetKey | ConsoleInitAssetKey\n ) => Promise<SatelliteInitUploadResult | ConsoleInitUploadResult>;\n}): Promise<void> => {\n const {batch_id: batchId} = await init_asset_upload({\n collection,\n full_path: fullPath,\n name: filename,\n token: toNullable<string>(token),\n encoding_type: toNullable<ENCODING_TYPE>(encoding),\n description: toNullable(description)\n });\n\n // https://forum.dfinity.org/t/optimal-upload-chunk-size/20444/23?u=peterparker\n const chunkSize = 1900000;\n\n const uploadChunks: UploadChunkParams[] = [];\n\n // Prevent transforming chunk to arrayBuffer error: The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.\n const clone: Blob = isBrowser() ? new Blob([await data.arrayBuffer()]) : data;\n\n // Split data into chunks\n let orderId = 0n;\n for (let start = 0; start < clone.size; start += chunkSize) {\n const chunk: Blob = clone.slice(start, start + chunkSize);\n\n uploadChunks.push({\n batchId,\n chunk,\n actor,\n orderId\n });\n\n orderId++;\n }\n\n // Upload chunks to the IC in batch - i.e. 12 chunks uploaded at a time.\n let chunkIds: UploadChunkResult[] = [];\n for await (const results of batchUploadChunks({uploadChunks})) {\n chunkIds = [...chunkIds, ...results];\n }\n\n const contentType: [[string, string]] | undefined =\n headers.find(([type, _]) => type.toLowerCase() === 'content-type') === undefined &&\n data.type !== undefined &&\n data.type !== ''\n ? [['Content-Type', data.type]]\n : undefined;\n\n await actor.commit_asset_upload({\n batch_id: batchId,\n chunk_ids: chunkIds.map(({chunk_id}: UploadChunkResult) => chunk_id),\n headers: [...headers, ...(contentType ? contentType : [])]\n });\n};\n\nasync function* batchUploadChunks({\n uploadChunks,\n limit = 12\n}: {\n uploadChunks: UploadChunkParams[];\n limit?: number;\n}): AsyncGenerator<UploadChunkResult[], void> {\n for (let i = 0; i < uploadChunks.length; i = i + limit) {\n const batch = uploadChunks.slice(i, i + limit);\n const result = await Promise.all(batch.map((params) => uploadChunk(params)));\n yield result;\n }\n}\n\ninterface UploadChunkResult {\n chunk_id: bigint;\n}\n\ninterface UploadChunkParams {\n batchId: bigint;\n chunk: Blob;\n actor: SatelliteActor | ConsoleActor;\n orderId: bigint;\n}\n\nconst uploadChunk = async ({\n batchId,\n chunk,\n actor,\n orderId\n}: UploadChunkParams): Promise<UploadChunkResult> =>\n actor.upload_asset_chunk({\n batch_id: batchId,\n content: new Uint8Array(await chunk.arrayBuffer()),\n order_id: toNullable(orderId)\n });\n"],
5
- "mappings": ";;ACAA,OAAQ,aAAAA,MAAgB,qBCOjB,IAAMC,EAAgBC,GAC3BA,GAAa,KASFC,EAAiBD,GAC5B,CAACD,EAAUC,CAAQ,ECTd,IAAME,EAAiBC,GAAyBC,EAAWD,CAAK,EAAI,CAACA,CAAK,EAAI,CAAC,ECL/E,IAAME,EAAY,IAAe,OAAO,OAAW,ICYnD,IAAMC,EAAc,MAAO,CAChC,MAAO,CAAC,KAAAC,EAAM,SAAAC,EAAU,WAAAC,EAAY,QAAAC,EAAS,MAAAC,EAAO,SAAAC,EAAU,SAAAC,EAAU,YAAAC,CAAW,EACnF,MAAAC,EACA,kBAAAC,CACF,IAMqB,CACnB,GAAM,CAAC,SAAUC,CAAO,EAAI,MAAMD,EAAkB,CAClD,WAAAP,EACA,UAAWG,EACX,KAAMJ,EACN,MAAOU,EAAmBP,CAAK,EAC/B,cAAeO,EAA0BL,CAAQ,EACjD,YAAaK,EAAWJ,CAAW,CACrC,CAAC,EAGKK,EAAY,KAEZC,EAAoC,CAAC,EAGrCC,EAAcC,EAAU,EAAI,IAAI,KAAK,CAAC,MAAMf,EAAK,YAAY,CAAC,CAAC,EAAIA,EAGrEgB,EAAU,GACd,QAASC,EAAQ,EAAGA,EAAQH,EAAM,KAAMG,GAASL,EAAW,CAC1D,IAAMM,EAAcJ,EAAM,MAAMG,EAAOA,EAAQL,CAAS,EAExDC,EAAa,KAAK,CAChB,QAAAH,EACA,MAAAQ,EACA,MAAAV,EACA,QAAAQ,CACF,CAAC,EAEDA,GACF,CAGA,IAAIG,EAAgC,CAAC,EACrC,cAAiBC,KAAWC,EAAkB,CAAC,aAAAR,CAAY,CAAC,EAC1DM,EAAW,CAAC,GAAGA,EAAU,GAAGC,CAAO,EAGrC,IAAME,EACJnB,EAAQ,KAAK,CAAC,CAACoB,EAAMC,CAAC,IAAMD,EAAK,YAAY,IAAM,cAAc,IAAM,QACvEvB,EAAK,OAAS,QACdA,EAAK,OAAS,GACV,CAAC,CAAC,eAAgBA,EAAK,IAAI,CAAC,EAC5B,OAEN,MAAMQ,EAAM,oBAAoB,CAC9B,SAAUE,EACV,UAAWS,EAAS,IAAI,CAAC,CAAC,SAAAM,CAAQ,IAAyBA,CAAQ,EACnE,QAAS,CAAC,GAAGtB,EAAS,GAAImB,GAA4B,CAAC,CAAE,CAC3D,CAAC,CACH,EAEA,eAAgBD,EAAkB,CAChC,aAAAR,EACA,MAAAa,EAAQ,EACV,EAG8C,CAC5C,QAASC,EAAI,EAAGA,EAAId,EAAa,OAAQc,EAAIA,EAAID,EAAO,CACtD,IAAME,EAAQf,EAAa,MAAMc,EAAGA,EAAID,CAAK,EAE7C,MADe,MAAM,QAAQ,IAAIE,EAAM,IAAKC,GAAWC,EAAYD,CAAM,CAAC,CAAC,CAE7E,CACF,CAaA,IAAMC,EAAc,MAAO,CACzB,QAAApB,EACA,MAAAQ,EACA,MAAAV,EACA,QAAAQ,CACF,IACER,EAAM,mBAAmB,CACvB,SAAUE,EACV,QAAS,IAAI,WAAW,MAAMQ,EAAM,YAAY,CAAC,EACjD,SAAUP,EAAWK,CAAO,CAC9B,CAAC",
6
- "names": ["Principal", "isNullish", "argument", "nonNullish", "toNullable", "value", "nonNullish", "isBrowser", "uploadAsset", "data", "filename", "collection", "headers", "token", "fullPath", "encoding", "description", "actor", "init_asset_upload", "batchId", "g", "chunkSize", "uploadChunks", "clone", "h", "orderId", "start", "chunk", "chunkIds", "results", "batchUploadChunks", "contentType", "type", "_", "chunk_id", "limit", "i", "batch", "params", "uploadChunk"]
3
+ "sources": ["../../src/api/storage.api.ts"],
4
+ "sourcesContent": ["import {toNullable} from '@dfinity/utils';\nimport {isBrowser} from '@junobuild/utils';\nimport type {\n _SERVICE as ConsoleActor,\n InitAssetKey as ConsoleInitAssetKey,\n InitUploadResult as ConsoleInitUploadResult\n} from '../../declarations/console/console.did';\nimport type {\n _SERVICE as SatelliteActor,\n InitAssetKey as SatelliteInitAssetKey,\n InitUploadResult as SatelliteInitUploadResult\n} from '../../declarations/satellite/satellite.did';\nimport type {ENCODING_TYPE, Storage} from '../types/storage.types';\n\nexport type UploadAsset = Required<Omit<Storage, 'token' | 'encoding' | 'description'>> &\n Pick<Storage, 'token' | 'encoding' | 'description'>;\n\nexport const uploadAsset = async ({\n asset: {data, filename, collection, headers, token, fullPath, encoding, description},\n actor,\n init_asset_upload\n}: {\n asset: UploadAsset;\n actor: SatelliteActor | ConsoleActor;\n init_asset_upload: (\n initAssetKey: SatelliteInitAssetKey | ConsoleInitAssetKey\n ) => Promise<SatelliteInitUploadResult | ConsoleInitUploadResult>;\n}): Promise<void> => {\n const {batch_id: batchId} = await init_asset_upload({\n collection,\n full_path: fullPath,\n name: filename,\n token: toNullable<string>(token),\n encoding_type: toNullable<ENCODING_TYPE>(encoding),\n description: toNullable(description)\n });\n\n // https://forum.dfinity.org/t/optimal-upload-chunk-size/20444/23?u=peterparker\n const chunkSize = 1900000;\n\n const uploadChunks: UploadChunkParams[] = [];\n\n // Prevent transforming chunk to arrayBuffer error: The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.\n const clone: Blob = isBrowser() ? new Blob([await data.arrayBuffer()]) : data;\n\n // Split data into chunks\n let orderId = 0n;\n for (let start = 0; start < clone.size; start += chunkSize) {\n const chunk: Blob = clone.slice(start, start + chunkSize);\n\n uploadChunks.push({\n batchId,\n chunk,\n actor,\n orderId\n });\n\n orderId++;\n }\n\n // Upload chunks to the IC in batch - i.e. 12 chunks uploaded at a time.\n let chunkIds: UploadChunkResult[] = [];\n for await (const results of batchUploadChunks({uploadChunks})) {\n chunkIds = [...chunkIds, ...results];\n }\n\n const contentType: [[string, string]] | undefined =\n headers.find(([type, _]) => type.toLowerCase() === 'content-type') === undefined &&\n data.type !== undefined &&\n data.type !== ''\n ? [['Content-Type', data.type]]\n : undefined;\n\n await actor.commit_asset_upload({\n batch_id: batchId,\n chunk_ids: chunkIds.map(({chunk_id}: UploadChunkResult) => chunk_id),\n headers: [...headers, ...(contentType ? contentType : [])]\n });\n};\n\nasync function* batchUploadChunks({\n uploadChunks,\n limit = 12\n}: {\n uploadChunks: UploadChunkParams[];\n limit?: number;\n}): AsyncGenerator<UploadChunkResult[], void> {\n for (let i = 0; i < uploadChunks.length; i = i + limit) {\n const batch = uploadChunks.slice(i, i + limit);\n const result = await Promise.all(batch.map((params) => uploadChunk(params)));\n yield result;\n }\n}\n\ninterface UploadChunkResult {\n chunk_id: bigint;\n}\n\ninterface UploadChunkParams {\n batchId: bigint;\n chunk: Blob;\n actor: SatelliteActor | ConsoleActor;\n orderId: bigint;\n}\n\nconst uploadChunk = async ({\n batchId,\n chunk,\n actor,\n orderId\n}: UploadChunkParams): Promise<UploadChunkResult> =>\n actor.upload_asset_chunk({\n batch_id: batchId,\n content: new Uint8Array(await chunk.arrayBuffer()),\n order_id: toNullable(orderId)\n });\n"],
5
+ "mappings": ";;AAAA,OAAQ,cAAAA,MAAiB,iBACzB,OAAQ,aAAAC,MAAgB,mBAgBjB,IAAMC,EAAc,MAAO,CAChC,MAAO,CAAC,KAAAC,EAAM,SAAAC,EAAU,WAAAC,EAAY,QAAAC,EAAS,MAAAC,EAAO,SAAAC,EAAU,SAAAC,EAAU,YAAAC,CAAW,EACnF,MAAAC,EACA,kBAAAC,CACF,IAMqB,CACnB,GAAM,CAAC,SAAUC,CAAO,EAAI,MAAMD,EAAkB,CAClD,WAAAP,EACA,UAAWG,EACX,KAAMJ,EACN,MAAOJ,EAAmBO,CAAK,EAC/B,cAAeP,EAA0BS,CAAQ,EACjD,YAAaT,EAAWU,CAAW,CACrC,CAAC,EAGKI,EAAY,KAEZC,EAAoC,CAAC,EAGrCC,EAAcf,EAAU,EAAI,IAAI,KAAK,CAAC,MAAME,EAAK,YAAY,CAAC,CAAC,EAAIA,EAGrEc,EAAU,GACd,QAASC,EAAQ,EAAGA,EAAQF,EAAM,KAAME,GAASJ,EAAW,CAC1D,IAAMK,EAAcH,EAAM,MAAME,EAAOA,EAAQJ,CAAS,EAExDC,EAAa,KAAK,CAChB,QAAAF,EACA,MAAAM,EACA,MAAAR,EACA,QAAAM,CACF,CAAC,EAEDA,GACF,CAGA,IAAIG,EAAgC,CAAC,EACrC,cAAiBC,KAAWC,EAAkB,CAAC,aAAAP,CAAY,CAAC,EAC1DK,EAAW,CAAC,GAAGA,EAAU,GAAGC,CAAO,EAGrC,IAAME,EACJjB,EAAQ,KAAK,CAAC,CAACkB,EAAMC,CAAC,IAAMD,EAAK,YAAY,IAAM,cAAc,IAAM,QACvErB,EAAK,OAAS,QACdA,EAAK,OAAS,GACV,CAAC,CAAC,eAAgBA,EAAK,IAAI,CAAC,EAC5B,OAEN,MAAMQ,EAAM,oBAAoB,CAC9B,SAAUE,EACV,UAAWO,EAAS,IAAI,CAAC,CAAC,SAAAM,CAAQ,IAAyBA,CAAQ,EACnE,QAAS,CAAC,GAAGpB,EAAS,GAAIiB,GAA4B,CAAC,CAAE,CAC3D,CAAC,CACH,EAEA,eAAgBD,EAAkB,CAChC,aAAAP,EACA,MAAAY,EAAQ,EACV,EAG8C,CAC5C,QAASC,EAAI,EAAGA,EAAIb,EAAa,OAAQa,EAAIA,EAAID,EAAO,CACtD,IAAME,EAAQd,EAAa,MAAMa,EAAGA,EAAID,CAAK,EAE7C,MADe,MAAM,QAAQ,IAAIE,EAAM,IAAKC,GAAWC,EAAYD,CAAM,CAAC,CAAC,CAE7E,CACF,CAaA,IAAMC,EAAc,MAAO,CACzB,QAAAlB,EACA,MAAAM,EACA,MAAAR,EACA,QAAAM,CACF,IACEN,EAAM,mBAAmB,CACvB,SAAUE,EACV,QAAS,IAAI,WAAW,MAAMM,EAAM,YAAY,CAAC,EACjD,SAAUnB,EAAWiB,CAAO,CAC9B,CAAC",
6
+ "names": ["toNullable", "isBrowser", "uploadAsset", "data", "filename", "collection", "headers", "token", "fullPath", "encoding", "description", "actor", "init_asset_upload", "batchId", "chunkSize", "uploadChunks", "clone", "orderId", "start", "chunk", "chunkIds", "results", "batchUploadChunks", "contentType", "type", "_", "chunk_id", "limit", "i", "batch", "params", "uploadChunk"]
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@junobuild/storage",
3
- "version": "0.1.1",
3
+ "version": "0.1.2",
4
4
  "description": "A library for interfacing with Juno's Storage features.",
5
5
  "author": "David Dal Busco (https://daviddalbusco.com)",
6
6
  "license": "MIT",
@@ -50,6 +50,8 @@
50
50
  "@dfinity/agent": "^2.1.3",
51
51
  "@dfinity/candid": "^2.1.3",
52
52
  "@dfinity/identity": "^2.1.3",
53
- "@dfinity/principal": "^2.1.3"
53
+ "@dfinity/principal": "^2.1.3",
54
+ "@dfinity/utils": "^2",
55
+ "@junobuild/utils": "*"
54
56
  }
55
57
  }