@junobuild/storage 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/dist/browser/index.js +1 -1
- package/dist/browser/index.js.map +2 -2
- package/dist/declarations/satellite/satellite.did.d.ts +8 -5
- package/dist/declarations/satellite/satellite.factory.did.js +11 -8
- package/dist/declarations/satellite/satellite.factory.did.mjs +11 -8
- package/dist/node/index.mjs +1 -1
- package/dist/node/index.mjs.map +2 -2
- package/package.json +1 -1
package/LICENSE
CHANGED
package/dist/browser/index.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{toNullable as a}from"@dfinity/utils";import{isBrowser as U}from"@junobuild/utils";var R=async({asset:{data:t,filename:s,collection:o,headers:n,token:r,fullPath:i,encoding:
|
|
1
|
+
import{toNullable as a}from"@dfinity/utils";import{isBrowser as U}from"@junobuild/utils";var R=async({asset:{data:t,filename:s,collection:o,headers:n,token:r,fullPath:i,encoding:C,description:f},actor:d,init_asset_upload:k})=>{let{batch_id:c}=await k({collection:o,full_path:i,name:s,token:a(r),encoding_type:a(C),description:a(f)}),p=19e5,u=[],y=U()?new Blob([await t.arrayBuffer()]):t,h=0n;for(let e=0;e<y.size;e+=p){let m=y.slice(e,e+p);u.push({batchId:c,chunk:m,actor:d,orderId:h}),h++}let l=[];for await(let e of _({uploadChunks:u}))l=[...l,...e];let I=n.find(([e,m])=>e.toLowerCase()==="content-type")===void 0&&t.type!==void 0&&t.type!==""?[["Content-Type",t.type]]:void 0;await d.commit_asset_upload({batch_id:c,chunk_ids:l.map(({chunk_id:e})=>e),headers:[...n,...I??[]]})};async function*_({uploadChunks:t,limit:s=12}){for(let o=0;o<t.length;o=o+s){let n=t.slice(o,o+s);yield await Promise.all(n.map(i=>A(i)))}}var A=async({batchId:t,chunk:s,actor:o,orderId:n})=>o.upload_asset_chunk({batch_id:t,content:new Uint8Array(await s.arrayBuffer()),order_id:a(n)});export{R as uploadAsset};
|
|
2
2
|
//# sourceMappingURL=index.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/api/storage.api.ts"],
|
|
4
|
-
"sourcesContent": ["import {toNullable} from '@dfinity/utils';\nimport {isBrowser} from '@junobuild/utils';\nimport type {\n _SERVICE as ConsoleActor,\n InitAssetKey as ConsoleInitAssetKey,\n InitUploadResult as ConsoleInitUploadResult\n} from '../../declarations/console/console.did';\nimport type {\n _SERVICE as SatelliteActor,\n InitAssetKey as SatelliteInitAssetKey,\n InitUploadResult as SatelliteInitUploadResult\n} from '../../declarations/satellite/satellite.did';\nimport type {ENCODING_TYPE, Storage} from '../types/storage.types';\n\nexport type UploadAsset = Required<Omit<Storage, 'token' | 'encoding' | 'description'>> &\n Pick<Storage, 'token' | 'encoding' | 'description'>;\n\nexport const uploadAsset = async ({\n asset: {data, filename, collection, headers, token, fullPath, encoding, description},\n actor,\n init_asset_upload\n}: {\n asset: UploadAsset;\n actor: SatelliteActor | ConsoleActor;\n init_asset_upload: (\n initAssetKey: SatelliteInitAssetKey | ConsoleInitAssetKey\n ) => Promise<SatelliteInitUploadResult | ConsoleInitUploadResult>;\n}): Promise<void> => {\n const {batch_id: batchId} = await init_asset_upload({\n collection,\n full_path: fullPath,\n name: filename,\n token: toNullable<string>(token),\n encoding_type: toNullable<ENCODING_TYPE>(encoding),\n description: toNullable(description)\n });\n\n // https://forum.dfinity.org/t/optimal-upload-chunk-size/20444/23?u=peterparker\n const chunkSize = 1900000;\n\n const uploadChunks: UploadChunkParams[] = [];\n\n // Prevent transforming chunk to arrayBuffer error: The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.\n const clone: Blob = isBrowser() ? new Blob([await data.arrayBuffer()]) : data;\n\n // Split data into chunks\n let orderId = 0n;\n for (let start = 0; start < clone.size; start += chunkSize) {\n const chunk: Blob = clone.slice(start, start + chunkSize);\n\n uploadChunks.push({\n batchId,\n chunk,\n actor,\n orderId\n });\n\n orderId++;\n }\n\n // Upload chunks to the IC in batch - i.e. 12 chunks uploaded at a time.\n let chunkIds: UploadChunkResult[] = [];\n for await (const results of batchUploadChunks({uploadChunks})) {\n chunkIds = [...chunkIds, ...results];\n }\n\n const contentType: [[string, string]] | undefined =\n headers.find(([type, _]) => type.toLowerCase() === 'content-type') === undefined &&\n data.type !== undefined &&\n data.type !== ''\n ? [['Content-Type', data.type]]\n : undefined;\n\n await actor.commit_asset_upload({\n batch_id: batchId,\n chunk_ids: chunkIds.map(({chunk_id}: UploadChunkResult) => chunk_id),\n headers: [...headers, ...(contentType
|
|
5
|
-
"mappings": "AAAA,OAAQ,cAAAA,MAAiB,iBACzB,OAAQ,aAAAC,MAAgB,mBAgBjB,IAAMC,EAAc,MAAO,CAChC,MAAO,CAAC,KAAAC,EAAM,SAAAC,EAAU,WAAAC,EAAY,QAAAC,EAAS,MAAAC,EAAO,SAAAC,EAAU,SAAAC,EAAU,YAAAC,CAAW,EACnF,MAAAC,EACA,kBAAAC,CACF,IAMqB,CACnB,GAAM,CAAC,SAAUC,CAAO,EAAI,MAAMD,EAAkB,CAClD,WAAAP,EACA,UAAWG,EACX,KAAMJ,EACN,MAAOJ,EAAmBO,CAAK,EAC/B,cAAeP,EAA0BS,CAAQ,EACjD,YAAaT,EAAWU,CAAW,CACrC,CAAC,EAGKI,EAAY,KAEZC,EAAoC,CAAC,EAGrCC,EAAcf,EAAU,EAAI,IAAI,KAAK,CAAC,MAAME,EAAK,YAAY,CAAC,CAAC,EAAIA,EAGrEc,EAAU,GACd,QAASC,EAAQ,EAAGA,EAAQF,EAAM,KAAME,GAASJ,EAAW,CAC1D,IAAMK,EAAcH,EAAM,MAAME,EAAOA,EAAQJ,CAAS,EAExDC,EAAa,KAAK,CAChB,QAAAF,EACA,MAAAM,EACA,MAAAR,EACA,QAAAM,CACF,CAAC,EAEDA,GACF,CAGA,IAAIG,EAAgC,CAAC,EACrC,cAAiBC,KAAWC,EAAkB,CAAC,aAAAP,CAAY,CAAC,EAC1DK,EAAW,CAAC,GAAGA,EAAU,GAAGC,CAAO,EAGrC,IAAME,EACJjB,EAAQ,KAAK,CAAC,CAACkB,EAAMC,CAAC,IAAMD,EAAK,YAAY,IAAM,cAAc,IAAM,QACvErB,EAAK,OAAS,QACdA,EAAK,OAAS,GACV,CAAC,CAAC,eAAgBA,EAAK,IAAI,CAAC,EAC5B,OAEN,MAAMQ,EAAM,oBAAoB,CAC9B,SAAUE,EACV,UAAWO,EAAS,IAAI,CAAC,CAAC,SAAAM,CAAQ,IAAyBA,CAAQ,EACnE,QAAS,CAAC,GAAGpB,EAAS,GAAIiB,
|
|
4
|
+
"sourcesContent": ["import {toNullable} from '@dfinity/utils';\nimport {isBrowser} from '@junobuild/utils';\nimport type {\n _SERVICE as ConsoleActor,\n InitAssetKey as ConsoleInitAssetKey,\n InitUploadResult as ConsoleInitUploadResult\n} from '../../declarations/console/console.did';\nimport type {\n _SERVICE as SatelliteActor,\n InitAssetKey as SatelliteInitAssetKey,\n InitUploadResult as SatelliteInitUploadResult\n} from '../../declarations/satellite/satellite.did';\nimport type {ENCODING_TYPE, Storage} from '../types/storage.types';\n\nexport type UploadAsset = Required<Omit<Storage, 'token' | 'encoding' | 'description'>> &\n Pick<Storage, 'token' | 'encoding' | 'description'>;\n\nexport const uploadAsset = async ({\n asset: {data, filename, collection, headers, token, fullPath, encoding, description},\n actor,\n init_asset_upload\n}: {\n asset: UploadAsset;\n actor: SatelliteActor | ConsoleActor;\n init_asset_upload: (\n initAssetKey: SatelliteInitAssetKey | ConsoleInitAssetKey\n ) => Promise<SatelliteInitUploadResult | ConsoleInitUploadResult>;\n}): Promise<void> => {\n const {batch_id: batchId} = await init_asset_upload({\n collection,\n full_path: fullPath,\n name: filename,\n token: toNullable<string>(token),\n encoding_type: toNullable<ENCODING_TYPE>(encoding),\n description: toNullable(description)\n });\n\n // https://forum.dfinity.org/t/optimal-upload-chunk-size/20444/23?u=peterparker\n const chunkSize = 1900000;\n\n const uploadChunks: UploadChunkParams[] = [];\n\n // Prevent transforming chunk to arrayBuffer error: The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.\n const clone: Blob = isBrowser() ? new Blob([await data.arrayBuffer()]) : data;\n\n // Split data into chunks\n let orderId = 0n;\n for (let start = 0; start < clone.size; start += chunkSize) {\n const chunk: Blob = clone.slice(start, start + chunkSize);\n\n uploadChunks.push({\n batchId,\n chunk,\n actor,\n orderId\n });\n\n orderId++;\n }\n\n // Upload chunks to the IC in batch - i.e. 12 chunks uploaded at a time.\n let chunkIds: UploadChunkResult[] = [];\n for await (const results of batchUploadChunks({uploadChunks})) {\n chunkIds = [...chunkIds, ...results];\n }\n\n const contentType: [[string, string]] | undefined =\n headers.find(([type, _]) => type.toLowerCase() === 'content-type') === undefined &&\n data.type !== undefined &&\n data.type !== ''\n ? [['Content-Type', data.type]]\n : undefined;\n\n await actor.commit_asset_upload({\n batch_id: batchId,\n chunk_ids: chunkIds.map(({chunk_id}: UploadChunkResult) => chunk_id),\n headers: [...headers, ...(contentType ?? [])]\n });\n};\n\nasync function* batchUploadChunks({\n uploadChunks,\n limit = 12\n}: {\n uploadChunks: UploadChunkParams[];\n limit?: number;\n}): AsyncGenerator<UploadChunkResult[], void> {\n for (let i = 0; i < uploadChunks.length; i = i + limit) {\n const batch = uploadChunks.slice(i, i + limit);\n const result = await Promise.all(batch.map((params) => uploadChunk(params)));\n yield result;\n }\n}\n\ninterface UploadChunkResult {\n chunk_id: bigint;\n}\n\ninterface UploadChunkParams {\n batchId: bigint;\n chunk: Blob;\n actor: SatelliteActor | ConsoleActor;\n orderId: bigint;\n}\n\nconst uploadChunk = async ({\n batchId,\n chunk,\n actor,\n orderId\n}: UploadChunkParams): Promise<UploadChunkResult> =>\n actor.upload_asset_chunk({\n batch_id: batchId,\n content: new Uint8Array(await chunk.arrayBuffer()),\n order_id: toNullable(orderId)\n });\n"],
|
|
5
|
+
"mappings": "AAAA,OAAQ,cAAAA,MAAiB,iBACzB,OAAQ,aAAAC,MAAgB,mBAgBjB,IAAMC,EAAc,MAAO,CAChC,MAAO,CAAC,KAAAC,EAAM,SAAAC,EAAU,WAAAC,EAAY,QAAAC,EAAS,MAAAC,EAAO,SAAAC,EAAU,SAAAC,EAAU,YAAAC,CAAW,EACnF,MAAAC,EACA,kBAAAC,CACF,IAMqB,CACnB,GAAM,CAAC,SAAUC,CAAO,EAAI,MAAMD,EAAkB,CAClD,WAAAP,EACA,UAAWG,EACX,KAAMJ,EACN,MAAOJ,EAAmBO,CAAK,EAC/B,cAAeP,EAA0BS,CAAQ,EACjD,YAAaT,EAAWU,CAAW,CACrC,CAAC,EAGKI,EAAY,KAEZC,EAAoC,CAAC,EAGrCC,EAAcf,EAAU,EAAI,IAAI,KAAK,CAAC,MAAME,EAAK,YAAY,CAAC,CAAC,EAAIA,EAGrEc,EAAU,GACd,QAASC,EAAQ,EAAGA,EAAQF,EAAM,KAAME,GAASJ,EAAW,CAC1D,IAAMK,EAAcH,EAAM,MAAME,EAAOA,EAAQJ,CAAS,EAExDC,EAAa,KAAK,CAChB,QAAAF,EACA,MAAAM,EACA,MAAAR,EACA,QAAAM,CACF,CAAC,EAEDA,GACF,CAGA,IAAIG,EAAgC,CAAC,EACrC,cAAiBC,KAAWC,EAAkB,CAAC,aAAAP,CAAY,CAAC,EAC1DK,EAAW,CAAC,GAAGA,EAAU,GAAGC,CAAO,EAGrC,IAAME,EACJjB,EAAQ,KAAK,CAAC,CAACkB,EAAMC,CAAC,IAAMD,EAAK,YAAY,IAAM,cAAc,IAAM,QACvErB,EAAK,OAAS,QACdA,EAAK,OAAS,GACV,CAAC,CAAC,eAAgBA,EAAK,IAAI,CAAC,EAC5B,OAEN,MAAMQ,EAAM,oBAAoB,CAC9B,SAAUE,EACV,UAAWO,EAAS,IAAI,CAAC,CAAC,SAAAM,CAAQ,IAAyBA,CAAQ,EACnE,QAAS,CAAC,GAAGpB,EAAS,GAAIiB,GAAe,CAAC,CAAE,CAC9C,CAAC,CACH,EAEA,eAAgBD,EAAkB,CAChC,aAAAP,EACA,MAAAY,EAAQ,EACV,EAG8C,CAC5C,QAASC,EAAI,EAAGA,EAAIb,EAAa,OAAQa,EAAIA,EAAID,EAAO,CACtD,IAAME,EAAQd,EAAa,MAAMa,EAAGA,EAAID,CAAK,EAE7C,MADe,MAAM,QAAQ,IAAIE,EAAM,IAAKC,GAAWC,EAAYD,CAAM,CAAC,CAAC,CAE7E,CACF,CAaA,IAAMC,EAAc,MAAO,CACzB,QAAAlB,EACA,MAAAM,EACA,MAAAR,EACA,QAAAM,CACF,IACEN,EAAM,mBAAmB,CACvB,SAAUE,EACV,QAAS,IAAI,WAAW,MAAMM,EAAM,YAAY,CAAC,EACjD,SAAUnB,EAAWiB,CAAO,CAC9B,CAAC",
|
|
6
6
|
"names": ["toNullable", "isBrowser", "uploadAsset", "data", "filename", "collection", "headers", "token", "fullPath", "encoding", "description", "actor", "init_asset_upload", "batchId", "chunkSize", "uploadChunks", "clone", "orderId", "start", "chunk", "chunkIds", "results", "batchUploadChunks", "contentType", "type", "_", "chunk_id", "limit", "i", "batch", "params", "uploadChunk"]
|
|
7
7
|
}
|
|
@@ -28,7 +28,9 @@ export interface AuthenticationConfig {
|
|
|
28
28
|
}
|
|
29
29
|
export interface AuthenticationConfigInternetIdentity {
|
|
30
30
|
derivation_origin: [] | [string];
|
|
31
|
+
external_alternative_origins: [] | [Array<string>];
|
|
31
32
|
}
|
|
33
|
+
export type CollectionType = {Db: null} | {Storage: null};
|
|
32
34
|
export interface CommitBatch {
|
|
33
35
|
batch_id: bigint;
|
|
34
36
|
headers: Array<[string, string]>;
|
|
@@ -161,8 +163,8 @@ export interface Rule {
|
|
|
161
163
|
mutable_permissions: [] | [boolean];
|
|
162
164
|
rate_config: [] | [RateConfig];
|
|
163
165
|
write: Permission;
|
|
166
|
+
max_changes_per_user: [] | [number];
|
|
164
167
|
}
|
|
165
|
-
export type RulesType = {Db: null} | {Storage: null};
|
|
166
168
|
export interface SetController {
|
|
167
169
|
metadata: Array<[string, string]>;
|
|
168
170
|
scope: ControllerScope;
|
|
@@ -186,6 +188,7 @@ export interface SetRule {
|
|
|
186
188
|
mutable_permissions: [] | [boolean];
|
|
187
189
|
rate_config: [] | [RateConfig];
|
|
188
190
|
write: Permission;
|
|
191
|
+
max_changes_per_user: [] | [number];
|
|
189
192
|
}
|
|
190
193
|
export interface StorageConfig {
|
|
191
194
|
iframe: [] | [StorageConfigIFrame];
|
|
@@ -250,7 +253,7 @@ export interface _SERVICE {
|
|
|
250
253
|
del_filtered_docs: ActorMethod<[string, ListParams], undefined>;
|
|
251
254
|
del_many_assets: ActorMethod<[Array<[string, string]>], undefined>;
|
|
252
255
|
del_many_docs: ActorMethod<[Array<[string, string, DelDoc]>], undefined>;
|
|
253
|
-
del_rule: ActorMethod<[
|
|
256
|
+
del_rule: ActorMethod<[CollectionType, string, DelRule], undefined>;
|
|
254
257
|
deposit_cycles: ActorMethod<[DepositCyclesArgs], undefined>;
|
|
255
258
|
get_asset: ActorMethod<[string, string], [] | [AssetNoContent]>;
|
|
256
259
|
get_auth_config: ActorMethod<[], [] | [AuthenticationConfig]>;
|
|
@@ -259,7 +262,7 @@ export interface _SERVICE {
|
|
|
259
262
|
get_doc: ActorMethod<[string, string], [] | [Doc]>;
|
|
260
263
|
get_many_assets: ActorMethod<[Array<[string, string]>], Array<[string, [] | [AssetNoContent]]>>;
|
|
261
264
|
get_many_docs: ActorMethod<[Array<[string, string]>], Array<[string, [] | [Doc]]>>;
|
|
262
|
-
get_rule: ActorMethod<[
|
|
265
|
+
get_rule: ActorMethod<[CollectionType, string], [] | [Rule]>;
|
|
263
266
|
get_storage_config: ActorMethod<[], StorageConfig>;
|
|
264
267
|
http_request: ActorMethod<[HttpRequest], HttpResponse>;
|
|
265
268
|
http_request_streaming_callback: ActorMethod<
|
|
@@ -271,7 +274,7 @@ export interface _SERVICE {
|
|
|
271
274
|
list_controllers: ActorMethod<[], Array<[Principal, Controller]>>;
|
|
272
275
|
list_custom_domains: ActorMethod<[], Array<[string, CustomDomain]>>;
|
|
273
276
|
list_docs: ActorMethod<[string, ListParams], ListResults_1>;
|
|
274
|
-
list_rules: ActorMethod<[
|
|
277
|
+
list_rules: ActorMethod<[CollectionType], Array<[string, Rule]>>;
|
|
275
278
|
memory_size: ActorMethod<[], MemorySize>;
|
|
276
279
|
set_auth_config: ActorMethod<[AuthenticationConfig], undefined>;
|
|
277
280
|
set_controllers: ActorMethod<[SetControllersArgs], Array<[Principal, Controller]>>;
|
|
@@ -279,7 +282,7 @@ export interface _SERVICE {
|
|
|
279
282
|
set_db_config: ActorMethod<[DbConfig], undefined>;
|
|
280
283
|
set_doc: ActorMethod<[string, string, SetDoc], Doc>;
|
|
281
284
|
set_many_docs: ActorMethod<[Array<[string, string, SetDoc]>], Array<[string, Doc]>>;
|
|
282
|
-
set_rule: ActorMethod<[
|
|
285
|
+
set_rule: ActorMethod<[CollectionType, string, SetRule], Rule>;
|
|
283
286
|
set_storage_config: ActorMethod<[StorageConfig], undefined>;
|
|
284
287
|
upload_asset_chunk: ActorMethod<[UploadChunk], UploadChunkResult>;
|
|
285
288
|
version: ActorMethod<[], string>;
|
|
@@ -48,7 +48,7 @@ export const idlFactory = ({IDL}) => {
|
|
|
48
48
|
expires_at: IDL.Opt(IDL.Nat64)
|
|
49
49
|
});
|
|
50
50
|
const DelDoc = IDL.Record({version: IDL.Opt(IDL.Nat64)});
|
|
51
|
-
const
|
|
51
|
+
const CollectionType = IDL.Variant({Db: IDL.Null, Storage: IDL.Null});
|
|
52
52
|
const DelRule = IDL.Record({version: IDL.Opt(IDL.Nat64)});
|
|
53
53
|
const DepositCyclesArgs = IDL.Record({
|
|
54
54
|
cycles: IDL.Nat,
|
|
@@ -76,7 +76,8 @@ export const idlFactory = ({IDL}) => {
|
|
|
76
76
|
version: IDL.Opt(IDL.Nat64)
|
|
77
77
|
});
|
|
78
78
|
const AuthenticationConfigInternetIdentity = IDL.Record({
|
|
79
|
-
derivation_origin: IDL.Opt(IDL.Text)
|
|
79
|
+
derivation_origin: IDL.Opt(IDL.Text),
|
|
80
|
+
external_alternative_origins: IDL.Opt(IDL.Vec(IDL.Text))
|
|
80
81
|
});
|
|
81
82
|
const AuthenticationConfig = IDL.Record({
|
|
82
83
|
internet_identity: IDL.Opt(AuthenticationConfigInternetIdentity)
|
|
@@ -143,7 +144,8 @@ export const idlFactory = ({IDL}) => {
|
|
|
143
144
|
version: IDL.Opt(IDL.Nat64),
|
|
144
145
|
mutable_permissions: IDL.Opt(IDL.Bool),
|
|
145
146
|
rate_config: IDL.Opt(RateConfig),
|
|
146
|
-
write: Permission
|
|
147
|
+
write: Permission,
|
|
148
|
+
max_changes_per_user: IDL.Opt(IDL.Nat32)
|
|
147
149
|
});
|
|
148
150
|
const HttpRequest = IDL.Record({
|
|
149
151
|
url: IDL.Text,
|
|
@@ -229,7 +231,8 @@ export const idlFactory = ({IDL}) => {
|
|
|
229
231
|
version: IDL.Opt(IDL.Nat64),
|
|
230
232
|
mutable_permissions: IDL.Opt(IDL.Bool),
|
|
231
233
|
rate_config: IDL.Opt(RateConfig),
|
|
232
|
-
write: Permission
|
|
234
|
+
write: Permission,
|
|
235
|
+
max_changes_per_user: IDL.Opt(IDL.Nat32)
|
|
233
236
|
});
|
|
234
237
|
const UploadChunk = IDL.Record({
|
|
235
238
|
content: IDL.Vec(IDL.Nat8),
|
|
@@ -258,7 +261,7 @@ export const idlFactory = ({IDL}) => {
|
|
|
258
261
|
del_filtered_docs: IDL.Func([IDL.Text, ListParams], [], []),
|
|
259
262
|
del_many_assets: IDL.Func([IDL.Vec(IDL.Tuple(IDL.Text, IDL.Text))], [], []),
|
|
260
263
|
del_many_docs: IDL.Func([IDL.Vec(IDL.Tuple(IDL.Text, IDL.Text, DelDoc))], [], []),
|
|
261
|
-
del_rule: IDL.Func([
|
|
264
|
+
del_rule: IDL.Func([CollectionType, IDL.Text, DelRule], [], []),
|
|
262
265
|
deposit_cycles: IDL.Func([DepositCyclesArgs], [], []),
|
|
263
266
|
get_asset: IDL.Func([IDL.Text, IDL.Text], [IDL.Opt(AssetNoContent)], ['query']),
|
|
264
267
|
get_auth_config: IDL.Func([], [IDL.Opt(AuthenticationConfig)], ['query']),
|
|
@@ -275,7 +278,7 @@ export const idlFactory = ({IDL}) => {
|
|
|
275
278
|
[IDL.Vec(IDL.Tuple(IDL.Text, IDL.Opt(Doc)))],
|
|
276
279
|
['query']
|
|
277
280
|
),
|
|
278
|
-
get_rule: IDL.Func([
|
|
281
|
+
get_rule: IDL.Func([CollectionType, IDL.Text], [IDL.Opt(Rule)], ['query']),
|
|
279
282
|
get_storage_config: IDL.Func([], [StorageConfig], ['query']),
|
|
280
283
|
http_request: IDL.Func([HttpRequest], [HttpResponse], ['query']),
|
|
281
284
|
http_request_streaming_callback: IDL.Func(
|
|
@@ -288,7 +291,7 @@ export const idlFactory = ({IDL}) => {
|
|
|
288
291
|
list_controllers: IDL.Func([], [IDL.Vec(IDL.Tuple(IDL.Principal, Controller))], ['query']),
|
|
289
292
|
list_custom_domains: IDL.Func([], [IDL.Vec(IDL.Tuple(IDL.Text, CustomDomain))], ['query']),
|
|
290
293
|
list_docs: IDL.Func([IDL.Text, ListParams], [ListResults_1], ['query']),
|
|
291
|
-
list_rules: IDL.Func([
|
|
294
|
+
list_rules: IDL.Func([CollectionType], [IDL.Vec(IDL.Tuple(IDL.Text, Rule))], ['query']),
|
|
292
295
|
memory_size: IDL.Func([], [MemorySize], ['query']),
|
|
293
296
|
set_auth_config: IDL.Func([AuthenticationConfig], [], []),
|
|
294
297
|
set_controllers: IDL.Func(
|
|
@@ -304,7 +307,7 @@ export const idlFactory = ({IDL}) => {
|
|
|
304
307
|
[IDL.Vec(IDL.Tuple(IDL.Text, Doc))],
|
|
305
308
|
[]
|
|
306
309
|
),
|
|
307
|
-
set_rule: IDL.Func([
|
|
310
|
+
set_rule: IDL.Func([CollectionType, IDL.Text, SetRule], [Rule], []),
|
|
308
311
|
set_storage_config: IDL.Func([StorageConfig], [], []),
|
|
309
312
|
upload_asset_chunk: IDL.Func([UploadChunk], [UploadChunkResult], []),
|
|
310
313
|
version: IDL.Func([], [IDL.Text], ['query'])
|
|
@@ -48,7 +48,7 @@ export const idlFactory = ({IDL}) => {
|
|
|
48
48
|
expires_at: IDL.Opt(IDL.Nat64)
|
|
49
49
|
});
|
|
50
50
|
const DelDoc = IDL.Record({version: IDL.Opt(IDL.Nat64)});
|
|
51
|
-
const
|
|
51
|
+
const CollectionType = IDL.Variant({Db: IDL.Null, Storage: IDL.Null});
|
|
52
52
|
const DelRule = IDL.Record({version: IDL.Opt(IDL.Nat64)});
|
|
53
53
|
const DepositCyclesArgs = IDL.Record({
|
|
54
54
|
cycles: IDL.Nat,
|
|
@@ -76,7 +76,8 @@ export const idlFactory = ({IDL}) => {
|
|
|
76
76
|
version: IDL.Opt(IDL.Nat64)
|
|
77
77
|
});
|
|
78
78
|
const AuthenticationConfigInternetIdentity = IDL.Record({
|
|
79
|
-
derivation_origin: IDL.Opt(IDL.Text)
|
|
79
|
+
derivation_origin: IDL.Opt(IDL.Text),
|
|
80
|
+
external_alternative_origins: IDL.Opt(IDL.Vec(IDL.Text))
|
|
80
81
|
});
|
|
81
82
|
const AuthenticationConfig = IDL.Record({
|
|
82
83
|
internet_identity: IDL.Opt(AuthenticationConfigInternetIdentity)
|
|
@@ -143,7 +144,8 @@ export const idlFactory = ({IDL}) => {
|
|
|
143
144
|
version: IDL.Opt(IDL.Nat64),
|
|
144
145
|
mutable_permissions: IDL.Opt(IDL.Bool),
|
|
145
146
|
rate_config: IDL.Opt(RateConfig),
|
|
146
|
-
write: Permission
|
|
147
|
+
write: Permission,
|
|
148
|
+
max_changes_per_user: IDL.Opt(IDL.Nat32)
|
|
147
149
|
});
|
|
148
150
|
const HttpRequest = IDL.Record({
|
|
149
151
|
url: IDL.Text,
|
|
@@ -229,7 +231,8 @@ export const idlFactory = ({IDL}) => {
|
|
|
229
231
|
version: IDL.Opt(IDL.Nat64),
|
|
230
232
|
mutable_permissions: IDL.Opt(IDL.Bool),
|
|
231
233
|
rate_config: IDL.Opt(RateConfig),
|
|
232
|
-
write: Permission
|
|
234
|
+
write: Permission,
|
|
235
|
+
max_changes_per_user: IDL.Opt(IDL.Nat32)
|
|
233
236
|
});
|
|
234
237
|
const UploadChunk = IDL.Record({
|
|
235
238
|
content: IDL.Vec(IDL.Nat8),
|
|
@@ -258,7 +261,7 @@ export const idlFactory = ({IDL}) => {
|
|
|
258
261
|
del_filtered_docs: IDL.Func([IDL.Text, ListParams], [], []),
|
|
259
262
|
del_many_assets: IDL.Func([IDL.Vec(IDL.Tuple(IDL.Text, IDL.Text))], [], []),
|
|
260
263
|
del_many_docs: IDL.Func([IDL.Vec(IDL.Tuple(IDL.Text, IDL.Text, DelDoc))], [], []),
|
|
261
|
-
del_rule: IDL.Func([
|
|
264
|
+
del_rule: IDL.Func([CollectionType, IDL.Text, DelRule], [], []),
|
|
262
265
|
deposit_cycles: IDL.Func([DepositCyclesArgs], [], []),
|
|
263
266
|
get_asset: IDL.Func([IDL.Text, IDL.Text], [IDL.Opt(AssetNoContent)], ['query']),
|
|
264
267
|
get_auth_config: IDL.Func([], [IDL.Opt(AuthenticationConfig)], ['query']),
|
|
@@ -275,7 +278,7 @@ export const idlFactory = ({IDL}) => {
|
|
|
275
278
|
[IDL.Vec(IDL.Tuple(IDL.Text, IDL.Opt(Doc)))],
|
|
276
279
|
['query']
|
|
277
280
|
),
|
|
278
|
-
get_rule: IDL.Func([
|
|
281
|
+
get_rule: IDL.Func([CollectionType, IDL.Text], [IDL.Opt(Rule)], ['query']),
|
|
279
282
|
get_storage_config: IDL.Func([], [StorageConfig], ['query']),
|
|
280
283
|
http_request: IDL.Func([HttpRequest], [HttpResponse], ['query']),
|
|
281
284
|
http_request_streaming_callback: IDL.Func(
|
|
@@ -288,7 +291,7 @@ export const idlFactory = ({IDL}) => {
|
|
|
288
291
|
list_controllers: IDL.Func([], [IDL.Vec(IDL.Tuple(IDL.Principal, Controller))], ['query']),
|
|
289
292
|
list_custom_domains: IDL.Func([], [IDL.Vec(IDL.Tuple(IDL.Text, CustomDomain))], ['query']),
|
|
290
293
|
list_docs: IDL.Func([IDL.Text, ListParams], [ListResults_1], ['query']),
|
|
291
|
-
list_rules: IDL.Func([
|
|
294
|
+
list_rules: IDL.Func([CollectionType], [IDL.Vec(IDL.Tuple(IDL.Text, Rule))], ['query']),
|
|
292
295
|
memory_size: IDL.Func([], [MemorySize], ['query']),
|
|
293
296
|
set_auth_config: IDL.Func([AuthenticationConfig], [], []),
|
|
294
297
|
set_controllers: IDL.Func(
|
|
@@ -304,7 +307,7 @@ export const idlFactory = ({IDL}) => {
|
|
|
304
307
|
[IDL.Vec(IDL.Tuple(IDL.Text, Doc))],
|
|
305
308
|
[]
|
|
306
309
|
),
|
|
307
|
-
set_rule: IDL.Func([
|
|
310
|
+
set_rule: IDL.Func([CollectionType, IDL.Text, SetRule], [Rule], []),
|
|
308
311
|
set_storage_config: IDL.Func([StorageConfig], [], []),
|
|
309
312
|
upload_asset_chunk: IDL.Func([UploadChunk], [UploadChunkResult], []),
|
|
310
313
|
version: IDL.Func([], [IDL.Text], ['query'])
|
package/dist/node/index.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
import { createRequire as topLevelCreateRequire } from 'module';
|
|
2
2
|
const require = topLevelCreateRequire(import.meta.url);
|
|
3
|
-
import{toNullable as a}from"@dfinity/utils";import{isBrowser as U}from"@junobuild/utils";var R=async({asset:{data:t,filename:s,collection:o,headers:n,token:r,fullPath:i,encoding:
|
|
3
|
+
import{toNullable as a}from"@dfinity/utils";import{isBrowser as U}from"@junobuild/utils";var R=async({asset:{data:t,filename:s,collection:o,headers:n,token:r,fullPath:i,encoding:C,description:f},actor:d,init_asset_upload:k})=>{let{batch_id:c}=await k({collection:o,full_path:i,name:s,token:a(r),encoding_type:a(C),description:a(f)}),p=19e5,u=[],y=U()?new Blob([await t.arrayBuffer()]):t,h=0n;for(let e=0;e<y.size;e+=p){let m=y.slice(e,e+p);u.push({batchId:c,chunk:m,actor:d,orderId:h}),h++}let l=[];for await(let e of _({uploadChunks:u}))l=[...l,...e];let I=n.find(([e,m])=>e.toLowerCase()==="content-type")===void 0&&t.type!==void 0&&t.type!==""?[["Content-Type",t.type]]:void 0;await d.commit_asset_upload({batch_id:c,chunk_ids:l.map(({chunk_id:e})=>e),headers:[...n,...I??[]]})};async function*_({uploadChunks:t,limit:s=12}){for(let o=0;o<t.length;o=o+s){let n=t.slice(o,o+s);yield await Promise.all(n.map(i=>A(i)))}}var A=async({batchId:t,chunk:s,actor:o,orderId:n})=>o.upload_asset_chunk({batch_id:t,content:new Uint8Array(await s.arrayBuffer()),order_id:a(n)});export{R as uploadAsset};
|
|
4
4
|
//# sourceMappingURL=index.mjs.map
|
package/dist/node/index.mjs.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/api/storage.api.ts"],
|
|
4
|
-
"sourcesContent": ["import {toNullable} from '@dfinity/utils';\nimport {isBrowser} from '@junobuild/utils';\nimport type {\n _SERVICE as ConsoleActor,\n InitAssetKey as ConsoleInitAssetKey,\n InitUploadResult as ConsoleInitUploadResult\n} from '../../declarations/console/console.did';\nimport type {\n _SERVICE as SatelliteActor,\n InitAssetKey as SatelliteInitAssetKey,\n InitUploadResult as SatelliteInitUploadResult\n} from '../../declarations/satellite/satellite.did';\nimport type {ENCODING_TYPE, Storage} from '../types/storage.types';\n\nexport type UploadAsset = Required<Omit<Storage, 'token' | 'encoding' | 'description'>> &\n Pick<Storage, 'token' | 'encoding' | 'description'>;\n\nexport const uploadAsset = async ({\n asset: {data, filename, collection, headers, token, fullPath, encoding, description},\n actor,\n init_asset_upload\n}: {\n asset: UploadAsset;\n actor: SatelliteActor | ConsoleActor;\n init_asset_upload: (\n initAssetKey: SatelliteInitAssetKey | ConsoleInitAssetKey\n ) => Promise<SatelliteInitUploadResult | ConsoleInitUploadResult>;\n}): Promise<void> => {\n const {batch_id: batchId} = await init_asset_upload({\n collection,\n full_path: fullPath,\n name: filename,\n token: toNullable<string>(token),\n encoding_type: toNullable<ENCODING_TYPE>(encoding),\n description: toNullable(description)\n });\n\n // https://forum.dfinity.org/t/optimal-upload-chunk-size/20444/23?u=peterparker\n const chunkSize = 1900000;\n\n const uploadChunks: UploadChunkParams[] = [];\n\n // Prevent transforming chunk to arrayBuffer error: The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.\n const clone: Blob = isBrowser() ? new Blob([await data.arrayBuffer()]) : data;\n\n // Split data into chunks\n let orderId = 0n;\n for (let start = 0; start < clone.size; start += chunkSize) {\n const chunk: Blob = clone.slice(start, start + chunkSize);\n\n uploadChunks.push({\n batchId,\n chunk,\n actor,\n orderId\n });\n\n orderId++;\n }\n\n // Upload chunks to the IC in batch - i.e. 12 chunks uploaded at a time.\n let chunkIds: UploadChunkResult[] = [];\n for await (const results of batchUploadChunks({uploadChunks})) {\n chunkIds = [...chunkIds, ...results];\n }\n\n const contentType: [[string, string]] | undefined =\n headers.find(([type, _]) => type.toLowerCase() === 'content-type') === undefined &&\n data.type !== undefined &&\n data.type !== ''\n ? [['Content-Type', data.type]]\n : undefined;\n\n await actor.commit_asset_upload({\n batch_id: batchId,\n chunk_ids: chunkIds.map(({chunk_id}: UploadChunkResult) => chunk_id),\n headers: [...headers, ...(contentType
|
|
5
|
-
"mappings": ";;AAAA,OAAQ,cAAAA,MAAiB,iBACzB,OAAQ,aAAAC,MAAgB,mBAgBjB,IAAMC,EAAc,MAAO,CAChC,MAAO,CAAC,KAAAC,EAAM,SAAAC,EAAU,WAAAC,EAAY,QAAAC,EAAS,MAAAC,EAAO,SAAAC,EAAU,SAAAC,EAAU,YAAAC,CAAW,EACnF,MAAAC,EACA,kBAAAC,CACF,IAMqB,CACnB,GAAM,CAAC,SAAUC,CAAO,EAAI,MAAMD,EAAkB,CAClD,WAAAP,EACA,UAAWG,EACX,KAAMJ,EACN,MAAOJ,EAAmBO,CAAK,EAC/B,cAAeP,EAA0BS,CAAQ,EACjD,YAAaT,EAAWU,CAAW,CACrC,CAAC,EAGKI,EAAY,KAEZC,EAAoC,CAAC,EAGrCC,EAAcf,EAAU,EAAI,IAAI,KAAK,CAAC,MAAME,EAAK,YAAY,CAAC,CAAC,EAAIA,EAGrEc,EAAU,GACd,QAASC,EAAQ,EAAGA,EAAQF,EAAM,KAAME,GAASJ,EAAW,CAC1D,IAAMK,EAAcH,EAAM,MAAME,EAAOA,EAAQJ,CAAS,EAExDC,EAAa,KAAK,CAChB,QAAAF,EACA,MAAAM,EACA,MAAAR,EACA,QAAAM,CACF,CAAC,EAEDA,GACF,CAGA,IAAIG,EAAgC,CAAC,EACrC,cAAiBC,KAAWC,EAAkB,CAAC,aAAAP,CAAY,CAAC,EAC1DK,EAAW,CAAC,GAAGA,EAAU,GAAGC,CAAO,EAGrC,IAAME,EACJjB,EAAQ,KAAK,CAAC,CAACkB,EAAMC,CAAC,IAAMD,EAAK,YAAY,IAAM,cAAc,IAAM,QACvErB,EAAK,OAAS,QACdA,EAAK,OAAS,GACV,CAAC,CAAC,eAAgBA,EAAK,IAAI,CAAC,EAC5B,OAEN,MAAMQ,EAAM,oBAAoB,CAC9B,SAAUE,EACV,UAAWO,EAAS,IAAI,CAAC,CAAC,SAAAM,CAAQ,IAAyBA,CAAQ,EACnE,QAAS,CAAC,GAAGpB,EAAS,GAAIiB,
|
|
4
|
+
"sourcesContent": ["import {toNullable} from '@dfinity/utils';\nimport {isBrowser} from '@junobuild/utils';\nimport type {\n _SERVICE as ConsoleActor,\n InitAssetKey as ConsoleInitAssetKey,\n InitUploadResult as ConsoleInitUploadResult\n} from '../../declarations/console/console.did';\nimport type {\n _SERVICE as SatelliteActor,\n InitAssetKey as SatelliteInitAssetKey,\n InitUploadResult as SatelliteInitUploadResult\n} from '../../declarations/satellite/satellite.did';\nimport type {ENCODING_TYPE, Storage} from '../types/storage.types';\n\nexport type UploadAsset = Required<Omit<Storage, 'token' | 'encoding' | 'description'>> &\n Pick<Storage, 'token' | 'encoding' | 'description'>;\n\nexport const uploadAsset = async ({\n asset: {data, filename, collection, headers, token, fullPath, encoding, description},\n actor,\n init_asset_upload\n}: {\n asset: UploadAsset;\n actor: SatelliteActor | ConsoleActor;\n init_asset_upload: (\n initAssetKey: SatelliteInitAssetKey | ConsoleInitAssetKey\n ) => Promise<SatelliteInitUploadResult | ConsoleInitUploadResult>;\n}): Promise<void> => {\n const {batch_id: batchId} = await init_asset_upload({\n collection,\n full_path: fullPath,\n name: filename,\n token: toNullable<string>(token),\n encoding_type: toNullable<ENCODING_TYPE>(encoding),\n description: toNullable(description)\n });\n\n // https://forum.dfinity.org/t/optimal-upload-chunk-size/20444/23?u=peterparker\n const chunkSize = 1900000;\n\n const uploadChunks: UploadChunkParams[] = [];\n\n // Prevent transforming chunk to arrayBuffer error: The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.\n const clone: Blob = isBrowser() ? new Blob([await data.arrayBuffer()]) : data;\n\n // Split data into chunks\n let orderId = 0n;\n for (let start = 0; start < clone.size; start += chunkSize) {\n const chunk: Blob = clone.slice(start, start + chunkSize);\n\n uploadChunks.push({\n batchId,\n chunk,\n actor,\n orderId\n });\n\n orderId++;\n }\n\n // Upload chunks to the IC in batch - i.e. 12 chunks uploaded at a time.\n let chunkIds: UploadChunkResult[] = [];\n for await (const results of batchUploadChunks({uploadChunks})) {\n chunkIds = [...chunkIds, ...results];\n }\n\n const contentType: [[string, string]] | undefined =\n headers.find(([type, _]) => type.toLowerCase() === 'content-type') === undefined &&\n data.type !== undefined &&\n data.type !== ''\n ? [['Content-Type', data.type]]\n : undefined;\n\n await actor.commit_asset_upload({\n batch_id: batchId,\n chunk_ids: chunkIds.map(({chunk_id}: UploadChunkResult) => chunk_id),\n headers: [...headers, ...(contentType ?? [])]\n });\n};\n\nasync function* batchUploadChunks({\n uploadChunks,\n limit = 12\n}: {\n uploadChunks: UploadChunkParams[];\n limit?: number;\n}): AsyncGenerator<UploadChunkResult[], void> {\n for (let i = 0; i < uploadChunks.length; i = i + limit) {\n const batch = uploadChunks.slice(i, i + limit);\n const result = await Promise.all(batch.map((params) => uploadChunk(params)));\n yield result;\n }\n}\n\ninterface UploadChunkResult {\n chunk_id: bigint;\n}\n\ninterface UploadChunkParams {\n batchId: bigint;\n chunk: Blob;\n actor: SatelliteActor | ConsoleActor;\n orderId: bigint;\n}\n\nconst uploadChunk = async ({\n batchId,\n chunk,\n actor,\n orderId\n}: UploadChunkParams): Promise<UploadChunkResult> =>\n actor.upload_asset_chunk({\n batch_id: batchId,\n content: new Uint8Array(await chunk.arrayBuffer()),\n order_id: toNullable(orderId)\n });\n"],
|
|
5
|
+
"mappings": ";;AAAA,OAAQ,cAAAA,MAAiB,iBACzB,OAAQ,aAAAC,MAAgB,mBAgBjB,IAAMC,EAAc,MAAO,CAChC,MAAO,CAAC,KAAAC,EAAM,SAAAC,EAAU,WAAAC,EAAY,QAAAC,EAAS,MAAAC,EAAO,SAAAC,EAAU,SAAAC,EAAU,YAAAC,CAAW,EACnF,MAAAC,EACA,kBAAAC,CACF,IAMqB,CACnB,GAAM,CAAC,SAAUC,CAAO,EAAI,MAAMD,EAAkB,CAClD,WAAAP,EACA,UAAWG,EACX,KAAMJ,EACN,MAAOJ,EAAmBO,CAAK,EAC/B,cAAeP,EAA0BS,CAAQ,EACjD,YAAaT,EAAWU,CAAW,CACrC,CAAC,EAGKI,EAAY,KAEZC,EAAoC,CAAC,EAGrCC,EAAcf,EAAU,EAAI,IAAI,KAAK,CAAC,MAAME,EAAK,YAAY,CAAC,CAAC,EAAIA,EAGrEc,EAAU,GACd,QAASC,EAAQ,EAAGA,EAAQF,EAAM,KAAME,GAASJ,EAAW,CAC1D,IAAMK,EAAcH,EAAM,MAAME,EAAOA,EAAQJ,CAAS,EAExDC,EAAa,KAAK,CAChB,QAAAF,EACA,MAAAM,EACA,MAAAR,EACA,QAAAM,CACF,CAAC,EAEDA,GACF,CAGA,IAAIG,EAAgC,CAAC,EACrC,cAAiBC,KAAWC,EAAkB,CAAC,aAAAP,CAAY,CAAC,EAC1DK,EAAW,CAAC,GAAGA,EAAU,GAAGC,CAAO,EAGrC,IAAME,EACJjB,EAAQ,KAAK,CAAC,CAACkB,EAAMC,CAAC,IAAMD,EAAK,YAAY,IAAM,cAAc,IAAM,QACvErB,EAAK,OAAS,QACdA,EAAK,OAAS,GACV,CAAC,CAAC,eAAgBA,EAAK,IAAI,CAAC,EAC5B,OAEN,MAAMQ,EAAM,oBAAoB,CAC9B,SAAUE,EACV,UAAWO,EAAS,IAAI,CAAC,CAAC,SAAAM,CAAQ,IAAyBA,CAAQ,EACnE,QAAS,CAAC,GAAGpB,EAAS,GAAIiB,GAAe,CAAC,CAAE,CAC9C,CAAC,CACH,EAEA,eAAgBD,EAAkB,CAChC,aAAAP,EACA,MAAAY,EAAQ,EACV,EAG8C,CAC5C,QAASC,EAAI,EAAGA,EAAIb,EAAa,OAAQa,EAAIA,EAAID,EAAO,CACtD,IAAME,EAAQd,EAAa,MAAMa,EAAGA,EAAID,CAAK,EAE7C,MADe,MAAM,QAAQ,IAAIE,EAAM,IAAKC,GAAWC,EAAYD,CAAM,CAAC,CAAC,CAE7E,CACF,CAaA,IAAMC,EAAc,MAAO,CACzB,QAAAlB,EACA,MAAAM,EACA,MAAAR,EACA,QAAAM,CACF,IACEN,EAAM,mBAAmB,CACvB,SAAUE,EACV,QAAS,IAAI,WAAW,MAAMM,EAAM,YAAY,CAAC,EACjD,SAAUnB,EAAWiB,CAAO,CAC9B,CAAC",
|
|
6
6
|
"names": ["toNullable", "isBrowser", "uploadAsset", "data", "filename", "collection", "headers", "token", "fullPath", "encoding", "description", "actor", "init_asset_upload", "batchId", "chunkSize", "uploadChunks", "clone", "orderId", "start", "chunk", "chunkIds", "results", "batchUploadChunks", "contentType", "type", "_", "chunk_id", "limit", "i", "batch", "params", "uploadChunk"]
|
|
7
7
|
}
|