@shelby-protocol/react 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/CHANGELOG.md +9 -0
  2. package/README.md +104 -0
  3. package/dist/index.cjs +2 -0
  4. package/dist/index.cjs.map +1 -0
  5. package/dist/index.d.ts +3 -0
  6. package/dist/index.d.ts.map +1 -0
  7. package/dist/index.js +2 -0
  8. package/dist/index.js.map +1 -0
  9. package/dist/mutations/index.d.ts +5 -0
  10. package/dist/mutations/index.d.ts.map +1 -0
  11. package/dist/mutations/useCommitBlobs.d.ts +50 -0
  12. package/dist/mutations/useCommitBlobs.d.ts.map +1 -0
  13. package/dist/mutations/useEncodeBlobs.d.ts +78 -0
  14. package/dist/mutations/useEncodeBlobs.d.ts.map +1 -0
  15. package/dist/mutations/useRegisterCommitments.d.ts +69 -0
  16. package/dist/mutations/useRegisterCommitments.d.ts.map +1 -0
  17. package/dist/mutations/useUploadBlobs.d.ts +79 -0
  18. package/dist/mutations/useUploadBlobs.d.ts.map +1 -0
  19. package/dist/queries/index.d.ts +3 -0
  20. package/dist/queries/index.d.ts.map +1 -0
  21. package/dist/queries/useAccountBlobs.d.ts +30 -0
  22. package/dist/queries/useAccountBlobs.d.ts.map +1 -0
  23. package/dist/queries/useBlobMetadata.d.ts +29 -0
  24. package/dist/queries/useBlobMetadata.d.ts.map +1 -0
  25. package/dist/types/mutations.d.ts +6 -0
  26. package/dist/types/mutations.d.ts.map +1 -0
  27. package/dist/types/queries.d.ts +6 -0
  28. package/dist/types/queries.d.ts.map +1 -0
  29. package/dist/types/signers.d.ts +9 -0
  30. package/dist/types/signers.d.ts.map +1 -0
  31. package/dist/types/walletAdapter.d.ts +3 -0
  32. package/dist/types/walletAdapter.d.ts.map +1 -0
  33. package/package.json +80 -0
  34. package/src/index.ts +2 -0
  35. package/src/mutations/index.ts +4 -0
  36. package/src/mutations/useCommitBlobs.tsx +83 -0
  37. package/src/mutations/useEncodeBlobs.tsx +122 -0
  38. package/src/mutations/useRegisterCommitments.tsx +129 -0
  39. package/src/mutations/useUploadBlobs.tsx +194 -0
  40. package/src/queries/index.ts +2 -0
  41. package/src/queries/useAccountBlobs.tsx +72 -0
  42. package/src/queries/useBlobMetadata.tsx +56 -0
  43. package/src/types/mutations.ts +16 -0
  44. package/src/types/queries.ts +16 -0
  45. package/src/types/signers.ts +11 -0
  46. package/src/types/walletAdapter.ts +4 -0
package/CHANGELOG.md ADDED
@@ -0,0 +1,9 @@
1
+ # @shelby-protocol/react
2
+
3
+ ## 0.0.2
4
+
5
+ ### Patch Changes
6
+
7
+ - ecc0789: Initial Release
8
+ - Updated dependencies [4486dfc]
9
+ - @shelby-protocol/sdk@0.0.7
package/README.md ADDED
@@ -0,0 +1,104 @@
1
+ # Shelby Protocol React SDK
2
+
3
+ A collection of React hooks and utilities built on top of the `@shelby-protocol/sdk`.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pnpm install @shelby-protocol/react @shelby-protocol/sdk @aptos-labs/ts-sdk @tanstack/react-query
9
+ ```
10
+
11
+ ## Documentation
12
+
13
+ Visit the [Shelby React SDK Documentation](https://docs.shelby.network/sdks/react) for more information.
14
+
15
+ ## Basic Usage
16
+
17
+ 1. Wrap your application with the `QueryClientProvider` component.
18
+
19
+ ```tsx
20
+ // src/App.tsx
21
+ import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
22
+
23
+ const queryClient = new QueryClient();
24
+
25
+ function App({ children }: { children: React.ReactNode }) {
26
+ return (
27
+ <QueryClientProvider client={queryClient}>
28
+ {children}
29
+ </QueryClientProvider>
30
+ );
31
+ }
32
+ ```
33
+
34
+ 2. Setup the Shelby Client.
35
+
36
+ ```tsx
37
+ // src/shelbyClient.ts
38
+ import { ShelbyClient } from "@shelby-protocol/sdk/browser";
39
+ import { Network } from "@aptos-labs/ts-sdk";
40
+
41
+ const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });
42
+ ```
43
+
44
+ 3. Use the queries to interact with the Shelby Protocol.
45
+
46
+ ```tsx
47
+ // src/AccountBlobs.tsx
48
+ import { useAccountBlobs } from "@shelby-protocol/react";
49
+ import { shelbyClient } from "./shelbyClient";
50
+
51
+ export function AccountBlobs() {
52
+ const { data: accountBlobs } = useAccountBlobs({
53
+ client: shelbyClient,
54
+ account: "0x123...",
55
+ });
56
+ return (
57
+ <div>
58
+ {accountBlobs?.map((blob) =>
59
+ <div key={blob.name}>
60
+ <div>{blob.name}</div>
61
+ <div>{blob.size}</div>
62
+ <div>{blob.expirationMicros}</div>
63
+ </div>
64
+ )}
65
+ </div>
66
+ );
67
+ }
68
+ ```
69
+
70
+ 4. Use the mutations to interact with the Shelby Protocol.
71
+
72
+ ```tsx
73
+ // src/UploadBlobs.tsx
74
+ import { useUploadBlobs } from "@shelby-protocol/react";
75
+
76
+ export function UploadBlobs() {
77
+ const signer = new Ed25519Account({
78
+ privateKey: new Ed25519PrivateKey("ed25519-priv-..."),
79
+ })
80
+
81
+ const { mutate: uploadBlobs, isPending } = useUploadBlobs({
82
+ client: shelbyClient,
83
+ });
84
+
85
+ const handleUploadBlobs = () =>
86
+ uploadBlobs({
87
+ signer,
88
+ blobs: [
89
+ { blobName: "file1.txt", blobData: new Uint8Array([...]) },
90
+ { blobName: "file2.txt", blobData: new Uint8Array([...]) },
91
+ ],
92
+ expirationMicros: Date.now() * 1000 + 86400000000
93
+ });
94
+
95
+
96
+ return (
97
+ <div>
98
+ <button onClick={handleUploadBlobs}>
99
+ {isPending ? "Uploading..." : "Upload Blobs"}
100
+ </button>
101
+ </div>
102
+ );
103
+ }
104
+ ```
package/dist/index.cjs ADDED
@@ -0,0 +1,2 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } async function _asyncNullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return await rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _reactquery = require('@tanstack/react-query');var _plimit = require('p-limit'); var _plimit2 = _interopRequireDefault(_plimit);function I({client:t,...s}){return _reactquery.useMutation.call(void 0, {mutationFn:async({account:o,blobs:e,maxConcurrentUploads:n=3})=>{let r=_plimit2.default.call(void 0, n),m=e.map(i=>r(()=>t.rpc.putBlob({account:o,blobName:i.blobName,blobData:i.blobData})));await Promise.all(m)},...s})}var _browser = require('@shelby-protocol/sdk/browser');function Z({client:t,...s}){return _reactquery.useMutation.call(void 0, {mutationFn:async({blobs:o,provider:e,onChunk:n})=>{let r=await _asyncNullishCoalesce(e, async () => (await _browser.createDefaultErasureCodingProvider.call(void 0, ))),m=new Array(o.length);return await Promise.all(o.map(async({blobData:i},p)=>{let a=Math.ceil(i.length/r.config.chunkSizeBytes),c=await _browser.generateCommitments.call(void 0, r,i,(l,d,y)=>_optionalChain([n, 'optionalCall', _2 => _2({blobIndex:p,chunksetIndex:l,chunkIndex:d,chunkData:y,progress:l/a})]));m[p]=c})),m},...s})}var _tssdk = require('@aptos-labs/ts-sdk');function $({client:t,...s}){return _reactquery.useMutation.call(void 0, {mutationFn:async({commitments:o,expirationMicros:e,options:n,signer:r})=>{if(!("account"in r)){let a=r,{transaction:c}=await t.coordination.batchRegisterBlobs({account:a,expirationMicros:e,options:n,blobs:o.map(l=>({blobName:l.blobName,blobSize:l.commitment.raw_data_size,blobMerkleRoot:l.commitment.blob_merkle_root}))});return c}let{account:m,signAndSubmitTransaction:i}=r;return await i({data:_browser.ShelbyBlobClient.createBatchRegisterBlobsPayload({account:_tssdk.AccountAddress.from(m),expirationMicros:e,blobs:o.map(a=>({blobName:a.blobName,blobSize:a.commitment.raw_data_size,blobMerkleRoot:a.commitment.blob_merkle_root,numChunksets:_browser.expectedTotalChunksets.call(void 0, a.commitment.raw_data_size,_nullishCoalesce(_optionalChain([n, 'optionalAccess', _3 => _3.chunksetSizeBytes]), () => (_browser.DEFAULT_CHUNKSET_SIZE_BYTES)))}))}),options:_optionalChain([n, 'optionalAccess', _4 => _4.build, 'optionalAccess', _5 => _5.options])})},...s})}function it({client:t,...s}){return _reactquery.useMutation.call(void 0, {mutationFn:async({blobs:o,expirationMicros:e,options:n,signer:r,maxConcurrentUploads:m=3})=>{if("account"in r){let{account:i,signAndSubmitTransaction:p}=r,a=_nullishCoalesce(_optionalChain([n, 'optionalAccess', _6 => _6.chunksetSizeBytes]), () => (_browser.DEFAULT_CHUNKSET_SIZE_BYTES)),c=await t.coordination.getBlobs({where:{blob_name:{_in:o.map(b=>_browser.createBlobKey.call(void 0, {account:i,blobName:b.blobName}))}}}),l=o.filter(b=>!c.some(B=>B.name===_browser.createBlobKey.call(void 0, {account:i,blobName:b.blobName})));if(l.length>0){let b=await _browser.createDefaultErasureCodingProvider.call(void 0, ),B=await Promise.all(l.map(async u=>_browser.generateCommitments.call(void 0, b,u.blobData))),g=await p({data:_browser.ShelbyBlobClient.createBatchRegisterBlobsPayload({account:_tssdk.AccountAddress.from(i),expirationMicros:e,blobs:l.map((u,f)=>({blobName:u.blobName,blobSize:u.blobData.length,blobMerkleRoot:B[f].blob_merkle_root,numChunksets:_browser.expectedTotalChunksets.call(void 0, u.blobData.length,a)}))}),options:_optionalChain([n, 'optionalAccess', _7 => _7.build, 'optionalAccess', _8 => _8.options])});await t.coordination.aptos.waitForTransaction({transactionHash:g.hash})}let d=_plimit2.default.call(void 0, m),y=o.map(b=>d(()=>t.rpc.putBlob({account:i,blobName:b.blobName,blobData:b.blobData})));await Promise.all(y)}else{let i=r;await t.batchUpload({blobs:o.map(({blobData:p,blobName:a})=>({blobData:p,blobName:a})),expirationMicros:e,signer:i,options:n})}},...s})}var V=t=>["account-blobs",t.network,t.account.toString(),_optionalChain([t, 'access', _9 => _9.pagination, 'optionalAccess', _10 => _10.limit]),_optionalChain([t, 'access', _11 => _11.pagination, 'optionalAccess', _12 => _12.offset]),t.orderBy,t.where];function ut({account:t,pagination:s,orderBy:o,where:e,client:n,...r}){return _reactquery.useQuery.call(void 0, {queryKey:V({network:n.config.network,account:t,pagination:s,orderBy:o,where:e}),queryFn:async()=>await n.coordination.getAccountBlobs({account:t,pagination:s,orderBy:o,where:e}),...r})}var K=t=>["blob-metadata",t.network,t.account.toString(),t.name];function Bt({account:t,name:s,client:o,...e}){return _reactquery.useQuery.call(void 0, {queryKey:K({network:o.config.network,account:t,name:s}),queryFn:async()=>await _asyncNullishCoalesce(await o.coordination.getBlobMetadata({account:t,name:s}), async () => (null)),...e})}exports.getUseAccountBlobsQueryKey = V; exports.getUseBlobMetadataQueryKey = K; exports.useAccountBlobs = ut; exports.useBlobMetadata = Bt; exports.useCommitBlobs = I; exports.useEncodeBlobs = Z; exports.useRegisterCommitments = $; exports.useUploadBlobs = it;
2
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/mutations/useCommitBlobs.tsx","../src/mutations/useEncodeBlobs.tsx","../src/mutations/useRegisterCommitments.tsx","../src/mutations/useUploadBlobs.tsx","../src/queries/useAccountBlobs.tsx","../src/queries/useBlobMetadata.tsx"],"names":["useCommitBlobs","shelbyClient","options","useMutation","account","blobs","maxConcurrentUploads","limit","pLimit","uploadPromises","blob","useEncodeBlobs","provider","onChunk","activeProvider","createDefaultErasureCodingProvider","results","blobData","blobIndex","chunksetCount","blobCommitments","generateCommitments","chunksetIndex","chunkIndex","chunkData","useRegisterCommitments","commitments","expirationMicros","signerOrFn","accountSigner","transaction","commitment","signAndSubmitTransaction","ShelbyBlobClient","AccountAddress","expectedTotalChunksets","DEFAULT_CHUNKSET_SIZE_BYTES","useUploadBlobs","chunksetSize","existingBlobs","createBlobKey","blobsToRegister","existingBlob","pendingRegisterBlobTransaction","index","blobName","getUseAccountBlobsQueryKey","params","useAccountBlobs","pagination","orderBy","where","useQuery","getUseBlobMetadataQueryKey","useBlobMetadata","name"],"mappings":"AACA,y7BAA4B,iFACT,SAuDHA,CAAAA,CAAe,CAC7B,MAAA,CAAQC,CAAAA,CACR,GAAGC,CACL,CAAA,CAA0B,CACxB,OAAOC,qCAAAA,CACL,UAAA,CAAY,KAAA,CAAO,CACjB,OAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CAAAA,CACA,oBAAA,CAAAC,CAAAA,CAAuB,CACzB,CAAA,CAAA,EAA+B,CAC7B,IAAMC,CAAAA,CAAQC,8BAAAA,CAA2B,CAAA,CACnCC,CAAAA,CAAiBJ,CAAAA,CAAM,GAAA,CAAKK,CAAAA,EAChCH,CAAAA,CAAM,CAAA,CAAA,EACJN,CAAAA,CAAa,GAAA,CAAI,OAAA,CAAQ,CACvB,OAAA,CAAAG,CAAAA,CACA,QAAA,CAAUM,CAAAA,CAAK,QAAA,CACf,QAAA,CAAUA,CAAAA,CAAK,QACjB,CAAC,CACH,CACF,CAAA,CAEA,MAAM,OAAA,CAAQ,GAAA,CAAID,CAAc,CAClC,CAAA,CACA,GAAGP,CACL,CAAC,CACH,CClFA,uDAKO,SA2ESS,CAAAA,CAAe,CAC7B,MAAA,CAAQV,CAAAA,CACR,GAAGC,CACL,CAAA,CAA0B,CACxB,OAAOC,qCAAAA,CACL,UAAA,CAAY,KAAA,CAAO,CAAE,KAAA,CAAAE,CAAAA,CAAO,QAAA,CAAAO,CAAAA,CAAU,OAAA,CAAAC,CAAQ,CAAA,CAAA,EAAM,CAClD,IAAMC,CAAAA,6BACJF,CAAAA,eAAa,MAAMG,yDAAAA,GAAmC,CAElDC,CAAAA,CAA6B,IAAI,KAAA,CAAMX,CAAAA,CAAM,MAAM,CAAA,CACzD,OAAA,MAAM,OAAA,CAAQ,GAAA,CACZA,CAAAA,CAAM,GAAA,CAAI,KAAA,CAAO,CAAE,QAAA,CAAAY,CAAS,CAAA,CAAGC,CAAAA,CAAAA,EAAc,CAC3C,IAAMC,CAAAA,CAAgB,IAAA,CAAK,IAAA,CACzBF,CAAAA,CAAS,MAAA,CAASH,CAAAA,CAAe,MAAA,CAAO,cAC1C,CAAA,CAEMM,CAAAA,CAAkB,MAAMC,0CAAAA,CAC5BP,CACAG,CAAAA,CACA,CACEK,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CAAAA,kBAEAX,CAAAA,0BAAAA,CAAU,CACR,SAAA,CAAAK,CAAAA,CACA,aAAA,CAAAI,CAAAA,CACA,UAAA,CAAAC,CAAAA,CACA,SAAA,CAAAC,CAAAA,CACA,QAAA,CAAUF,CAAAA,CAAgBH,CAC5B,CAAC,GACL,CAAA,CAEAH,CAAAA,CAAQE,CAAS,CAAA,CAAIE,CACvB,CAAC,CACH,CAAA,CAEOJ,CACT,CAAA,CACA,GAAGd,CACL,CAAC,CACH,CCzHA,2CAA+B,SA6EfuB,CAAAA,CAAuB,CACrC,MAAA,CAAQxB,CAAAA,CACR,GAAGC,CACL,CAAA,CAAkC,CAChC,OAAOC,qCAAAA,CACL,UAAA,CAAY,KAAA,CAAO,CACjB,WAAA,CAAAuB,CAAAA,CACA,gBAAA,CAAAC,CAAAA,CACA,OAAA,CAAAzB,CAAAA,CACA,MAAA,CAAQ0B,CACV,CAAA,CAAA,EAAM,CACJ,EAAA,CAAI,CAAA,CAAE,SAAA,GAAaA,CAAAA,CAAAA,CAAa,CAC9B,IAAMC,CAAAA,CAA+BD,CAAAA,CAE/B,CAAE,WAAA,CAAAE,CAAY,CAAA,CAClB,MAAM7B,CAAAA,CAAa,YAAA,CAAa,kBAAA,CAAmB,CACjD,OAAA,CAAS4B,CAAAA,CACT,gBAAA,CAAAF,CAAAA,CACA,OAAA,CAAAzB,CAAAA,CACA,KAAA,CAAOwB,CAAAA,CAAY,GAAA,CAAKK,CAAAA,EAAAA,CAAgB,CACtC,QAAA,CAAUA,CAAAA,CAAW,QAAA,CACrB,QAAA,CAAUA,CAAAA,CAAW,UAAA,CAAW,aAAA,CAChC,cAAA,CAAgBA,CAAAA,CAAW,UAAA,CAAW,gBACxC,CAAA,CAAE,CACJ,CAAC,CAAA,CAEH,OAAOD,CACT,CAEA,GAAM,CAAE,OAAA,CAAA1B,CAAAA,CAAS,wBAAA,CAAA4B,CAAyB,CAAA,CAAIJ,CAAAA,CAkB9C,OAjBoB,MAAMI,CAAAA,CAAyB,CACjD,IAAA,CAAMC,yBAAAA,CAAiB,+BAAA,CAAgC,CACrD,OAAA,CAASC,qBAAAA,CAAe,IAAA,CAAK9B,CAAO,CAAA,CACpC,gBAAA,CAAAuB,CAAAA,CACA,KAAA,CAAOD,CAAAA,CAAY,GAAA,CAAKK,CAAAA,EAAAA,CAAgB,CACtC,QAAA,CAAUA,CAAAA,CAAW,QAAA,CACrB,QAAA,CAAUA,CAAAA,CAAW,UAAA,CAAW,aAAA,CAChC,cAAA,CAAgBA,CAAAA,CAAW,UAAA,CAAW,gBAAA,CACtC,YAAA,CAAcI,6CAAAA,CACZJ,CAAW,UAAA,CAAW,aAAA,kCACtB7B,CAAAA,6BAAS,mBAAA,SAAqBkC,sCAChC,CACF,CAAA,CAAE,CACJ,CAAC,CAAA,CACD,OAAA,iBAASlC,CAAAA,6BAAS,KAAA,6BAAO,SAC3B,CAAC,CAGH,CAAA,CACA,GAAGA,CACL,CAAC,CACH,CChIA,SA+FgBmC,EAAAA,CAAe,CAC7B,MAAA,CAAQpC,CAAAA,CACR,GAAGC,CACL,CAAA,CAA0B,CACxB,OAAOC,qCAAAA,CACL,UAAA,CAAY,KAAA,CAAO,CACjB,KAAA,CAAAE,CAAAA,CACA,gBAAA,CAAAsB,CAAAA,CACA,OAAA,CAAAzB,CAAAA,CACA,MAAA,CAAQ0B,CAAAA,CACR,oBAAA,CAAAtB,CAAAA,CAAuB,CACzB,CAAA,CAAA,EAA+B,CAC7B,EAAA,CAAM,SAAA,GAAasB,CAAAA,CAYZ,CACL,GAAM,CAAE,OAAA,CAAAxB,CAAAA,CAAS,wBAAA,CAAA4B,CAAyB,CAAA,CAAIJ,CAAAA,CAExCU,CAAAA,kCACJpC,CAAAA,6BAAS,mBAAA,SAAqBkC,sCAAAA,CAE1BG,CAAAA,CAAgB,MAAMtC,CAAAA,CAAa,YAAA,CAAa,QAAA,CAAS,CAC7D,KAAA,CAAO,CACL,SAAA,CAAW,CACT,GAAA,CAAKI,CAAAA,CAAM,GAAA,CAAKK,CAAAA,EACd8B,oCAAAA,CAAgB,OAAA,CAAApC,CAAAA,CAAS,QAAA,CAAUM,CAAAA,CAAK,QAAS,CAAC,CACpD,CACF,CACF,CACF,CAAC,CAAA,CAEK+B,CAAAA,CAAkBpC,CAAAA,CAAM,MAAA,CAC3BK,CAAAA,EACC,CAAC6B,CAAAA,CAAc,IAAA,CACZG,CAAAA,EACCA,CAAAA,CAAa,IAAA,GACbF,oCAAAA,CAAgB,OAAA,CAAApC,CAAAA,CAAS,QAAA,CAAUM,CAAAA,CAAK,QAAS,CAAC,CACtD,CACJ,CAAA,CAEA,EAAA,CAAI+B,CAAAA,CAAgB,MAAA,CAAS,CAAA,CAAG,CAC9B,IAAM7B,CAAAA,CAAW,MAAMG,yDAAAA,CAAmC,CAEpDK,CAAAA,CAAkB,MAAM,OAAA,CAAQ,GAAA,CACpCqB,CAAAA,CAAgB,GAAA,CAAI,MAAO/B,CAAAA,EACzBW,0CAAAA,CAAoBT,CAAUF,CAAAA,CAAK,QAAQ,CAC7C,CACF,CAAA,CAEMiC,CAAAA,CAAiC,MAAMX,CAAAA,CAC3C,CACE,IAAA,CAAMC,yBAAAA,CAAiB,+BAAA,CAAgC,CACrD,OAAA,CAASC,qBAAAA,CAAe,IAAA,CAAK9B,CAAO,CAAA,CACpC,gBAAA,CAAAuB,CAAAA,CACA,KAAA,CAAOc,CAAAA,CAAgB,GAAA,CAAI,CAAC/B,CAAAA,CAAMkC,CAAAA,CAAAA,EAAAA,CAAW,CAC3C,QAAA,CAAUlC,CAAAA,CAAK,QAAA,CACf,QAAA,CAAUA,CAAAA,CAAK,QAAA,CAAS,MAAA,CACxB,cAAA,CAAgBU,CAAAA,CAAgBwB,CAAK,CAAA,CAAE,gBAAA,CACvC,YAAA,CAAcT,6CAAAA,CACZzB,CAAK,QAAA,CAAS,MAAA,CACd4B,CACF,CACF,CAAA,CAAE,CACJ,CAAC,CAAA,CACD,OAAA,iBAASpC,CAAAA,6BAAS,KAAA,6BAAO,SAC3B,CACF,CAAA,CAEA,MAAMD,CAAAA,CAAa,YAAA,CAAa,KAAA,CAAM,kBAAA,CAAmB,CACvD,eAAA,CAAiB0C,CAAAA,CAA+B,IAClD,CAAC,CACH,CAEA,IAAMpC,CAAAA,CAAQC,8BAAAA,CAA2B,CAAA,CACnCC,CAAAA,CAAiBJ,CAAAA,CAAM,GAAA,CAAKK,CAAAA,EAChCH,CAAAA,CAAM,CAAA,CAAA,EACJN,CAAAA,CAAa,GAAA,CAAI,OAAA,CAAQ,CACvB,OAAA,CAAAG,CAAAA,CACA,QAAA,CAAUM,CAAAA,CAAK,QAAA,CACf,QAAA,CAAUA,CAAAA,CAAK,QACjB,CAAC,CACH,CACF,CAAA,CAEA,MAAM,OAAA,CAAQ,GAAA,CAAID,CAAc,CAClC,CAAA,IAlFgC,CAC9B,IAAMoB,CAAAA,CAA+BD,CAAAA,CAErC,MAAM3B,CAAAA,CAAa,WAAA,CAAY,CAC7B,KAAA,CAAOI,CAAAA,CAAM,GAAA,CAAI,CAAC,CAAE,QAAA,CAAAY,CAAAA,CAAU,QAAA,CAAA4B,CAAS,CAAA,CAAA,EAAA,CAAO,CAC5C,QAAA,CAAA5B,CAAAA,CACA,QAAA,CAAA4B,CACF,CAAA,CAAE,CAAA,CACF,gBAAA,CAAAlB,CAAAA,CACA,MAAA,CAAQE,CAAAA,CACR,OAAA,CAAA3B,CACF,CAAC,CACH,CAuEF,CAAA,CACA,GAAGA,CACL,CAAC,CACH,CC5LA,IAGa4C,CAAAA,CACXC,CAAAA,EAGG,CACH,eAAA,CACAA,CAAAA,CAAO,OAAA,CACPA,CAAAA,CAAO,OAAA,CAAQ,QAAA,CAAS,CAAA,iBACxBA,CAAAA,qBAAO,UAAA,+BAAY,OAAA,iBACnBA,CAAAA,uBAAO,UAAA,+BAAY,QAAA,CACnBA,CAAAA,CAAO,OAAA,CACPA,CAAAA,CAAO,KACT,CAAA,CA0BO,SAASC,EAAAA,CAAgB,CAC9B,OAAA,CAAA5C,CAAAA,CACA,UAAA,CAAA6C,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CAAAA,CACA,MAAA,CAAQlD,CAAAA,CACR,GAAGC,CACL,CAAA,CAA2B,CACzB,OAAOkD,kCAAAA,CACL,QAAA,CAAUN,CAAAA,CAA2B,CACnC,OAAA,CAAS7C,CAAAA,CAAa,MAAA,CAAO,OAAA,CAC7B,OAAA,CAAAG,CAAAA,CACA,UAAA,CAAA6C,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CACF,CAAC,CAAA,CACD,OAAA,CAAS,KAAA,CAAA,CAAA,EACP,MAAMlD,CAAAA,CAAa,YAAA,CAAa,eAAA,CAAgB,CAC9C,OAAA,CAAAG,CAAAA,CACA,UAAA,CAAA6C,CAAAA,CACA,OAAA,CAAAC,CAAAA,CACA,KAAA,CAAAC,CACF,CAAC,CAAA,CACH,GAAGjD,CACL,CAAC,CACH,CClEA,IAGamD,CAAAA,CACXN,CAAAA,EAGG,CAAC,eAAA,CAAiBA,CAAAA,CAAO,OAAA,CAASA,CAAAA,CAAO,OAAA,CAAQ,QAAA,CAAS,CAAA,CAAGA,CAAAA,CAAO,IAAI,CAAA,CA0BtE,SAASO,EAAAA,CAAgB,CAC9B,OAAA,CAAAlD,CAAAA,CACA,IAAA,CAAAmD,CAAAA,CACA,MAAA,CAAQtD,CAAAA,CACR,GAAGC,CACL,CAAA,CAA2B,CACzB,OAAOkD,kCAAAA,CACL,QAAA,CAAUC,CAAAA,CAA2B,CACnC,OAAA,CAASpD,CAAAA,CAAa,MAAA,CAAO,OAAA,CAC7B,OAAA,CAAAG,CAAAA,CACA,IAAA,CAAAmD,CACF,CAAC,CAAA,CACD,OAAA,CAAS,KAAA,CAAA,CAAA,8BACN,MAAMtD,CAAAA,CAAa,YAAA,CAAa,eAAA,CAAgB,CAAE,OAAA,CAAAG,CAAAA,CAAS,IAAA,CAAAmD,CAAK,CAAC,CAAA,eAClE,MAAA,CACF,GAAGrD,CACL,CAAC,CACH,CAAA,oQAAA","file":"/data/svc_shelby/actions-runner/shelby/shelby/_work/shelby/shelby/typescript/packages/react/dist/index.cjs","sourcesContent":["import type { AccountAddressInput } from \"@aptos-labs/ts-sdk\";\nimport { useMutation } from \"@tanstack/react-query\";\nimport pLimit from \"p-limit\";\nimport type { UseMutationOptionsWithClient } from \"../types/mutations\";\n\nexport type UseCommitBlobsVariables = {\n /**\n * The account to commit the blobs to.\n */\n account: AccountAddressInput;\n /**\n * The blobs to commit.\n */\n blobs: {\n blobName: string;\n blobData: Uint8Array;\n }[];\n /**\n * The maximum number of concurrent uploads.\n * @default 3\n */\n maxConcurrentUploads?: number;\n};\n\nexport type UseCommitBlobsOptions = UseMutationOptionsWithClient<\n void,\n Error,\n UseCommitBlobsVariables\n>;\n\n/**\n * Uploads blobs data to the Shelby RPCs.\n *\n * This mutation uploads multiple blobs data to the RPC endpoint with configurable\n * concurrency control.\n *\n * @example\n * ```tsx\n * import { ShelbyClient } from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useCommitBlobs } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const commitBlobs = useCommitBlobs({\n * client: shelbyClient,\n * onSuccess: () => console.log('Blobs committed successfully'),\n * });\n *\n * commitBlobs.mutate({\n * account: '0x123...',\n * blobs: [\n * { blobName: 'file1.txt', blobData: new Uint8Array([...]) },\n * { blobName: 'file2.txt', blobData: new Uint8Array([...]) },\n * ],\n * });\n * ```\n */\nexport function useCommitBlobs({\n client: shelbyClient,\n ...options\n}: UseCommitBlobsOptions) {\n return useMutation({\n mutationFn: async ({\n account,\n blobs,\n maxConcurrentUploads = 3,\n }: UseCommitBlobsVariables) => {\n const limit = pLimit(maxConcurrentUploads);\n const uploadPromises = blobs.map((blob) =>\n limit(() =>\n shelbyClient.rpc.putBlob({\n account,\n blobName: blob.blobName,\n blobData: blob.blobData,\n }),\n ),\n );\n\n await Promise.all(uploadPromises);\n },\n ...options,\n });\n}\n","import {\n type BlobCommitments,\n createDefaultErasureCodingProvider,\n type ErasureCodingProvider,\n generateCommitments,\n} from \"@shelby-protocol/sdk/browser\";\nimport { useMutation } from \"@tanstack/react-query\";\nimport type { UseMutationOptionsWithClient } from \"../types/mutations\";\n\nexport type UseEncodeBlobsOnChunkEvent = {\n /**\n * The index of the blob being encoded.\n */\n blobIndex: number;\n /**\n * The index of the chunkset being encoded.\n */\n chunksetIndex: number;\n /**\n * The index of the chunk being encoded.\n */\n chunkIndex: number;\n /**\n * The data of the chunk being encoded.\n */\n chunkData: Uint8Array;\n /**\n * The progress of the encoding.\n */\n progress: number;\n};\n\nexport type UseEncodeBlobsVariables = {\n /**\n * The blobs to encode.\n */\n blobs: { blobData: Uint8Array }[];\n /**\n * The erasure coding provider to use.\n */\n provider?: ErasureCodingProvider;\n /**\n * The callback to call when a chunk is encoded.\n */\n onChunk?: (event: UseEncodeBlobsOnChunkEvent) => void;\n};\n\nexport type UseEncodeBlobsOptions = UseMutationOptionsWithClient<\n BlobCommitments[],\n Error,\n UseEncodeBlobsVariables\n>;\n\n/**\n * Encodes blobs using erasure coding.\n *\n * This mutation generates blob commitments (merkle roots and erasure coding chunks)\n * from raw blob data. It supports custom erasure coding providers and progress callbacks\n * for tracking encoding progress.\n *\n * @example\n * ```tsx\n * import { ShelbyClient } from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useEncodeBlobs } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const encodeBlobs = useEncodeBlobs({\n * client: shelbyClient,\n * onSuccess: (commitments) => console.log('Encoded', commitments.length, 'blobs'),\n * });\n *\n * encodeBlobs.mutate({\n * blobs: [{ blobData: new Uint8Array([...]) }],\n * onChunk: ({ blobIndex, progress }) => {\n * console.log(`Blob ${blobIndex}: ${(progress * 100).toFixed(1)}%`);\n * },\n * });\n * ```\n */\nexport function useEncodeBlobs({\n client: shelbyClient,\n ...options\n}: UseEncodeBlobsOptions) {\n return useMutation({\n mutationFn: async ({ blobs, provider, onChunk }) => {\n const activeProvider =\n provider ?? (await createDefaultErasureCodingProvider());\n\n const results: BlobCommitments[] = new Array(blobs.length);\n await Promise.all(\n blobs.map(async ({ blobData }, blobIndex) => {\n const chunksetCount = Math.ceil(\n blobData.length / activeProvider.config.chunkSizeBytes,\n );\n\n const blobCommitments = await generateCommitments(\n activeProvider,\n blobData,\n (\n chunksetIndex: number,\n chunkIndex: number,\n chunkData: Uint8Array,\n ) =>\n onChunk?.({\n blobIndex,\n chunksetIndex,\n chunkIndex,\n chunkData,\n progress: chunksetIndex / chunksetCount,\n }),\n );\n\n results[blobIndex] = blobCommitments;\n }),\n );\n\n return results;\n },\n ...options,\n });\n}\n","import { AccountAddress } from \"@aptos-labs/ts-sdk\";\nimport {\n type BlobCommitments,\n DEFAULT_CHUNKSET_SIZE_BYTES,\n expectedTotalChunksets,\n ShelbyBlobClient,\n type UploadOptions,\n} from \"@shelby-protocol/sdk/browser\";\nimport { useMutation } from \"@tanstack/react-query\";\nimport type { UseMutationOptionsWithClient } from \"../types/mutations\";\nimport type { AccountSigner, Signer } from \"../types/signers\";\n\nexport type UseRegisterCommitmentsVariables = {\n /**\n * The signer to use for the transaction.\n * @see {@link Signer}\n *\n * @example\n * ```tsx\n * const signer = new Account.generate();\n * registerCommitments.mutate({\n * signer,\n * commitments: [\n * { blobName: 'file1.txt', commitment: blobCommitments[0] },\n * ],\n * });\n * ```\n */\n signer: Signer;\n /**\n * The commitments to register.\n */\n commitments: { blobName: string; commitment: BlobCommitments }[];\n /**\n * The expiration time of the commitments in microseconds.\n */\n expirationMicros: number;\n /**\n * Optional transaction building options.\n */\n options?: UploadOptions;\n};\n\nexport type UseRegisterCommitmentsOptions = UseMutationOptionsWithClient<\n { hash: string },\n Error,\n UseRegisterCommitmentsVariables\n>;\n\n/**\n * Registers blob commitments on-chain.\n *\n * This mutation registers blob commitments (merkle roots) on the Aptos blockchain\n * as part of the blob upload process. It supports both account signers and wallet adapter\n * signers, and handles batch registration of multiple blobs.\n *\n * @example\n * ```tsx\n * import { ShelbyClient } from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useRegisterCommitments } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const registerCommitments = useRegisterCommitments({\n * client: shelbyClient,\n * onSuccess: ({ hash }) => console.log('Transaction hash:', hash),\n * });\n *\n * registerCommitments.mutate({\n * signer: accountSigner,\n * commitments: [\n * { blobName: 'file1.txt', commitment: blobCommitments[0] },\n * ],\n * expirationMicros: Date.now() * 1000 + 86400000000, // 1 day\n * });\n * ```\n */\nexport function useRegisterCommitments({\n client: shelbyClient,\n ...options\n}: UseRegisterCommitmentsOptions) {\n return useMutation({\n mutationFn: async ({\n commitments,\n expirationMicros,\n options,\n signer: signerOrFn,\n }) => {\n if (!(\"account\" in signerOrFn)) {\n const accountSigner: AccountSigner = signerOrFn;\n\n const { transaction } =\n await shelbyClient.coordination.batchRegisterBlobs({\n account: accountSigner,\n expirationMicros,\n options,\n blobs: commitments.map((commitment) => ({\n blobName: commitment.blobName,\n blobSize: commitment.commitment.raw_data_size,\n blobMerkleRoot: commitment.commitment.blob_merkle_root,\n })),\n });\n\n return transaction;\n }\n\n const { account, signAndSubmitTransaction } = signerOrFn;\n const transaction = await signAndSubmitTransaction({\n data: ShelbyBlobClient.createBatchRegisterBlobsPayload({\n account: AccountAddress.from(account),\n expirationMicros,\n blobs: commitments.map((commitment) => ({\n blobName: commitment.blobName,\n blobSize: commitment.commitment.raw_data_size,\n blobMerkleRoot: commitment.commitment.blob_merkle_root,\n numChunksets: expectedTotalChunksets(\n commitment.commitment.raw_data_size,\n options?.chunksetSizeBytes ?? DEFAULT_CHUNKSET_SIZE_BYTES,\n ),\n })),\n }),\n options: options?.build?.options,\n });\n\n return transaction;\n },\n ...options,\n });\n}\n","import { AccountAddress } from \"@aptos-labs/ts-sdk\";\nimport {\n createBlobKey,\n createDefaultErasureCodingProvider,\n DEFAULT_CHUNKSET_SIZE_BYTES,\n expectedTotalChunksets,\n generateCommitments,\n ShelbyBlobClient,\n type UploadOptions,\n} from \"@shelby-protocol/sdk/browser\";\nimport { useMutation } from \"@tanstack/react-query\";\nimport pLimit from \"p-limit\";\nimport type { UseMutationOptionsWithClient } from \"../types/mutations\";\nimport type { AccountSigner, Signer } from \"../types/signers\";\n\nexport type UseUploadBlobsVariables = {\n /**\n * The signer to use for the transaction.\n *\n * @see {@link Signer}\n *\n * @example\n * ```tsx\n * const signer = new Account.generate();\n * uploadBlobs.mutate({\n * signer,\n * blobs: [\n * { blobName: 'file1.txt', blobData: new Uint8Array([...]) },\n * ],\n * expirationMicros: Date.now() * 1000 + 86400000000,\n * });\n * ```\n */\n signer: Signer;\n /**\n * The blobs to upload.\n */\n blobs: {\n blobName: string;\n blobData: Uint8Array;\n }[];\n /**\n * The expiration time of the blobs in microseconds.\n */\n expirationMicros: number;\n /**\n * Optional transaction building options.\n */\n options?: UploadOptions;\n /**\n * The maximum number of concurrent uploads.\n * @default 3\n */\n maxConcurrentUploads?: number;\n};\n\nexport type UseUploadBlobsOptions = UseMutationOptionsWithClient<\n void,\n Error,\n UseUploadBlobsVariables\n>;\n\n/**\n * Uploads blobs to the Shelby network.\n *\n * This mutation handles the complete blob upload process including:\n * - Encoding blobs with erasure coding\n * - Registering commitments on-chain (if not already registered)\n * - Uploading blob data to the RPC endpoint\n *\n * It supports both account signers and wallet adapter signers, and includes\n * logic to skip registration for blobs that already exist.\n *\n * @example\n * ```tsx\n * import { ShelbyClient } from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useUploadBlobs } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const uploadBlobs = useUploadBlobs({\n * client: shelbyClient,\n * onSuccess: () => console.log('Upload complete'),\n * });\n *\n * const signer = new Account.generate();\n * uploadBlobs.mutate({\n * signer,\n * blobs: [\n * { blobName: 'file1.txt', blobData: new Uint8Array([...]) },\n * ],\n * expirationMicros: Date.now() * 1000 + 86400000000,\n * });\n * ```\n */\nexport function useUploadBlobs({\n client: shelbyClient,\n ...options\n}: UseUploadBlobsOptions) {\n return useMutation({\n mutationFn: async ({\n blobs,\n expirationMicros,\n options,\n signer: signerOrFn,\n maxConcurrentUploads = 3,\n }: UseUploadBlobsVariables) => {\n if (!(\"account\" in signerOrFn)) {\n const accountSigner: AccountSigner = signerOrFn;\n\n await shelbyClient.batchUpload({\n blobs: blobs.map(({ blobData, blobName }) => ({\n blobData,\n blobName,\n })),\n expirationMicros,\n signer: accountSigner,\n options,\n });\n } else {\n const { account, signAndSubmitTransaction } = signerOrFn;\n\n const chunksetSize =\n options?.chunksetSizeBytes ?? DEFAULT_CHUNKSET_SIZE_BYTES;\n\n const existingBlobs = await shelbyClient.coordination.getBlobs({\n where: {\n blob_name: {\n _in: blobs.map((blob) =>\n createBlobKey({ account, blobName: blob.blobName }),\n ),\n },\n },\n });\n\n const blobsToRegister = blobs.filter(\n (blob) =>\n !existingBlobs.some(\n (existingBlob) =>\n existingBlob.name ===\n createBlobKey({ account, blobName: blob.blobName }),\n ),\n );\n\n if (blobsToRegister.length > 0) {\n const provider = await createDefaultErasureCodingProvider();\n\n const blobCommitments = await Promise.all(\n blobsToRegister.map(async (blob) =>\n generateCommitments(provider, blob.blobData),\n ),\n );\n\n const pendingRegisterBlobTransaction = await signAndSubmitTransaction(\n {\n data: ShelbyBlobClient.createBatchRegisterBlobsPayload({\n account: AccountAddress.from(account),\n expirationMicros,\n blobs: blobsToRegister.map((blob, index) => ({\n blobName: blob.blobName,\n blobSize: blob.blobData.length,\n blobMerkleRoot: blobCommitments[index].blob_merkle_root,\n numChunksets: expectedTotalChunksets(\n blob.blobData.length,\n chunksetSize,\n ),\n })),\n }),\n options: options?.build?.options,\n },\n );\n\n await shelbyClient.coordination.aptos.waitForTransaction({\n transactionHash: pendingRegisterBlobTransaction.hash,\n });\n }\n\n const limit = pLimit(maxConcurrentUploads);\n const uploadPromises = blobs.map((blob) =>\n limit(() =>\n shelbyClient.rpc.putBlob({\n account,\n blobName: blob.blobName,\n blobData: blob.blobData,\n }),\n ),\n );\n\n await Promise.all(uploadPromises);\n }\n },\n ...options,\n });\n}\n","import type { Network } from \"@aptos-labs/ts-sdk\";\nimport type {\n BlobMetadata,\n ShelbyBlobClient,\n} from \"@shelby-protocol/sdk/browser\";\nimport { useQuery } from \"@tanstack/react-query\";\nimport type { UseQueryOptionsWithClient } from \"../types/queries\";\n\nexport const getUseAccountBlobsQueryKey = (\n params: Parameters<ShelbyBlobClient[\"getAccountBlobs\"]>[0] & {\n network: Network;\n },\n) => [\n \"account-blobs\",\n params.network,\n params.account.toString(),\n params.pagination?.limit,\n params.pagination?.offset,\n params.orderBy,\n params.where,\n];\n\nexport type UseAccountBlobsOptions = UseQueryOptionsWithClient<BlobMetadata[]> &\n Parameters<ShelbyBlobClient[\"getAccountBlobs\"]>[0];\n\n/**\n * Queries blobs associated with an account.\n *\n * This query fetches blob metadata for a specific account with support for\n * pagination, filtering, and ordering.\n *\n * @example\n * ```tsx\n * import { ShelbyClient, Order_By} from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useAccountBlobs } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const { data: blobs, isLoading } = useAccountBlobs({\n * client: shelbyClient,\n * account: '0x123...',\n * pagination: { limit: 10, offset: 0 },\n * orderBy: { updated_at: Order_By.Desc },\n * });\n * ```\n */\nexport function useAccountBlobs({\n account,\n pagination,\n orderBy,\n where,\n client: shelbyClient,\n ...options\n}: UseAccountBlobsOptions) {\n return useQuery<BlobMetadata[]>({\n queryKey: getUseAccountBlobsQueryKey({\n network: shelbyClient.config.network,\n account,\n pagination,\n orderBy,\n where,\n }),\n queryFn: async () =>\n await shelbyClient.coordination.getAccountBlobs({\n account,\n pagination,\n orderBy,\n where,\n }),\n ...options,\n });\n}\n","import type { Network } from \"@aptos-labs/ts-sdk\";\nimport type {\n BlobMetadata,\n ShelbyBlobClient,\n} from \"@shelby-protocol/sdk/browser\";\nimport { useQuery } from \"@tanstack/react-query\";\nimport type { UseQueryOptionsWithClient } from \"../types/queries\";\n\nexport const getUseBlobMetadataQueryKey = (\n params: Parameters<ShelbyBlobClient[\"getBlobMetadata\"]>[0] & {\n network: Network;\n },\n) => [\"blob-metadata\", params.network, params.account.toString(), params.name];\n\nexport type UseBlobMetadataOptions =\n UseQueryOptionsWithClient<BlobMetadata | null> &\n Parameters<ShelbyBlobClient[\"getBlobMetadata\"]>[0];\n\n/**\n * Queries the metadata for a specific blob.\n *\n * This query fetches the metadata for a single blob identified by account and blob name.\n * Returns `null` if the blob is not found.\n *\n * @example\n * ```tsx\n * import { ShelbyClient } from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useBlobMetadata } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const { data: metadata } = useBlobMetadata({\n * client: shelbyClient,\n * account: '0x123...',\n * name: 'file1.txt',\n * });\n * ```\n */\nexport function useBlobMetadata({\n account,\n name,\n client: shelbyClient,\n ...options\n}: UseBlobMetadataOptions) {\n return useQuery<BlobMetadata | null>({\n queryKey: getUseBlobMetadataQueryKey({\n network: shelbyClient.config.network,\n account,\n name,\n }),\n queryFn: async () =>\n (await shelbyClient.coordination.getBlobMetadata({ account, name })) ??\n null,\n ...options,\n });\n}\n"]}
@@ -0,0 +1,3 @@
1
+ export * from "./mutations/index";
2
+ export * from "./queries/index";
3
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,2 @@
1
+ import{useMutation as C}from"@tanstack/react-query";import h from"p-limit";function I({client:t,...s}){return C({mutationFn:async({account:o,blobs:e,maxConcurrentUploads:n=3})=>{let r=h(n),m=e.map(i=>r(()=>t.rpc.putBlob({account:o,blobName:i.blobName,blobData:i.blobData})));await Promise.all(m)},...s})}import{createDefaultErasureCodingProvider as k,generateCommitments as w}from"@shelby-protocol/sdk/browser";import{useMutation as S}from"@tanstack/react-query";function Z({client:t,...s}){return S({mutationFn:async({blobs:o,provider:e,onChunk:n})=>{let r=e??await k(),m=new Array(o.length);return await Promise.all(o.map(async({blobData:i},p)=>{let a=Math.ceil(i.length/r.config.chunkSizeBytes),c=await w(r,i,(l,d,y)=>n?.({blobIndex:p,chunksetIndex:l,chunkIndex:d,chunkData:y,progress:l/a}));m[p]=c})),m},...s})}import{AccountAddress as x}from"@aptos-labs/ts-sdk";import{DEFAULT_CHUNKSET_SIZE_BYTES as A,expectedTotalChunksets as M,ShelbyBlobClient as O}from"@shelby-protocol/sdk/browser";import{useMutation as E}from"@tanstack/react-query";function $({client:t,...s}){return E({mutationFn:async({commitments:o,expirationMicros:e,options:n,signer:r})=>{if(!("account"in r)){let a=r,{transaction:c}=await t.coordination.batchRegisterBlobs({account:a,expirationMicros:e,options:n,blobs:o.map(l=>({blobName:l.blobName,blobSize:l.commitment.raw_data_size,blobMerkleRoot:l.commitment.blob_merkle_root}))});return c}let{account:m,signAndSubmitTransaction:i}=r;return await i({data:O.createBatchRegisterBlobsPayload({account:x.from(m),expirationMicros:e,blobs:o.map(a=>({blobName:a.blobName,blobSize:a.commitment.raw_data_size,blobMerkleRoot:a.commitment.blob_merkle_root,numChunksets:M(a.commitment.raw_data_size,n?.chunksetSizeBytes??A)}))}),options:n?.build?.options})},...s})}import{AccountAddress as N}from"@aptos-labs/ts-sdk";import{createBlobKey as U,createDefaultErasureCodingProvider as _,DEFAULT_CHUNKSET_SIZE_BYTES as P,expectedTotalChunksets as v,generateCommitments as D,ShelbyBlobClient as R}from"@shelby-protocol/sdk/browser";import{useMutation as T}from"@tanstack/react-query";import W from"p-limit";function it({client:t,...s}){return T({mutationFn:async({blobs:o,expirationMicros:e,options:n,signer:r,maxConcurrentUploads:m=3})=>{if("account"in r){let{account:i,signAndSubmitTransaction:p}=r,a=n?.chunksetSizeBytes??P,c=await t.coordination.getBlobs({where:{blob_name:{_in:o.map(b=>U({account:i,blobName:b.blobName}))}}}),l=o.filter(b=>!c.some(B=>B.name===U({account:i,blobName:b.blobName})));if(l.length>0){let b=await _(),B=await Promise.all(l.map(async u=>D(b,u.blobData))),g=await p({data:R.createBatchRegisterBlobsPayload({account:N.from(i),expirationMicros:e,blobs:l.map((u,f)=>({blobName:u.blobName,blobSize:u.blobData.length,blobMerkleRoot:B[f].blob_merkle_root,numChunksets:v(u.blobData.length,a)}))}),options:n?.build?.options});await t.coordination.aptos.waitForTransaction({transactionHash:g.hash})}let d=W(m),y=o.map(b=>d(()=>t.rpc.putBlob({account:i,blobName:b.blobName,blobData:b.blobData})));await Promise.all(y)}else{let i=r;await t.batchUpload({blobs:o.map(({blobData:p,blobName:a})=>({blobData:p,blobName:a})),expirationMicros:e,signer:i,options:n})}},...s})}import{useQuery as z}from"@tanstack/react-query";var V=t=>["account-blobs",t.network,t.account.toString(),t.pagination?.limit,t.pagination?.offset,t.orderBy,t.where];function ut({account:t,pagination:s,orderBy:o,where:e,client:n,...r}){return z({queryKey:V({network:n.config.network,account:t,pagination:s,orderBy:o,where:e}),queryFn:async()=>await n.coordination.getAccountBlobs({account:t,pagination:s,orderBy:o,where:e}),...r})}import{useQuery as Q}from"@tanstack/react-query";var K=t=>["blob-metadata",t.network,t.account.toString(),t.name];function Bt({account:t,name:s,client:o,...e}){return Q({queryKey:K({network:o.config.network,account:t,name:s}),queryFn:async()=>await o.coordination.getBlobMetadata({account:t,name:s})??null,...e})}export{V as getUseAccountBlobsQueryKey,K as getUseBlobMetadataQueryKey,ut as useAccountBlobs,Bt as useBlobMetadata,I as useCommitBlobs,Z as useEncodeBlobs,$ as useRegisterCommitments,it as useUploadBlobs};
2
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/mutations/useCommitBlobs.tsx","../src/mutations/useEncodeBlobs.tsx","../src/mutations/useRegisterCommitments.tsx","../src/mutations/useUploadBlobs.tsx","../src/queries/useAccountBlobs.tsx","../src/queries/useBlobMetadata.tsx"],"sourcesContent":["import type { AccountAddressInput } from \"@aptos-labs/ts-sdk\";\nimport { useMutation } from \"@tanstack/react-query\";\nimport pLimit from \"p-limit\";\nimport type { UseMutationOptionsWithClient } from \"../types/mutations\";\n\nexport type UseCommitBlobsVariables = {\n /**\n * The account to commit the blobs to.\n */\n account: AccountAddressInput;\n /**\n * The blobs to commit.\n */\n blobs: {\n blobName: string;\n blobData: Uint8Array;\n }[];\n /**\n * The maximum number of concurrent uploads.\n * @default 3\n */\n maxConcurrentUploads?: number;\n};\n\nexport type UseCommitBlobsOptions = UseMutationOptionsWithClient<\n void,\n Error,\n UseCommitBlobsVariables\n>;\n\n/**\n * Uploads blobs data to the Shelby RPCs.\n *\n * This mutation uploads multiple blobs data to the RPC endpoint with configurable\n * concurrency control.\n *\n * @example\n * ```tsx\n * import { ShelbyClient } from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useCommitBlobs } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const commitBlobs = useCommitBlobs({\n * client: shelbyClient,\n * onSuccess: () => console.log('Blobs committed successfully'),\n * });\n *\n * commitBlobs.mutate({\n * account: '0x123...',\n * blobs: [\n * { blobName: 'file1.txt', blobData: new Uint8Array([...]) },\n * { blobName: 'file2.txt', blobData: new Uint8Array([...]) },\n * ],\n * });\n * ```\n */\nexport function useCommitBlobs({\n client: shelbyClient,\n ...options\n}: UseCommitBlobsOptions) {\n return useMutation({\n mutationFn: async ({\n account,\n blobs,\n maxConcurrentUploads = 3,\n }: UseCommitBlobsVariables) => {\n const limit = pLimit(maxConcurrentUploads);\n const uploadPromises = blobs.map((blob) =>\n limit(() =>\n shelbyClient.rpc.putBlob({\n account,\n blobName: blob.blobName,\n blobData: blob.blobData,\n }),\n ),\n );\n\n await Promise.all(uploadPromises);\n },\n ...options,\n });\n}\n","import {\n type BlobCommitments,\n createDefaultErasureCodingProvider,\n type ErasureCodingProvider,\n generateCommitments,\n} from \"@shelby-protocol/sdk/browser\";\nimport { useMutation } from \"@tanstack/react-query\";\nimport type { UseMutationOptionsWithClient } from \"../types/mutations\";\n\nexport type UseEncodeBlobsOnChunkEvent = {\n /**\n * The index of the blob being encoded.\n */\n blobIndex: number;\n /**\n * The index of the chunkset being encoded.\n */\n chunksetIndex: number;\n /**\n * The index of the chunk being encoded.\n */\n chunkIndex: number;\n /**\n * The data of the chunk being encoded.\n */\n chunkData: Uint8Array;\n /**\n * The progress of the encoding.\n */\n progress: number;\n};\n\nexport type UseEncodeBlobsVariables = {\n /**\n * The blobs to encode.\n */\n blobs: { blobData: Uint8Array }[];\n /**\n * The erasure coding provider to use.\n */\n provider?: ErasureCodingProvider;\n /**\n * The callback to call when a chunk is encoded.\n */\n onChunk?: (event: UseEncodeBlobsOnChunkEvent) => void;\n};\n\nexport type UseEncodeBlobsOptions = UseMutationOptionsWithClient<\n BlobCommitments[],\n Error,\n UseEncodeBlobsVariables\n>;\n\n/**\n * Encodes blobs using erasure coding.\n *\n * This mutation generates blob commitments (merkle roots and erasure coding chunks)\n * from raw blob data. It supports custom erasure coding providers and progress callbacks\n * for tracking encoding progress.\n *\n * @example\n * ```tsx\n * import { ShelbyClient } from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useEncodeBlobs } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const encodeBlobs = useEncodeBlobs({\n * client: shelbyClient,\n * onSuccess: (commitments) => console.log('Encoded', commitments.length, 'blobs'),\n * });\n *\n * encodeBlobs.mutate({\n * blobs: [{ blobData: new Uint8Array([...]) }],\n * onChunk: ({ blobIndex, progress }) => {\n * console.log(`Blob ${blobIndex}: ${(progress * 100).toFixed(1)}%`);\n * },\n * });\n * ```\n */\nexport function useEncodeBlobs({\n client: shelbyClient,\n ...options\n}: UseEncodeBlobsOptions) {\n return useMutation({\n mutationFn: async ({ blobs, provider, onChunk }) => {\n const activeProvider =\n provider ?? (await createDefaultErasureCodingProvider());\n\n const results: BlobCommitments[] = new Array(blobs.length);\n await Promise.all(\n blobs.map(async ({ blobData }, blobIndex) => {\n const chunksetCount = Math.ceil(\n blobData.length / activeProvider.config.chunkSizeBytes,\n );\n\n const blobCommitments = await generateCommitments(\n activeProvider,\n blobData,\n (\n chunksetIndex: number,\n chunkIndex: number,\n chunkData: Uint8Array,\n ) =>\n onChunk?.({\n blobIndex,\n chunksetIndex,\n chunkIndex,\n chunkData,\n progress: chunksetIndex / chunksetCount,\n }),\n );\n\n results[blobIndex] = blobCommitments;\n }),\n );\n\n return results;\n },\n ...options,\n });\n}\n","import { AccountAddress } from \"@aptos-labs/ts-sdk\";\nimport {\n type BlobCommitments,\n DEFAULT_CHUNKSET_SIZE_BYTES,\n expectedTotalChunksets,\n ShelbyBlobClient,\n type UploadOptions,\n} from \"@shelby-protocol/sdk/browser\";\nimport { useMutation } from \"@tanstack/react-query\";\nimport type { UseMutationOptionsWithClient } from \"../types/mutations\";\nimport type { AccountSigner, Signer } from \"../types/signers\";\n\nexport type UseRegisterCommitmentsVariables = {\n /**\n * The signer to use for the transaction.\n * @see {@link Signer}\n *\n * @example\n * ```tsx\n * const signer = new Account.generate();\n * registerCommitments.mutate({\n * signer,\n * commitments: [\n * { blobName: 'file1.txt', commitment: blobCommitments[0] },\n * ],\n * });\n * ```\n */\n signer: Signer;\n /**\n * The commitments to register.\n */\n commitments: { blobName: string; commitment: BlobCommitments }[];\n /**\n * The expiration time of the commitments in microseconds.\n */\n expirationMicros: number;\n /**\n * Optional transaction building options.\n */\n options?: UploadOptions;\n};\n\nexport type UseRegisterCommitmentsOptions = UseMutationOptionsWithClient<\n { hash: string },\n Error,\n UseRegisterCommitmentsVariables\n>;\n\n/**\n * Registers blob commitments on-chain.\n *\n * This mutation registers blob commitments (merkle roots) on the Aptos blockchain\n * as part of the blob upload process. It supports both account signers and wallet adapter\n * signers, and handles batch registration of multiple blobs.\n *\n * @example\n * ```tsx\n * import { ShelbyClient } from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useRegisterCommitments } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const registerCommitments = useRegisterCommitments({\n * client: shelbyClient,\n * onSuccess: ({ hash }) => console.log('Transaction hash:', hash),\n * });\n *\n * registerCommitments.mutate({\n * signer: accountSigner,\n * commitments: [\n * { blobName: 'file1.txt', commitment: blobCommitments[0] },\n * ],\n * expirationMicros: Date.now() * 1000 + 86400000000, // 1 day\n * });\n * ```\n */\nexport function useRegisterCommitments({\n client: shelbyClient,\n ...options\n}: UseRegisterCommitmentsOptions) {\n return useMutation({\n mutationFn: async ({\n commitments,\n expirationMicros,\n options,\n signer: signerOrFn,\n }) => {\n if (!(\"account\" in signerOrFn)) {\n const accountSigner: AccountSigner = signerOrFn;\n\n const { transaction } =\n await shelbyClient.coordination.batchRegisterBlobs({\n account: accountSigner,\n expirationMicros,\n options,\n blobs: commitments.map((commitment) => ({\n blobName: commitment.blobName,\n blobSize: commitment.commitment.raw_data_size,\n blobMerkleRoot: commitment.commitment.blob_merkle_root,\n })),\n });\n\n return transaction;\n }\n\n const { account, signAndSubmitTransaction } = signerOrFn;\n const transaction = await signAndSubmitTransaction({\n data: ShelbyBlobClient.createBatchRegisterBlobsPayload({\n account: AccountAddress.from(account),\n expirationMicros,\n blobs: commitments.map((commitment) => ({\n blobName: commitment.blobName,\n blobSize: commitment.commitment.raw_data_size,\n blobMerkleRoot: commitment.commitment.blob_merkle_root,\n numChunksets: expectedTotalChunksets(\n commitment.commitment.raw_data_size,\n options?.chunksetSizeBytes ?? DEFAULT_CHUNKSET_SIZE_BYTES,\n ),\n })),\n }),\n options: options?.build?.options,\n });\n\n return transaction;\n },\n ...options,\n });\n}\n","import { AccountAddress } from \"@aptos-labs/ts-sdk\";\nimport {\n createBlobKey,\n createDefaultErasureCodingProvider,\n DEFAULT_CHUNKSET_SIZE_BYTES,\n expectedTotalChunksets,\n generateCommitments,\n ShelbyBlobClient,\n type UploadOptions,\n} from \"@shelby-protocol/sdk/browser\";\nimport { useMutation } from \"@tanstack/react-query\";\nimport pLimit from \"p-limit\";\nimport type { UseMutationOptionsWithClient } from \"../types/mutations\";\nimport type { AccountSigner, Signer } from \"../types/signers\";\n\nexport type UseUploadBlobsVariables = {\n /**\n * The signer to use for the transaction.\n *\n * @see {@link Signer}\n *\n * @example\n * ```tsx\n * const signer = new Account.generate();\n * uploadBlobs.mutate({\n * signer,\n * blobs: [\n * { blobName: 'file1.txt', blobData: new Uint8Array([...]) },\n * ],\n * expirationMicros: Date.now() * 1000 + 86400000000,\n * });\n * ```\n */\n signer: Signer;\n /**\n * The blobs to upload.\n */\n blobs: {\n blobName: string;\n blobData: Uint8Array;\n }[];\n /**\n * The expiration time of the blobs in microseconds.\n */\n expirationMicros: number;\n /**\n * Optional transaction building options.\n */\n options?: UploadOptions;\n /**\n * The maximum number of concurrent uploads.\n * @default 3\n */\n maxConcurrentUploads?: number;\n};\n\nexport type UseUploadBlobsOptions = UseMutationOptionsWithClient<\n void,\n Error,\n UseUploadBlobsVariables\n>;\n\n/**\n * Uploads blobs to the Shelby network.\n *\n * This mutation handles the complete blob upload process including:\n * - Encoding blobs with erasure coding\n * - Registering commitments on-chain (if not already registered)\n * - Uploading blob data to the RPC endpoint\n *\n * It supports both account signers and wallet adapter signers, and includes\n * logic to skip registration for blobs that already exist.\n *\n * @example\n * ```tsx\n * import { ShelbyClient } from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useUploadBlobs } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const uploadBlobs = useUploadBlobs({\n * client: shelbyClient,\n * onSuccess: () => console.log('Upload complete'),\n * });\n *\n * const signer = new Account.generate();\n * uploadBlobs.mutate({\n * signer,\n * blobs: [\n * { blobName: 'file1.txt', blobData: new Uint8Array([...]) },\n * ],\n * expirationMicros: Date.now() * 1000 + 86400000000,\n * });\n * ```\n */\nexport function useUploadBlobs({\n client: shelbyClient,\n ...options\n}: UseUploadBlobsOptions) {\n return useMutation({\n mutationFn: async ({\n blobs,\n expirationMicros,\n options,\n signer: signerOrFn,\n maxConcurrentUploads = 3,\n }: UseUploadBlobsVariables) => {\n if (!(\"account\" in signerOrFn)) {\n const accountSigner: AccountSigner = signerOrFn;\n\n await shelbyClient.batchUpload({\n blobs: blobs.map(({ blobData, blobName }) => ({\n blobData,\n blobName,\n })),\n expirationMicros,\n signer: accountSigner,\n options,\n });\n } else {\n const { account, signAndSubmitTransaction } = signerOrFn;\n\n const chunksetSize =\n options?.chunksetSizeBytes ?? DEFAULT_CHUNKSET_SIZE_BYTES;\n\n const existingBlobs = await shelbyClient.coordination.getBlobs({\n where: {\n blob_name: {\n _in: blobs.map((blob) =>\n createBlobKey({ account, blobName: blob.blobName }),\n ),\n },\n },\n });\n\n const blobsToRegister = blobs.filter(\n (blob) =>\n !existingBlobs.some(\n (existingBlob) =>\n existingBlob.name ===\n createBlobKey({ account, blobName: blob.blobName }),\n ),\n );\n\n if (blobsToRegister.length > 0) {\n const provider = await createDefaultErasureCodingProvider();\n\n const blobCommitments = await Promise.all(\n blobsToRegister.map(async (blob) =>\n generateCommitments(provider, blob.blobData),\n ),\n );\n\n const pendingRegisterBlobTransaction = await signAndSubmitTransaction(\n {\n data: ShelbyBlobClient.createBatchRegisterBlobsPayload({\n account: AccountAddress.from(account),\n expirationMicros,\n blobs: blobsToRegister.map((blob, index) => ({\n blobName: blob.blobName,\n blobSize: blob.blobData.length,\n blobMerkleRoot: blobCommitments[index].blob_merkle_root,\n numChunksets: expectedTotalChunksets(\n blob.blobData.length,\n chunksetSize,\n ),\n })),\n }),\n options: options?.build?.options,\n },\n );\n\n await shelbyClient.coordination.aptos.waitForTransaction({\n transactionHash: pendingRegisterBlobTransaction.hash,\n });\n }\n\n const limit = pLimit(maxConcurrentUploads);\n const uploadPromises = blobs.map((blob) =>\n limit(() =>\n shelbyClient.rpc.putBlob({\n account,\n blobName: blob.blobName,\n blobData: blob.blobData,\n }),\n ),\n );\n\n await Promise.all(uploadPromises);\n }\n },\n ...options,\n });\n}\n","import type { Network } from \"@aptos-labs/ts-sdk\";\nimport type {\n BlobMetadata,\n ShelbyBlobClient,\n} from \"@shelby-protocol/sdk/browser\";\nimport { useQuery } from \"@tanstack/react-query\";\nimport type { UseQueryOptionsWithClient } from \"../types/queries\";\n\nexport const getUseAccountBlobsQueryKey = (\n params: Parameters<ShelbyBlobClient[\"getAccountBlobs\"]>[0] & {\n network: Network;\n },\n) => [\n \"account-blobs\",\n params.network,\n params.account.toString(),\n params.pagination?.limit,\n params.pagination?.offset,\n params.orderBy,\n params.where,\n];\n\nexport type UseAccountBlobsOptions = UseQueryOptionsWithClient<BlobMetadata[]> &\n Parameters<ShelbyBlobClient[\"getAccountBlobs\"]>[0];\n\n/**\n * Queries blobs associated with an account.\n *\n * This query fetches blob metadata for a specific account with support for\n * pagination, filtering, and ordering.\n *\n * @example\n * ```tsx\n * import { ShelbyClient, Order_By} from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useAccountBlobs } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const { data: blobs, isLoading } = useAccountBlobs({\n * client: shelbyClient,\n * account: '0x123...',\n * pagination: { limit: 10, offset: 0 },\n * orderBy: { updated_at: Order_By.Desc },\n * });\n * ```\n */\nexport function useAccountBlobs({\n account,\n pagination,\n orderBy,\n where,\n client: shelbyClient,\n ...options\n}: UseAccountBlobsOptions) {\n return useQuery<BlobMetadata[]>({\n queryKey: getUseAccountBlobsQueryKey({\n network: shelbyClient.config.network,\n account,\n pagination,\n orderBy,\n where,\n }),\n queryFn: async () =>\n await shelbyClient.coordination.getAccountBlobs({\n account,\n pagination,\n orderBy,\n where,\n }),\n ...options,\n });\n}\n","import type { Network } from \"@aptos-labs/ts-sdk\";\nimport type {\n BlobMetadata,\n ShelbyBlobClient,\n} from \"@shelby-protocol/sdk/browser\";\nimport { useQuery } from \"@tanstack/react-query\";\nimport type { UseQueryOptionsWithClient } from \"../types/queries\";\n\nexport const getUseBlobMetadataQueryKey = (\n params: Parameters<ShelbyBlobClient[\"getBlobMetadata\"]>[0] & {\n network: Network;\n },\n) => [\"blob-metadata\", params.network, params.account.toString(), params.name];\n\nexport type UseBlobMetadataOptions =\n UseQueryOptionsWithClient<BlobMetadata | null> &\n Parameters<ShelbyBlobClient[\"getBlobMetadata\"]>[0];\n\n/**\n * Queries the metadata for a specific blob.\n *\n * This query fetches the metadata for a single blob identified by account and blob name.\n * Returns `null` if the blob is not found.\n *\n * @example\n * ```tsx\n * import { ShelbyClient } from \"@shelby-protocol/sdk/browser\";\n * import { Network } from \"@aptos-labs/ts-sdk\";\n * import { useBlobMetadata } from \"@shelby-protocol/react\";\n *\n * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });\n * const { data: metadata } = useBlobMetadata({\n * client: shelbyClient,\n * account: '0x123...',\n * name: 'file1.txt',\n * });\n * ```\n */\nexport function useBlobMetadata({\n account,\n name,\n client: shelbyClient,\n ...options\n}: UseBlobMetadataOptions) {\n return useQuery<BlobMetadata | null>({\n queryKey: getUseBlobMetadataQueryKey({\n network: shelbyClient.config.network,\n account,\n name,\n }),\n queryFn: async () =>\n (await shelbyClient.coordination.getBlobMetadata({ account, name })) ??\n null,\n ...options,\n });\n}\n"],"mappings":"AACA,OAAS,eAAAA,MAAmB,wBAC5B,OAAOC,MAAY,UAuDZ,SAASC,EAAe,CAC7B,OAAQC,EACR,GAAGC,CACL,EAA0B,CACxB,OAAOJ,EAAY,CACjB,WAAY,MAAO,CACjB,QAAAK,EACA,MAAAC,EACA,qBAAAC,EAAuB,CACzB,IAA+B,CAC7B,IAAMC,EAAQP,EAAOM,CAAoB,EACnCE,EAAiBH,EAAM,IAAKI,GAChCF,EAAM,IACJL,EAAa,IAAI,QAAQ,CACvB,QAAAE,EACA,SAAUK,EAAK,SACf,SAAUA,EAAK,QACjB,CAAC,CACH,CACF,EAEA,MAAM,QAAQ,IAAID,CAAc,CAClC,EACA,GAAGL,CACL,CAAC,CACH,CClFA,OAEE,sCAAAO,EAEA,uBAAAC,MACK,+BACP,OAAS,eAAAC,MAAmB,wBA0ErB,SAASC,EAAe,CAC7B,OAAQC,EACR,GAAGC,CACL,EAA0B,CACxB,OAAOH,EAAY,CACjB,WAAY,MAAO,CAAE,MAAAI,EAAO,SAAAC,EAAU,QAAAC,CAAQ,IAAM,CAClD,IAAMC,EACJF,GAAa,MAAMP,EAAmC,EAElDU,EAA6B,IAAI,MAAMJ,EAAM,MAAM,EACzD,aAAM,QAAQ,IACZA,EAAM,IAAI,MAAO,CAAE,SAAAK,CAAS,EAAGC,IAAc,CAC3C,IAAMC,EAAgB,KAAK,KACzBF,EAAS,OAASF,EAAe,OAAO,cAC1C,EAEMK,EAAkB,MAAMb,EAC5BQ,EACAE,EACA,CACEI,EACAC,EACAC,IAEAT,IAAU,CACR,UAAAI,EACA,cAAAG,EACA,WAAAC,EACA,UAAAC,EACA,SAAUF,EAAgBF,CAC5B,CAAC,CACL,EAEAH,EAAQE,CAAS,EAAIE,CACvB,CAAC,CACH,EAEOJ,CACT,EACA,GAAGL,CACL,CAAC,CACH,CCzHA,OAAS,kBAAAa,MAAsB,qBAC/B,OAEE,+BAAAC,EACA,0BAAAC,EACA,oBAAAC,MAEK,+BACP,OAAS,eAAAC,MAAmB,wBAqErB,SAASC,EAAuB,CACrC,OAAQC,EACR,GAAGC,CACL,EAAkC,CAChC,OAAOH,EAAY,CACjB,WAAY,MAAO,CACjB,YAAAI,EACA,iBAAAC,EACA,QAAAF,EACA,OAAQG,CACV,IAAM,CACJ,GAAI,EAAE,YAAaA,GAAa,CAC9B,IAAMC,EAA+BD,EAE/B,CAAE,YAAAE,CAAY,EAClB,MAAMN,EAAa,aAAa,mBAAmB,CACjD,QAASK,EACT,iBAAAF,EACA,QAAAF,EACA,MAAOC,EAAY,IAAKK,IAAgB,CACtC,SAAUA,EAAW,SACrB,SAAUA,EAAW,WAAW,cAChC,eAAgBA,EAAW,WAAW,gBACxC,EAAE,CACJ,CAAC,EAEH,OAAOD,CACT,CAEA,GAAM,CAAE,QAAAE,EAAS,yBAAAC,CAAyB,EAAIL,EAkB9C,OAjBoB,MAAMK,EAAyB,CACjD,KAAMZ,EAAiB,gCAAgC,CACrD,QAASH,EAAe,KAAKc,CAAO,EACpC,iBAAAL,EACA,MAAOD,EAAY,IAAKK,IAAgB,CACtC,SAAUA,EAAW,SACrB,SAAUA,EAAW,WAAW,cAChC,eAAgBA,EAAW,WAAW,iBACtC,aAAcX,EACZW,EAAW,WAAW,cACtBN,GAAS,mBAAqBN,CAChC,CACF,EAAE,CACJ,CAAC,EACD,QAASM,GAAS,OAAO,OAC3B,CAAC,CAGH,EACA,GAAGA,CACL,CAAC,CACH,CChIA,OAAS,kBAAAS,MAAsB,qBAC/B,OACE,iBAAAC,EACA,sCAAAC,EACA,+BAAAC,EACA,0BAAAC,EACA,uBAAAC,EACA,oBAAAC,MAEK,+BACP,OAAS,eAAAC,MAAmB,wBAC5B,OAAOC,MAAY,UAoFZ,SAASC,GAAe,CAC7B,OAAQC,EACR,GAAGC,CACL,EAA0B,CACxB,OAAOJ,EAAY,CACjB,WAAY,MAAO,CACjB,MAAAK,EACA,iBAAAC,EACA,QAAAF,EACA,OAAQG,EACR,qBAAAC,EAAuB,CACzB,IAA+B,CAC7B,GAAM,YAAaD,EAYZ,CACL,GAAM,CAAE,QAAAE,EAAS,yBAAAC,CAAyB,EAAIH,EAExCI,EACJP,GAAS,mBAAqBR,EAE1BgB,EAAgB,MAAMT,EAAa,aAAa,SAAS,CAC7D,MAAO,CACL,UAAW,CACT,IAAKE,EAAM,IAAKQ,GACdnB,EAAc,CAAE,QAAAe,EAAS,SAAUI,EAAK,QAAS,CAAC,CACpD,CACF,CACF,CACF,CAAC,EAEKC,EAAkBT,EAAM,OAC3BQ,GACC,CAACD,EAAc,KACZG,GACCA,EAAa,OACbrB,EAAc,CAAE,QAAAe,EAAS,SAAUI,EAAK,QAAS,CAAC,CACtD,CACJ,EAEA,GAAIC,EAAgB,OAAS,EAAG,CAC9B,IAAME,EAAW,MAAMrB,EAAmC,EAEpDsB,EAAkB,MAAM,QAAQ,IACpCH,EAAgB,IAAI,MAAOD,GACzBf,EAAoBkB,EAAUH,EAAK,QAAQ,CAC7C,CACF,EAEMK,EAAiC,MAAMR,EAC3C,CACE,KAAMX,EAAiB,gCAAgC,CACrD,QAASN,EAAe,KAAKgB,CAAO,EACpC,iBAAAH,EACA,MAAOQ,EAAgB,IAAI,CAACD,EAAMM,KAAW,CAC3C,SAAUN,EAAK,SACf,SAAUA,EAAK,SAAS,OACxB,eAAgBI,EAAgBE,CAAK,EAAE,iBACvC,aAActB,EACZgB,EAAK,SAAS,OACdF,CACF,CACF,EAAE,CACJ,CAAC,EACD,QAASP,GAAS,OAAO,OAC3B,CACF,EAEA,MAAMD,EAAa,aAAa,MAAM,mBAAmB,CACvD,gBAAiBe,EAA+B,IAClD,CAAC,CACH,CAEA,IAAME,EAAQnB,EAAOO,CAAoB,EACnCa,EAAiBhB,EAAM,IAAKQ,GAChCO,EAAM,IACJjB,EAAa,IAAI,QAAQ,CACvB,QAAAM,EACA,SAAUI,EAAK,SACf,SAAUA,EAAK,QACjB,CAAC,CACH,CACF,EAEA,MAAM,QAAQ,IAAIQ,CAAc,CAClC,KAlFgC,CAC9B,IAAMC,EAA+Bf,EAErC,MAAMJ,EAAa,YAAY,CAC7B,MAAOE,EAAM,IAAI,CAAC,CAAE,SAAAkB,EAAU,SAAAC,CAAS,KAAO,CAC5C,SAAAD,EACA,SAAAC,CACF,EAAE,EACF,iBAAAlB,EACA,OAAQgB,EACR,QAAAlB,CACF,CAAC,CACH,CAuEF,EACA,GAAGA,CACL,CAAC,CACH,CC5LA,OAAS,YAAAqB,MAAgB,wBAGlB,IAAMC,EACXC,GAGG,CACH,gBACAA,EAAO,QACPA,EAAO,QAAQ,SAAS,EACxBA,EAAO,YAAY,MACnBA,EAAO,YAAY,OACnBA,EAAO,QACPA,EAAO,KACT,EA0BO,SAASC,GAAgB,CAC9B,QAAAC,EACA,WAAAC,EACA,QAAAC,EACA,MAAAC,EACA,OAAQC,EACR,GAAGC,CACL,EAA2B,CACzB,OAAOT,EAAyB,CAC9B,SAAUC,EAA2B,CACnC,QAASO,EAAa,OAAO,QAC7B,QAAAJ,EACA,WAAAC,EACA,QAAAC,EACA,MAAAC,CACF,CAAC,EACD,QAAS,SACP,MAAMC,EAAa,aAAa,gBAAgB,CAC9C,QAAAJ,EACA,WAAAC,EACA,QAAAC,EACA,MAAAC,CACF,CAAC,EACH,GAAGE,CACL,CAAC,CACH,CClEA,OAAS,YAAAC,MAAgB,wBAGlB,IAAMC,EACXC,GAGG,CAAC,gBAAiBA,EAAO,QAASA,EAAO,QAAQ,SAAS,EAAGA,EAAO,IAAI,EA0BtE,SAASC,GAAgB,CAC9B,QAAAC,EACA,KAAAC,EACA,OAAQC,EACR,GAAGC,CACL,EAA2B,CACzB,OAAOP,EAA8B,CACnC,SAAUC,EAA2B,CACnC,QAASK,EAAa,OAAO,QAC7B,QAAAF,EACA,KAAAC,CACF,CAAC,EACD,QAAS,SACN,MAAMC,EAAa,aAAa,gBAAgB,CAAE,QAAAF,EAAS,KAAAC,CAAK,CAAC,GAClE,KACF,GAAGE,CACL,CAAC,CACH","names":["useMutation","pLimit","useCommitBlobs","shelbyClient","options","account","blobs","maxConcurrentUploads","limit","uploadPromises","blob","createDefaultErasureCodingProvider","generateCommitments","useMutation","useEncodeBlobs","shelbyClient","options","blobs","provider","onChunk","activeProvider","results","blobData","blobIndex","chunksetCount","blobCommitments","chunksetIndex","chunkIndex","chunkData","AccountAddress","DEFAULT_CHUNKSET_SIZE_BYTES","expectedTotalChunksets","ShelbyBlobClient","useMutation","useRegisterCommitments","shelbyClient","options","commitments","expirationMicros","signerOrFn","accountSigner","transaction","commitment","account","signAndSubmitTransaction","AccountAddress","createBlobKey","createDefaultErasureCodingProvider","DEFAULT_CHUNKSET_SIZE_BYTES","expectedTotalChunksets","generateCommitments","ShelbyBlobClient","useMutation","pLimit","useUploadBlobs","shelbyClient","options","blobs","expirationMicros","signerOrFn","maxConcurrentUploads","account","signAndSubmitTransaction","chunksetSize","existingBlobs","blob","blobsToRegister","existingBlob","provider","blobCommitments","pendingRegisterBlobTransaction","index","limit","uploadPromises","accountSigner","blobData","blobName","useQuery","getUseAccountBlobsQueryKey","params","useAccountBlobs","account","pagination","orderBy","where","shelbyClient","options","useQuery","getUseBlobMetadataQueryKey","params","useBlobMetadata","account","name","shelbyClient","options"]}
@@ -0,0 +1,5 @@
1
+ export * from "./useCommitBlobs";
2
+ export * from "./useEncodeBlobs";
3
+ export * from "./useRegisterCommitments";
4
+ export * from "./useUploadBlobs";
5
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/mutations/index.ts"],"names":[],"mappings":"AAAA,cAAc,kBAAkB,CAAC;AACjC,cAAc,kBAAkB,CAAC;AACjC,cAAc,0BAA0B,CAAC;AACzC,cAAc,kBAAkB,CAAC"}
@@ -0,0 +1,50 @@
1
+ import type { AccountAddressInput } from "@aptos-labs/ts-sdk";
2
+ import type { UseMutationOptionsWithClient } from "../types/mutations";
3
+ export type UseCommitBlobsVariables = {
4
+ /**
5
+ * The account to commit the blobs to.
6
+ */
7
+ account: AccountAddressInput;
8
+ /**
9
+ * The blobs to commit.
10
+ */
11
+ blobs: {
12
+ blobName: string;
13
+ blobData: Uint8Array;
14
+ }[];
15
+ /**
16
+ * The maximum number of concurrent uploads.
17
+ * @default 3
18
+ */
19
+ maxConcurrentUploads?: number;
20
+ };
21
+ export type UseCommitBlobsOptions = UseMutationOptionsWithClient<void, Error, UseCommitBlobsVariables>;
22
+ /**
23
+ * Uploads blobs data to the Shelby RPCs.
24
+ *
25
+ * This mutation uploads multiple blobs data to the RPC endpoint with configurable
26
+ * concurrency control.
27
+ *
28
+ * @example
29
+ * ```tsx
30
+ * import { ShelbyClient } from "@shelby-protocol/sdk/browser";
31
+ * import { Network } from "@aptos-labs/ts-sdk";
32
+ * import { useCommitBlobs } from "@shelby-protocol/react";
33
+ *
34
+ * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });
35
+ * const commitBlobs = useCommitBlobs({
36
+ * client: shelbyClient,
37
+ * onSuccess: () => console.log('Blobs committed successfully'),
38
+ * });
39
+ *
40
+ * commitBlobs.mutate({
41
+ * account: '0x123...',
42
+ * blobs: [
43
+ * { blobName: 'file1.txt', blobData: new Uint8Array([...]) },
44
+ * { blobName: 'file2.txt', blobData: new Uint8Array([...]) },
45
+ * ],
46
+ * });
47
+ * ```
48
+ */
49
+ export declare function useCommitBlobs({ client: shelbyClient, ...options }: UseCommitBlobsOptions): import("@tanstack/react-query").UseMutationResult<void, Error, UseCommitBlobsVariables, readonly unknown[]>;
50
+ //# sourceMappingURL=useCommitBlobs.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useCommitBlobs.d.ts","sourceRoot":"","sources":["../../src/mutations/useCommitBlobs.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,oBAAoB,CAAC;AAG9D,OAAO,KAAK,EAAE,4BAA4B,EAAE,MAAM,oBAAoB,CAAC;AAEvE,MAAM,MAAM,uBAAuB,GAAG;IACpC;;OAEG;IACH,OAAO,EAAE,mBAAmB,CAAC;IAC7B;;OAEG;IACH,KAAK,EAAE;QACL,QAAQ,EAAE,MAAM,CAAC;QACjB,QAAQ,EAAE,UAAU,CAAC;KACtB,EAAE,CAAC;IACJ;;;OAGG;IACH,oBAAoB,CAAC,EAAE,MAAM,CAAC;CAC/B,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG,4BAA4B,CAC9D,IAAI,EACJ,KAAK,EACL,uBAAuB,CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;GA0BG;AACH,wBAAgB,cAAc,CAAC,EAC7B,MAAM,EAAE,YAAY,EACpB,GAAG,OAAO,EACX,EAAE,qBAAqB,+GAsBvB"}
@@ -0,0 +1,78 @@
1
+ import { type BlobCommitments, type ErasureCodingProvider } from "@shelby-protocol/sdk/browser";
2
+ import type { UseMutationOptionsWithClient } from "../types/mutations";
3
+ export type UseEncodeBlobsOnChunkEvent = {
4
+ /**
5
+ * The index of the blob being encoded.
6
+ */
7
+ blobIndex: number;
8
+ /**
9
+ * The index of the chunkset being encoded.
10
+ */
11
+ chunksetIndex: number;
12
+ /**
13
+ * The index of the chunk being encoded.
14
+ */
15
+ chunkIndex: number;
16
+ /**
17
+ * The data of the chunk being encoded.
18
+ */
19
+ chunkData: Uint8Array;
20
+ /**
21
+ * The progress of the encoding.
22
+ */
23
+ progress: number;
24
+ };
25
+ export type UseEncodeBlobsVariables = {
26
+ /**
27
+ * The blobs to encode.
28
+ */
29
+ blobs: {
30
+ blobData: Uint8Array;
31
+ }[];
32
+ /**
33
+ * The erasure coding provider to use.
34
+ */
35
+ provider?: ErasureCodingProvider;
36
+ /**
37
+ * The callback to call when a chunk is encoded.
38
+ */
39
+ onChunk?: (event: UseEncodeBlobsOnChunkEvent) => void;
40
+ };
41
+ export type UseEncodeBlobsOptions = UseMutationOptionsWithClient<BlobCommitments[], Error, UseEncodeBlobsVariables>;
42
+ /**
43
+ * Encodes blobs using erasure coding.
44
+ *
45
+ * This mutation generates blob commitments (merkle roots and erasure coding chunks)
46
+ * from raw blob data. It supports custom erasure coding providers and progress callbacks
47
+ * for tracking encoding progress.
48
+ *
49
+ * @example
50
+ * ```tsx
51
+ * import { ShelbyClient } from "@shelby-protocol/sdk/browser";
52
+ * import { Network } from "@aptos-labs/ts-sdk";
53
+ * import { useEncodeBlobs } from "@shelby-protocol/react";
54
+ *
55
+ * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });
56
+ * const encodeBlobs = useEncodeBlobs({
57
+ * client: shelbyClient,
58
+ * onSuccess: (commitments) => console.log('Encoded', commitments.length, 'blobs'),
59
+ * });
60
+ *
61
+ * encodeBlobs.mutate({
62
+ * blobs: [{ blobData: new Uint8Array([...]) }],
63
+ * onChunk: ({ blobIndex, progress }) => {
64
+ * console.log(`Blob ${blobIndex}: ${(progress * 100).toFixed(1)}%`);
65
+ * },
66
+ * });
67
+ * ```
68
+ */
69
+ export declare function useEncodeBlobs({ client: shelbyClient, ...options }: UseEncodeBlobsOptions): import("@tanstack/react-query").UseMutationResult<{
70
+ schema_version: string;
71
+ raw_data_size: number;
72
+ blob_merkle_root: string;
73
+ chunkset_commitments: {
74
+ chunkset_root: string | null;
75
+ chunk_commitments: string[];
76
+ }[];
77
+ }[], Error, UseEncodeBlobsVariables, readonly unknown[]>;
78
+ //# sourceMappingURL=useEncodeBlobs.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useEncodeBlobs.d.ts","sourceRoot":"","sources":["../../src/mutations/useEncodeBlobs.tsx"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,eAAe,EAEpB,KAAK,qBAAqB,EAE3B,MAAM,8BAA8B,CAAC;AAEtC,OAAO,KAAK,EAAE,4BAA4B,EAAE,MAAM,oBAAoB,CAAC;AAEvE,MAAM,MAAM,0BAA0B,GAAG;IACvC;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,EAAE,UAAU,CAAC;IACtB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,MAAM,MAAM,uBAAuB,GAAG;IACpC;;OAEG;IACH,KAAK,EAAE;QAAE,QAAQ,EAAE,UAAU,CAAA;KAAE,EAAE,CAAC;IAClC;;OAEG;IACH,QAAQ,CAAC,EAAE,qBAAqB,CAAC;IACjC;;OAEG;IACH,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,0BAA0B,KAAK,IAAI,CAAC;CACvD,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG,4BAA4B,CAC9D,eAAe,EAAE,EACjB,KAAK,EACL,uBAAuB,CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;GA0BG;AACH,wBAAgB,cAAc,CAAC,EAC7B,MAAM,EAAE,YAAY,EACpB,GAAG,OAAO,EACX,EAAE,qBAAqB;;;;;;;;yDAsCvB"}
@@ -0,0 +1,69 @@
1
+ import { type BlobCommitments, type UploadOptions } from "@shelby-protocol/sdk/browser";
2
+ import type { UseMutationOptionsWithClient } from "../types/mutations";
3
+ import type { Signer } from "../types/signers";
4
+ export type UseRegisterCommitmentsVariables = {
5
+ /**
6
+ * The signer to use for the transaction.
7
+ * @see {@link Signer}
8
+ *
9
+ * @example
10
+ * ```tsx
11
+ * const signer = new Account.generate();
12
+ * registerCommitments.mutate({
13
+ * signer,
14
+ * commitments: [
15
+ * { blobName: 'file1.txt', commitment: blobCommitments[0] },
16
+ * ],
17
+ * });
18
+ * ```
19
+ */
20
+ signer: Signer;
21
+ /**
22
+ * The commitments to register.
23
+ */
24
+ commitments: {
25
+ blobName: string;
26
+ commitment: BlobCommitments;
27
+ }[];
28
+ /**
29
+ * The expiration time of the commitments in microseconds.
30
+ */
31
+ expirationMicros: number;
32
+ /**
33
+ * Optional transaction building options.
34
+ */
35
+ options?: UploadOptions;
36
+ };
37
+ export type UseRegisterCommitmentsOptions = UseMutationOptionsWithClient<{
38
+ hash: string;
39
+ }, Error, UseRegisterCommitmentsVariables>;
40
+ /**
41
+ * Registers blob commitments on-chain.
42
+ *
43
+ * This mutation registers blob commitments (merkle roots) on the Aptos blockchain
44
+ * as part of the blob upload process. It supports both account signers and wallet adapter
45
+ * signers, and handles batch registration of multiple blobs.
46
+ *
47
+ * @example
48
+ * ```tsx
49
+ * import { ShelbyClient } from "@shelby-protocol/sdk/browser";
50
+ * import { Network } from "@aptos-labs/ts-sdk";
51
+ * import { useRegisterCommitments } from "@shelby-protocol/react";
52
+ *
53
+ * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });
54
+ * const registerCommitments = useRegisterCommitments({
55
+ * client: shelbyClient,
56
+ * onSuccess: ({ hash }) => console.log('Transaction hash:', hash),
57
+ * });
58
+ *
59
+ * registerCommitments.mutate({
60
+ * signer: accountSigner,
61
+ * commitments: [
62
+ * { blobName: 'file1.txt', commitment: blobCommitments[0] },
63
+ * ],
64
+ * expirationMicros: Date.now() * 1000 + 86400000000, // 1 day
65
+ * });
66
+ * ```
67
+ */
68
+ export declare function useRegisterCommitments({ client: shelbyClient, ...options }: UseRegisterCommitmentsOptions): import("@tanstack/react-query").UseMutationResult<import("@aptos-labs/wallet-adapter-react").AptosSignAndSubmitTransactionOutput, Error, UseRegisterCommitmentsVariables, readonly unknown[]>;
69
+ //# sourceMappingURL=useRegisterCommitments.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useRegisterCommitments.d.ts","sourceRoot":"","sources":["../../src/mutations/useRegisterCommitments.tsx"],"names":[],"mappings":"AACA,OAAO,EACL,KAAK,eAAe,EAIpB,KAAK,aAAa,EACnB,MAAM,8BAA8B,CAAC;AAEtC,OAAO,KAAK,EAAE,4BAA4B,EAAE,MAAM,oBAAoB,CAAC;AACvE,OAAO,KAAK,EAAiB,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAE9D,MAAM,MAAM,+BAA+B,GAAG;IAC5C;;;;;;;;;;;;;;OAcG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,WAAW,EAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,eAAe,CAAA;KAAE,EAAE,CAAC;IACjE;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,OAAO,CAAC,EAAE,aAAa,CAAC;CACzB,CAAC;AAEF,MAAM,MAAM,6BAA6B,GAAG,4BAA4B,CACtE;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,EAChB,KAAK,EACL,+BAA+B,CAChC,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,wBAAgB,sBAAsB,CAAC,EACrC,MAAM,EAAE,YAAY,EACpB,GAAG,OAAO,EACX,EAAE,6BAA6B,iMAgD/B"}
@@ -0,0 +1,79 @@
1
+ import { type UploadOptions } from "@shelby-protocol/sdk/browser";
2
+ import type { UseMutationOptionsWithClient } from "../types/mutations";
3
+ import type { Signer } from "../types/signers";
4
+ export type UseUploadBlobsVariables = {
5
+ /**
6
+ * The signer to use for the transaction.
7
+ *
8
+ * @see {@link Signer}
9
+ *
10
+ * @example
11
+ * ```tsx
12
+ * const signer = new Account.generate();
13
+ * uploadBlobs.mutate({
14
+ * signer,
15
+ * blobs: [
16
+ * { blobName: 'file1.txt', blobData: new Uint8Array([...]) },
17
+ * ],
18
+ * expirationMicros: Date.now() * 1000 + 86400000000,
19
+ * });
20
+ * ```
21
+ */
22
+ signer: Signer;
23
+ /**
24
+ * The blobs to upload.
25
+ */
26
+ blobs: {
27
+ blobName: string;
28
+ blobData: Uint8Array;
29
+ }[];
30
+ /**
31
+ * The expiration time of the blobs in microseconds.
32
+ */
33
+ expirationMicros: number;
34
+ /**
35
+ * Optional transaction building options.
36
+ */
37
+ options?: UploadOptions;
38
+ /**
39
+ * The maximum number of concurrent uploads.
40
+ * @default 3
41
+ */
42
+ maxConcurrentUploads?: number;
43
+ };
44
+ export type UseUploadBlobsOptions = UseMutationOptionsWithClient<void, Error, UseUploadBlobsVariables>;
45
+ /**
46
+ * Uploads blobs to the Shelby network.
47
+ *
48
+ * This mutation handles the complete blob upload process including:
49
+ * - Encoding blobs with erasure coding
50
+ * - Registering commitments on-chain (if not already registered)
51
+ * - Uploading blob data to the RPC endpoint
52
+ *
53
+ * It supports both account signers and wallet adapter signers, and includes
54
+ * logic to skip registration for blobs that already exist.
55
+ *
56
+ * @example
57
+ * ```tsx
58
+ * import { ShelbyClient } from "@shelby-protocol/sdk/browser";
59
+ * import { Network } from "@aptos-labs/ts-sdk";
60
+ * import { useUploadBlobs } from "@shelby-protocol/react";
61
+ *
62
+ * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });
63
+ * const uploadBlobs = useUploadBlobs({
64
+ * client: shelbyClient,
65
+ * onSuccess: () => console.log('Upload complete'),
66
+ * });
67
+ *
68
+ * const signer = new Account.generate();
69
+ * uploadBlobs.mutate({
70
+ * signer,
71
+ * blobs: [
72
+ * { blobName: 'file1.txt', blobData: new Uint8Array([...]) },
73
+ * ],
74
+ * expirationMicros: Date.now() * 1000 + 86400000000,
75
+ * });
76
+ * ```
77
+ */
78
+ export declare function useUploadBlobs({ client: shelbyClient, ...options }: UseUploadBlobsOptions): import("@tanstack/react-query").UseMutationResult<void, Error, UseUploadBlobsVariables, readonly unknown[]>;
79
+ //# sourceMappingURL=useUploadBlobs.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useUploadBlobs.d.ts","sourceRoot":"","sources":["../../src/mutations/useUploadBlobs.tsx"],"names":[],"mappings":"AACA,OAAO,EAOL,KAAK,aAAa,EACnB,MAAM,8BAA8B,CAAC;AAGtC,OAAO,KAAK,EAAE,4BAA4B,EAAE,MAAM,oBAAoB,CAAC;AACvE,OAAO,KAAK,EAAiB,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAE9D,MAAM,MAAM,uBAAuB,GAAG;IACpC;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,KAAK,EAAE;QACL,QAAQ,EAAE,MAAM,CAAC;QACjB,QAAQ,EAAE,UAAU,CAAC;KACtB,EAAE,CAAC;IACJ;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,OAAO,CAAC,EAAE,aAAa,CAAC;IACxB;;;OAGG;IACH,oBAAoB,CAAC,EAAE,MAAM,CAAC;CAC/B,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG,4BAA4B,CAC9D,IAAI,EACJ,KAAK,EACL,uBAAuB,CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,wBAAgB,cAAc,CAAC,EAC7B,MAAM,EAAE,YAAY,EACpB,GAAG,OAAO,EACX,EAAE,qBAAqB,+GA+FvB"}
@@ -0,0 +1,3 @@
1
+ export * from "./useAccountBlobs";
2
+ export * from "./useBlobMetadata";
3
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/queries/index.ts"],"names":[],"mappings":"AAAA,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC"}
@@ -0,0 +1,30 @@
1
+ import type { Network } from "@aptos-labs/ts-sdk";
2
+ import type { BlobMetadata, ShelbyBlobClient } from "@shelby-protocol/sdk/browser";
3
+ import type { UseQueryOptionsWithClient } from "../types/queries";
4
+ export declare const getUseAccountBlobsQueryKey: (params: Parameters<ShelbyBlobClient["getAccountBlobs"]>[0] & {
5
+ network: Network;
6
+ }) => (string | number | import("@shelby-protocol/sdk/browser").Blobs_Order_By | Omit<import("@shelby-protocol/sdk/browser").Blobs_Bool_Exp, "owner"> | undefined)[];
7
+ export type UseAccountBlobsOptions = UseQueryOptionsWithClient<BlobMetadata[]> & Parameters<ShelbyBlobClient["getAccountBlobs"]>[0];
8
+ /**
9
+ * Queries blobs associated with an account.
10
+ *
11
+ * This query fetches blob metadata for a specific account with support for
12
+ * pagination, filtering, and ordering.
13
+ *
14
+ * @example
15
+ * ```tsx
16
+ * import { ShelbyClient, Order_By} from "@shelby-protocol/sdk/browser";
17
+ * import { Network } from "@aptos-labs/ts-sdk";
18
+ * import { useAccountBlobs } from "@shelby-protocol/react";
19
+ *
20
+ * const shelbyClient = new ShelbyClient({ network: Network.SHELBYNET });
21
+ * const { data: blobs, isLoading } = useAccountBlobs({
22
+ * client: shelbyClient,
23
+ * account: '0x123...',
24
+ * pagination: { limit: 10, offset: 0 },
25
+ * orderBy: { updated_at: Order_By.Desc },
26
+ * });
27
+ * ```
28
+ */
29
+ export declare function useAccountBlobs({ account, pagination, orderBy, where, client: shelbyClient, ...options }: UseAccountBlobsOptions): import("@tanstack/react-query").UseQueryResult<BlobMetadata[], Error>;
30
+ //# sourceMappingURL=useAccountBlobs.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useAccountBlobs.d.ts","sourceRoot":"","sources":["../../src/queries/useAccountBlobs.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAC;AAClD,OAAO,KAAK,EACV,YAAY,EACZ,gBAAgB,EACjB,MAAM,8BAA8B,CAAC;AAEtC,OAAO,KAAK,EAAE,yBAAyB,EAAE,MAAM,kBAAkB,CAAC;AAElE,eAAO,MAAM,0BAA0B,GACrC,QAAQ,UAAU,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;IAC3D,OAAO,EAAE,OAAO,CAAC;CAClB,mKASF,CAAC;AAEF,MAAM,MAAM,sBAAsB,GAAG,yBAAyB,CAAC,YAAY,EAAE,CAAC,GAC5E,UAAU,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAErD;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,wBAAgB,eAAe,CAAC,EAC9B,OAAO,EACP,UAAU,EACV,OAAO,EACP,KAAK,EACL,MAAM,EAAE,YAAY,EACpB,GAAG,OAAO,EACX,EAAE,sBAAsB,yEAkBxB"}