@bopen-io/wallet-toolbox 1.7.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +10 -0
- package/.env.template +22 -0
- package/.github/ISSUE_TEMPLATE/bug_report.md +40 -0
- package/.github/ISSUE_TEMPLATE/discussion.md +24 -0
- package/.github/pull_request_template.md +22 -0
- package/.github/workflows/push.yaml +145 -0
- package/.prettierrc +10 -0
- package/CHANGELOG.md +280 -0
- package/CONTRIBUTING.md +89 -0
- package/README.md +43 -0
- package/docs/README.md +85 -0
- package/docs/client.md +19627 -0
- package/docs/monitor.md +953 -0
- package/docs/open-rpc/index.html +46 -0
- package/docs/services.md +6377 -0
- package/docs/setup.md +1268 -0
- package/docs/storage.md +5367 -0
- package/docs/wallet.md +19626 -0
- package/jest.config.ts +25 -0
- package/license.md +28 -0
- package/out/tsconfig.all.tsbuildinfo +1 -0
- package/package.json +63 -0
- package/src/CWIStyleWalletManager.ts +1999 -0
- package/src/Setup.ts +579 -0
- package/src/SetupClient.ts +322 -0
- package/src/SetupWallet.ts +108 -0
- package/src/SimpleWalletManager.ts +526 -0
- package/src/Wallet.ts +1169 -0
- package/src/WalletAuthenticationManager.ts +153 -0
- package/src/WalletLogger.ts +213 -0
- package/src/WalletPermissionsManager.ts +3660 -0
- package/src/WalletSettingsManager.ts +114 -0
- package/src/__tests/CWIStyleWalletManager.test.d.ts.map +1 -0
- package/src/__tests/CWIStyleWalletManager.test.js.map +1 -0
- package/src/__tests/CWIStyleWalletManager.test.ts +675 -0
- package/src/__tests/WalletPermissionsManager.callbacks.test.ts +323 -0
- package/src/__tests/WalletPermissionsManager.checks.test.ts +844 -0
- package/src/__tests/WalletPermissionsManager.encryption.test.ts +412 -0
- package/src/__tests/WalletPermissionsManager.fixtures.ts +307 -0
- package/src/__tests/WalletPermissionsManager.flows.test.ts +462 -0
- package/src/__tests/WalletPermissionsManager.initialization.test.ts +300 -0
- package/src/__tests/WalletPermissionsManager.pmodules.test.ts +798 -0
- package/src/__tests/WalletPermissionsManager.proxying.test.ts +724 -0
- package/src/__tests/WalletPermissionsManager.tokens.test.ts +503 -0
- package/src/index.all.ts +27 -0
- package/src/index.client.ts +25 -0
- package/src/index.mobile.ts +21 -0
- package/src/index.ts +1 -0
- package/src/monitor/Monitor.ts +412 -0
- package/src/monitor/MonitorDaemon.ts +188 -0
- package/src/monitor/README.md +3 -0
- package/src/monitor/__test/MonitorDaemon.man.test.ts +45 -0
- package/src/monitor/tasks/TaskCheckForProofs.ts +243 -0
- package/src/monitor/tasks/TaskCheckNoSends.ts +73 -0
- package/src/monitor/tasks/TaskClock.ts +33 -0
- package/src/monitor/tasks/TaskFailAbandoned.ts +54 -0
- package/src/monitor/tasks/TaskMonitorCallHistory.ts +26 -0
- package/src/monitor/tasks/TaskNewHeader.ts +93 -0
- package/src/monitor/tasks/TaskPurge.ts +68 -0
- package/src/monitor/tasks/TaskReorg.ts +89 -0
- package/src/monitor/tasks/TaskReviewStatus.ts +48 -0
- package/src/monitor/tasks/TaskSendWaiting.ts +122 -0
- package/src/monitor/tasks/TaskSyncWhenIdle.ts +26 -0
- package/src/monitor/tasks/TaskUnFail.ts +151 -0
- package/src/monitor/tasks/WalletMonitorTask.ts +47 -0
- package/src/sdk/CertOpsWallet.ts +18 -0
- package/src/sdk/PrivilegedKeyManager.ts +372 -0
- package/src/sdk/README.md +13 -0
- package/src/sdk/WERR_errors.ts +234 -0
- package/src/sdk/WalletError.ts +170 -0
- package/src/sdk/WalletErrorFromJson.ts +80 -0
- package/src/sdk/WalletServices.interfaces.ts +700 -0
- package/src/sdk/WalletSigner.interfaces.ts +11 -0
- package/src/sdk/WalletStorage.interfaces.ts +606 -0
- package/src/sdk/__test/CertificateLifeCycle.test.ts +131 -0
- package/src/sdk/__test/PrivilegedKeyManager.test.ts +738 -0
- package/src/sdk/__test/WalletError.test.ts +318 -0
- package/src/sdk/__test/validationHelpers.test.ts +21 -0
- package/src/sdk/index.ts +10 -0
- package/src/sdk/types.ts +226 -0
- package/src/services/README.md +11 -0
- package/src/services/ServiceCollection.ts +248 -0
- package/src/services/Services.ts +603 -0
- package/src/services/__tests/ARC.man.test.ts +123 -0
- package/src/services/__tests/ARC.timeout.man.test.ts +79 -0
- package/src/services/__tests/ArcGorillaPool.man.test.ts +108 -0
- package/src/services/__tests/arcServices.test.ts +8 -0
- package/src/services/__tests/bitrails.test.ts +56 -0
- package/src/services/__tests/getMerklePath.test.ts +15 -0
- package/src/services/__tests/getRawTx.test.ts +13 -0
- package/src/services/__tests/postBeef.test.ts +104 -0
- package/src/services/__tests/verifyBeef.test.ts +50 -0
- package/src/services/chaintracker/BHServiceClient.ts +212 -0
- package/src/services/chaintracker/ChaintracksChainTracker.ts +71 -0
- package/src/services/chaintracker/__tests/ChaintracksChainTracker.test.ts +33 -0
- package/src/services/chaintracker/__tests/ChaintracksServiceClient.test.ts +29 -0
- package/src/services/chaintracker/chaintracks/Api/BlockHeaderApi.ts +72 -0
- package/src/services/chaintracker/chaintracks/Api/BulkIngestorApi.ts +83 -0
- package/src/services/chaintracker/chaintracks/Api/BulkStorageApi.ts +92 -0
- package/src/services/chaintracker/chaintracks/Api/ChaintracksApi.ts +64 -0
- package/src/services/chaintracker/chaintracks/Api/ChaintracksClientApi.ts +189 -0
- package/src/services/chaintracker/chaintracks/Api/ChaintracksFetchApi.ts +18 -0
- package/src/services/chaintracker/chaintracks/Api/ChaintracksFsApi.ts +58 -0
- package/src/services/chaintracker/chaintracks/Api/ChaintracksStorageApi.ts +386 -0
- package/src/services/chaintracker/chaintracks/Api/LiveIngestorApi.ts +25 -0
- package/src/services/chaintracker/chaintracks/Chaintracks.ts +609 -0
- package/src/services/chaintracker/chaintracks/ChaintracksService.ts +199 -0
- package/src/services/chaintracker/chaintracks/ChaintracksServiceClient.ts +154 -0
- package/src/services/chaintracker/chaintracks/Ingest/BulkIngestorBase.ts +176 -0
- package/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.ts +174 -0
- package/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDNBabbage.ts +18 -0
- package/src/services/chaintracker/chaintracks/Ingest/BulkIngestorWhatsOnChainCdn.ts +113 -0
- package/src/services/chaintracker/chaintracks/Ingest/BulkIngestorWhatsOnChainWs.ts +81 -0
- package/src/services/chaintracker/chaintracks/Ingest/LiveIngestorBase.ts +86 -0
- package/src/services/chaintracker/chaintracks/Ingest/LiveIngestorTeranodeP2P.ts +59 -0
- package/src/services/chaintracker/chaintracks/Ingest/LiveIngestorWhatsOnChainPoll.ts +104 -0
- package/src/services/chaintracker/chaintracks/Ingest/LiveIngestorWhatsOnChainWs.ts +66 -0
- package/src/services/chaintracker/chaintracks/Ingest/WhatsOnChainIngestorWs.ts +566 -0
- package/src/services/chaintracker/chaintracks/Ingest/WhatsOnChainServices.ts +219 -0
- package/src/services/chaintracker/chaintracks/Ingest/__tests/BulkIngestorCDNBabbage.test.ts +54 -0
- package/src/services/chaintracker/chaintracks/Ingest/__tests/LiveIngestorWhatsOnChainPoll.test.ts +33 -0
- package/src/services/chaintracker/chaintracks/Ingest/__tests/WhatsOnChainServices.test.ts +124 -0
- package/src/services/chaintracker/chaintracks/Storage/BulkStorageBase.ts +92 -0
- package/src/services/chaintracker/chaintracks/Storage/ChaintracksKnexMigrations.ts +104 -0
- package/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageBase.ts +382 -0
- package/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageIdb.ts +574 -0
- package/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageKnex.ts +438 -0
- package/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageMemory.ts +29 -0
- package/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageNoDb.ts +304 -0
- package/src/services/chaintracker/chaintracks/Storage/__tests/ChaintracksStorageIdb.test.ts +102 -0
- package/src/services/chaintracker/chaintracks/Storage/__tests/ChaintracksStorageKnex.test.ts +45 -0
- package/src/services/chaintracker/chaintracks/__tests/Chaintracks.test.ts +77 -0
- package/src/services/chaintracker/chaintracks/__tests/ChaintracksClientApi.test.ts +192 -0
- package/src/services/chaintracker/chaintracks/__tests/LocalCdnServer.ts +75 -0
- package/src/services/chaintracker/chaintracks/__tests/createIdbChaintracks.test.ts +62 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest349/mainNetBlockHeaders.json +1 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest349/mainNet_0.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest349/mainNet_1.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest349/mainNet_2.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest349/mainNet_3.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest379/mainNetBlockHeaders.json +1 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest379/mainNet_0.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest379/mainNet_1.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest379/mainNet_2.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest379/mainNet_3.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest399/mainNetBlockHeaders.json +1 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest399/mainNet_0.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest399/mainNet_1.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest399/mainNet_2.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest399/mainNet_3.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest402/mainNetBlockHeaders.json +1 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest402/mainNet_0.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest402/mainNet_1.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest402/mainNet_2.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest402/mainNet_3.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest402/mainNet_4.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest499/mainNetBlockHeaders.json +1 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest499/mainNet_0.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest499/mainNet_1.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest499/mainNet_2.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest499/mainNet_3.headers +0 -0
- package/src/services/chaintracker/chaintracks/__tests/data/cdnTest499/mainNet_4.headers +0 -0
- package/src/services/chaintracker/chaintracks/createDefaultIdbChaintracksOptions.ts +92 -0
- package/src/services/chaintracker/chaintracks/createDefaultKnexChaintracksOptions.ts +111 -0
- package/src/services/chaintracker/chaintracks/createDefaultNoDbChaintracksOptions.ts +91 -0
- package/src/services/chaintracker/chaintracks/createIdbChaintracks.ts +60 -0
- package/src/services/chaintracker/chaintracks/createKnexChaintracks.ts +65 -0
- package/src/services/chaintracker/chaintracks/createNoDbChaintracks.ts +60 -0
- package/src/services/chaintracker/chaintracks/index.all.ts +12 -0
- package/src/services/chaintracker/chaintracks/index.client.ts +4 -0
- package/src/services/chaintracker/chaintracks/index.mobile.ts +37 -0
- package/src/services/chaintracker/chaintracks/util/BulkFileDataManager.ts +975 -0
- package/src/services/chaintracker/chaintracks/util/BulkFileDataReader.ts +60 -0
- package/src/services/chaintracker/chaintracks/util/BulkFilesReader.ts +336 -0
- package/src/services/chaintracker/chaintracks/util/BulkHeaderFile.ts +247 -0
- package/src/services/chaintracker/chaintracks/util/ChaintracksFetch.ts +69 -0
- package/src/services/chaintracker/chaintracks/util/ChaintracksFs.ts +141 -0
- package/src/services/chaintracker/chaintracks/util/HeightRange.ts +153 -0
- package/src/services/chaintracker/chaintracks/util/SingleWriterMultiReaderLock.ts +76 -0
- package/src/services/chaintracker/chaintracks/util/__tests/BulkFileDataManager.test.ts +304 -0
- package/src/services/chaintracker/chaintracks/util/__tests/ChaintracksFetch.test.ts +60 -0
- package/src/services/chaintracker/chaintracks/util/__tests/HeightRange.test.ts +67 -0
- package/src/services/chaintracker/chaintracks/util/__tests/SingleWriterMultiReaderLock.test.ts +49 -0
- package/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.ts +573 -0
- package/src/services/chaintracker/chaintracks/util/dirtyHashes.ts +29 -0
- package/src/services/chaintracker/chaintracks/util/validBulkHeaderFilesByFileHash.ts +432 -0
- package/src/services/chaintracker/index.all.ts +4 -0
- package/src/services/chaintracker/index.client.ts +4 -0
- package/src/services/chaintracker/index.mobile.ts +4 -0
- package/src/services/createDefaultWalletServicesOptions.ts +77 -0
- package/src/services/index.ts +1 -0
- package/src/services/processingErrors/arcSuccessError.json +76 -0
- package/src/services/providers/ARC.ts +350 -0
- package/src/services/providers/Bitails.ts +256 -0
- package/src/services/providers/SdkWhatsOnChain.ts +83 -0
- package/src/services/providers/WhatsOnChain.ts +883 -0
- package/src/services/providers/__tests/WhatsOnChain.test.ts +242 -0
- package/src/services/providers/__tests/exchangeRates.test.ts +18 -0
- package/src/services/providers/exchangeRates.ts +265 -0
- package/src/services/providers/getBeefForTxid.ts +369 -0
- package/src/signer/README.md +5 -0
- package/src/signer/WalletSigner.ts +17 -0
- package/src/signer/methods/acquireDirectCertificate.ts +52 -0
- package/src/signer/methods/buildSignableTransaction.ts +183 -0
- package/src/signer/methods/completeSignedTransaction.ts +117 -0
- package/src/signer/methods/createAction.ts +172 -0
- package/src/signer/methods/internalizeAction.ts +106 -0
- package/src/signer/methods/proveCertificate.ts +43 -0
- package/src/signer/methods/signAction.ts +54 -0
- package/src/storage/README.md +14 -0
- package/src/storage/StorageIdb.ts +2304 -0
- package/src/storage/StorageKnex.ts +1425 -0
- package/src/storage/StorageProvider.ts +810 -0
- package/src/storage/StorageReader.ts +194 -0
- package/src/storage/StorageReaderWriter.ts +432 -0
- package/src/storage/StorageSyncReader.ts +34 -0
- package/src/storage/WalletStorageManager.ts +943 -0
- package/src/storage/__test/StorageIdb.test.ts +43 -0
- package/src/storage/__test/WalletStorageManager.test.ts +275 -0
- package/src/storage/__test/adminStats.man.test.ts +89 -0
- package/src/storage/__test/getBeefForTransaction.test.ts +385 -0
- package/src/storage/index.all.ts +11 -0
- package/src/storage/index.client.ts +7 -0
- package/src/storage/index.mobile.ts +6 -0
- package/src/storage/methods/ListActionsSpecOp.ts +70 -0
- package/src/storage/methods/ListOutputsSpecOp.ts +129 -0
- package/src/storage/methods/__test/GenerateChange/generateChangeSdk.test.ts +1057 -0
- package/src/storage/methods/__test/GenerateChange/randomValsUsed1.ts +20 -0
- package/src/storage/methods/__test/offsetKey.test.ts +274 -0
- package/src/storage/methods/attemptToPostReqsToNetwork.ts +389 -0
- package/src/storage/methods/createAction.ts +947 -0
- package/src/storage/methods/generateChange.ts +556 -0
- package/src/storage/methods/getBeefForTransaction.ts +139 -0
- package/src/storage/methods/getSyncChunk.ts +293 -0
- package/src/storage/methods/internalizeAction.ts +562 -0
- package/src/storage/methods/listActionsIdb.ts +183 -0
- package/src/storage/methods/listActionsKnex.ts +226 -0
- package/src/storage/methods/listCertificates.ts +73 -0
- package/src/storage/methods/listOutputsIdb.ts +203 -0
- package/src/storage/methods/listOutputsKnex.ts +263 -0
- package/src/storage/methods/offsetKey.ts +89 -0
- package/src/storage/methods/processAction.ts +420 -0
- package/src/storage/methods/purgeData.ts +251 -0
- package/src/storage/methods/purgeDataIdb.ts +10 -0
- package/src/storage/methods/reviewStatus.ts +101 -0
- package/src/storage/methods/reviewStatusIdb.ts +43 -0
- package/src/storage/methods/utils.Buffer.ts +33 -0
- package/src/storage/methods/utils.ts +56 -0
- package/src/storage/remoting/StorageClient.ts +567 -0
- package/src/storage/remoting/StorageMobile.ts +544 -0
- package/src/storage/remoting/StorageServer.ts +291 -0
- package/src/storage/remoting/__test/StorageClient.test.ts +113 -0
- package/src/storage/schema/KnexMigrations.ts +489 -0
- package/src/storage/schema/StorageIdbSchema.ts +150 -0
- package/src/storage/schema/entities/EntityBase.ts +210 -0
- package/src/storage/schema/entities/EntityCertificate.ts +188 -0
- package/src/storage/schema/entities/EntityCertificateField.ts +136 -0
- package/src/storage/schema/entities/EntityCommission.ts +148 -0
- package/src/storage/schema/entities/EntityOutput.ts +290 -0
- package/src/storage/schema/entities/EntityOutputBasket.ts +153 -0
- package/src/storage/schema/entities/EntityOutputTag.ts +121 -0
- package/src/storage/schema/entities/EntityOutputTagMap.ts +123 -0
- package/src/storage/schema/entities/EntityProvenTx.ts +319 -0
- package/src/storage/schema/entities/EntityProvenTxReq.ts +580 -0
- package/src/storage/schema/entities/EntitySyncState.ts +389 -0
- package/src/storage/schema/entities/EntityTransaction.ts +306 -0
- package/src/storage/schema/entities/EntityTxLabel.ts +121 -0
- package/src/storage/schema/entities/EntityTxLabelMap.ts +123 -0
- package/src/storage/schema/entities/EntityUser.ts +112 -0
- package/src/storage/schema/entities/MergeEntity.ts +73 -0
- package/src/storage/schema/entities/__tests/CertificateFieldTests.test.ts +353 -0
- package/src/storage/schema/entities/__tests/CertificateTests.test.ts +354 -0
- package/src/storage/schema/entities/__tests/CommissionTests.test.ts +371 -0
- package/src/storage/schema/entities/__tests/OutputBasketTests.test.ts +278 -0
- package/src/storage/schema/entities/__tests/OutputTagMapTests.test.ts +242 -0
- package/src/storage/schema/entities/__tests/OutputTagTests.test.ts +288 -0
- package/src/storage/schema/entities/__tests/OutputTests.test.ts +464 -0
- package/src/storage/schema/entities/__tests/ProvenTxReqTests.test.ts +340 -0
- package/src/storage/schema/entities/__tests/ProvenTxTests.test.ts +504 -0
- package/src/storage/schema/entities/__tests/SyncStateTests.test.ts +288 -0
- package/src/storage/schema/entities/__tests/TransactionTests.test.ts +604 -0
- package/src/storage/schema/entities/__tests/TxLabelMapTests.test.ts +361 -0
- package/src/storage/schema/entities/__tests/TxLabelTests.test.ts +198 -0
- package/src/storage/schema/entities/__tests/stampLogTests.test.ts +90 -0
- package/src/storage/schema/entities/__tests/usersTests.test.ts +340 -0
- package/src/storage/schema/entities/index.ts +16 -0
- package/src/storage/schema/tables/TableCertificate.ts +21 -0
- package/src/storage/schema/tables/TableCertificateField.ts +12 -0
- package/src/storage/schema/tables/TableCommission.ts +13 -0
- package/src/storage/schema/tables/TableMonitorEvent.ts +9 -0
- package/src/storage/schema/tables/TableOutput.ts +64 -0
- package/src/storage/schema/tables/TableOutputBasket.ts +12 -0
- package/src/storage/schema/tables/TableOutputTag.ts +10 -0
- package/src/storage/schema/tables/TableOutputTagMap.ts +9 -0
- package/src/storage/schema/tables/TableProvenTx.ts +14 -0
- package/src/storage/schema/tables/TableProvenTxReq.ts +65 -0
- package/src/storage/schema/tables/TableSettings.ts +17 -0
- package/src/storage/schema/tables/TableSyncState.ts +18 -0
- package/src/storage/schema/tables/TableTransaction.ts +54 -0
- package/src/storage/schema/tables/TableTxLabel.ts +10 -0
- package/src/storage/schema/tables/TableTxLabelMap.ts +9 -0
- package/src/storage/schema/tables/TableUser.ts +16 -0
- package/src/storage/schema/tables/index.ts +16 -0
- package/src/storage/sync/StorageMySQLDojoReader.ts +696 -0
- package/src/storage/sync/index.ts +1 -0
- package/src/utility/Format.ts +133 -0
- package/src/utility/README.md +3 -0
- package/src/utility/ReaderUint8Array.ts +187 -0
- package/src/utility/ScriptTemplateBRC29.ts +73 -0
- package/src/utility/__tests/utilityHelpers.noBuffer.test.ts +109 -0
- package/src/utility/aggregateResults.ts +68 -0
- package/src/utility/identityUtils.ts +159 -0
- package/src/utility/index.all.ts +7 -0
- package/src/utility/index.client.ts +7 -0
- package/src/utility/parseTxScriptOffsets.ts +29 -0
- package/src/utility/stampLog.ts +69 -0
- package/src/utility/tscProofToMerklePath.ts +48 -0
- package/src/utility/utilityHelpers.buffer.ts +34 -0
- package/src/utility/utilityHelpers.noBuffer.ts +60 -0
- package/src/utility/utilityHelpers.ts +275 -0
- package/src/wab-client/WABClient.ts +94 -0
- package/src/wab-client/__tests/WABClient.man.test.ts +59 -0
- package/src/wab-client/auth-method-interactors/AuthMethodInteractor.ts +47 -0
- package/src/wab-client/auth-method-interactors/DevConsoleInteractor.ts +73 -0
- package/src/wab-client/auth-method-interactors/PersonaIDInteractor.ts +35 -0
- package/src/wab-client/auth-method-interactors/TwilioPhoneInteractor.ts +72 -0
- package/syncVersions.js +71 -0
- package/test/Wallet/StorageClient/storageClient.man.test.ts +75 -0
- package/test/Wallet/action/abortAction.test.ts +47 -0
- package/test/Wallet/action/createAction.test.ts +299 -0
- package/test/Wallet/action/createAction2.test.ts +1273 -0
- package/test/Wallet/action/createActionToGenerateBeefs.man.test.ts +293 -0
- package/test/Wallet/action/internalizeAction.a.test.ts +286 -0
- package/test/Wallet/action/internalizeAction.test.ts +682 -0
- package/test/Wallet/action/relinquishOutput.test.ts +37 -0
- package/test/Wallet/certificate/acquireCertificate.test.ts +298 -0
- package/test/Wallet/certificate/listCertificates.test.ts +346 -0
- package/test/Wallet/construct/Wallet.constructor.test.ts +57 -0
- package/test/Wallet/get/getHeaderForHeight.test.ts +82 -0
- package/test/Wallet/get/getHeight.test.ts +52 -0
- package/test/Wallet/get/getKnownTxids.test.ts +86 -0
- package/test/Wallet/get/getNetwork.test.ts +27 -0
- package/test/Wallet/get/getVersion.test.ts +27 -0
- package/test/Wallet/list/listActions.test.ts +279 -0
- package/test/Wallet/list/listActions2.test.ts +1381 -0
- package/test/Wallet/list/listCertificates.test.ts +118 -0
- package/test/Wallet/list/listOutputs.test.ts +447 -0
- package/test/Wallet/live/walletLive.man.test.ts +521 -0
- package/test/Wallet/local/localWallet.man.test.ts +93 -0
- package/test/Wallet/local/localWallet2.man.test.ts +277 -0
- package/test/Wallet/signAction/mountaintop.man.test.ts +130 -0
- package/test/Wallet/specOps/specOps.man.test.ts +220 -0
- package/test/Wallet/support/janitor.man.test.ts +40 -0
- package/test/Wallet/support/operations.man.test.ts +407 -0
- package/test/Wallet/support/reqErrorReview.2025.05.06.man.test.ts +347 -0
- package/test/Wallet/sync/Wallet.sync.test.ts +215 -0
- package/test/Wallet/sync/Wallet.updateWalletLegacyTestData.man.test.ts +203 -0
- package/test/Wallet/sync/setActive.test.ts +170 -0
- package/test/WalletClient/LocalKVStore.man.test.ts +114 -0
- package/test/WalletClient/WERR.man.test.ts +35 -0
- package/test/bsv-ts-sdk/LocalKVStore.test.ts +102 -0
- package/test/checkDB.ts +57 -0
- package/test/checkdb +0 -0
- package/test/examples/backup.man.test.ts +59 -0
- package/test/examples/pushdrop.test.ts +282 -0
- package/test/monitor/Monitor.test.ts +620 -0
- package/test/services/Services.test.ts +263 -0
- package/test/storage/KnexMigrations.test.ts +86 -0
- package/test/storage/StorageMySQLDojoReader.man.test.ts +60 -0
- package/test/storage/count.test.ts +177 -0
- package/test/storage/find.test.ts +195 -0
- package/test/storage/findLegacy.test.ts +67 -0
- package/test/storage/idb/allocateChange.test.ts +251 -0
- package/test/storage/idb/count.test.ts +158 -0
- package/test/storage/idb/find.test.ts +177 -0
- package/test/storage/idb/idbSpeed.test.ts +36 -0
- package/test/storage/idb/insert.test.ts +268 -0
- package/test/storage/idb/transactionAbort.test.ts +108 -0
- package/test/storage/idb/update.test.ts +999 -0
- package/test/storage/insert.test.ts +278 -0
- package/test/storage/update.test.ts +1021 -0
- package/test/storage/update2.test.ts +897 -0
- package/test/utils/TestUtilsWalletStorage.ts +2526 -0
- package/test/utils/localWalletMethods.ts +363 -0
- package/test/utils/removeFailedFromDatabase.sql +17 -0
- package/ts2md.json +44 -0
- package/tsconfig.all.json +31 -0
- package/tsconfig.client.json +29 -0
- package/tsconfig.json +17 -0
- package/tsconfig.mobile.json +28 -0
|
@@ -0,0 +1,975 @@
|
|
|
1
|
+
import { ChaintracksFetchApi } from '../Api/ChaintracksFetchApi'
|
|
2
|
+
import { BlockHeader, Chain, WERR_INTERNAL, WERR_INVALID_OPERATION, WERR_INVALID_PARAMETER } from '../../../../sdk'
|
|
3
|
+
import { Hash } from '@bsv/sdk'
|
|
4
|
+
import { asArray, asString, asUint8Array } from '../../../../utility/utilityHelpers.noBuffer'
|
|
5
|
+
import { BulkHeaderFileInfo, BulkHeaderFilesInfo } from './BulkHeaderFile'
|
|
6
|
+
import { isKnownValidBulkHeaderFile, validBulkHeaderFiles } from './validBulkHeaderFilesByFileHash'
|
|
7
|
+
import { HeightRange } from './HeightRange'
|
|
8
|
+
import {
|
|
9
|
+
addWork,
|
|
10
|
+
convertBitsToWork,
|
|
11
|
+
deserializeBlockHeader,
|
|
12
|
+
serializeBaseBlockHeaders,
|
|
13
|
+
subWork,
|
|
14
|
+
validateBufferOfHeaders,
|
|
15
|
+
validateGenesisHeader
|
|
16
|
+
} from './blockHeaderUtilities'
|
|
17
|
+
import { ChaintracksStorageBulkFileApi } from '../Api/ChaintracksStorageApi'
|
|
18
|
+
import { ChaintracksFetch } from './ChaintracksFetch'
|
|
19
|
+
import { ChaintracksFsApi } from '../Api/ChaintracksFsApi'
|
|
20
|
+
import { SingleWriterMultiReaderLock } from './SingleWriterMultiReaderLock'
|
|
21
|
+
|
|
22
|
+
export interface BulkFileDataManagerOptions {
|
|
23
|
+
chain: Chain
|
|
24
|
+
maxPerFile: number
|
|
25
|
+
maxRetained?: number
|
|
26
|
+
fetch?: ChaintracksFetchApi
|
|
27
|
+
fromKnownSourceUrl?: string
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Manages bulk file data (typically 8MB chunks of 100,000 headers each).
|
|
32
|
+
*
|
|
33
|
+
* If not cached in memory,
|
|
34
|
+
* optionally fetches data by `sourceUrl` from CDN on demand,
|
|
35
|
+
* optionally finds data by `fileId` in a database on demand,
|
|
36
|
+
* and retains a limited number of files in memory,
|
|
37
|
+
* subject to the optional `maxRetained` limit.
|
|
38
|
+
*/
|
|
39
|
+
export class BulkFileDataManager {
|
|
40
|
+
static createDefaultOptions(chain: Chain): BulkFileDataManagerOptions {
|
|
41
|
+
return {
|
|
42
|
+
chain,
|
|
43
|
+
maxPerFile: 100000,
|
|
44
|
+
maxRetained: 2,
|
|
45
|
+
fetch: new ChaintracksFetch(),
|
|
46
|
+
fromKnownSourceUrl: 'https://cdn.projectbabbage.com/blockheaders'
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
private log: (...args: any[]) => void = () => {}
|
|
51
|
+
|
|
52
|
+
private bfds: BulkFileData[] = []
|
|
53
|
+
private fileHashToIndex: Record<string, number> = {}
|
|
54
|
+
private lock: SingleWriterMultiReaderLock = new SingleWriterMultiReaderLock()
|
|
55
|
+
private storage?: ChaintracksStorageBulkFileApi
|
|
56
|
+
|
|
57
|
+
readonly chain: Chain
|
|
58
|
+
readonly maxPerFile: number
|
|
59
|
+
readonly fetch?: ChaintracksFetchApi
|
|
60
|
+
readonly maxRetained?: number
|
|
61
|
+
readonly fromKnownSourceUrl?: string
|
|
62
|
+
|
|
63
|
+
constructor(options: BulkFileDataManagerOptions | Chain) {
|
|
64
|
+
if (typeof options === 'object') options = options as BulkFileDataManagerOptions
|
|
65
|
+
else options = BulkFileDataManager.createDefaultOptions(options as Chain)
|
|
66
|
+
this.chain = options.chain
|
|
67
|
+
this.maxPerFile = options.maxPerFile
|
|
68
|
+
this.maxRetained = options.maxRetained
|
|
69
|
+
this.fromKnownSourceUrl = options.fromKnownSourceUrl
|
|
70
|
+
this.fetch = options.fetch
|
|
71
|
+
|
|
72
|
+
this.deleteBulkFilesNoLock()
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async deleteBulkFiles(): Promise<void> {
|
|
76
|
+
return this.lock.withWriteLock(async () => this.deleteBulkFilesNoLock())
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
private deleteBulkFilesNoLock(): void {
|
|
80
|
+
this.bfds = []
|
|
81
|
+
this.fileHashToIndex = {}
|
|
82
|
+
|
|
83
|
+
if (this.fromKnownSourceUrl) {
|
|
84
|
+
const vbhfs = validBulkHeaderFiles
|
|
85
|
+
const filtered = vbhfs.filter(f => f.sourceUrl === this.fromKnownSourceUrl)
|
|
86
|
+
const files = selectBulkHeaderFiles(filtered, this.chain, this.maxPerFile)
|
|
87
|
+
for (const file of files) {
|
|
88
|
+
this.add({ ...file, fileHash: file.fileHash!, mru: Date.now() })
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* If `bfds` are going to be backed by persistent storage,
|
|
95
|
+
* must be called before making storage available.
|
|
96
|
+
*
|
|
97
|
+
* Synchronizes bfds and storage files, after which this manager maintains sync.
|
|
98
|
+
* There should be no changes to bulk files by direct access to storage bulk file methods.
|
|
99
|
+
*/
|
|
100
|
+
async setStorage(storage: ChaintracksStorageBulkFileApi, log: (...args: any[]) => void): Promise<void> {
|
|
101
|
+
return this.lock.withWriteLock(async () => this.setStorageNoLock(storage, log))
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
private async setStorageNoLock(storage: ChaintracksStorageBulkFileApi, log: (...args: any[]) => void): Promise<void> {
|
|
105
|
+
this.storage = storage
|
|
106
|
+
this.log = log
|
|
107
|
+
|
|
108
|
+
// Get files currently in persistent storage.
|
|
109
|
+
let sfs = await this.storage.getBulkFiles()
|
|
110
|
+
|
|
111
|
+
// Sync bfds with storage. Two scenarios supported:
|
|
112
|
+
|
|
113
|
+
const bfdsRanges = this.heightRangesFromBulkFiles(this.bfds)
|
|
114
|
+
const sfsRanges = this.heightRangesFromBulkFiles(sfs)
|
|
115
|
+
|
|
116
|
+
if (sfsRanges.cdn.length >= bfdsRanges.cdn.length) {
|
|
117
|
+
// Storage win if it has greater or equal CDN coverage
|
|
118
|
+
// Replace all bfds with sfs
|
|
119
|
+
this.bfds = []
|
|
120
|
+
for (const file of sfs) {
|
|
121
|
+
const vbf: BulkFileData = await this.validateFileInfo(file)
|
|
122
|
+
this.bfds.push(vbf)
|
|
123
|
+
}
|
|
124
|
+
} else {
|
|
125
|
+
// Bfds win if they have greater CDN coverage
|
|
126
|
+
// Replace all sfs with bfds
|
|
127
|
+
for (const s of sfs.reverse()) await this.storage.deleteBulkFile(s.fileId!)
|
|
128
|
+
for (const bfd of this.bfds) {
|
|
129
|
+
await this.ensureData(bfd)
|
|
130
|
+
bfd.fileId = await this.storage.insertBulkFile(bfdToInfo(bfd, true))
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
heightRangesFromBulkFiles(files: BulkHeaderFileInfo[]): {
|
|
136
|
+
all: HeightRange
|
|
137
|
+
cdn: HeightRange
|
|
138
|
+
incremental: HeightRange
|
|
139
|
+
} {
|
|
140
|
+
const ranges = { all: new HeightRange(0, -1), cdn: new HeightRange(0, -1), incremental: new HeightRange(0, -1) }
|
|
141
|
+
for (const file of files) {
|
|
142
|
+
const range = new HeightRange(file.firstHeight, file.firstHeight + file.count - 1)
|
|
143
|
+
ranges.all = ranges.all.union(range)
|
|
144
|
+
if (isBdfCdn(file)) ranges.cdn = ranges.cdn.union(range)
|
|
145
|
+
if (isBdfIncremental(file)) ranges.incremental = ranges.incremental.union(range)
|
|
146
|
+
}
|
|
147
|
+
return ranges
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
async createReader(range?: HeightRange, maxBufferSize?: number): Promise<BulkFileDataReader> {
|
|
151
|
+
range = range || (await this.getHeightRange())
|
|
152
|
+
maxBufferSize = maxBufferSize || 1000000 * 80 // 100,000 headers, 8MB
|
|
153
|
+
return new BulkFileDataReader(this, range, maxBufferSize)
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
async updateFromUrl(cdnUrl: string): Promise<void> {
|
|
157
|
+
if (!this.fetch) throw new WERR_INVALID_OPERATION('fetch is not defined in the BulkFileDataManager.')
|
|
158
|
+
|
|
159
|
+
const toUrl = (file: string) => this.fetch!.pathJoin(cdnUrl, file)
|
|
160
|
+
const url = toUrl(`${this.chain}NetBlockHeaders.json`)
|
|
161
|
+
|
|
162
|
+
const availableBulkFiles = (await this.fetch.fetchJson(url)) as BulkHeaderFilesInfo
|
|
163
|
+
if (!availableBulkFiles)
|
|
164
|
+
throw new WERR_INVALID_PARAMETER(`cdnUrl`, `a valid BulkHeaderFilesInfo JSON resource available from ${url}`)
|
|
165
|
+
|
|
166
|
+
const selectedFiles = selectBulkHeaderFiles(
|
|
167
|
+
availableBulkFiles.files,
|
|
168
|
+
this.chain,
|
|
169
|
+
this.maxPerFile || availableBulkFiles.headersPerFile
|
|
170
|
+
)
|
|
171
|
+
for (const bf of selectedFiles) {
|
|
172
|
+
if (!bf.fileHash) {
|
|
173
|
+
throw new WERR_INVALID_PARAMETER(`fileHash`, `valid for all files in json downloaded from ${url}`)
|
|
174
|
+
}
|
|
175
|
+
if (!bf.chain || bf.chain !== this.chain) {
|
|
176
|
+
throw new WERR_INVALID_PARAMETER(`chain`, `"${this.chain}" for all files in json downloaded from ${url}`)
|
|
177
|
+
}
|
|
178
|
+
if (!bf.sourceUrl || bf.sourceUrl !== cdnUrl) bf.sourceUrl = cdnUrl
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
const rangeBefore = await this.getHeightRange()
|
|
182
|
+
const r = await this.merge(selectedFiles)
|
|
183
|
+
const rangeAfter = await this.getHeightRange()
|
|
184
|
+
|
|
185
|
+
let log = 'BulkDataFileManager.updateFromUrl\n'
|
|
186
|
+
log += ` url: ${url}\n`
|
|
187
|
+
log += ` bulk range before: ${rangeBefore}\n`
|
|
188
|
+
log += ` bulk range after: ${rangeAfter}\n`
|
|
189
|
+
this.log(log)
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
async merge(files: BulkHeaderFileInfo[]): Promise<BulkFileDataManagerMergeResult> {
|
|
193
|
+
return this.lock.withWriteLock(async () => this.mergeNoLock(files))
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
private async mergeNoLock(files: BulkHeaderFileInfo[]): Promise<BulkFileDataManagerMergeResult> {
|
|
197
|
+
const r: BulkFileDataManagerMergeResult = { inserted: [], updated: [], unchanged: [], dropped: [] }
|
|
198
|
+
for (const file of files) {
|
|
199
|
+
const hbf = this.getBfdForHeight(file.firstHeight)
|
|
200
|
+
if (hbf && file.fileId) hbf.fileId = file.fileId // Always update fileId if provided
|
|
201
|
+
const lbf = this.getLastBfd()
|
|
202
|
+
if (
|
|
203
|
+
hbf &&
|
|
204
|
+
hbf.fileHash === file.fileHash &&
|
|
205
|
+
hbf.count === file.count &&
|
|
206
|
+
hbf.lastHash === file.lastHash &&
|
|
207
|
+
hbf.lastChainWork === file.lastChainWork
|
|
208
|
+
) {
|
|
209
|
+
// We already have an identical matching file...
|
|
210
|
+
r.unchanged.push(bfdToInfo(hbf))
|
|
211
|
+
continue
|
|
212
|
+
}
|
|
213
|
+
const vbf: BulkFileData = await this.validateFileInfo(file)
|
|
214
|
+
if (hbf) {
|
|
215
|
+
// We have a matching file by firstHeight but count and fileHash differ
|
|
216
|
+
await this.update(vbf, hbf, r)
|
|
217
|
+
} else if (isBdfIncremental(vbf) && lbf && isBdfIncremental(lbf)) {
|
|
218
|
+
await this.mergeIncremental(lbf, vbf, r)
|
|
219
|
+
} else {
|
|
220
|
+
const added = await this.add(vbf)
|
|
221
|
+
r.inserted.push(added)
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
this.log(`BulkFileDataManager.merge:\n${this.toLogString(r)}\n`)
|
|
225
|
+
return r
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
private async mergeIncremental(lbf: BulkFileData, vbf: BulkFileData, r: BulkFileDataManagerMergeResult) {
|
|
229
|
+
lbf.count += vbf.count
|
|
230
|
+
lbf.lastHash = vbf.lastHash
|
|
231
|
+
lbf.lastChainWork = vbf.lastChainWork
|
|
232
|
+
await this.ensureData(lbf)
|
|
233
|
+
const newData = new Uint8Array(lbf.data!.length + vbf.data!.length)
|
|
234
|
+
newData.set(lbf.data!)
|
|
235
|
+
newData.set(vbf.data!, lbf.data!.length)
|
|
236
|
+
lbf.data = newData
|
|
237
|
+
delete this.fileHashToIndex[lbf.fileHash!]
|
|
238
|
+
lbf.fileHash = asString(Hash.sha256(asArray(newData)), 'base64')
|
|
239
|
+
this.fileHashToIndex[lbf.fileHash] = this.bfds.length - 1
|
|
240
|
+
lbf.mru = Date.now()
|
|
241
|
+
const lbfInfo = bfdToInfo(lbf, true)
|
|
242
|
+
r.updated.push(lbfInfo)
|
|
243
|
+
if (this.storage && lbf.fileId) {
|
|
244
|
+
await this.storage.updateBulkFile(lbf.fileId, lbfInfo)
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
toLogString(what?: BulkFileDataManagerMergeResult | BulkFileData[] | BulkHeaderFileInfo[]): string {
|
|
249
|
+
let log = ''
|
|
250
|
+
if (!what) {
|
|
251
|
+
log += this.toLogString(this.bfds)
|
|
252
|
+
} else if (what['updated']) {
|
|
253
|
+
what = what as BulkFileDataManagerMergeResult
|
|
254
|
+
for (const { category, bfds } of [
|
|
255
|
+
{ category: 'unchanged', bfds: what.unchanged },
|
|
256
|
+
{ category: 'dropped', bfds: what.dropped },
|
|
257
|
+
{ category: 'updated', bfds: what.updated },
|
|
258
|
+
{ category: 'inserted', bfds: what.inserted }
|
|
259
|
+
]) {
|
|
260
|
+
if (bfds.length > 0) {
|
|
261
|
+
log += ` ${category}:\n`
|
|
262
|
+
log += this.toLogString(bfds)
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
} else if (Array.isArray(what)) {
|
|
266
|
+
what = what as BulkHeaderFileInfo[]
|
|
267
|
+
let i = -1
|
|
268
|
+
for (const bfd of what) {
|
|
269
|
+
i++
|
|
270
|
+
log += ` ${i}: ${bfd.fileName} fileId=${bfd.fileId} ${bfd.firstHeight}-${bfd.firstHeight + bfd.count - 1}\n`
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
return log
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
async mergeIncrementalBlockHeaders(newBulkHeaders: BlockHeader[], incrementalChainWork?: string): Promise<void> {
|
|
278
|
+
if (newBulkHeaders.length === 0) return
|
|
279
|
+
return this.lock.withWriteLock(async () => {
|
|
280
|
+
const lbf = this.getLastFileNoLock()
|
|
281
|
+
const nextHeight = lbf ? lbf.firstHeight + lbf.count : 0
|
|
282
|
+
if (nextHeight > 0 && newBulkHeaders.length > 0 && newBulkHeaders[0].height < nextHeight) {
|
|
283
|
+
// Don't modify the incoming array...
|
|
284
|
+
newBulkHeaders = [...newBulkHeaders]
|
|
285
|
+
// If we have more headers than we need, drop the incoming headers.
|
|
286
|
+
while (newBulkHeaders.length > 0 && newBulkHeaders[0].height < nextHeight) {
|
|
287
|
+
const h = newBulkHeaders.shift()
|
|
288
|
+
if (h && incrementalChainWork) {
|
|
289
|
+
incrementalChainWork = subWork(incrementalChainWork, convertBitsToWork(h.bits))
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
if (newBulkHeaders.length === 0) return
|
|
294
|
+
if (!lbf || nextHeight !== newBulkHeaders[0].height)
|
|
295
|
+
throw new WERR_INVALID_PARAMETER('newBulkHeaders', 'an extension of existing bulk headers')
|
|
296
|
+
if (!lbf.lastHash) throw new WERR_INTERNAL(`lastHash is not defined for the last bulk file ${lbf.fileName}`)
|
|
297
|
+
|
|
298
|
+
const fbh = newBulkHeaders[0]
|
|
299
|
+
const lbh = newBulkHeaders.slice(-1)[0]
|
|
300
|
+
let lastChainWork = lbf.lastChainWork
|
|
301
|
+
if (incrementalChainWork) {
|
|
302
|
+
lastChainWork = addWork(incrementalChainWork, lastChainWork)
|
|
303
|
+
} else {
|
|
304
|
+
// If lastChainWork is not provided, calculate it from the last file with basic validation.
|
|
305
|
+
let lastHeight = lbf.firstHeight + lbf.count - 1
|
|
306
|
+
let lastHash = lbf.lastHash
|
|
307
|
+
for (const h of newBulkHeaders) {
|
|
308
|
+
if (h.height !== lastHeight + 1 || h.previousHash !== lastHash) {
|
|
309
|
+
throw new WERR_INVALID_PARAMETER(
|
|
310
|
+
'headers',
|
|
311
|
+
`an extension of existing bulk headers, header with height ${h.height} is non-sequential`
|
|
312
|
+
)
|
|
313
|
+
}
|
|
314
|
+
lastChainWork = addWork(lastChainWork, convertBitsToWork(h.bits))
|
|
315
|
+
lastHeight = h.height
|
|
316
|
+
lastHash = h.hash
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
const data = serializeBaseBlockHeaders(newBulkHeaders)
|
|
320
|
+
|
|
321
|
+
const fileHash = asString(Hash.sha256(asArray(data)), 'base64')
|
|
322
|
+
const bf: BulkHeaderFileInfo = {
|
|
323
|
+
fileId: undefined,
|
|
324
|
+
chain: this.chain,
|
|
325
|
+
sourceUrl: undefined,
|
|
326
|
+
fileName: 'incremental',
|
|
327
|
+
firstHeight: fbh.height,
|
|
328
|
+
count: newBulkHeaders.length,
|
|
329
|
+
prevChainWork: lbf.lastChainWork,
|
|
330
|
+
lastChainWork,
|
|
331
|
+
prevHash: lbf.lastHash,
|
|
332
|
+
lastHash: lbh.hash,
|
|
333
|
+
fileHash,
|
|
334
|
+
data
|
|
335
|
+
}
|
|
336
|
+
await this.mergeNoLock([bf])
|
|
337
|
+
})
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
async getBulkFiles(keepData?: boolean): Promise<BulkHeaderFileInfo[]> {
|
|
341
|
+
return this.lock.withReadLock(async () => {
|
|
342
|
+
return this.bfds.map(bfd => bfdToInfo(bfd, keepData))
|
|
343
|
+
})
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
async getHeightRange(): Promise<HeightRange> {
|
|
347
|
+
return this.lock.withReadLock(async () => {
|
|
348
|
+
if (this.bfds.length === 0) return HeightRange.empty
|
|
349
|
+
const first = this.bfds[0]
|
|
350
|
+
const last = this.bfds[this.bfds.length - 1]
|
|
351
|
+
return new HeightRange(first.firstHeight, last.firstHeight + last.count - 1)
|
|
352
|
+
})
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
async getDataFromFile(file: BulkHeaderFileInfo, offset?: number, length?: number): Promise<Uint8Array | undefined> {
|
|
356
|
+
const bfd = await this.getBfdForHeight(file.firstHeight)
|
|
357
|
+
if (!bfd || bfd.count < file.count)
|
|
358
|
+
throw new WERR_INVALID_PARAMETER(
|
|
359
|
+
'file',
|
|
360
|
+
`a match for ${file.firstHeight}, ${file.count} in the BulkFileDataManager.`
|
|
361
|
+
)
|
|
362
|
+
return this.lock.withReadLock(async () => this.getDataFromFileNoLock(bfd, offset, length))
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
private async getDataFromFileNoLock(
|
|
366
|
+
bfd: BulkFileData,
|
|
367
|
+
offset?: number,
|
|
368
|
+
length?: number
|
|
369
|
+
): Promise<Uint8Array | undefined> {
|
|
370
|
+
const fileLength = bfd.count * 80
|
|
371
|
+
offset = offset || 0
|
|
372
|
+
if (offset > fileLength - 1) return undefined
|
|
373
|
+
length = length || bfd.count * 80 - offset
|
|
374
|
+
length = Math.min(length, fileLength - offset)
|
|
375
|
+
let data: Uint8Array | undefined
|
|
376
|
+
if (bfd.data) {
|
|
377
|
+
data = bfd.data.slice(offset, offset + length)
|
|
378
|
+
} else if (bfd.fileId && this.storage) {
|
|
379
|
+
data = await this.storage.getBulkFileData(bfd.fileId, offset, length)
|
|
380
|
+
}
|
|
381
|
+
if (!data) {
|
|
382
|
+
await this.ensureData(bfd)
|
|
383
|
+
if (bfd.data) data = bfd.data.slice(offset, offset + length)
|
|
384
|
+
}
|
|
385
|
+
if (!data) return undefined
|
|
386
|
+
return data
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
async findHeaderForHeightOrUndefined(height: number): Promise<BlockHeader | undefined> {
|
|
390
|
+
return this.lock.withReadLock(async () => {
|
|
391
|
+
if (!Number.isInteger(height) || height < 0)
|
|
392
|
+
throw new WERR_INVALID_PARAMETER('height', `a non-negative integer (${height}).`)
|
|
393
|
+
const file = this.bfds.find(f => f.firstHeight <= height && f.firstHeight + f.count > height)
|
|
394
|
+
if (!file) return undefined
|
|
395
|
+
const offset = (height - file.firstHeight) * 80
|
|
396
|
+
const data = await this.getDataFromFileNoLock(file, offset, 80)
|
|
397
|
+
if (!data) return undefined
|
|
398
|
+
const header = deserializeBlockHeader(data, 0, height)
|
|
399
|
+
return header
|
|
400
|
+
})
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
async getFileForHeight(height: number): Promise<BulkHeaderFileInfo | undefined> {
|
|
404
|
+
return this.lock.withReadLock(async () => {
|
|
405
|
+
const bfd = this.getBfdForHeight(height)
|
|
406
|
+
if (!bfd) return undefined
|
|
407
|
+
return bfdToInfo(bfd)
|
|
408
|
+
})
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
private getBfdForHeight(height: number): BulkFileData | undefined {
|
|
412
|
+
if (!Number.isInteger(height) || height < 0)
|
|
413
|
+
throw new WERR_INVALID_PARAMETER('height', `a non-negative integer (${height}).`)
|
|
414
|
+
const file = this.bfds.find(f => f.firstHeight <= height && f.firstHeight + f.count > height)
|
|
415
|
+
return file
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
private getLastBfd(fromEnd = 1): BulkFileData | undefined {
|
|
419
|
+
if (this.bfds.length < fromEnd) return undefined
|
|
420
|
+
const bfd = this.bfds[this.bfds.length - fromEnd]
|
|
421
|
+
return bfd
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
async getLastFile(fromEnd = 1): Promise<BulkHeaderFileInfo | undefined> {
|
|
425
|
+
return this.lock.withReadLock(async () => this.getLastFileNoLock(fromEnd))
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
private getLastFileNoLock(fromEnd = 1): BulkHeaderFileInfo | undefined {
|
|
429
|
+
const bfd = this.getLastBfd(fromEnd)
|
|
430
|
+
if (!bfd) return undefined
|
|
431
|
+
return bfdToInfo(bfd)
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
private async getDataByFileHash(fileHash: string): Promise<Uint8Array | undefined> {
|
|
435
|
+
const index = this.fileHashToIndex[fileHash]
|
|
436
|
+
if (index === undefined)
|
|
437
|
+
throw new WERR_INVALID_PARAMETER('fileHash', `known to the BulkFileDataManager. ${fileHash} is unknown.`)
|
|
438
|
+
const bfd = this.bfds[index]
|
|
439
|
+
const data = await this.ensureData(bfd)
|
|
440
|
+
return data
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
private async getDataByFileId(fileId: number): Promise<Uint8Array | undefined> {
|
|
444
|
+
const bfd = this.bfds.find(f => f.fileId === fileId)
|
|
445
|
+
if (bfd === undefined)
|
|
446
|
+
throw new WERR_INVALID_PARAMETER('fileId', `known to the BulkFileDataManager. ${fileId} is unknown.`)
|
|
447
|
+
const data = await this.ensureData(bfd)
|
|
448
|
+
return data
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
private async validateFileInfo(file: BulkHeaderFileInfo): Promise<BulkFileData> {
|
|
452
|
+
if (file.chain !== this.chain) throw new WERR_INVALID_PARAMETER('chain', `${this.chain}`)
|
|
453
|
+
if (file.count <= 0)
|
|
454
|
+
throw new WERR_INVALID_PARAMETER('bf.count', `expected count to be greater than 0, but got ${file.count}`)
|
|
455
|
+
if (file.count > this.maxPerFile && file.fileName !== 'incremental')
|
|
456
|
+
throw new WERR_INVALID_PARAMETER('count', `less than or equal to maxPerFile ${this.maxPerFile}`)
|
|
457
|
+
if (!file.fileHash) throw new WERR_INVALID_PARAMETER('fileHash', `defined`)
|
|
458
|
+
if (!file.sourceUrl && !file.fileId && !file.data)
|
|
459
|
+
throw new WERR_INVALID_PARAMETER('data', `defined when sourceUrl and fileId are undefined`)
|
|
460
|
+
|
|
461
|
+
let bfd: BulkFileData = {
|
|
462
|
+
...file,
|
|
463
|
+
fileHash: file.fileHash,
|
|
464
|
+
mru: Date.now()
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
if (!bfd.validated) {
|
|
468
|
+
await this.ensureData(bfd)
|
|
469
|
+
|
|
470
|
+
if (!bfd.data || bfd.data.length !== bfd.count * 80)
|
|
471
|
+
throw new WERR_INVALID_PARAMETER(
|
|
472
|
+
'file.data',
|
|
473
|
+
`bulk file ${bfd.fileName} data length ${bfd.data?.length} does not match expected count ${bfd.count}`
|
|
474
|
+
)
|
|
475
|
+
|
|
476
|
+
bfd.fileHash = asString(Hash.sha256(asArray(bfd.data)), 'base64')
|
|
477
|
+
if (file.fileHash && file.fileHash !== bfd.fileHash)
|
|
478
|
+
throw new WERR_INVALID_PARAMETER('file.fileHash', `expected ${file.fileHash} but got ${bfd.fileHash}`)
|
|
479
|
+
|
|
480
|
+
if (!isKnownValidBulkHeaderFile(bfd)) {
|
|
481
|
+
const pbf = bfd.firstHeight > 0 ? this.getBfdForHeight(bfd.firstHeight - 1) : undefined
|
|
482
|
+
const prevHash = pbf ? pbf.lastHash! : '00'.repeat(32)
|
|
483
|
+
const prevChainWork = pbf ? pbf.lastChainWork : '00'.repeat(32)
|
|
484
|
+
|
|
485
|
+
const { lastHeaderHash, lastChainWork } = validateBufferOfHeaders(
|
|
486
|
+
bfd.data,
|
|
487
|
+
prevHash,
|
|
488
|
+
0,
|
|
489
|
+
undefined,
|
|
490
|
+
prevChainWork
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
if (bfd.lastHash && bfd.lastHash !== lastHeaderHash)
|
|
494
|
+
throw new WERR_INVALID_PARAMETER('file.lastHash', `expected ${bfd.lastHash} but got ${lastHeaderHash}`)
|
|
495
|
+
if (bfd.lastChainWork && bfd.lastChainWork !== lastChainWork)
|
|
496
|
+
throw new WERR_INVALID_PARAMETER(
|
|
497
|
+
'file.lastChainWork',
|
|
498
|
+
`expected ${bfd.lastChainWork} but got ${lastChainWork}`
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
bfd.lastHash = lastHeaderHash
|
|
502
|
+
bfd.lastChainWork = lastChainWork!
|
|
503
|
+
|
|
504
|
+
if (bfd.firstHeight === 0) {
|
|
505
|
+
validateGenesisHeader(bfd.data, bfd.chain!)
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
bfd.validated = true
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
return bfd
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
async ReValidate(): Promise<void> {
|
|
515
|
+
return this.lock.withReadLock(async () => this.ReValidateNoLock())
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
private async ReValidateNoLock(): Promise<void> {
|
|
519
|
+
for (const file of this.bfds) {
|
|
520
|
+
await this.ensureData(file)
|
|
521
|
+
file.validated = false // Reset validation to re-validate on next access
|
|
522
|
+
const bfd = await this.validateFileInfo(file)
|
|
523
|
+
if (!bfd.validated) throw new WERR_INTERNAL(`BulkFileDataManager.ReValidate failed for file ${bfd.fileName}`)
|
|
524
|
+
file.validated = true
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
private validateBfdForAdd(bfd: BulkFileData): void {
|
|
529
|
+
if (this.bfds.length === 0 && bfd.firstHeight !== 0)
|
|
530
|
+
throw new WERR_INVALID_PARAMETER('firstHeight', `0 for the first file`)
|
|
531
|
+
if (this.bfds.length > 0) {
|
|
532
|
+
const last = this.bfds[this.bfds.length - 1]
|
|
533
|
+
if (bfd.firstHeight !== last.firstHeight + last.count)
|
|
534
|
+
throw new WERR_INVALID_PARAMETER('firstHeight', `the last file's firstHeight + count`)
|
|
535
|
+
if (bfd.prevHash !== last.lastHash || bfd.prevChainWork !== last.lastChainWork)
|
|
536
|
+
throw new WERR_INVALID_PARAMETER('prevHash/prevChainWork', `the last file's lastHash/lastChainWork`)
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
private async add(bfd: BulkFileData): Promise<BulkHeaderFileInfo> {
|
|
541
|
+
this.validateBfdForAdd(bfd)
|
|
542
|
+
const index = this.bfds.length
|
|
543
|
+
this.bfds.push(bfd)
|
|
544
|
+
this.fileHashToIndex[bfd.fileHash] = index
|
|
545
|
+
this.ensureMaxRetained()
|
|
546
|
+
const info = bfdToInfo(bfd, true)
|
|
547
|
+
if (this.storage) {
|
|
548
|
+
info.fileId = bfd.fileId = await this.storage.insertBulkFile(info)
|
|
549
|
+
}
|
|
550
|
+
return info
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
private replaceBfdAtIndex(index: number, update: BulkFileData): void {
|
|
554
|
+
const oldBfd = this.bfds[index]
|
|
555
|
+
delete this.fileHashToIndex[oldBfd.fileHash]
|
|
556
|
+
this.bfds[index] = update
|
|
557
|
+
this.fileHashToIndex[update.fileHash] = index
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
/**
|
|
561
|
+
* Updating an existing file occurs in two specific contexts:
|
|
562
|
+
*
|
|
563
|
+
* 1. CDN Update: CDN files of a specific `maxPerFile` series typically ends in a partial file
|
|
564
|
+
* which may periodically add more headers until the next file is started.
|
|
565
|
+
* If the CDN update is the second to last file (followed by an incremental file),
|
|
566
|
+
* then the incremental file is updated or deleted and also returned as the result (with a count of zero if deleted).
|
|
567
|
+
*
|
|
568
|
+
* 2. Incremental Update: The last bulk file is almost always an "incremental" file
|
|
569
|
+
* which is not limited by "maxPerFile" and holds all non-CDN bulk headers.
|
|
570
|
+
* If is updated with new bulk headers which come either from non CDN ingestors or from live header migration to bulk.
|
|
571
|
+
*
|
|
572
|
+
* Updating preserves the following properties:
|
|
573
|
+
*
|
|
574
|
+
* - Any existing headers following this update are preserved and must form an unbroken chain.
|
|
575
|
+
* - There can be at most one incremental file and it must be the last file.
|
|
576
|
+
* - The update start conditions (height, prevHash, prevChainWork) must match an existing file which may be either CDN or internal.
|
|
577
|
+
* - The update fileId must match, it may be undefind.
|
|
578
|
+
* - The fileName does not need to match.
|
|
579
|
+
* - The incremental file must always have fileName "incremental" and sourceUrl must be undefined.
|
|
580
|
+
* - The update count must be greater than 0.
|
|
581
|
+
* - The update count must be greater than current count for CDN to CDN update.
|
|
582
|
+
*
|
|
583
|
+
* @param update new validated BulkFileData to update.
|
|
584
|
+
* @param hbf corresponding existing BulkFileData to update.
|
|
585
|
+
*/
|
|
586
|
+
private async update(update: BulkFileData, hbf: BulkFileData, r: BulkFileDataManagerMergeResult): Promise<void> {
|
|
587
|
+
if (
|
|
588
|
+
!hbf ||
|
|
589
|
+
hbf.firstHeight !== update.firstHeight ||
|
|
590
|
+
hbf.prevChainWork !== update.prevChainWork ||
|
|
591
|
+
hbf.prevHash !== update.prevHash
|
|
592
|
+
)
|
|
593
|
+
throw new WERR_INVALID_PARAMETER('file', `an existing file by height, prevChainWork and prevHash`)
|
|
594
|
+
if (isBdfCdn(update) === isBdfCdn(hbf) && update.count <= hbf.count)
|
|
595
|
+
throw new WERR_INVALID_PARAMETER('file.count', `greater than the current count ${hbf.count}`)
|
|
596
|
+
|
|
597
|
+
const lbf = this.getLastBfd()!
|
|
598
|
+
let index = this.bfds.length - 1
|
|
599
|
+
let truncate: BulkFileData | undefined = undefined
|
|
600
|
+
let replaced: BulkFileData | undefined = undefined
|
|
601
|
+
let drop: BulkFileData | undefined = undefined
|
|
602
|
+
|
|
603
|
+
if (hbf.firstHeight === lbf.firstHeight) {
|
|
604
|
+
// If the update is for the last file, there are three cases:
|
|
605
|
+
|
|
606
|
+
if (isBdfIncremental(update)) {
|
|
607
|
+
// 1. Incremental file may only be extended with more incremental headers.
|
|
608
|
+
if (!isBdfIncremental(lbf))
|
|
609
|
+
throw new WERR_INVALID_PARAMETER('file', `an incremental file to update an existing incremental file`)
|
|
610
|
+
} else {
|
|
611
|
+
// The update is a CDN bulk file.
|
|
612
|
+
if (isBdfCdn(lbf)) {
|
|
613
|
+
// 2. An updated CDN file replaces a partial CDN file.
|
|
614
|
+
if (update.count <= lbf.count)
|
|
615
|
+
throw new WERR_INVALID_PARAMETER(
|
|
616
|
+
'update.count',
|
|
617
|
+
`CDN update must have more headers. ${update.count} <= ${lbf.count}`
|
|
618
|
+
)
|
|
619
|
+
} else {
|
|
620
|
+
// 3. A new CDN file replaces some or all of current incremental file.
|
|
621
|
+
// Retain extra incremental headers if any.
|
|
622
|
+
if (update.count < lbf.count) {
|
|
623
|
+
// The new CDN partially replaces the last incremental file, prepare to shift work and re-add it.
|
|
624
|
+
await this.ensureData(lbf)
|
|
625
|
+
truncate = lbf
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
} else {
|
|
630
|
+
// If the update is NOT for the last file, then it MUST be for the second to last file which MUST be a CDN file:
|
|
631
|
+
// - it must be a CDN file update with more headers than the current CDN file.
|
|
632
|
+
// - the last file must be an incremental file which is updated or deleted. The updated (or deleted) last file is returned.
|
|
633
|
+
const lbf2 = this.getLastBfd(2)
|
|
634
|
+
if (!lbf2 || hbf.firstHeight !== lbf2.firstHeight)
|
|
635
|
+
throw new WERR_INVALID_PARAMETER('file', `an update to last or second to last file`)
|
|
636
|
+
if (!isBdfCdn(update) || !isBdfCdn(lbf2) || update.count <= lbf2.count)
|
|
637
|
+
throw new WERR_INVALID_PARAMETER('file', `a CDN file update with more headers than the current CDN file`)
|
|
638
|
+
if (!isBdfIncremental(lbf))
|
|
639
|
+
throw new WERR_INVALID_PARAMETER('file', `a CDN file update followed by an incremental file`)
|
|
640
|
+
if (!update.fileId) update.fileId = lbf2.fileId // Update fileId if not provided
|
|
641
|
+
if (update.count >= lbf2.count + lbf.count) {
|
|
642
|
+
// The current last file is fully replaced by the CDN update.
|
|
643
|
+
drop = lbf
|
|
644
|
+
} else {
|
|
645
|
+
// If the update doesn't fully replace the last incremental file, make sure data is available to be truncated.
|
|
646
|
+
await this.ensureData(lbf)
|
|
647
|
+
truncate = lbf
|
|
648
|
+
// The existing second to last file is fully replaced by the update.
|
|
649
|
+
replaced = lbf2
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
index = index - 1 // The update replaces the second to last file.
|
|
653
|
+
}
|
|
654
|
+
|
|
655
|
+
// In all cases the bulk file at the current fileId if any is updated.
|
|
656
|
+
this.replaceBfdAtIndex(index, update)
|
|
657
|
+
if (truncate) {
|
|
658
|
+
// If there is a bulk file to be truncated, it becomes the new (reduced) last file.
|
|
659
|
+
await this.shiftWork(update, truncate, replaced)
|
|
660
|
+
}
|
|
661
|
+
if (drop) {
|
|
662
|
+
this.dropLastBulkFile(drop)
|
|
663
|
+
}
|
|
664
|
+
|
|
665
|
+
const updateInfo = bfdToInfo(update, true)
|
|
666
|
+
const truncateInfo = truncate ? bfdToInfo(truncate, true) : undefined
|
|
667
|
+
|
|
668
|
+
if (this.storage) {
|
|
669
|
+
// Keep storage in sync.
|
|
670
|
+
if (update.fileId) {
|
|
671
|
+
await this.storage.updateBulkFile(update.fileId, updateInfo)
|
|
672
|
+
}
|
|
673
|
+
if (truncate && truncateInfo) {
|
|
674
|
+
if (replaced) {
|
|
675
|
+
await this.storage.updateBulkFile(truncate.fileId!, truncateInfo)
|
|
676
|
+
} else {
|
|
677
|
+
truncateInfo.fileId = undefined // Make sure truncate is a new file.
|
|
678
|
+
truncate.fileId = await this.storage.insertBulkFile(truncateInfo)
|
|
679
|
+
}
|
|
680
|
+
}
|
|
681
|
+
if (drop && drop.fileId) {
|
|
682
|
+
await this.storage.deleteBulkFile(drop.fileId)
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
if (r) {
|
|
687
|
+
// Update results for logging...
|
|
688
|
+
r.updated.push(updateInfo)
|
|
689
|
+
if (truncateInfo) {
|
|
690
|
+
if (replaced) {
|
|
691
|
+
r.updated.push(truncateInfo)
|
|
692
|
+
} else {
|
|
693
|
+
r.inserted.push(truncateInfo)
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
if (drop) {
|
|
697
|
+
r.dropped.push(bfdToInfo(drop))
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
|
|
701
|
+
this.ensureMaxRetained()
|
|
702
|
+
}
|
|
703
|
+
|
|
704
|
+
private dropLastBulkFile(lbf: BulkFileData): void {
|
|
705
|
+
delete this.fileHashToIndex[lbf.fileHash]
|
|
706
|
+
const index = this.bfds.indexOf(lbf)
|
|
707
|
+
if (index !== this.bfds.length - 1)
|
|
708
|
+
throw new WERR_INTERNAL(`dropLastBulkFile requires lbf is the current last file.`)
|
|
709
|
+
this.bfds.pop()
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
/**
|
|
713
|
+
* Remove work (and headers) from `truncate` that now exists in `update`.
|
|
714
|
+
* There are two scenarios:
|
|
715
|
+
* 1. `replaced` is undefined: update is a CDN file that splits an incremental file that must be truncated.
|
|
716
|
+
* 2. `replaced` is valid: update is a CDN update that replaced an existing CDN file and splits an incremental file that must be truncated.
|
|
717
|
+
* @param update the new CDN update file.
|
|
718
|
+
* @param truncate the incremental file to be truncated (losing work which now exists in `update`).
|
|
719
|
+
* @param replaced the existing CDN file that was replaced by `update` (if any).
|
|
720
|
+
*/
|
|
721
|
+
private async shiftWork(update: BulkFileData, truncate: BulkFileData, replaced?: BulkFileData): Promise<void> {
|
|
722
|
+
const updateIndex = this.fileHashToIndex[update.fileHash]
|
|
723
|
+
// replaced will be valid if the update replaced it and it must become the new last file.
|
|
724
|
+
// truncateIndex will be updateIndex + 1 if the existing last file is being truncated and update is second to last.
|
|
725
|
+
const truncateIndex = this.fileHashToIndex[truncate.fileHash]
|
|
726
|
+
if (truncateIndex !== undefined && truncateIndex !== updateIndex + 1)
|
|
727
|
+
throw new WERR_INTERNAL(`shiftWork requires update to have replaced truncate or truncate to follow update`)
|
|
728
|
+
if (truncateIndex !== undefined && !replaced)
|
|
729
|
+
throw new WERR_INTERNAL(`shiftWork requires valid replaced when update hasn't replaced truncate`)
|
|
730
|
+
|
|
731
|
+
truncate.prevHash = update.lastHash!
|
|
732
|
+
truncate.prevChainWork = update.lastChainWork
|
|
733
|
+
// truncate.lastChainWork, truncate.lastHash remain unchanged
|
|
734
|
+
let count = update.count
|
|
735
|
+
if (replaced) {
|
|
736
|
+
count -= replaced.count
|
|
737
|
+
} else {
|
|
738
|
+
// The truncated file is itself being replaced by the update and must be inserted as a new file.
|
|
739
|
+
truncate.fileId = undefined
|
|
740
|
+
this.bfds.push(truncate) // Add the truncated file as a new entry.
|
|
741
|
+
}
|
|
742
|
+
truncate.count -= count
|
|
743
|
+
truncate.firstHeight += count
|
|
744
|
+
|
|
745
|
+
truncate.data = truncate.data?.slice(count * 80)
|
|
746
|
+
delete this.fileHashToIndex[truncate.fileHash]
|
|
747
|
+
truncate.fileHash = asString(Hash.sha256(asArray(truncate.data!)), 'base64')
|
|
748
|
+
this.fileHashToIndex[truncate.fileHash] = updateIndex + 1
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
/**
|
|
752
|
+
*
|
|
753
|
+
* @param bfd
|
|
754
|
+
* @returns
|
|
755
|
+
*/
|
|
756
|
+
private async ensureData(bfd: BulkFileData): Promise<Uint8Array> {
|
|
757
|
+
if (bfd.data) return bfd.data
|
|
758
|
+
|
|
759
|
+
if (this.storage && bfd.fileId) {
|
|
760
|
+
bfd.data = await this.storage.getBulkFileData(bfd.fileId)
|
|
761
|
+
if (!bfd.data) throw new WERR_INVALID_PARAMETER('fileId', `valid, data not found for fileId ${bfd.fileId}`)
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
if (!bfd.data && this.fetch && bfd.sourceUrl) {
|
|
765
|
+
// TODO - restore this change
|
|
766
|
+
const url = this.fetch.pathJoin(bfd.sourceUrl, bfd.fileName)
|
|
767
|
+
//const url = this.fetch.pathJoin('http://localhost:8842/blockheaders', bfd.fileName)
|
|
768
|
+
|
|
769
|
+
try {
|
|
770
|
+
bfd.data = await this.fetch.download(url)
|
|
771
|
+
} catch (err) {
|
|
772
|
+
bfd.data = await this.fetch.download(url)
|
|
773
|
+
}
|
|
774
|
+
if (!bfd.data) throw new WERR_INVALID_PARAMETER('sourceUrl', `data not found for sourceUrl ${url}`)
|
|
775
|
+
}
|
|
776
|
+
|
|
777
|
+
if (!bfd.data) throw new WERR_INVALID_PARAMETER('data', `defined. Unable to retrieve data for ${bfd.fileName}`)
|
|
778
|
+
|
|
779
|
+
bfd.mru = Date.now()
|
|
780
|
+
|
|
781
|
+
// Validate retrieved data.
|
|
782
|
+
const fileHash = asString(Hash.sha256(asArray(bfd.data)), 'base64')
|
|
783
|
+
if (fileHash !== bfd.fileHash)
|
|
784
|
+
throw new WERR_INVALID_PARAMETER('fileHash', `a match for retrieved data for ${bfd.fileName}`)
|
|
785
|
+
|
|
786
|
+
this.ensureMaxRetained()
|
|
787
|
+
return bfd.data
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
private ensureMaxRetained(): void {
|
|
791
|
+
if (this.maxRetained === undefined) return
|
|
792
|
+
let withData = this.bfds.filter(bfd => bfd.data && (bfd.fileId || bfd.sourceUrl))
|
|
793
|
+
let countToRelease = withData.length - this.maxRetained
|
|
794
|
+
if (countToRelease <= 0) return
|
|
795
|
+
const sorted = withData.sort((a, b) => a.mru - b.mru)
|
|
796
|
+
while (countToRelease-- > 0 && sorted.length > 0) {
|
|
797
|
+
const oldest = sorted.shift()!
|
|
798
|
+
// Release the least recently used data
|
|
799
|
+
oldest.data = undefined // Release the data
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
|
|
803
|
+
async exportHeadersToFs(
|
|
804
|
+
toFs: ChaintracksFsApi,
|
|
805
|
+
toHeadersPerFile: number,
|
|
806
|
+
toFolder: string,
|
|
807
|
+
sourceUrl?: string,
|
|
808
|
+
maxHeight?: number
|
|
809
|
+
): Promise<void> {
|
|
810
|
+
const chain = this.chain
|
|
811
|
+
const toFileName = (i: number) => `${chain}Net_${i}.headers`
|
|
812
|
+
const toPath = (i: number) => toFs.pathJoin(toFolder, toFileName(i))
|
|
813
|
+
const toJsonPath = () => toFs.pathJoin(toFolder, `${chain}NetBlockHeaders.json`)
|
|
814
|
+
|
|
815
|
+
const toBulkFiles: BulkHeaderFilesInfo = {
|
|
816
|
+
rootFolder: sourceUrl || toFolder,
|
|
817
|
+
jsonFilename: `${chain}NetBlockHeaders.json`,
|
|
818
|
+
headersPerFile: toHeadersPerFile,
|
|
819
|
+
files: []
|
|
820
|
+
}
|
|
821
|
+
|
|
822
|
+
let range = await this.getHeightRange()
|
|
823
|
+
if (maxHeight) range = range.intersect(new HeightRange(0, maxHeight))
|
|
824
|
+
const reader = await this.createReader(range, toHeadersPerFile * 80)
|
|
825
|
+
|
|
826
|
+
let firstHeight = 0
|
|
827
|
+
let lastHeaderHash = '00'.repeat(32)
|
|
828
|
+
let lastChainWork = '00'.repeat(32)
|
|
829
|
+
|
|
830
|
+
let i = -1
|
|
831
|
+
for (;;) {
|
|
832
|
+
i++
|
|
833
|
+
const data = await reader.read()
|
|
834
|
+
if (!data || data.length === 0) {
|
|
835
|
+
break
|
|
836
|
+
}
|
|
837
|
+
|
|
838
|
+
const last = validateBufferOfHeaders(data, lastHeaderHash, 0, undefined, lastChainWork)
|
|
839
|
+
|
|
840
|
+
await toFs.writeFile(toPath(i), data)
|
|
841
|
+
|
|
842
|
+
const fileHash = asString(Hash.sha256(asArray(data)), 'base64')
|
|
843
|
+
const file: BulkHeaderFileInfo = {
|
|
844
|
+
chain,
|
|
845
|
+
count: data.length / 80,
|
|
846
|
+
fileHash,
|
|
847
|
+
fileName: toFileName(i),
|
|
848
|
+
firstHeight,
|
|
849
|
+
lastChainWork: last.lastChainWork!,
|
|
850
|
+
lastHash: last.lastHeaderHash,
|
|
851
|
+
prevChainWork: lastChainWork,
|
|
852
|
+
prevHash: lastHeaderHash,
|
|
853
|
+
sourceUrl
|
|
854
|
+
}
|
|
855
|
+
toBulkFiles.files.push(file)
|
|
856
|
+
firstHeight += file.count
|
|
857
|
+
lastHeaderHash = file.lastHash!
|
|
858
|
+
lastChainWork = file.lastChainWork!
|
|
859
|
+
}
|
|
860
|
+
|
|
861
|
+
await toFs.writeFile(toJsonPath(), asUint8Array(JSON.stringify(toBulkFiles), 'utf8'))
|
|
862
|
+
}
|
|
863
|
+
}
|
|
864
|
+
|
|
865
|
+
interface BulkFileData extends BulkHeaderFileInfo {
|
|
866
|
+
mru: number
|
|
867
|
+
fileHash: string
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
export function selectBulkHeaderFiles(
|
|
871
|
+
files: BulkHeaderFileInfo[],
|
|
872
|
+
chain: Chain,
|
|
873
|
+
maxPerFile: number
|
|
874
|
+
): BulkHeaderFileInfo[] {
|
|
875
|
+
const r: BulkHeaderFileInfo[] = []
|
|
876
|
+
let height = 0
|
|
877
|
+
for (;;) {
|
|
878
|
+
const choices = files.filter(f => f.firstHeight === height && f.count <= maxPerFile && f.chain === chain)
|
|
879
|
+
// Pick the file with the maximum count
|
|
880
|
+
const choice = choices.reduce((a, b) => (a.count > b.count ? a : b), choices[0])
|
|
881
|
+
if (!choice) break // no more files to select
|
|
882
|
+
r.push(choice)
|
|
883
|
+
height += choice.count
|
|
884
|
+
}
|
|
885
|
+
return r
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
function isBdfIncremental(bfd: BulkFileData | BulkHeaderFileInfo): boolean {
|
|
889
|
+
return bfd.fileName === 'incremental' && !bfd.sourceUrl
|
|
890
|
+
}
|
|
891
|
+
|
|
892
|
+
function isBdfCdn(bfd: BulkFileData | BulkHeaderFileInfo): boolean {
|
|
893
|
+
return !isBdfIncremental(bfd)
|
|
894
|
+
}
|
|
895
|
+
|
|
896
|
+
function bfdToInfo(bfd: BulkFileData, keepData?: boolean): BulkHeaderFileInfo {
|
|
897
|
+
return {
|
|
898
|
+
chain: bfd.chain,
|
|
899
|
+
fileHash: bfd.fileHash,
|
|
900
|
+
fileName: bfd.fileName,
|
|
901
|
+
sourceUrl: bfd.sourceUrl,
|
|
902
|
+
fileId: bfd.fileId,
|
|
903
|
+
count: bfd.count,
|
|
904
|
+
prevChainWork: bfd.prevChainWork,
|
|
905
|
+
lastChainWork: bfd.lastChainWork,
|
|
906
|
+
firstHeight: bfd.firstHeight,
|
|
907
|
+
prevHash: bfd.prevHash,
|
|
908
|
+
lastHash: bfd.lastHash,
|
|
909
|
+
validated: bfd.validated || false,
|
|
910
|
+
data: keepData ? bfd.data : undefined
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
|
|
914
|
+
export interface BulkFileDataManagerMergeResult {
|
|
915
|
+
unchanged: BulkHeaderFileInfo[]
|
|
916
|
+
inserted: BulkHeaderFileInfo[]
|
|
917
|
+
updated: BulkHeaderFileInfo[]
|
|
918
|
+
dropped: BulkHeaderFileInfo[]
|
|
919
|
+
}
|
|
920
|
+
|
|
921
|
+
export class BulkFileDataReader {
|
|
922
|
+
readonly manager: BulkFileDataManager
|
|
923
|
+
readonly range: HeightRange
|
|
924
|
+
readonly maxBufferSize: number
|
|
925
|
+
nextHeight: number
|
|
926
|
+
|
|
927
|
+
constructor(manager: BulkFileDataManager, range: HeightRange, maxBufferSize: number) {
|
|
928
|
+
this.manager = manager
|
|
929
|
+
this.range = range
|
|
930
|
+
this.maxBufferSize = maxBufferSize
|
|
931
|
+
this.nextHeight = range.minHeight
|
|
932
|
+
}
|
|
933
|
+
|
|
934
|
+
/**
|
|
935
|
+
* Returns the Buffer of block headers from the given `file` for the given `range`.
|
|
936
|
+
* If `range` is undefined, the file's full height range is read.
|
|
937
|
+
* The returned Buffer will only contain headers in `file` and in `range`
|
|
938
|
+
* @param file
|
|
939
|
+
* @param range
|
|
940
|
+
*/
|
|
941
|
+
private async readBufferFromFile(file: BulkHeaderFileInfo, range?: HeightRange): Promise<Uint8Array | undefined> {
|
|
942
|
+
// Constrain the range to the file's contents...
|
|
943
|
+
let fileRange = new HeightRange(file.firstHeight, file.firstHeight + file.count - 1)
|
|
944
|
+
if (range) fileRange = fileRange.intersect(range)
|
|
945
|
+
if (fileRange.isEmpty) return undefined
|
|
946
|
+
const offset = (fileRange.minHeight - file.firstHeight) * 80
|
|
947
|
+
const length = fileRange.length * 80
|
|
948
|
+
return await this.manager.getDataFromFile(file, offset, length)
|
|
949
|
+
}
|
|
950
|
+
|
|
951
|
+
/**
|
|
952
|
+
* @returns an array containing the next `maxBufferSize` bytes of headers from the files.
|
|
953
|
+
*/
|
|
954
|
+
async read(): Promise<Uint8Array | undefined> {
|
|
955
|
+
if (this.nextHeight === undefined || !this.range || this.range.isEmpty || this.nextHeight > this.range.maxHeight)
|
|
956
|
+
return undefined
|
|
957
|
+
let lastHeight = this.nextHeight + this.maxBufferSize / 80 - 1
|
|
958
|
+
lastHeight = Math.min(lastHeight, this.range.maxHeight)
|
|
959
|
+
let file = await this.manager.getFileForHeight(this.nextHeight)
|
|
960
|
+
if (!file) throw new WERR_INTERNAL(`logic error`)
|
|
961
|
+
const readRange = new HeightRange(this.nextHeight, lastHeight)
|
|
962
|
+
let buffers = new Uint8Array(readRange.length * 80)
|
|
963
|
+
let offset = 0
|
|
964
|
+
while (file) {
|
|
965
|
+
const buffer = await this.readBufferFromFile(file, readRange)
|
|
966
|
+
if (!buffer) break
|
|
967
|
+
buffers.set(buffer, offset)
|
|
968
|
+
offset += buffer.length
|
|
969
|
+
file = await this.manager.getFileForHeight(file.firstHeight + file.count)
|
|
970
|
+
}
|
|
971
|
+
if (!buffers.length || offset !== readRange.length * 80) return undefined
|
|
972
|
+
this.nextHeight = lastHeight + 1
|
|
973
|
+
return buffers
|
|
974
|
+
}
|
|
975
|
+
}
|