harperdb 3.2.0 → 3.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +20 -11
- package/bin/BinObjects.jsc +0 -0
- package/bin/harperdb.jsc +0 -0
- package/bin/install.jsc +0 -0
- package/bin/register.jsc +0 -0
- package/bin/run.jsc +0 -0
- package/bin/stop.jsc +0 -0
- package/bin/upgrade.jsc +0 -0
- package/bin/utility.jsc +0 -0
- package/bin/version.jsc +0 -0
- package/coverage/lcov.info +6624 -6141
- package/data_layer/CreateAttributeObject.jsc +0 -0
- package/data_layer/CreateTableObject.jsc +0 -0
- package/data_layer/DataLayerObjects.jsc +0 -0
- package/data_layer/DeleteBeforeObject.jsc +0 -0
- package/data_layer/DeleteObject.jsc +0 -0
- package/data_layer/DropAttributeObject.jsc +0 -0
- package/data_layer/InsertObject.jsc +0 -0
- package/data_layer/ReadTransactionLogObject.jsc +0 -0
- package/data_layer/SQLSearch.jsc +0 -0
- package/data_layer/SearchByConditionsObject.jsc +0 -0
- package/data_layer/SearchByHashObject.jsc +0 -0
- package/data_layer/SearchObject.jsc +0 -0
- package/data_layer/SqlSearchObject.jsc +0 -0
- package/data_layer/UpdateObject.jsc +0 -0
- package/data_layer/UpsertObject.jsc +0 -0
- package/data_layer/bulkLoad.jsc +0 -0
- package/data_layer/data_objects/BulkLoadObjects.jsc +0 -0
- package/data_layer/data_objects/UpsertObject.jsc +0 -0
- package/data_layer/delete.jsc +0 -0
- package/data_layer/export.jsc +0 -0
- package/data_layer/harperBridge/BridgeMethods.jsc +0 -0
- package/data_layer/harperBridge/bridgeUtility/checkForNewAttr.jsc +0 -0
- package/data_layer/harperBridge/bridgeUtility/convertOperationToTransaction.jsc +0 -0
- package/data_layer/harperBridge/bridgeUtility/evaluateTableGetAttributes.jsc +0 -0
- package/data_layer/harperBridge/bridgeUtility/insertUpdateReturnObj.jsc +0 -0
- package/data_layer/harperBridge/bridgeUtility/insertUpdateValidate.jsc +0 -0
- package/data_layer/harperBridge/harperBridge.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/LMDBBridge.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/DeleteTransactionsBeforeResults.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateAttribute.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateRecords.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateSchema.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateTable.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteRecords.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteRecordsBefore.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteTransactionLogsBefore.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropAttribute.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropSchema.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropTable.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetDataByHash.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetDataByValue.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbReadTransactionLog.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByConditions.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByHash.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByValue.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbUpdateRecords.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbUpsertRecords.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBCreateAttributeObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBDeleteTransactionObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBInsertTransactionObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBTransactionObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBUpdateTransactionObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBUpsertTransactionObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/TableSizeObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/ThreadSearchObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/initializeHashSearch.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/initializePaths.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbCheckForNewAttributes.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbCreateTransactionsEnvironment.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbDropAllAttributes.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbGetTableSize.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbProcessRows.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbSearch.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbThreadSearch.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbWriteTransaction.jsc +0 -0
- package/data_layer/hdbInfoController.jsc +0 -0
- package/data_layer/insert.jsc +0 -0
- package/data_layer/readTransactionLog.jsc +0 -0
- package/data_layer/schema.jsc +0 -0
- package/data_layer/schemaDescribe.jsc +0 -0
- package/data_layer/search.jsc +0 -0
- package/data_layer/update.jsc +0 -0
- package/events/ClusterStatusEmitter.jsc +0 -0
- package/events/SioServerStoppedEvent.jsc +0 -0
- package/events/SocketClusterStatusEmitter.jsc +0 -0
- package/license/LICENSE +91 -1
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/README.md +1 -0
- package/node_modules/{node-addon-api/src/nothing.c → @msgpackr-extract/msgpackr-extract-linux-x64/index.js} +0 -0
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/node.abi93.glibc.node +0 -0
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/node.abi93.musl.node +0 -0
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/node.napi.glibc.node +0 -0
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/node.napi.musl.node +0 -0
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/package.json +53 -0
- package/node_modules/{msgpackr-extract → lmdb-store}/.github/workflows/prebuild.yml +9 -10
- package/node_modules/lmdb-store/.idea/lmdb-store.iml +12 -0
- package/node_modules/lmdb-store/.idea/misc.xml +6 -0
- package/node_modules/lmdb-store/.idea/modules.xml +8 -0
- package/node_modules/lmdb-store/.idea/workspace.xml +4 -0
- package/node_modules/lmdb-store/README.md +393 -388
- package/node_modules/lmdb-store/benchmark/index.js +162 -162
- package/node_modules/lmdb-store/binding.gyp +79 -88
- package/node_modules/lmdb-store/caching.js +113 -113
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/COPYRIGHT +20 -20
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/Doxyfile +1631 -1631
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/LICENSE +47 -47
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/chacha8.c +183 -183
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/chacha8.h +14 -14
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/crypto.c +121 -121
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/intro.doc +192 -192
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb.c +12125 -12125
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_copy.1 +74 -74
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_copy.c +106 -106
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_drop.1 +53 -53
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_drop.c +154 -154
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_dump.1 +94 -94
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_dump.c +333 -333
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_load.1 +97 -97
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_load.c +530 -530
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_stat.1 +83 -83
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_stat.c +276 -276
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/midl.c +452 -452
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/midl.h +208 -208
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/module.c +101 -101
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/module.h +16 -16
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest.c +178 -178
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest2.c +124 -124
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest3.c +133 -133
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest4.c +168 -168
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest5.c +135 -135
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest6.c +141 -141
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest_enc.c +190 -190
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest_enc2.c +189 -189
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest_remap.c +177 -177
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/sample-bdb.txt +73 -73
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/sample-mdb.txt +62 -62
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/tooltag +27 -27
- package/node_modules/lmdb-store/dependencies/lz4/LICENSE +11 -11
- package/node_modules/lmdb-store/dependencies/lz4/lib/README.md +137 -137
- package/node_modules/lmdb-store/dependencies/lz4/lib/dll/example/README.md +69 -69
- package/node_modules/lmdb-store/dependencies/lz4/lib/lz4frame.c +1899 -1899
- package/node_modules/lmdb-store/dependencies/lz4/lib/xxhash.c +1030 -1030
- package/node_modules/lmdb-store/dependencies/lz4/lib/xxhash.h +328 -328
- package/node_modules/lmdb-store/dist/index.cjs +2591 -0
- package/node_modules/lmdb-store/dist/index.cjs.map +1 -0
- package/node_modules/lmdb-store/index.d.ts +323 -323
- package/node_modules/lmdb-store/index.js +1274 -1274
- package/node_modules/lmdb-store/index.mjs +3 -3
- package/node_modules/lmdb-store/package.json +16 -11
- package/node_modules/lmdb-store/prebuilds/darwin-arm64/electron.abi98.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-arm64/node.abi83.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-arm64/node.abi93.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-x64/electron.abi98.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-x64/node.abi83.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-x64/node.abi93.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-arm64/electron.abi98.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-arm64/node.abi83.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-arm64/node.abi93.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-x64/electron.abi98.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-x64/node.abi83.musl.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-x64/node.abi83.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-x64/node.abi93.musl.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-x64/node.abi93.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/electron.abi98.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/node.abi83.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/node.abi93.node +0 -0
- package/node_modules/lmdb-store/src/compression.cpp +181 -181
- package/node_modules/lmdb-store/src/cursor.cpp +407 -407
- package/node_modules/lmdb-store/src/dbi.cpp +354 -354
- package/node_modules/lmdb-store/src/env.cpp +1134 -1134
- package/node_modules/lmdb-store/src/misc.cpp +528 -528
- package/node_modules/lmdb-store/src/node-lmdb.cpp +44 -44
- package/node_modules/lmdb-store/src/node-lmdb.h +965 -965
- package/node_modules/lmdb-store/src/ordered-binary.cpp +337 -337
- package/node_modules/lmdb-store/src/txn.cpp +513 -513
- package/node_modules/lmdb-store/src/v8-fast-api-calls.h +419 -419
- package/node_modules/lmdb-store/src/windows.c +30 -30
- package/node_modules/lmdb-store/test/index.test.js +584 -584
- package/node_modules/lmdb-store/test/node-lmdb.test.js +1525 -1525
- package/node_modules/lmdb-store/test/threads.js +100 -100
- package/node_modules/lmdb-store/util/ArrayLikeIterable.js +136 -136
- package/node_modules/lmdb-store/util/WeakValueMap.js +40 -40
- package/node_modules/lmdb-store/util/upgrade-lmdb.js +46 -46
- package/node_modules/lmdb-store/util/when.js +8 -8
- package/node_modules/microtime/.github/workflows/release.yml +76 -0
- package/node_modules/microtime/.github/workflows/test.yml +46 -0
- package/node_modules/microtime/README.md +0 -2
- package/node_modules/microtime/binding.gyp +28 -10
- package/node_modules/microtime/package.json +25 -20
- package/node_modules/microtime/prebuilds/darwin-x64+arm64/electron.napi.node +0 -0
- package/node_modules/microtime/prebuilds/darwin-x64+arm64/node.napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-arm/node.napi.armv7.node +0 -0
- package/node_modules/microtime/prebuilds/linux-arm64/node.napi.armv8.node +0 -0
- package/node_modules/microtime/prebuilds/linux-x64/electron.napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-x64/node.napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-ia32/electron.napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-ia32/node.napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-x64/electron.napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-x64/node.napi.node +0 -0
- package/node_modules/msgpackr/dist/index.js +1929 -1917
- package/node_modules/msgpackr/dist/index.min.js +67 -68
- package/node_modules/msgpackr/dist/node.cjs +1994 -1980
- package/node_modules/msgpackr/dist/test.js +683 -1235
- package/node_modules/msgpackr/index.d.ts +23 -12
- package/node_modules/msgpackr/node-index.js +23 -21
- package/node_modules/msgpackr/pack.js +935 -931
- package/node_modules/msgpackr/package.json +24 -12
- package/node_modules/msgpackr/unpack.d.ts +52 -50
- package/node_modules/msgpackr/unpack.js +1061 -1053
- package/node_modules/msgpackr-extract/bin/download-prebuilds.js +11 -0
- package/node_modules/msgpackr-extract/binding.gyp +22 -5
- package/node_modules/msgpackr-extract/index.js +1 -1
- package/node_modules/msgpackr-extract/package.json +46 -21
- package/node_modules/msgpackr-extract/src/.vs/ProjectSettings.json +3 -0
- package/node_modules/msgpackr-extract/src/.vs/VSWorkspaceState.json +7 -0
- package/node_modules/msgpackr-extract/src/.vs/slnx.sqlite +0 -0
- package/node_modules/msgpackr-extract/src/.vs/src/v16/.suo +0 -0
- package/node_modules/msgpackr-extract/src/.vs/src/v16/Browse.VC.db +0 -0
- package/node_modules/msgpackr-extract/{prebuilds/darwin-x64/node.abi72.node → src/.vs/src/v16/Browse.VC.db-shm} +0 -0
- package/node_modules/msgpackr-extract/src/.vs/src/v16/Browse.VC.db-wal +0 -0
- package/node_modules/msgpackr-extract/src/extract.cpp +272 -269
- package/node_modules/nan/package.json +0 -1
- package/node_modules/node-addon-api/README.md +146 -53
- package/node_modules/node-addon-api/common.gypi +21 -0
- package/node_modules/node-addon-api/except.gypi +25 -0
- package/node_modules/node-addon-api/index.js +7 -41
- package/node_modules/node-addon-api/napi-inl.deprecated.h +8 -8
- package/node_modules/node-addon-api/napi-inl.h +2795 -633
- package/node_modules/node-addon-api/napi.h +1547 -597
- package/node_modules/node-addon-api/node_api.gyp +9 -0
- package/node_modules/node-addon-api/noexcept.gypi +26 -0
- package/node_modules/node-addon-api/nothing.c +0 -0
- package/node_modules/node-addon-api/package-support.json +21 -0
- package/node_modules/node-addon-api/package.json +203 -13
- package/node_modules/node-addon-api/tools/README.md +12 -6
- package/node_modules/node-addon-api/tools/clang-format.js +71 -0
- package/node_modules/node-addon-api/tools/conversion.js +4 -8
- package/node_modules/node-addon-api/tools/eslint-format.js +71 -0
- package/node_modules/node-gyp-build/README.md +17 -14
- package/node_modules/node-gyp-build/bin.js +28 -15
- package/node_modules/node-gyp-build/index.js +145 -34
- package/node_modules/node-gyp-build/package.json +18 -15
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/LICENSE +0 -0
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/README.md +0 -0
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/bin.js +1 -1
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/build-test.js +0 -0
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/index.js +17 -11
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/optional.js +0 -0
- package/node_modules/{msgpackr-extract/node_modules/node-gyp-build → node-gyp-build-optional-packages}/package.json +17 -17
- package/package.json +12 -11
- package/security/JWTObjects.jsc +0 -0
- package/security/auth.jsc +0 -0
- package/security/cryptoHash.jsc +0 -0
- package/security/data_objects/PermissionAttributeResponseObject.jsc +0 -0
- package/security/data_objects/PermissionResponseObject.jsc +0 -0
- package/security/data_objects/PermissionTableResponseObject.jsc +0 -0
- package/security/permissionsTranslator.jsc +0 -0
- package/security/role.jsc +0 -0
- package/security/tokenAuthentication.jsc +0 -0
- package/security/user.jsc +0 -0
- package/server/ClusteringOriginObject.jsc +0 -0
- package/server/JobObject.jsc +0 -0
- package/server/clustering/ClusterStatusObject.jsc +0 -0
- package/server/clustering/NodeObject.jsc +0 -0
- package/server/clustering/clusterUtilities.jsc +0 -0
- package/server/configuration.jsc +0 -0
- package/server/customFunctions/customFunctionsServer.jsc +0 -0
- package/server/customFunctions/helpers/getCORSOptions.jsc +0 -0
- package/server/customFunctions/helpers/getHeaderTimeoutConfig.jsc +0 -0
- package/server/customFunctions/helpers/getServerOptions.jsc +0 -0
- package/server/customFunctions/operations.jsc +0 -0
- package/server/customFunctions/operationsValidation.jsc +0 -0
- package/server/harperdb/hdbServer.jsc +0 -0
- package/server/ipc/IPCClient.jsc +0 -0
- package/server/ipc/hdbIpcServer.jsc +0 -0
- package/server/ipc/serverHandlers.jsc +0 -0
- package/server/ipc/utility/IPCEventObject.jsc +0 -0
- package/server/ipc/utility/ipcUtils.jsc +0 -0
- package/server/jobRunner.jsc +0 -0
- package/server/jobThread.jsc +0 -0
- package/server/jobs.jsc +0 -0
- package/server/serverHelpers/OperationFunctionObject.jsc +0 -0
- package/server/serverHelpers/requestTimePlugin.jsc +0 -0
- package/server/serverHelpers/serverHandlers.jsc +0 -0
- package/server/serverHelpers/serverUtilities.jsc +0 -0
- package/server/socketcluster/Server.jsc +0 -0
- package/server/socketcluster/broker.jsc +0 -0
- package/server/socketcluster/connector/HDBSocketConnector.jsc +0 -0
- package/server/socketcluster/connector/InterNodeSocketConnector.jsc +0 -0
- package/server/socketcluster/connector/SocketConnector.jsc +0 -0
- package/server/socketcluster/connector/spawnSCConnection.jsc +0 -0
- package/server/socketcluster/decisionMatrix/CoreDecisionMatrix.jsc +0 -0
- package/server/socketcluster/decisionMatrix/DecisionMatrixIF.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/AssignToHdbChildWorkerRule.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/CallRoomMsgHandlerRule.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/CleanDataObjectRule.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/CommandCollection.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/DummyRule.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/RulesIF.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/StripHdbHeaderRule.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/TestRule.jsc +0 -0
- package/server/socketcluster/handlers/NodeConnectionsHandler.jsc +0 -0
- package/server/socketcluster/handlers/SCServer.jsc +0 -0
- package/server/socketcluster/handlers/ServerSocket.jsc +0 -0
- package/server/socketcluster/interNodeConnectionLauncher.jsc +0 -0
- package/server/socketcluster/messageQueue/MessageQueueIF.jsc +0 -0
- package/server/socketcluster/middleware/AuthMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/ConnectionNameCheckMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/GenericMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/MessagePrepMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/MiddlewareFactory.jsc +0 -0
- package/server/socketcluster/middleware/MiddlewareIF.jsc +0 -0
- package/server/socketcluster/middleware/OriginatorCheckMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/RequestDataValidMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/StampOriginatorMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/StampRequestMiddleware.jsc +0 -0
- package/server/socketcluster/observer/EventableIF.jsc +0 -0
- package/server/socketcluster/room/AddUserRoom.jsc +0 -0
- package/server/socketcluster/room/AlterUserRoom.jsc +0 -0
- package/server/socketcluster/room/CoreRoom.jsc +0 -0
- package/server/socketcluster/room/CreateAttributeRoom.jsc +0 -0
- package/server/socketcluster/room/CreateSchemaRoom.jsc +0 -0
- package/server/socketcluster/room/CreateTableRoom.jsc +0 -0
- package/server/socketcluster/room/DropUserRoom.jsc +0 -0
- package/server/socketcluster/room/HDBNodeRoom.jsc +0 -0
- package/server/socketcluster/room/RoomIF.jsc +0 -0
- package/server/socketcluster/room/RoomMessageObjects.jsc +0 -0
- package/server/socketcluster/room/UsersRoom.jsc +0 -0
- package/server/socketcluster/room/WatchHDBWorkersRoom.jsc +0 -0
- package/server/socketcluster/room/WorkerRoom.jsc +0 -0
- package/server/socketcluster/room/roomFactory.jsc +0 -0
- package/server/socketcluster/socketClusterObjects.jsc +0 -0
- package/server/socketcluster/types.jsc +0 -0
- package/server/socketcluster/util/clusterData.jsc +0 -0
- package/server/socketcluster/util/socketClusterUtils.jsc +0 -0
- package/server/socketcluster/worker/ClusterWorker.jsc +0 -0
- package/server/socketcluster/worker/WorkerIF.jsc +0 -0
- package/server/socketcluster/worker/WorkerObjects.jsc +0 -0
- package/server/transactToClusteringUtilities.jsc +0 -0
- package/sqlTranslator/SelectValidator.jsc +0 -0
- package/sqlTranslator/alasqlFunctionImporter.jsc +0 -0
- package/sqlTranslator/conditionPatterns.jsc +0 -0
- package/sqlTranslator/deleteTranslator.jsc +0 -0
- package/sqlTranslator/index.jsc +0 -0
- package/sqlTranslator/sql_statement_bucket.jsc +0 -0
- package/upgrade/EnvironmentVariable.jsc +0 -0
- package/upgrade/UpgradeDirective.jsc +0 -0
- package/upgrade/UpgradeObjects.jsc +0 -0
- package/upgrade/directives/3-0-0.jsc +0 -0
- package/upgrade/directives/3-1-0.jsc +0 -0
- package/upgrade/directives/directivesController.jsc +0 -0
- package/upgrade/directives/upgrade_scripts/3_0_0_reindex_script.jsc +0 -0
- package/upgrade/directivesManager.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/DBIDefinition.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/OpenDBIObject.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/OpenEnvironmentObject.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/commonErrors.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/commonUtility.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/environmentUtility.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/terms.jsc +0 -0
- package/upgrade/upgradePrompt.jsc +0 -0
- package/upgrade/upgradeUtilities.jsc +0 -0
- package/utility/AWS/AWSConnector.jsc +0 -0
- package/utility/OperationFunctionCaller.jsc +0 -0
- package/utility/common_utils.jsc +0 -0
- package/utility/environment/SystemInformationObject.jsc +0 -0
- package/utility/environment/SystemInformationOperation.jsc +0 -0
- package/utility/environment/environmentManager.jsc +0 -0
- package/utility/environment/systemInformation.jsc +0 -0
- package/utility/errors/commonErrors.jsc +0 -0
- package/utility/errors/hdbError.jsc +0 -0
- package/utility/functions/date/dateFunctions.jsc +0 -0
- package/utility/functions/geo.jsc +0 -0
- package/utility/functions/math/avg.jsc +0 -0
- package/utility/functions/math/count.jsc +0 -0
- package/utility/functions/sql/alaSQLExtension.jsc +0 -0
- package/utility/functions/string/compare.jsc +0 -0
- package/utility/globalSchema.jsc +0 -0
- package/utility/hdbTerms.jsc +0 -0
- package/utility/install/checkJWTTokensExist.jsc +0 -0
- package/utility/install/installer.jsc +0 -0
- package/utility/install_user_permission.jsc +0 -0
- package/utility/lmdb/DBIDefinition.jsc +0 -0
- package/utility/lmdb/DeleteRecordsResponseObject.jsc +0 -0
- package/utility/lmdb/InsertRecordsResponseObject.jsc +0 -0
- package/utility/lmdb/OpenDBIObject.jsc +0 -0
- package/utility/lmdb/OpenEnvironmentObject.jsc +0 -0
- package/utility/lmdb/UpdateRecordsResponseObject.jsc +0 -0
- package/utility/lmdb/UpsertRecordsResponseObject.jsc +0 -0
- package/utility/lmdb/cleanLMDBMap.jsc +0 -0
- package/utility/lmdb/commonUtility.jsc +0 -0
- package/utility/lmdb/deleteUtility.jsc +0 -0
- package/utility/lmdb/environmentUtility.jsc +0 -0
- package/utility/lmdb/searchCursorFunctions.jsc +0 -0
- package/utility/lmdb/searchUtility.jsc +0 -0
- package/utility/lmdb/terms.jsc +0 -0
- package/utility/lmdb/writeUtility.jsc +0 -0
- package/utility/logging/harper_logger.jsc +0 -0
- package/utility/mount_hdb.jsc +0 -0
- package/utility/npmUtilities.jsc +0 -0
- package/utility/operation_authorization.jsc +0 -0
- package/utility/password.jsc +0 -0
- package/utility/pm2/servicesConfig.jsc +0 -0
- package/utility/pm2/utilityFunctions.jsc +0 -0
- package/utility/psList.jsc +0 -0
- package/utility/registration/hdb_license.jsc +0 -0
- package/utility/registration/licenseObjects.jsc +0 -0
- package/utility/registration/registrationHandler.jsc +0 -0
- package/utility/scripts/restartHdb.jsc +0 -0
- package/utility/signalling.jsc +0 -0
- package/utility/system_info.jsc +0 -0
- package/validation/bulkDeleteValidator.jsc +0 -0
- package/validation/check_permissions.jsc +0 -0
- package/validation/clustering/configureValidator.jsc +0 -0
- package/validation/common_validators.jsc +0 -0
- package/validation/conditionalDeleteValidator.jsc +0 -0
- package/validation/deleteValidator.jsc +0 -0
- package/validation/fileLoadValidator.jsc +0 -0
- package/validation/insertValidator.jsc +0 -0
- package/validation/nodeSubscriptionValidator.jsc +0 -0
- package/validation/nodeValidator.jsc +0 -0
- package/validation/readLogValidator.jsc +0 -0
- package/validation/registration/license_key_object.jsc +0 -0
- package/validation/role_validation.jsc +0 -0
- package/validation/schemaMetadataValidator.jsc +0 -0
- package/validation/schema_validator.jsc +0 -0
- package/validation/searchValidator.jsc +0 -0
- package/validation/user_validation.jsc +0 -0
- package/validation/validationWrapper.jsc +0 -0
- package/node_modules/lmdb-store/build/Makefile +0 -324
- package/node_modules/lmdb-store/build/Release/.deps/Release/lmdb-store.node.d +0 -1
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/chacha8.o.d +0 -6
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb.o.d +0 -8
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/midl.o.d +0 -8
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/dependencies/lz4/lib/lz4.o.d +0 -5
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/compression.o.d +0 -72
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/cursor.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/dbi.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/env.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/misc.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/node-lmdb.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/ordered-binary.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/txn.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/windows.o.d +0 -3
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store.node.d +0 -1
- package/node_modules/lmdb-store/build/Release/lmdb-store.node +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/chacha8.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/midl.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/dependencies/lz4/lib/lz4.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/compression.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/cursor.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/dbi.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/env.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/misc.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/node-lmdb.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/ordered-binary.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/txn.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/windows.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store.node +0 -0
- package/node_modules/lmdb-store/build/binding.Makefile +0 -6
- package/node_modules/lmdb-store/build/config.gypi +0 -426
- package/node_modules/lmdb-store/build/lmdb-store.target.mk +0 -206
- package/node_modules/lmdb-store/node_modules/node-gyp-build/package.json +0 -60
- package/node_modules/lmdb-store/prebuilds/darwin-x64/electron.abi87.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-x64/node.abi72.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-x64/node.abi88.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/electron.abi87.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/node.abi72.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/node.abi88.node +0 -0
- package/node_modules/microtime/prebuilds/darwin-x64/electron-napi.node +0 -0
- package/node_modules/microtime/prebuilds/darwin-x64/node-napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-arm/electron-napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-arm/node-napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-x64/electron-napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-x64/node-napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-x64/electron-napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-x64/node-napi.node +0 -0
- package/node_modules/msgpackr-extract/.circleci/config.yml +0 -19
- package/node_modules/msgpackr-extract/.travis.yml +0 -30
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/LICENSE +0 -21
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/README.md +0 -58
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/bin.js +0 -77
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/build-test.js +0 -19
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/index.js +0 -202
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/optional.js +0 -7
- package/node_modules/msgpackr-extract/prebuilds/darwin-arm64/electron.abi98.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-arm64/node.abi102.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-arm64/node.abi83.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-arm64/node.abi93.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-x64/electron.abi98.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-x64/node.abi102.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-x64/node.abi83.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-x64/node.abi88.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-x64/node.abi93.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-arm64/electron.abi98.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-arm64/node.abi102.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-arm64/node.abi83.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-arm64/node.abi93.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/electron.abi98.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi102.musl.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi102.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi72.musl.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi72.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi83.musl.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi83.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi88.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi93.musl.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi93.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/electron.abi98.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/node.abi102.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/node.abi72.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/node.abi83.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/node.abi88.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/node.abi93.node +0 -0
- package/node_modules/node-addon-api/.editorconfig +0 -8
- package/node_modules/node-addon-api/.travis.yml +0 -65
- package/node_modules/node-addon-api/CHANGELOG.md +0 -325
- package/node_modules/node-addon-api/CODE_OF_CONDUCT.md +0 -4
- package/node_modules/node-addon-api/CONTRIBUTING.md +0 -66
- package/node_modules/node-addon-api/appveyor.yml +0 -48
- package/node_modules/node-addon-api/doc/Doxyfile +0 -2450
- package/node_modules/node-addon-api/doc/array_buffer.md +0 -129
- package/node_modules/node-addon-api/doc/async_context.md +0 -76
- package/node_modules/node-addon-api/doc/async_operations.md +0 -31
- package/node_modules/node-addon-api/doc/async_worker.md +0 -397
- package/node_modules/node-addon-api/doc/basic_types.md +0 -415
- package/node_modules/node-addon-api/doc/bigint.md +0 -92
- package/node_modules/node-addon-api/doc/boolean.md +0 -64
- package/node_modules/node-addon-api/doc/buffer.md +0 -140
- package/node_modules/node-addon-api/doc/callback_scope.md +0 -54
- package/node_modules/node-addon-api/doc/callbackinfo.md +0 -97
- package/node_modules/node-addon-api/doc/checker-tool.md +0 -32
- package/node_modules/node-addon-api/doc/class_property_descriptor.md +0 -118
- package/node_modules/node-addon-api/doc/cmake-js.md +0 -19
- package/node_modules/node-addon-api/doc/conversion-tool.md +0 -28
- package/node_modules/node-addon-api/doc/creating_a_release.md +0 -62
- package/node_modules/node-addon-api/doc/dataview.md +0 -244
- package/node_modules/node-addon-api/doc/env.md +0 -63
- package/node_modules/node-addon-api/doc/error.md +0 -115
- package/node_modules/node-addon-api/doc/error_handling.md +0 -186
- package/node_modules/node-addon-api/doc/escapable_handle_scope.md +0 -82
- package/node_modules/node-addon-api/doc/external.md +0 -59
- package/node_modules/node-addon-api/doc/function.md +0 -294
- package/node_modules/node-addon-api/doc/function_reference.md +0 -238
- package/node_modules/node-addon-api/doc/generator.md +0 -13
- package/node_modules/node-addon-api/doc/handle_scope.md +0 -65
- package/node_modules/node-addon-api/doc/memory_management.md +0 -27
- package/node_modules/node-addon-api/doc/node-gyp.md +0 -82
- package/node_modules/node-addon-api/doc/number.md +0 -163
- package/node_modules/node-addon-api/doc/object.md +0 -202
- package/node_modules/node-addon-api/doc/object_lifetime_management.md +0 -83
- package/node_modules/node-addon-api/doc/object_reference.md +0 -117
- package/node_modules/node-addon-api/doc/object_wrap.md +0 -546
- package/node_modules/node-addon-api/doc/prebuild_tools.md +0 -16
- package/node_modules/node-addon-api/doc/promises.md +0 -74
- package/node_modules/node-addon-api/doc/property_descriptor.md +0 -231
- package/node_modules/node-addon-api/doc/range_error.md +0 -59
- package/node_modules/node-addon-api/doc/reference.md +0 -111
- package/node_modules/node-addon-api/doc/setup.md +0 -82
- package/node_modules/node-addon-api/doc/string.md +0 -89
- package/node_modules/node-addon-api/doc/symbol.md +0 -44
- package/node_modules/node-addon-api/doc/threadsafe_function.md +0 -303
- package/node_modules/node-addon-api/doc/type_error.md +0 -59
- package/node_modules/node-addon-api/doc/typed_array.md +0 -74
- package/node_modules/node-addon-api/doc/typed_array_of.md +0 -133
- package/node_modules/node-addon-api/doc/value.md +0 -269
- package/node_modules/node-addon-api/doc/version_management.md +0 -43
- package/node_modules/node-addon-api/doc/working_with_javascript_values.md +0 -14
- package/node_modules/node-addon-api/external-napi/node_api.h +0 -7
- package/node_modules/node-addon-api/src/node_api.cc +0 -3655
- package/node_modules/node-addon-api/src/node_api.gyp +0 -21
- package/node_modules/node-addon-api/src/node_api.h +0 -588
- package/node_modules/node-addon-api/src/node_api_types.h +0 -115
- package/node_modules/node-addon-api/src/node_internals.cc +0 -142
- package/node_modules/node-addon-api/src/node_internals.h +0 -157
- package/node_modules/node-addon-api/src/util-inl.h +0 -38
- package/node_modules/node-addon-api/src/util.h +0 -7
|
@@ -1,1274 +1,1274 @@
|
|
|
1
|
-
const { sync: mkdirpSync } = require('mkdirp')
|
|
2
|
-
const fs = require('fs')
|
|
3
|
-
const { extname, basename, dirname} = require('path')
|
|
4
|
-
const { ArrayLikeIterable } = require('./util/ArrayLikeIterable')
|
|
5
|
-
const when = require('./util/when')
|
|
6
|
-
const EventEmitter = require('events')
|
|
7
|
-
Object.assign(exports, require('node-gyp-build')(__dirname))
|
|
8
|
-
const { Env, Cursor, Compression, getLastVersion, setLastVersion, getBufferForAddress, keyValueToBuffer, bufferToKeyValue } = exports
|
|
9
|
-
const { CachingStore, setGetLastVersion } = require('./caching')
|
|
10
|
-
const os = require('os')
|
|
11
|
-
setGetLastVersion(getLastVersion)
|
|
12
|
-
Uint8ArraySlice = Uint8Array.prototype.slice
|
|
13
|
-
const syncInstructions = Buffer.allocUnsafeSlow(2048)
|
|
14
|
-
const syncInstructionsView = new DataView(syncInstructions.buffer, 0, 2048)
|
|
15
|
-
const buffers = []
|
|
16
|
-
|
|
17
|
-
const DEFAULT_SYNC_BATCH_THRESHOLD = 200000000 // 200MB
|
|
18
|
-
const DEFAULT_IMMEDIATE_BATCH_THRESHOLD = 10000000 // 10MB
|
|
19
|
-
const DEFAULT_COMMIT_DELAY = 0
|
|
20
|
-
const READING_TNX = {
|
|
21
|
-
readOnly: true
|
|
22
|
-
}
|
|
23
|
-
const ABORT = {}
|
|
24
|
-
|
|
25
|
-
const allDbs = exports.allDbs = new Map()
|
|
26
|
-
const SYNC_PROMISE_RESULT = Promise.resolve(true)
|
|
27
|
-
const SYNC_PROMISE_FAIL = Promise.resolve(false)
|
|
28
|
-
SYNC_PROMISE_RESULT.isSync = true
|
|
29
|
-
SYNC_PROMISE_FAIL.isSync = true
|
|
30
|
-
const LAST_KEY = String.fromCharCode(0xffff)
|
|
31
|
-
const LAST_BUFFER_KEY = Buffer.from([255, 255, 255, 255])
|
|
32
|
-
const FIRST_BUFFER_KEY = Buffer.from([0])
|
|
33
|
-
const ITERATOR_DONE = { done: true, value: undefined }
|
|
34
|
-
let env
|
|
35
|
-
let defaultCompression
|
|
36
|
-
let lastSize, lastOffset
|
|
37
|
-
exports.open = open
|
|
38
|
-
exports.ABORT = ABORT
|
|
39
|
-
let abortedNonChildTransactionWarn
|
|
40
|
-
function open(path, options) {
|
|
41
|
-
let env = new Env()
|
|
42
|
-
let committingWrites
|
|
43
|
-
let scheduledTransactions
|
|
44
|
-
let scheduledOperations
|
|
45
|
-
let asyncTransactionAfter = true, asyncTransactionStrictOrder
|
|
46
|
-
let transactionWarned
|
|
47
|
-
let readTxn, writeTxn, pendingBatch, currentCommit, runNextBatch, readTxnRenewed, cursorTxns = []
|
|
48
|
-
let renewId = 1
|
|
49
|
-
if (typeof path == 'object' && !options) {
|
|
50
|
-
options = path
|
|
51
|
-
path = options.path
|
|
52
|
-
}
|
|
53
|
-
let extension = extname(path)
|
|
54
|
-
let name = basename(path, extension)
|
|
55
|
-
let is32Bit = os.arch().endsWith('32')
|
|
56
|
-
let remapChunks = (options && options.remapChunks) || ((options && options.mapSize) ?
|
|
57
|
-
(is32Bit && options.mapSize > 0x100000000) : // larger than fits in address space, must use dynamic maps
|
|
58
|
-
is32Bit) // without a known map size, we default to being able to handle large data correctly/well*/
|
|
59
|
-
options = Object.assign({
|
|
60
|
-
path,
|
|
61
|
-
noSubdir: Boolean(extension),
|
|
62
|
-
isRoot: true,
|
|
63
|
-
maxDbs: 12,
|
|
64
|
-
remapChunks,
|
|
65
|
-
syncInstructions,
|
|
66
|
-
//winMemoryPriority: 4,
|
|
67
|
-
// default map size limit of 4 exabytes when using remapChunks, since it is not preallocated and we can
|
|
68
|
-
// make it super huge.
|
|
69
|
-
mapSize: remapChunks ? 0x10000000000000 :
|
|
70
|
-
0x20000, // Otherwise we start small with 128KB
|
|
71
|
-
}, options)
|
|
72
|
-
if (options.asyncTransactionOrder == 'before')
|
|
73
|
-
asyncTransactionAfter = false
|
|
74
|
-
else if (options.asyncTransactionOrder == 'strict') {
|
|
75
|
-
asyncTransactionStrictOrder = true
|
|
76
|
-
asyncTransactionAfter = false
|
|
77
|
-
}
|
|
78
|
-
if (!fs.existsSync(options.noSubdir ? dirname(path) : path))
|
|
79
|
-
mkdirpSync(options.noSubdir ? dirname(path) : path)
|
|
80
|
-
if (options.compression) {
|
|
81
|
-
let setDefault
|
|
82
|
-
if (options.compression == true) {
|
|
83
|
-
if (defaultCompression)
|
|
84
|
-
options.compression = defaultCompression
|
|
85
|
-
else
|
|
86
|
-
defaultCompression = options.compression = new Compression({
|
|
87
|
-
threshold: 1000,
|
|
88
|
-
dictionary: fs.readFileSync(require.resolve('./dict/dict.txt')),
|
|
89
|
-
})
|
|
90
|
-
} else
|
|
91
|
-
options.compression = new Compression(Object.assign({
|
|
92
|
-
threshold: 1000,
|
|
93
|
-
dictionary: fs.readFileSync(require.resolve('./dict/dict.txt')),
|
|
94
|
-
}), options.compression)
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
if (options && options.clearOnStart) {
|
|
98
|
-
console.info('Removing', path)
|
|
99
|
-
fs.removeSync(path)
|
|
100
|
-
console.info('Removed', path)
|
|
101
|
-
}
|
|
102
|
-
let useWritemap = options.useWritemap
|
|
103
|
-
try {
|
|
104
|
-
env.open(options)
|
|
105
|
-
} catch(error) {
|
|
106
|
-
if (error.message.startsWith('MDB_INVALID')) {
|
|
107
|
-
require('./util/upgrade-lmdb').upgrade(path, options, open)
|
|
108
|
-
env = new Env()
|
|
109
|
-
env.open(options)
|
|
110
|
-
} else
|
|
111
|
-
throw error
|
|
112
|
-
}
|
|
113
|
-
/* let filePath = noSubdir ? path : (path + '/data.mdb')
|
|
114
|
-
if (fs.statSync(filePath).size == env.info().mapSize && !options.remapChunks) {
|
|
115
|
-
// if the file size is identical to the map size, that means the OS is taking full disk space for
|
|
116
|
-
// mapping and we need to revert back to remapChunks
|
|
117
|
-
env.close()
|
|
118
|
-
options.remapChunks = true
|
|
119
|
-
env.open(options)
|
|
120
|
-
}*/
|
|
121
|
-
env.readerCheck() // clear out any stale entries
|
|
122
|
-
function renewReadTxn() {
|
|
123
|
-
if (readTxn)
|
|
124
|
-
readTxn.renew()
|
|
125
|
-
else
|
|
126
|
-
readTxn = env.beginTxn(READING_TNX)
|
|
127
|
-
readTxnRenewed = setImmediate(resetReadTxn)
|
|
128
|
-
return readTxn
|
|
129
|
-
}
|
|
130
|
-
function resetReadTxn() {
|
|
131
|
-
if (readTxnRenewed) {
|
|
132
|
-
renewId++
|
|
133
|
-
readTxnRenewed = null
|
|
134
|
-
if (readTxn.cursorCount - (readTxn.renewingCursorCount || 0) > 0) {
|
|
135
|
-
readTxn.onlyCursor = true
|
|
136
|
-
cursorTxns.push(readTxn)
|
|
137
|
-
readTxn = null
|
|
138
|
-
}
|
|
139
|
-
else
|
|
140
|
-
readTxn.reset()
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
let stores = []
|
|
144
|
-
class LMDBStore extends EventEmitter {
|
|
145
|
-
constructor(dbName, dbOptions) {
|
|
146
|
-
super()
|
|
147
|
-
if (dbName === undefined)
|
|
148
|
-
throw new Error('Database name must be supplied in name property (may be null for root database)')
|
|
149
|
-
|
|
150
|
-
const openDB = () => {
|
|
151
|
-
try {
|
|
152
|
-
this.db = env.openDbi(Object.assign({
|
|
153
|
-
name: dbName,
|
|
154
|
-
create: true,
|
|
155
|
-
txn: writeTxn,
|
|
156
|
-
}, dbOptions))
|
|
157
|
-
this.db.name = dbName || null
|
|
158
|
-
} catch(error) {
|
|
159
|
-
handleError(error, null, null, openDB)
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
if (dbOptions.compression && !(dbOptions.compression instanceof Compression)) {
|
|
163
|
-
if (dbOptions.compression == true && options.compression)
|
|
164
|
-
dbOptions.compression = options.compression // use the parent compression if available
|
|
165
|
-
else
|
|
166
|
-
dbOptions.compression = new Compression(Object.assign({
|
|
167
|
-
threshold: 1000,
|
|
168
|
-
dictionary: fs.readFileSync(require.resolve('./dict/dict.txt')),
|
|
169
|
-
}), dbOptions.compression)
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
if (dbOptions.dupSort && (dbOptions.useVersions || dbOptions.cache)) {
|
|
173
|
-
throw new Error('The dupSort flag can not be combined with versions or caching')
|
|
174
|
-
}
|
|
175
|
-
openDB()
|
|
176
|
-
resetReadTxn() // a read transaction becomes invalid after opening another db
|
|
177
|
-
this.name = dbName
|
|
178
|
-
this.env = env
|
|
179
|
-
this.reads = 0
|
|
180
|
-
this.writes = 0
|
|
181
|
-
this.transactions = 0
|
|
182
|
-
this.averageTransactionTime = 5
|
|
183
|
-
if (dbOptions.syncBatchThreshold)
|
|
184
|
-
console.warn('syncBatchThreshold is no longer supported')
|
|
185
|
-
if (dbOptions.immediateBatchThreshold)
|
|
186
|
-
console.warn('immediateBatchThreshold is no longer supported')
|
|
187
|
-
this.commitDelay = DEFAULT_COMMIT_DELAY
|
|
188
|
-
Object.assign(this, { // these are the options that are inherited
|
|
189
|
-
path: options.path,
|
|
190
|
-
encoding: options.encoding,
|
|
191
|
-
strictAsyncOrder: options.strictAsyncOrder,
|
|
192
|
-
}, dbOptions)
|
|
193
|
-
if (!this.encoding || this.encoding == 'msgpack' || this.encoding == 'cbor') {
|
|
194
|
-
this.encoder = this.decoder = new (this.encoding == 'cbor' ? require('cbor-x').Encoder : require('msgpackr').Encoder)
|
|
195
|
-
(Object.assign(this.sharedStructuresKey ?
|
|
196
|
-
this.setupSharedStructures() : {
|
|
197
|
-
copyBuffers: true // need to copy any embedded buffers that are found since we use unsafe buffers
|
|
198
|
-
}, options, dbOptions))
|
|
199
|
-
} else if (this.encoding == 'json') {
|
|
200
|
-
this.encoder = {
|
|
201
|
-
encode: JSON.stringify,
|
|
202
|
-
}
|
|
203
|
-
} else if (this.encoding == 'ordered-binary') {
|
|
204
|
-
this.encoder = this.decoder = {
|
|
205
|
-
encode(value) { return keyValueToBuffer(value) },
|
|
206
|
-
decode(buffer, end) { return bufferToKeyValue(buffer.slice(0, end)) }
|
|
207
|
-
}
|
|
208
|
-
}
|
|
209
|
-
allDbs.set(dbName ? name + '-' + dbName : name, this)
|
|
210
|
-
stores.push(this)
|
|
211
|
-
}
|
|
212
|
-
openDB(dbName, dbOptions) {
|
|
213
|
-
if (typeof dbName == 'object' && !dbOptions) {
|
|
214
|
-
dbOptions = dbName
|
|
215
|
-
dbName = options.name
|
|
216
|
-
} else
|
|
217
|
-
dbOptions = dbOptions || {}
|
|
218
|
-
try {
|
|
219
|
-
return dbOptions.cache ?
|
|
220
|
-
new (CachingStore(LMDBStore))(dbName, dbOptions) :
|
|
221
|
-
new LMDBStore(dbName, dbOptions)
|
|
222
|
-
} catch(error) {
|
|
223
|
-
if (error.message.indexOf('MDB_DBS_FULL') > -1) {
|
|
224
|
-
error.message += ' (increase your maxDbs option)'
|
|
225
|
-
}
|
|
226
|
-
throw error
|
|
227
|
-
}
|
|
228
|
-
}
|
|
229
|
-
transactionAsync(callback, asChild) {
|
|
230
|
-
if (writeTxn) {
|
|
231
|
-
// already nested in a transaction, just execute and return
|
|
232
|
-
return callback()
|
|
233
|
-
}
|
|
234
|
-
let lastOperation
|
|
235
|
-
let after, strictOrder
|
|
236
|
-
if (scheduledOperations) {
|
|
237
|
-
lastOperation = asyncTransactionAfter ? scheduledOperations.appendAsyncTxn :
|
|
238
|
-
scheduledOperations[asyncTransactionStrictOrder ? scheduledOperations.length - 1 : 0]
|
|
239
|
-
} else {
|
|
240
|
-
scheduledOperations = []
|
|
241
|
-
scheduledOperations.bytes = 0
|
|
242
|
-
}
|
|
243
|
-
let transactionSet
|
|
244
|
-
let transactionSetIndex
|
|
245
|
-
if (lastOperation === true) { // continue last set of transactions
|
|
246
|
-
transactionSetIndex = scheduledTransactions.length - 1
|
|
247
|
-
transactionSet = scheduledTransactions[transactionSetIndex]
|
|
248
|
-
} else {
|
|
249
|
-
// for now we signify transactions as a true
|
|
250
|
-
if (asyncTransactionAfter) // by default we add a flag to put transactions after other operations
|
|
251
|
-
scheduledOperations.appendAsyncTxn = true
|
|
252
|
-
else if (asyncTransactionStrictOrder)
|
|
253
|
-
scheduledOperations.push(true)
|
|
254
|
-
else // in before mode, we put all the async transaction at the beginning
|
|
255
|
-
scheduledOperations.unshift(true)
|
|
256
|
-
if (!scheduledTransactions) {
|
|
257
|
-
scheduledTransactions = []
|
|
258
|
-
}
|
|
259
|
-
transactionSetIndex = scheduledTransactions.push(transactionSet = []) - 1
|
|
260
|
-
}
|
|
261
|
-
let index = (transactionSet.push(asChild ?
|
|
262
|
-
{asChild, callback } : callback) - 1) << 1
|
|
263
|
-
return this.scheduleCommit().results.then((results) => {
|
|
264
|
-
let transactionResults = results.transactionResults[transactionSetIndex]
|
|
265
|
-
let error = transactionResults[index]
|
|
266
|
-
if (error)
|
|
267
|
-
throw error
|
|
268
|
-
return transactionResults[index + 1]
|
|
269
|
-
})
|
|
270
|
-
}
|
|
271
|
-
childTransaction(callback) {
|
|
272
|
-
if (useWritemap)
|
|
273
|
-
throw new Error('Child transactions are not supported in writemap mode')
|
|
274
|
-
if (writeTxn) {
|
|
275
|
-
let parentTxn = writeTxn
|
|
276
|
-
let childTxn = writeTxn = env.beginTxn(null, parentTxn)
|
|
277
|
-
try {
|
|
278
|
-
return when(callback(), (result) => {
|
|
279
|
-
writeTxn = parentTxn
|
|
280
|
-
if (result === ABORT)
|
|
281
|
-
childTxn.abort()
|
|
282
|
-
else
|
|
283
|
-
childTxn.commit()
|
|
284
|
-
return result
|
|
285
|
-
}, (error) => {
|
|
286
|
-
writeTxn = parentTxn
|
|
287
|
-
childTxn.abort()
|
|
288
|
-
throw error
|
|
289
|
-
})
|
|
290
|
-
} catch(error) {
|
|
291
|
-
writeTxn = parentTxn
|
|
292
|
-
childTxn.abort()
|
|
293
|
-
throw error
|
|
294
|
-
}
|
|
295
|
-
}
|
|
296
|
-
return this.transactionAsync(callback, true)
|
|
297
|
-
}
|
|
298
|
-
transaction(callback, abort) {
|
|
299
|
-
if (!transactionWarned) {
|
|
300
|
-
console.warn('transaction is deprecated, use transactionSync if you want a synchronous transaction or transactionAsync for asynchronous transaction. In this future this will always call transactionAsync.')
|
|
301
|
-
transactionWarned = true
|
|
302
|
-
}
|
|
303
|
-
let result = this.transactionSync(callback, abort)
|
|
304
|
-
return abort ? ABORT : result
|
|
305
|
-
}
|
|
306
|
-
transactionSync(callback, abort) {
|
|
307
|
-
if (writeTxn) {
|
|
308
|
-
if (!useWritemap && !this.cache)
|
|
309
|
-
// already nested in a transaction, execute as child transaction (if possible) and return
|
|
310
|
-
return this.childTransaction(callback)
|
|
311
|
-
let result = callback() // else just run in current transaction
|
|
312
|
-
if (result == ABORT && !abortedNonChildTransactionWarn) {
|
|
313
|
-
console.warn('Can not abort a transaction inside another transaction with ' + (this.cache ? 'caching enabled' : 'useWritemap enabled'))
|
|
314
|
-
abortedNonChildTransactionWarn = true
|
|
315
|
-
}
|
|
316
|
-
return result
|
|
317
|
-
}
|
|
318
|
-
let txn
|
|
319
|
-
try {
|
|
320
|
-
this.transactions++
|
|
321
|
-
txn = writeTxn = env.beginTxn()
|
|
322
|
-
/*if (scheduledOperations && runNextBatch) {
|
|
323
|
-
runNextBatch((operations, callback) => {
|
|
324
|
-
try {
|
|
325
|
-
callback(null, this.commitBatchNow(operations))
|
|
326
|
-
} catch (error) {
|
|
327
|
-
callback(error)
|
|
328
|
-
}
|
|
329
|
-
})
|
|
330
|
-
}
|
|
331
|
-
TODO: To reenable forced sequential writes, we need to re-execute the operations if we get an env resize
|
|
332
|
-
*/
|
|
333
|
-
return when(callback(), (result) => {
|
|
334
|
-
try {
|
|
335
|
-
if (result === ABORT)
|
|
336
|
-
txn.abort()
|
|
337
|
-
else {
|
|
338
|
-
txn.commit()
|
|
339
|
-
resetReadTxn()
|
|
340
|
-
}
|
|
341
|
-
writeTxn = null
|
|
342
|
-
return result
|
|
343
|
-
} catch(error) {
|
|
344
|
-
if (error.message == 'The transaction is already closed.') {
|
|
345
|
-
return result
|
|
346
|
-
}
|
|
347
|
-
return handleError(error, this, txn, () => this.transaction(callback))
|
|
348
|
-
}
|
|
349
|
-
}, (error) => {
|
|
350
|
-
return handleError(error, this, txn, () => this.transaction(callback))
|
|
351
|
-
})
|
|
352
|
-
} catch(error) {
|
|
353
|
-
return handleError(error, this, txn, () => this.transaction(callback))
|
|
354
|
-
}
|
|
355
|
-
}
|
|
356
|
-
getBinaryLocation(id) {
|
|
357
|
-
syncInstructionsView.setUint32(8, 4, true)
|
|
358
|
-
syncInstructionsView.setUint32(16, id, true)
|
|
359
|
-
//syncInstructions.utf8Write(id, 16, 1000)
|
|
360
|
-
syncInstructionsView.setUint32(20, 0)
|
|
361
|
-
// keyToBinary(syncInstructions, id)
|
|
362
|
-
this.db.get(id)
|
|
363
|
-
lastSize = syncInstructionsView.getUint32(0, true)
|
|
364
|
-
let bufferIndex = syncInstructionsView.getUint32(12, true)
|
|
365
|
-
let lastOffset = syncInstructionsView.getUint32(8, true)
|
|
366
|
-
if (lastOffset == 0 && bufferIndex == 0) {
|
|
367
|
-
return // not found
|
|
368
|
-
}
|
|
369
|
-
let buffer = buffers[bufferIndex]
|
|
370
|
-
let startOffset
|
|
371
|
-
if (!buffer || lastOffset < (startOffset = buffer.startOffset) || (lastOffset + lastSize > startOffset + 0x100000000)) {
|
|
372
|
-
if (buffer)
|
|
373
|
-
env.detachBuffer(buffer.buffer)
|
|
374
|
-
startOffset = (lastOffset >>> 16) * 0x10000
|
|
375
|
-
console.log('make buffer for address', bufferIndex * 0x100000000 + startOffset)
|
|
376
|
-
buffer = buffers[bufferIndex] = Buffer.from(getBufferForAddress(bufferIndex * 0x100000000 + startOffset))
|
|
377
|
-
buffer.startOffset = startOffset
|
|
378
|
-
}
|
|
379
|
-
lastOffset -= startOffset
|
|
380
|
-
return buffer.slice(lastOffset, lastOffset + lastSize)
|
|
381
|
-
}
|
|
382
|
-
|
|
383
|
-
getSizeBinaryFast(id) {
|
|
384
|
-
return lastSize = (writeTxn || (readTxnRenewed ? readTxn : renewReadTxn()))
|
|
385
|
-
.getBinaryUnsafe(this.db, id)
|
|
386
|
-
}
|
|
387
|
-
getString(id) {
|
|
388
|
-
let string = (writeTxn || (readTxnRenewed ? readTxn : renewReadTxn()))
|
|
389
|
-
.getUtf8(this.db, id)
|
|
390
|
-
if (string)
|
|
391
|
-
lastSize = string.length
|
|
392
|
-
return string
|
|
393
|
-
}
|
|
394
|
-
getBinaryFast(id) {
|
|
395
|
-
this.getSizeBinaryFast(id)
|
|
396
|
-
return lastSize === undefined ? undefined : this.db.unsafeBuffer.slice(0, lastSize)
|
|
397
|
-
}
|
|
398
|
-
getBinary(id) {
|
|
399
|
-
this.getSizeBinaryFast(id)
|
|
400
|
-
return lastSize === undefined ? undefined : Uint8ArraySlice.call(this.db.unsafeBuffer, 0, lastSize)
|
|
401
|
-
}
|
|
402
|
-
get(id) {
|
|
403
|
-
if (this.decoder) {
|
|
404
|
-
this.getSizeBinaryFast(id)
|
|
405
|
-
return lastSize === undefined ? undefined : this.decoder.decode(this.db.unsafeBuffer, lastSize)
|
|
406
|
-
}
|
|
407
|
-
if (this.encoding == 'binary')
|
|
408
|
-
return this.getBinary(id)
|
|
409
|
-
|
|
410
|
-
let result = this.getString(id)
|
|
411
|
-
if (result) {
|
|
412
|
-
if (this.encoding == 'json')
|
|
413
|
-
return JSON.parse(result)
|
|
414
|
-
}
|
|
415
|
-
return result
|
|
416
|
-
}
|
|
417
|
-
getEntry(id) {
|
|
418
|
-
let value = this.get(id)
|
|
419
|
-
if (value !== undefined) {
|
|
420
|
-
if (this.useVersions)
|
|
421
|
-
return {
|
|
422
|
-
value,
|
|
423
|
-
version: getLastVersion(),
|
|
424
|
-
//size: lastSize
|
|
425
|
-
}
|
|
426
|
-
else
|
|
427
|
-
return {
|
|
428
|
-
value,
|
|
429
|
-
//size: lastSize
|
|
430
|
-
}
|
|
431
|
-
}
|
|
432
|
-
}
|
|
433
|
-
resetReadTxn() {
|
|
434
|
-
resetReadTxn()
|
|
435
|
-
}
|
|
436
|
-
ifNoExists(key, callback) {
|
|
437
|
-
return this.ifVersion(key, null, callback)
|
|
438
|
-
}
|
|
439
|
-
ifVersion(key, version, callback) {
|
|
440
|
-
if (typeof version != 'number') {
|
|
441
|
-
if (version == null) {
|
|
442
|
-
if (version === null)
|
|
443
|
-
version = -4.2434325325532E-199 // NO_EXIST_VERSION
|
|
444
|
-
else {// if undefined, just do callback without any condition being added
|
|
445
|
-
callback()
|
|
446
|
-
// TODO: if we are inside another ifVersion, use that promise, or use ANY_VERSION
|
|
447
|
-
return pendingBatch ? pendingBatch.unconditionalResults : Promise.resolve(true) // be consistent in returning a promise, indicate success
|
|
448
|
-
}
|
|
449
|
-
} else {
|
|
450
|
-
throw new Error('Version must be a number or null')
|
|
451
|
-
}
|
|
452
|
-
}
|
|
453
|
-
let scheduledOperations = this.getScheduledOperations()
|
|
454
|
-
let index = scheduledOperations.push([key, version]) - 1
|
|
455
|
-
try {
|
|
456
|
-
callback()
|
|
457
|
-
let commit = this.scheduleCommit()
|
|
458
|
-
return commit.results.then((writeResults) => {
|
|
459
|
-
if (writeResults[index] === 0)
|
|
460
|
-
return true
|
|
461
|
-
if (writeResults[index] === 3) {
|
|
462
|
-
throw new Error('The key size was 0 or too large')
|
|
463
|
-
}
|
|
464
|
-
return false
|
|
465
|
-
})
|
|
466
|
-
} finally {
|
|
467
|
-
scheduledOperations.push(false) // reset condition
|
|
468
|
-
}
|
|
469
|
-
}
|
|
470
|
-
doesExist(key, versionOrValue) {
|
|
471
|
-
let txn
|
|
472
|
-
try {
|
|
473
|
-
if (writeTxn) {
|
|
474
|
-
txn = writeTxn
|
|
475
|
-
} else {
|
|
476
|
-
txn = readTxnRenewed ? readTxn : renewReadTxn()
|
|
477
|
-
}
|
|
478
|
-
if (versionOrValue === undefined)
|
|
479
|
-
return txn.getBinaryUnsafe(this.db, key) !== undefined
|
|
480
|
-
else if (this.useVersions)
|
|
481
|
-
return txn.getBinaryUnsafe(this.db, key) !== undefined && matches(getLastVersion(), versionOrValue)
|
|
482
|
-
else {
|
|
483
|
-
let cursor = new Cursor(txn, this.db)
|
|
484
|
-
if (this.encoder) {
|
|
485
|
-
versionOrValue = this.encoder.encode(versionOrValue)
|
|
486
|
-
}
|
|
487
|
-
if (typeof versionOrValue == 'string')
|
|
488
|
-
versionOrValue = Buffer.from(versionOrValue)
|
|
489
|
-
let result = cursor.goToDup(key, versionOrValue) !== undefined
|
|
490
|
-
cursor.close()
|
|
491
|
-
return result
|
|
492
|
-
}
|
|
493
|
-
} catch(error) {
|
|
494
|
-
return handleError(error, this, txn, () => this.doesExist(key, versionOrValue))
|
|
495
|
-
}
|
|
496
|
-
}
|
|
497
|
-
getScheduledOperations() {
|
|
498
|
-
if (!scheduledOperations) {
|
|
499
|
-
scheduledOperations = []
|
|
500
|
-
scheduledOperations.bytes = 0
|
|
501
|
-
}
|
|
502
|
-
if (scheduledOperations.store != this) {
|
|
503
|
-
// issue action to switch dbs
|
|
504
|
-
scheduledOperations.store = this
|
|
505
|
-
scheduledOperations.push(this.db)
|
|
506
|
-
}
|
|
507
|
-
return scheduledOperations
|
|
508
|
-
}
|
|
509
|
-
put(id, value, version, ifVersion) {
|
|
510
|
-
if (id.length > 1978) {
|
|
511
|
-
throw new Error('Key is larger than maximum key size (1978)')
|
|
512
|
-
}
|
|
513
|
-
this.writes++
|
|
514
|
-
if (writeTxn) {
|
|
515
|
-
if (ifVersion !== undefined) {
|
|
516
|
-
this.get(id)
|
|
517
|
-
let previousVersion = this.get(id) ? getLastVersion() : null
|
|
518
|
-
if (!matches(previousVersion, ifVersion)) {
|
|
519
|
-
return SYNC_PROMISE_FAIL
|
|
520
|
-
}
|
|
521
|
-
}
|
|
522
|
-
putSync.call(this, id, value, version)
|
|
523
|
-
return SYNC_PROMISE_RESULT
|
|
524
|
-
}
|
|
525
|
-
if (this.encoder) {
|
|
526
|
-
//if (!(value instanceof Uint8Array)) TODO: in a future version, directly store buffers that are provided
|
|
527
|
-
value = this.encoder.encode(value)
|
|
528
|
-
} else if (typeof value != 'string' && !(value instanceof Uint8Array))
|
|
529
|
-
throw new Error('Invalid value to put in database ' + value + ' (' + (typeof value) +'), consider using encoder')
|
|
530
|
-
let operations = this.getScheduledOperations()
|
|
531
|
-
let index = operations.push(ifVersion == null ? version == null ? [id, value] : [id, value, version] : [id, value, version, ifVersion]) - 1
|
|
532
|
-
// track the size of the scheduled operations (and include the approx size of the array structure too)
|
|
533
|
-
operations.bytes += (id.length || 6) + (value && value.length || 0) + 100
|
|
534
|
-
let commit = this.scheduleCommit()
|
|
535
|
-
return ifVersion === undefined ? commit.unconditionalResults : // TODO: Technically you can get a bad key if an array is passed in there is no ifVersion and still fail
|
|
536
|
-
commit.results.then((writeResults) => {
|
|
537
|
-
if (writeResults[index] === 0)
|
|
538
|
-
return true
|
|
539
|
-
if (writeResults[index] === 3) {
|
|
540
|
-
throw new Error('The key size was 0 or too large')
|
|
541
|
-
}
|
|
542
|
-
return false
|
|
543
|
-
})
|
|
544
|
-
}
|
|
545
|
-
putSync(id, value, version) {
|
|
546
|
-
if (id.length > 1978) {
|
|
547
|
-
throw new Error('Key is larger than maximum key size (1978)')
|
|
548
|
-
}
|
|
549
|
-
let localTxn, hadWriteTxn = writeTxn
|
|
550
|
-
try {
|
|
551
|
-
this.writes++
|
|
552
|
-
if (!writeTxn)
|
|
553
|
-
localTxn = writeTxn = env.beginTxn()
|
|
554
|
-
if (this.encoder)
|
|
555
|
-
value = this.encoder.encode(value)
|
|
556
|
-
if (typeof value == 'string') {
|
|
557
|
-
writeTxn.putUtf8(this.db, id, value, version)
|
|
558
|
-
} else {
|
|
559
|
-
if (!(value instanceof Uint8Array)) {
|
|
560
|
-
throw new Error('Invalid value type ' + typeof value + ' used ' + value)
|
|
561
|
-
}
|
|
562
|
-
writeTxn.putBinary(this.db, id, value, version)
|
|
563
|
-
}
|
|
564
|
-
if (localTxn) {
|
|
565
|
-
writeTxn.commit()
|
|
566
|
-
writeTxn = null
|
|
567
|
-
resetReadTxn()
|
|
568
|
-
}
|
|
569
|
-
} catch(error) {
|
|
570
|
-
if (hadWriteTxn)
|
|
571
|
-
throw error // if we are in a transaction, the whole transaction probably needs to restart
|
|
572
|
-
return handleError(error, this, localTxn, () => this.putSync(id, value, version))
|
|
573
|
-
}
|
|
574
|
-
}
|
|
575
|
-
removeSync(id, ifVersionOrValue) {
|
|
576
|
-
if (id.length > 1978) {
|
|
577
|
-
throw new Error('Key is larger than maximum key size (1978)')
|
|
578
|
-
}
|
|
579
|
-
let localTxn, hadWriteTxn = writeTxn
|
|
580
|
-
try {
|
|
581
|
-
if (!writeTxn)
|
|
582
|
-
localTxn = writeTxn = env.beginTxn()
|
|
583
|
-
let deleteValue
|
|
584
|
-
if (ifVersionOrValue !== undefined) {
|
|
585
|
-
if (this.useVersions) {
|
|
586
|
-
let previousVersion = this.get(id) ? getLastVersion() : null
|
|
587
|
-
if (!matches(previousVersion, ifVersionOrValue))
|
|
588
|
-
return false
|
|
589
|
-
} else if (this.encoder)
|
|
590
|
-
deleteValue = this.encoder.encode(ifVersionOrValue)
|
|
591
|
-
else
|
|
592
|
-
deleteValue = ifVersionOrValue
|
|
593
|
-
}
|
|
594
|
-
this.writes++
|
|
595
|
-
let result
|
|
596
|
-
if (deleteValue)
|
|
597
|
-
result = writeTxn.del(this.db, id, deleteValue)
|
|
598
|
-
else
|
|
599
|
-
result = writeTxn.del(this.db, id)
|
|
600
|
-
if (localTxn) {
|
|
601
|
-
writeTxn.commit()
|
|
602
|
-
writeTxn = null
|
|
603
|
-
resetReadTxn()
|
|
604
|
-
}
|
|
605
|
-
return result // object found and deleted
|
|
606
|
-
} catch(error) {
|
|
607
|
-
if (hadWriteTxn)
|
|
608
|
-
throw error // if we are in a transaction, the whole transaction probably needs to restart
|
|
609
|
-
return handleError(error, this, localTxn, () => this.removeSync(id))
|
|
610
|
-
}
|
|
611
|
-
}
|
|
612
|
-
remove(id, ifVersionOrValue) {
|
|
613
|
-
if (id.length > 1978) {
|
|
614
|
-
throw new Error('Key is larger than maximum key size (1978)')
|
|
615
|
-
}
|
|
616
|
-
this.writes++
|
|
617
|
-
if (writeTxn) {
|
|
618
|
-
if (removeSync.call(this, id, ifVersionOrValue) === false)
|
|
619
|
-
return SYNC_PROMISE_FAIL
|
|
620
|
-
return SYNC_PROMISE_RESULT
|
|
621
|
-
}
|
|
622
|
-
let scheduledOperations = this.getScheduledOperations()
|
|
623
|
-
let operation
|
|
624
|
-
if (ifVersionOrValue === undefined)
|
|
625
|
-
operation = [id]
|
|
626
|
-
else if (this.useVersions)
|
|
627
|
-
operation = [id, undefined, undefined, ifVersionOrValue] // version condition
|
|
628
|
-
else {
|
|
629
|
-
if (this.encoder)
|
|
630
|
-
operation = [id, this.encoder.encode(ifVersionOrValue), true]
|
|
631
|
-
else
|
|
632
|
-
operation = [id, ifVersionOrValue, true]
|
|
633
|
-
}
|
|
634
|
-
let index = scheduledOperations.push(operation) - 1 // remove specific values
|
|
635
|
-
scheduledOperations.bytes += (id.length || 6) + 100
|
|
636
|
-
let commit = this.scheduleCommit()
|
|
637
|
-
return ifVersionOrValue === undefined ? commit.unconditionalResults :
|
|
638
|
-
commit.results.then((writeResults) => {
|
|
639
|
-
if (writeResults[index] === 0)
|
|
640
|
-
return true
|
|
641
|
-
if (writeResults[index] === 3) {
|
|
642
|
-
throw new Error('The key size was 0 or too large')
|
|
643
|
-
}
|
|
644
|
-
return false
|
|
645
|
-
})
|
|
646
|
-
}
|
|
647
|
-
getValues(key, options) {
|
|
648
|
-
let defaultOptions = {
|
|
649
|
-
start: key,
|
|
650
|
-
valuesForKey: true
|
|
651
|
-
}
|
|
652
|
-
if (options && options.snapshot === false)
|
|
653
|
-
throw new Error('Can not disable snapshots for getValues')
|
|
654
|
-
return this.getRange(options ? Object.assign(defaultOptions, options) : defaultOptions)
|
|
655
|
-
}
|
|
656
|
-
getKeys(options) {
|
|
657
|
-
if (!options)
|
|
658
|
-
options = {}
|
|
659
|
-
options.values = false
|
|
660
|
-
return this.getRange(options)
|
|
661
|
-
}
|
|
662
|
-
getCount(options) {
|
|
663
|
-
if (!options)
|
|
664
|
-
options = {}
|
|
665
|
-
options.onlyCount = true
|
|
666
|
-
return this.getRange(options)[Symbol.iterator]()
|
|
667
|
-
}
|
|
668
|
-
getKeysCount(options) {
|
|
669
|
-
if (!options)
|
|
670
|
-
options = {}
|
|
671
|
-
options.onlyCount = true
|
|
672
|
-
options.values = false
|
|
673
|
-
return this.getRange(options)[Symbol.iterator]()
|
|
674
|
-
}
|
|
675
|
-
getValuesCount(key, options) {
|
|
676
|
-
if (!options)
|
|
677
|
-
options = {}
|
|
678
|
-
options.start = key
|
|
679
|
-
options.valuesForKey = true
|
|
680
|
-
options.onlyCount = true
|
|
681
|
-
return this.getRange(options)[Symbol.iterator]()
|
|
682
|
-
}
|
|
683
|
-
getRange(options) {
|
|
684
|
-
let iterable = new ArrayLikeIterable()
|
|
685
|
-
if (!options)
|
|
686
|
-
options = {}
|
|
687
|
-
let includeValues = options.values !== false
|
|
688
|
-
let includeVersions = options.versions
|
|
689
|
-
let valuesForKey = options.valuesForKey
|
|
690
|
-
let limit = options.limit
|
|
691
|
-
let db = this.db
|
|
692
|
-
iterable[Symbol.iterator] = () => {
|
|
693
|
-
let currentKey = options.start !== undefined ? options.start :
|
|
694
|
-
(options.reverse ? this.keyIsUint32 ? 0xffffffff : this.keyIsBuffer ? LAST_BUFFER_KEY : LAST_KEY :
|
|
695
|
-
this.keyIsUint32 ? 0 : this.keyIsBuffer ? FIRST_BUFFER_KEY : false)
|
|
696
|
-
let endKey = options.end !== undefined ? options.end :
|
|
697
|
-
(options.reverse ? this.keyIsUint32 ? 0 : this.keyIsBuffer ? FIRST_BUFFER_KEY : false :
|
|
698
|
-
this.keyIsUint32 ? 0xffffffff : this.keyIsBuffer ? LAST_BUFFER_KEY : LAST_KEY)
|
|
699
|
-
const reverse = options.reverse
|
|
700
|
-
let count = 0
|
|
701
|
-
let cursor, cursorRenewId
|
|
702
|
-
let txn
|
|
703
|
-
function resetCursor() {
|
|
704
|
-
try {
|
|
705
|
-
if (cursor)
|
|
706
|
-
finishCursor()
|
|
707
|
-
|
|
708
|
-
txn = writeTxn || (readTxnRenewed ? readTxn : renewReadTxn())
|
|
709
|
-
cursor = new Cursor(txn, db)
|
|
710
|
-
txn.cursorCount = (txn.cursorCount || 0) + 1 // track transaction so we always use the same one
|
|
711
|
-
if (options.snapshot === false) {
|
|
712
|
-
cursorRenewId = renewId // use shared read transaction
|
|
713
|
-
txn.renewingCursorCount = (txn.renewingCursorCount || 0) + 1 // need to know how many are renewing cursors
|
|
714
|
-
}
|
|
715
|
-
if (reverse) {
|
|
716
|
-
if (valuesForKey) {
|
|
717
|
-
// position at key
|
|
718
|
-
currentKey = cursor.goToKey(currentKey)
|
|
719
|
-
// now move to next key and then previous entry to get to last value
|
|
720
|
-
if (currentKey) {
|
|
721
|
-
cursor.goToNextNoDup()
|
|
722
|
-
cursor.goToPrev()
|
|
723
|
-
}
|
|
724
|
-
} else {
|
|
725
|
-
// for reverse retrieval, goToRange is backwards because it positions at the key equal or *greater than* the provided key
|
|
726
|
-
let nextKey = cursor.goToRange(currentKey)
|
|
727
|
-
if (nextKey) {
|
|
728
|
-
if (compareKey(nextKey, currentKey)) {
|
|
729
|
-
// goToRange positioned us at a key after the provided key, so we need to go the previous key to be less than the provided key
|
|
730
|
-
currentKey = cursor.goToPrev()
|
|
731
|
-
} else
|
|
732
|
-
currentKey = nextKey // they match, we are good, and currentKey is already correct
|
|
733
|
-
} else {
|
|
734
|
-
// likewise, we have been position beyond the end of the index, need to go to last
|
|
735
|
-
currentKey = cursor.goToLast()
|
|
736
|
-
}
|
|
737
|
-
}
|
|
738
|
-
} else {
|
|
739
|
-
// for forward retrieval, goToRange does what we want
|
|
740
|
-
currentKey = valuesForKey ? cursor.goToKey(currentKey) : cursor.goToRange(currentKey)
|
|
741
|
-
}
|
|
742
|
-
// TODO: Make a makeCompare(endKey)
|
|
743
|
-
} catch(error) {
|
|
744
|
-
if (cursor) {
|
|
745
|
-
try {
|
|
746
|
-
cursor.close()
|
|
747
|
-
} catch(error) { }
|
|
748
|
-
}
|
|
749
|
-
return handleError(error, this, txn, resetCursor)
|
|
750
|
-
}
|
|
751
|
-
}
|
|
752
|
-
resetCursor()
|
|
753
|
-
let offset = options.offset
|
|
754
|
-
while(offset-- > 0 && currentKey !== undefined) {
|
|
755
|
-
currentKey = reverse ?
|
|
756
|
-
valuesForKey ? cursor.goToPrevDup() :
|
|
757
|
-
includeValues ? cursor.goToPrev() : cursor.goToPrevNoDup() :
|
|
758
|
-
valuesForKey ? cursor.goToNextDup() :
|
|
759
|
-
includeValues ? cursor.goToNext() : cursor.goToNextNoDup()
|
|
760
|
-
}
|
|
761
|
-
if (options.onlyCount) {
|
|
762
|
-
while (!(currentKey === undefined ||
|
|
763
|
-
(reverse ? compareKey(currentKey, endKey) <= 0 : compareKey(currentKey, endKey) >= 0) ||
|
|
764
|
-
(count++ >= limit))) {
|
|
765
|
-
currentKey = reverse ?
|
|
766
|
-
valuesForKey ? cursor.goToPrevDup() :
|
|
767
|
-
includeValues ? cursor.goToPrev() : cursor.goToPrevNoDup() :
|
|
768
|
-
valuesForKey ? cursor.goToNextDup() :
|
|
769
|
-
includeValues ? cursor.goToNext() : cursor.goToNextNoDup()
|
|
770
|
-
}
|
|
771
|
-
finishCursor()
|
|
772
|
-
return count
|
|
773
|
-
}
|
|
774
|
-
|
|
775
|
-
let store = this
|
|
776
|
-
function finishCursor() {
|
|
777
|
-
if (txn.isAborted)
|
|
778
|
-
return
|
|
779
|
-
cursor.close()
|
|
780
|
-
if (cursorRenewId)
|
|
781
|
-
txn.renewingCursorCount--
|
|
782
|
-
if (--txn.cursorCount <= 0 && txn.onlyCursor) {
|
|
783
|
-
let index = cursorTxns.indexOf(txn)
|
|
784
|
-
if (index > -1)
|
|
785
|
-
cursorTxns.splice(index, 1)
|
|
786
|
-
txn.abort() // this is no longer main read txn, abort it now that we are done
|
|
787
|
-
txn.isAborted = true
|
|
788
|
-
}
|
|
789
|
-
}
|
|
790
|
-
return {
|
|
791
|
-
next() {
|
|
792
|
-
if (cursorRenewId && cursorRenewId != renewId)
|
|
793
|
-
resetCursor()
|
|
794
|
-
if (count > 0)
|
|
795
|
-
currentKey = reverse ?
|
|
796
|
-
valuesForKey ? cursor.goToPrevDup() :
|
|
797
|
-
includeValues ? cursor.goToPrev() : cursor.goToPrevNoDup() :
|
|
798
|
-
valuesForKey ? cursor.goToNextDup() :
|
|
799
|
-
includeValues ? cursor.goToNext() : cursor.goToNextNoDup()
|
|
800
|
-
if (currentKey === undefined ||
|
|
801
|
-
(reverse ? compareKey(currentKey, endKey) <= 0 : compareKey(currentKey, endKey) >= 0) ||
|
|
802
|
-
(count++ >= limit)) {
|
|
803
|
-
finishCursor()
|
|
804
|
-
return ITERATOR_DONE
|
|
805
|
-
}
|
|
806
|
-
if (includeValues) {
|
|
807
|
-
let value
|
|
808
|
-
if (store.decoder) {
|
|
809
|
-
lastSize = value = cursor.getCurrentBinaryUnsafe()
|
|
810
|
-
if (value)
|
|
811
|
-
value = store.decoder.decode(store.db.unsafeBuffer, value)
|
|
812
|
-
} else if (store.encoding == 'binary')
|
|
813
|
-
value = cursor.getCurrentBinary()
|
|
814
|
-
else {
|
|
815
|
-
value = cursor.getCurrentUtf8()
|
|
816
|
-
if (store.encoding == 'json' && value)
|
|
817
|
-
value = JSON.parse(value)
|
|
818
|
-
}
|
|
819
|
-
if (includeVersions)
|
|
820
|
-
return {
|
|
821
|
-
value: {
|
|
822
|
-
key: currentKey,
|
|
823
|
-
value,
|
|
824
|
-
version: getLastVersion()
|
|
825
|
-
}
|
|
826
|
-
}
|
|
827
|
-
else if (valuesForKey)
|
|
828
|
-
return {
|
|
829
|
-
value
|
|
830
|
-
}
|
|
831
|
-
else
|
|
832
|
-
return {
|
|
833
|
-
value: {
|
|
834
|
-
key: currentKey,
|
|
835
|
-
value,
|
|
836
|
-
}
|
|
837
|
-
}
|
|
838
|
-
} else if (includeVersions) {
|
|
839
|
-
cursor.getCurrentBinaryUnsafe()
|
|
840
|
-
return {
|
|
841
|
-
value: {
|
|
842
|
-
key: currentKey,
|
|
843
|
-
version: getLastVersion()
|
|
844
|
-
}
|
|
845
|
-
}
|
|
846
|
-
} else {
|
|
847
|
-
return {
|
|
848
|
-
value: currentKey
|
|
849
|
-
}
|
|
850
|
-
}
|
|
851
|
-
},
|
|
852
|
-
return() {
|
|
853
|
-
finishCursor()
|
|
854
|
-
return ITERATOR_DONE
|
|
855
|
-
},
|
|
856
|
-
throw() {
|
|
857
|
-
finishCursor()
|
|
858
|
-
return ITERATOR_DONE
|
|
859
|
-
}
|
|
860
|
-
}
|
|
861
|
-
}
|
|
862
|
-
return iterable
|
|
863
|
-
}
|
|
864
|
-
scheduleCommit() {
|
|
865
|
-
if (!pendingBatch) {
|
|
866
|
-
// pendingBatch promise represents the completion of the transaction
|
|
867
|
-
let whenCommitted = new Promise((resolve, reject) => {
|
|
868
|
-
runNextBatch = (sync) => {
|
|
869
|
-
if (!whenCommitted)
|
|
870
|
-
return
|
|
871
|
-
runNextBatch = null
|
|
872
|
-
if (pendingBatch) {
|
|
873
|
-
for (const store of stores) {
|
|
874
|
-
store.emit('beforecommit', { scheduledOperations })
|
|
875
|
-
}
|
|
876
|
-
}
|
|
877
|
-
clearTimeout(timeout)
|
|
878
|
-
currentCommit = whenCommitted
|
|
879
|
-
whenCommitted = null
|
|
880
|
-
pendingBatch = null
|
|
881
|
-
if (scheduledOperations || scheduledTransactions) {
|
|
882
|
-
// operations to perform, collect them as an array and start doing them
|
|
883
|
-
let operations = scheduledOperations || []
|
|
884
|
-
let transactions = scheduledTransactions
|
|
885
|
-
if (operations.appendAsyncTxn) {
|
|
886
|
-
operations.push(true)
|
|
887
|
-
}
|
|
888
|
-
scheduledOperations = null
|
|
889
|
-
scheduledTransactions = null
|
|
890
|
-
const writeBatch = () => {
|
|
891
|
-
let start = Date.now()
|
|
892
|
-
let results = Buffer.alloc(operations.length)
|
|
893
|
-
let continuedWriteTxn
|
|
894
|
-
let transactionResults
|
|
895
|
-
let transactionSetIndex = 0
|
|
896
|
-
let callback = async (error) => {
|
|
897
|
-
if (error === true) {
|
|
898
|
-
// resume batch transaction
|
|
899
|
-
if (!transactionResults) {
|
|
900
|
-
// get the transaction we will use
|
|
901
|
-
continuedWriteTxn = env.beginTxn(true)
|
|
902
|
-
transactionResults = new Array(transactions.length)
|
|
903
|
-
results.transactionResults = transactionResults
|
|
904
|
-
}
|
|
905
|
-
let transactionSet = transactions[transactionSetIndex]
|
|
906
|
-
let transactionSetResults = transactionResults[transactionSetIndex++] = []
|
|
907
|
-
let promises
|
|
908
|
-
for (let i = 0, l = transactionSet.length; i < l; i++) {
|
|
909
|
-
let userTxn = transactionSet[i]
|
|
910
|
-
let asChild = userTxn.asChild
|
|
911
|
-
if (asChild) {
|
|
912
|
-
if (promises) {
|
|
913
|
-
// must complete any outstanding transactions before proceeding
|
|
914
|
-
await Promise.all(promises)
|
|
915
|
-
promises = null
|
|
916
|
-
}
|
|
917
|
-
let childTxn = writeTxn = env.beginTxn(null, continuedWriteTxn)
|
|
918
|
-
try {
|
|
919
|
-
let result = userTxn.callback()
|
|
920
|
-
if (result && result.then) {
|
|
921
|
-
await result
|
|
922
|
-
}
|
|
923
|
-
if (result === ABORT)
|
|
924
|
-
childTxn.abort()
|
|
925
|
-
else
|
|
926
|
-
childTxn.commit()
|
|
927
|
-
transactionSetResults[(i << 1) + 1] = result
|
|
928
|
-
} catch(error) {
|
|
929
|
-
childTxn.abort()
|
|
930
|
-
if (!txnError(error, i))
|
|
931
|
-
return
|
|
932
|
-
}
|
|
933
|
-
} else {
|
|
934
|
-
writeTxn = continuedWriteTxn
|
|
935
|
-
try {
|
|
936
|
-
let result = userTxn()
|
|
937
|
-
if (result && result.then) {
|
|
938
|
-
if (!promises)
|
|
939
|
-
promises = []
|
|
940
|
-
transactionSetResults[(i << 1) + 1] = result
|
|
941
|
-
promises.push(result.catch(() => {
|
|
942
|
-
txnError(error, i)
|
|
943
|
-
}))
|
|
944
|
-
} else
|
|
945
|
-
transactionSetResults[(i << 1) + 1] = result
|
|
946
|
-
} catch(error) {
|
|
947
|
-
if (!txnError(error, i))
|
|
948
|
-
return
|
|
949
|
-
}
|
|
950
|
-
}
|
|
951
|
-
}
|
|
952
|
-
if (promises) { // finish any outstanding commit functions
|
|
953
|
-
await Promise.all(promises)
|
|
954
|
-
}
|
|
955
|
-
writeTxn = null
|
|
956
|
-
return env.continueBatch(0)
|
|
957
|
-
function txnError(error, i) {
|
|
958
|
-
if (error.message.startsWith('MDB_MAP_FULL')) {
|
|
959
|
-
env.continueBatch(-30792)
|
|
960
|
-
writeTxn = null
|
|
961
|
-
return false
|
|
962
|
-
}
|
|
963
|
-
if (error.message.startsWith('MDB_MAP_RESIZED')) {
|
|
964
|
-
env.continueBatch(-30785)
|
|
965
|
-
writeTxn = null
|
|
966
|
-
return false
|
|
967
|
-
}
|
|
968
|
-
// user exception
|
|
969
|
-
transactionSetResults[i << 1] = error
|
|
970
|
-
return true
|
|
971
|
-
}
|
|
972
|
-
}
|
|
973
|
-
let duration = Date.now() - start
|
|
974
|
-
this.averageTransactionTime = (this.averageTransactionTime * 3 + duration) / 4
|
|
975
|
-
//console.log('did batch', (duration) + 'ms', name, operations.length/*map(o => o[1].toString('binary')).join(',')*/)
|
|
976
|
-
resetReadTxn()
|
|
977
|
-
if (error) {
|
|
978
|
-
if (error.message == 'Interrupted batch')
|
|
979
|
-
// if the batch was interrupted by a sync transaction request we just have to restart it
|
|
980
|
-
return writeBatch()
|
|
981
|
-
try {
|
|
982
|
-
// see if we can recover from recoverable error (like full map with a resize)
|
|
983
|
-
handleError(error, this, null, writeBatch)
|
|
984
|
-
} catch(error) {
|
|
985
|
-
currentCommit = null
|
|
986
|
-
for (const store of stores) {
|
|
987
|
-
store.emit('aftercommit', { operations })
|
|
988
|
-
}
|
|
989
|
-
reject(error)
|
|
990
|
-
}
|
|
991
|
-
} else {
|
|
992
|
-
currentCommit = null
|
|
993
|
-
for (const store of stores) {
|
|
994
|
-
store.emit('aftercommit', { operations, results })
|
|
995
|
-
}
|
|
996
|
-
resolve(results)
|
|
997
|
-
}
|
|
998
|
-
}
|
|
999
|
-
try {
|
|
1000
|
-
if (sync === true) {
|
|
1001
|
-
env.batchWrite(operations, results)
|
|
1002
|
-
callback()
|
|
1003
|
-
} else
|
|
1004
|
-
env.batchWrite(operations, results, callback)
|
|
1005
|
-
} catch (error) {
|
|
1006
|
-
callback(error)
|
|
1007
|
-
}
|
|
1008
|
-
}
|
|
1009
|
-
try {
|
|
1010
|
-
writeBatch()
|
|
1011
|
-
} catch(error) {
|
|
1012
|
-
reject(error)
|
|
1013
|
-
}
|
|
1014
|
-
} else {
|
|
1015
|
-
resolve([])
|
|
1016
|
-
}
|
|
1017
|
-
}
|
|
1018
|
-
let timeout
|
|
1019
|
-
if (this.commitDelay > 0) {
|
|
1020
|
-
timeout = setTimeout(() => {
|
|
1021
|
-
when(currentCommit, () => whenCommitted && runNextBatch(), () => whenCommitted && runNextBatch())
|
|
1022
|
-
}, this.commitDelay)
|
|
1023
|
-
} else {
|
|
1024
|
-
timeout = runNextBatch.immediate = setImmediate(() => {
|
|
1025
|
-
when(currentCommit, () => whenCommitted && runNextBatch(), () => whenCommitted && runNextBatch())
|
|
1026
|
-
})
|
|
1027
|
-
}
|
|
1028
|
-
})
|
|
1029
|
-
pendingBatch = {
|
|
1030
|
-
results: whenCommitted,
|
|
1031
|
-
unconditionalResults: whenCommitted.then(() => true) // for returning from non-conditional operations
|
|
1032
|
-
}
|
|
1033
|
-
}
|
|
1034
|
-
return pendingBatch
|
|
1035
|
-
}
|
|
1036
|
-
|
|
1037
|
-
batch(operations) {
|
|
1038
|
-
/*if (writeTxn) {
|
|
1039
|
-
this.commitBatchNow(operations.map(operation => [this.db, operation.key, operation.value]))
|
|
1040
|
-
return Promise.resolve(true)
|
|
1041
|
-
}*/
|
|
1042
|
-
let scheduledOperations = this.getScheduledOperations()
|
|
1043
|
-
for (let operation of operations) {
|
|
1044
|
-
let value = operation.value
|
|
1045
|
-
scheduledOperations.push([operation.key, value])
|
|
1046
|
-
scheduledOperations.bytes += operation.key.length + (value && value.length || 0) + 200
|
|
1047
|
-
}
|
|
1048
|
-
return this.scheduleCommit().unconditionalResults
|
|
1049
|
-
}
|
|
1050
|
-
backup(path) {
|
|
1051
|
-
return new Promise((resolve, reject) => env.copy(path, true, (error) => {
|
|
1052
|
-
if (error) {
|
|
1053
|
-
reject(error)
|
|
1054
|
-
} else {
|
|
1055
|
-
resolve()
|
|
1056
|
-
}
|
|
1057
|
-
}))
|
|
1058
|
-
}
|
|
1059
|
-
close() {
|
|
1060
|
-
this.db.close()
|
|
1061
|
-
if (this.isRoot) {
|
|
1062
|
-
if (readTxn) {
|
|
1063
|
-
try {
|
|
1064
|
-
readTxn.abort()
|
|
1065
|
-
} catch(error) {}
|
|
1066
|
-
}
|
|
1067
|
-
readTxnRenewed = null
|
|
1068
|
-
env.close()
|
|
1069
|
-
}
|
|
1070
|
-
}
|
|
1071
|
-
getStats() {
|
|
1072
|
-
try {
|
|
1073
|
-
let stats = this.db.stat(readTxnRenewed ? readTxn : renewReadTxn())
|
|
1074
|
-
return stats
|
|
1075
|
-
}
|
|
1076
|
-
catch(error) {
|
|
1077
|
-
return handleError(error, this, readTxn, () => this.getStats())
|
|
1078
|
-
}
|
|
1079
|
-
}
|
|
1080
|
-
sync(callback) {
|
|
1081
|
-
return env.sync(callback || function(error) {
|
|
1082
|
-
if (error) {
|
|
1083
|
-
console.error(error)
|
|
1084
|
-
}
|
|
1085
|
-
})
|
|
1086
|
-
}
|
|
1087
|
-
deleteDB() {
|
|
1088
|
-
try {
|
|
1089
|
-
this.db.drop({
|
|
1090
|
-
justFreePages: false,
|
|
1091
|
-
txn: writeTxn,
|
|
1092
|
-
})
|
|
1093
|
-
} catch(error) {
|
|
1094
|
-
handleError(error, this, null, () => this.deleteDB())
|
|
1095
|
-
}
|
|
1096
|
-
}
|
|
1097
|
-
clear() {
|
|
1098
|
-
try {
|
|
1099
|
-
this.db.drop({
|
|
1100
|
-
justFreePages: true,
|
|
1101
|
-
txn: writeTxn,
|
|
1102
|
-
})
|
|
1103
|
-
} catch(error) {
|
|
1104
|
-
handleError(error, this, null, () => this.clear())
|
|
1105
|
-
}
|
|
1106
|
-
if (this.encoder && this.encoder.structures)
|
|
1107
|
-
this.encoder.structures = []
|
|
1108
|
-
|
|
1109
|
-
}
|
|
1110
|
-
readerCheck() {
|
|
1111
|
-
return env.readerCheck()
|
|
1112
|
-
}
|
|
1113
|
-
readerList() {
|
|
1114
|
-
return env.readerList().join('')
|
|
1115
|
-
}
|
|
1116
|
-
setupSharedStructures() {
|
|
1117
|
-
const getStructures = () => {
|
|
1118
|
-
let lastVersion // because we are doing a read here, we may need to save and restore the lastVersion from the last read
|
|
1119
|
-
if (this.useVersions)
|
|
1120
|
-
lastVersion = getLastVersion()
|
|
1121
|
-
try {
|
|
1122
|
-
let buffer = (writeTxn || (readTxnRenewed ? readTxn : renewReadTxn())).getBinary(this.db, this.sharedStructuresKey)
|
|
1123
|
-
if (this.useVersions)
|
|
1124
|
-
setLastVersion(lastVersion)
|
|
1125
|
-
return buffer ? this.encoder.decode(buffer) : []
|
|
1126
|
-
} catch(error) {
|
|
1127
|
-
return handleError(error, this, null, getStructures)
|
|
1128
|
-
}
|
|
1129
|
-
}
|
|
1130
|
-
return {
|
|
1131
|
-
saveStructures: (structures, previousLength) => {
|
|
1132
|
-
return this.transactionSync(() => {
|
|
1133
|
-
let existingStructuresBuffer = writeTxn.getBinary(this.db, this.sharedStructuresKey)
|
|
1134
|
-
let existingStructures = existingStructuresBuffer ? this.encoder.decode(existingStructuresBuffer) : []
|
|
1135
|
-
if (existingStructures.length != previousLength)
|
|
1136
|
-
return false // it changed, we need to indicate that we couldn't update
|
|
1137
|
-
writeTxn.putBinary(this.db, this.sharedStructuresKey, this.encoder.encode(structures))
|
|
1138
|
-
})
|
|
1139
|
-
},
|
|
1140
|
-
getStructures,
|
|
1141
|
-
copyBuffers: true // need to copy any embedded buffers that are found since we use unsafe buffers
|
|
1142
|
-
}
|
|
1143
|
-
}
|
|
1144
|
-
}
|
|
1145
|
-
// if caching class overrides putSync, don't want to double call the caching code
|
|
1146
|
-
const putSync = LMDBStore.prototype.putSync
|
|
1147
|
-
const removeSync = LMDBStore.prototype.removeSync
|
|
1148
|
-
return options.cache ?
|
|
1149
|
-
new (CachingStore(LMDBStore))(options.name || null, options) :
|
|
1150
|
-
new LMDBStore(options.name || null, options)
|
|
1151
|
-
function handleError(error, store, txn, retry) {
|
|
1152
|
-
try {
|
|
1153
|
-
if (writeTxn)
|
|
1154
|
-
writeTxn.abort()
|
|
1155
|
-
} catch(error) {}
|
|
1156
|
-
if (writeTxn)
|
|
1157
|
-
writeTxn = null
|
|
1158
|
-
|
|
1159
|
-
if (error.message.startsWith('MDB_') &&
|
|
1160
|
-
!(error.message.startsWith('MDB_KEYEXIST') || error.message.startsWith('MDB_NOTFOUND')) ||
|
|
1161
|
-
error.message == 'The transaction is already closed.') {
|
|
1162
|
-
resetReadTxn() // separate out cursor-based read txns
|
|
1163
|
-
try {
|
|
1164
|
-
if (readTxn) {
|
|
1165
|
-
readTxn.abort()
|
|
1166
|
-
readTxn.isAborted = true
|
|
1167
|
-
}
|
|
1168
|
-
} catch(error) {}
|
|
1169
|
-
readTxn = null
|
|
1170
|
-
}
|
|
1171
|
-
if (error.message.startsWith('MDB_PROBLEM'))
|
|
1172
|
-
console.error(error)
|
|
1173
|
-
//if (error.message == 'The transaction is already closed.')
|
|
1174
|
-
// return handleError(error, store, null, retry)
|
|
1175
|
-
if (error.message.startsWith('MDB_MAP_FULL') || error.message.startsWith('MDB_MAP_RESIZED')) {
|
|
1176
|
-
const oldSize = env.info().mapSize
|
|
1177
|
-
const newSize = error.message.startsWith('MDB_MAP_FULL') ?
|
|
1178
|
-
Math.floor(((1.08 + 3000 / Math.sqrt(oldSize)) * oldSize) / 0x100000) * 0x100000 : // increase size, more rapidly at first, and round to nearest 1 MB
|
|
1179
|
-
oldSize + 0x2000//Math.pow(2, (Math.round(Math.log2(oldSize)) + 1)) // for resized notifications, we try to align to doubling each time
|
|
1180
|
-
for (const store of stores) {
|
|
1181
|
-
store.emit('remap')
|
|
1182
|
-
}
|
|
1183
|
-
try {
|
|
1184
|
-
env.resize(newSize)
|
|
1185
|
-
} catch(error) {
|
|
1186
|
-
throw new Error(error.message + ' trying to set map size to ' + newSize)
|
|
1187
|
-
}
|
|
1188
|
-
return retry()
|
|
1189
|
-
}
|
|
1190
|
-
error.message = 'In database ' + name + ': ' + error.message
|
|
1191
|
-
throw error
|
|
1192
|
-
}
|
|
1193
|
-
}
|
|
1194
|
-
|
|
1195
|
-
function matches(previousVersion, ifVersion){
|
|
1196
|
-
let matches
|
|
1197
|
-
if (previousVersion) {
|
|
1198
|
-
if (ifVersion) {
|
|
1199
|
-
matches = previousVersion == ifVersion
|
|
1200
|
-
} else {
|
|
1201
|
-
matches = false
|
|
1202
|
-
}
|
|
1203
|
-
} else {
|
|
1204
|
-
matches = !ifVersion
|
|
1205
|
-
}
|
|
1206
|
-
return matches
|
|
1207
|
-
}
|
|
1208
|
-
|
|
1209
|
-
function compareKey(a, b) {
|
|
1210
|
-
// compare with type consistency that matches ordered-binary
|
|
1211
|
-
if (typeof a == 'object') {
|
|
1212
|
-
if (!a) {
|
|
1213
|
-
return b == null ? 0 : -1
|
|
1214
|
-
}
|
|
1215
|
-
if (a.compare) {
|
|
1216
|
-
if (b == null) {
|
|
1217
|
-
return 1
|
|
1218
|
-
} else if (b.compare) {
|
|
1219
|
-
return a.compare(b)
|
|
1220
|
-
} else {
|
|
1221
|
-
return -1
|
|
1222
|
-
}
|
|
1223
|
-
}
|
|
1224
|
-
let arrayComparison
|
|
1225
|
-
if (b instanceof Array) {
|
|
1226
|
-
let i = 0
|
|
1227
|
-
while((arrayComparison = compareKey(a[i], b[i])) == 0 && i <= a.length) {
|
|
1228
|
-
i++
|
|
1229
|
-
}
|
|
1230
|
-
return arrayComparison
|
|
1231
|
-
}
|
|
1232
|
-
arrayComparison = compareKey(a[0], b)
|
|
1233
|
-
if (arrayComparison == 0 && a.length > 1)
|
|
1234
|
-
return 1
|
|
1235
|
-
return arrayComparison
|
|
1236
|
-
} else if (typeof a == typeof b) {
|
|
1237
|
-
if (typeof a === 'symbol') {
|
|
1238
|
-
a = Symbol.keyFor(a)
|
|
1239
|
-
b = Symbol.keyFor(b)
|
|
1240
|
-
}
|
|
1241
|
-
return a < b ? -1 : a === b ? 0 : 1
|
|
1242
|
-
}
|
|
1243
|
-
else if (typeof b == 'object') {
|
|
1244
|
-
if (b instanceof Array)
|
|
1245
|
-
return -compareKey(b, a)
|
|
1246
|
-
return 1
|
|
1247
|
-
} else {
|
|
1248
|
-
return typeOrder[typeof a] < typeOrder[typeof b] ? -1 : 1
|
|
1249
|
-
}
|
|
1250
|
-
}
|
|
1251
|
-
class Entry {
|
|
1252
|
-
constructor(value, version, db) {
|
|
1253
|
-
this.value = value
|
|
1254
|
-
this.version = version
|
|
1255
|
-
this.db = db
|
|
1256
|
-
}
|
|
1257
|
-
ifSamePut() {
|
|
1258
|
-
|
|
1259
|
-
}
|
|
1260
|
-
ifSameRemove() {
|
|
1261
|
-
|
|
1262
|
-
}
|
|
1263
|
-
}
|
|
1264
|
-
exports.compareKey = compareKey
|
|
1265
|
-
const typeOrder = {
|
|
1266
|
-
symbol: 0,
|
|
1267
|
-
undefined: 1,
|
|
1268
|
-
boolean: 2,
|
|
1269
|
-
number: 3,
|
|
1270
|
-
string: 4
|
|
1271
|
-
}
|
|
1272
|
-
exports.getLastEntrySize = function() {
|
|
1273
|
-
return lastSize
|
|
1274
|
-
}
|
|
1
|
+
const { sync: mkdirpSync } = require('mkdirp')
|
|
2
|
+
const fs = require('fs')
|
|
3
|
+
const { extname, basename, dirname} = require('path')
|
|
4
|
+
const { ArrayLikeIterable } = require('./util/ArrayLikeIterable')
|
|
5
|
+
const when = require('./util/when')
|
|
6
|
+
const EventEmitter = require('events')
|
|
7
|
+
Object.assign(exports, require('node-gyp-build')(__dirname))
|
|
8
|
+
const { Env, Cursor, Compression, getLastVersion, setLastVersion, getBufferForAddress, keyValueToBuffer, bufferToKeyValue } = exports
|
|
9
|
+
const { CachingStore, setGetLastVersion } = require('./caching')
|
|
10
|
+
const os = require('os')
|
|
11
|
+
setGetLastVersion(getLastVersion)
|
|
12
|
+
Uint8ArraySlice = Uint8Array.prototype.slice
|
|
13
|
+
const syncInstructions = Buffer.allocUnsafeSlow(2048)
|
|
14
|
+
const syncInstructionsView = new DataView(syncInstructions.buffer, 0, 2048)
|
|
15
|
+
const buffers = []
|
|
16
|
+
|
|
17
|
+
const DEFAULT_SYNC_BATCH_THRESHOLD = 200000000 // 200MB
|
|
18
|
+
const DEFAULT_IMMEDIATE_BATCH_THRESHOLD = 10000000 // 10MB
|
|
19
|
+
const DEFAULT_COMMIT_DELAY = 0
|
|
20
|
+
const READING_TNX = {
|
|
21
|
+
readOnly: true
|
|
22
|
+
}
|
|
23
|
+
const ABORT = {}
|
|
24
|
+
|
|
25
|
+
const allDbs = exports.allDbs = new Map()
|
|
26
|
+
const SYNC_PROMISE_RESULT = Promise.resolve(true)
|
|
27
|
+
const SYNC_PROMISE_FAIL = Promise.resolve(false)
|
|
28
|
+
SYNC_PROMISE_RESULT.isSync = true
|
|
29
|
+
SYNC_PROMISE_FAIL.isSync = true
|
|
30
|
+
const LAST_KEY = String.fromCharCode(0xffff)
|
|
31
|
+
const LAST_BUFFER_KEY = Buffer.from([255, 255, 255, 255])
|
|
32
|
+
const FIRST_BUFFER_KEY = Buffer.from([0])
|
|
33
|
+
const ITERATOR_DONE = { done: true, value: undefined }
|
|
34
|
+
let env
|
|
35
|
+
let defaultCompression
|
|
36
|
+
let lastSize, lastOffset
|
|
37
|
+
exports.open = open
|
|
38
|
+
exports.ABORT = ABORT
|
|
39
|
+
let abortedNonChildTransactionWarn
|
|
40
|
+
function open(path, options) {
|
|
41
|
+
let env = new Env()
|
|
42
|
+
let committingWrites
|
|
43
|
+
let scheduledTransactions
|
|
44
|
+
let scheduledOperations
|
|
45
|
+
let asyncTransactionAfter = true, asyncTransactionStrictOrder
|
|
46
|
+
let transactionWarned
|
|
47
|
+
let readTxn, writeTxn, pendingBatch, currentCommit, runNextBatch, readTxnRenewed, cursorTxns = []
|
|
48
|
+
let renewId = 1
|
|
49
|
+
if (typeof path == 'object' && !options) {
|
|
50
|
+
options = path
|
|
51
|
+
path = options.path
|
|
52
|
+
}
|
|
53
|
+
let extension = extname(path)
|
|
54
|
+
let name = basename(path, extension)
|
|
55
|
+
let is32Bit = os.arch().endsWith('32')
|
|
56
|
+
let remapChunks = (options && options.remapChunks) || ((options && options.mapSize) ?
|
|
57
|
+
(is32Bit && options.mapSize > 0x100000000) : // larger than fits in address space, must use dynamic maps
|
|
58
|
+
is32Bit) // without a known map size, we default to being able to handle large data correctly/well*/
|
|
59
|
+
options = Object.assign({
|
|
60
|
+
path,
|
|
61
|
+
noSubdir: Boolean(extension),
|
|
62
|
+
isRoot: true,
|
|
63
|
+
maxDbs: 12,
|
|
64
|
+
remapChunks,
|
|
65
|
+
syncInstructions,
|
|
66
|
+
//winMemoryPriority: 4,
|
|
67
|
+
// default map size limit of 4 exabytes when using remapChunks, since it is not preallocated and we can
|
|
68
|
+
// make it super huge.
|
|
69
|
+
mapSize: remapChunks ? 0x10000000000000 :
|
|
70
|
+
0x20000, // Otherwise we start small with 128KB
|
|
71
|
+
}, options)
|
|
72
|
+
if (options.asyncTransactionOrder == 'before')
|
|
73
|
+
asyncTransactionAfter = false
|
|
74
|
+
else if (options.asyncTransactionOrder == 'strict') {
|
|
75
|
+
asyncTransactionStrictOrder = true
|
|
76
|
+
asyncTransactionAfter = false
|
|
77
|
+
}
|
|
78
|
+
if (!fs.existsSync(options.noSubdir ? dirname(path) : path))
|
|
79
|
+
mkdirpSync(options.noSubdir ? dirname(path) : path)
|
|
80
|
+
if (options.compression) {
|
|
81
|
+
let setDefault
|
|
82
|
+
if (options.compression == true) {
|
|
83
|
+
if (defaultCompression)
|
|
84
|
+
options.compression = defaultCompression
|
|
85
|
+
else
|
|
86
|
+
defaultCompression = options.compression = new Compression({
|
|
87
|
+
threshold: 1000,
|
|
88
|
+
dictionary: fs.readFileSync(require.resolve('./dict/dict.txt')),
|
|
89
|
+
})
|
|
90
|
+
} else
|
|
91
|
+
options.compression = new Compression(Object.assign({
|
|
92
|
+
threshold: 1000,
|
|
93
|
+
dictionary: fs.readFileSync(require.resolve('./dict/dict.txt')),
|
|
94
|
+
}), options.compression)
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
if (options && options.clearOnStart) {
|
|
98
|
+
console.info('Removing', path)
|
|
99
|
+
fs.removeSync(path)
|
|
100
|
+
console.info('Removed', path)
|
|
101
|
+
}
|
|
102
|
+
let useWritemap = options.useWritemap
|
|
103
|
+
try {
|
|
104
|
+
env.open(options)
|
|
105
|
+
} catch(error) {
|
|
106
|
+
if (error.message.startsWith('MDB_INVALID')) {
|
|
107
|
+
require('./util/upgrade-lmdb').upgrade(path, options, open)
|
|
108
|
+
env = new Env()
|
|
109
|
+
env.open(options)
|
|
110
|
+
} else
|
|
111
|
+
throw error
|
|
112
|
+
}
|
|
113
|
+
/* let filePath = noSubdir ? path : (path + '/data.mdb')
|
|
114
|
+
if (fs.statSync(filePath).size == env.info().mapSize && !options.remapChunks) {
|
|
115
|
+
// if the file size is identical to the map size, that means the OS is taking full disk space for
|
|
116
|
+
// mapping and we need to revert back to remapChunks
|
|
117
|
+
env.close()
|
|
118
|
+
options.remapChunks = true
|
|
119
|
+
env.open(options)
|
|
120
|
+
}*/
|
|
121
|
+
env.readerCheck() // clear out any stale entries
|
|
122
|
+
function renewReadTxn() {
|
|
123
|
+
if (readTxn)
|
|
124
|
+
readTxn.renew()
|
|
125
|
+
else
|
|
126
|
+
readTxn = env.beginTxn(READING_TNX)
|
|
127
|
+
readTxnRenewed = setImmediate(resetReadTxn)
|
|
128
|
+
return readTxn
|
|
129
|
+
}
|
|
130
|
+
function resetReadTxn() {
|
|
131
|
+
if (readTxnRenewed) {
|
|
132
|
+
renewId++
|
|
133
|
+
readTxnRenewed = null
|
|
134
|
+
if (readTxn.cursorCount - (readTxn.renewingCursorCount || 0) > 0) {
|
|
135
|
+
readTxn.onlyCursor = true
|
|
136
|
+
cursorTxns.push(readTxn)
|
|
137
|
+
readTxn = null
|
|
138
|
+
}
|
|
139
|
+
else
|
|
140
|
+
readTxn.reset()
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
let stores = []
|
|
144
|
+
class LMDBStore extends EventEmitter {
|
|
145
|
+
constructor(dbName, dbOptions) {
|
|
146
|
+
super()
|
|
147
|
+
if (dbName === undefined)
|
|
148
|
+
throw new Error('Database name must be supplied in name property (may be null for root database)')
|
|
149
|
+
|
|
150
|
+
const openDB = () => {
|
|
151
|
+
try {
|
|
152
|
+
this.db = env.openDbi(Object.assign({
|
|
153
|
+
name: dbName,
|
|
154
|
+
create: true,
|
|
155
|
+
txn: writeTxn,
|
|
156
|
+
}, dbOptions))
|
|
157
|
+
this.db.name = dbName || null
|
|
158
|
+
} catch(error) {
|
|
159
|
+
handleError(error, null, null, openDB)
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
if (dbOptions.compression && !(dbOptions.compression instanceof Compression)) {
|
|
163
|
+
if (dbOptions.compression == true && options.compression)
|
|
164
|
+
dbOptions.compression = options.compression // use the parent compression if available
|
|
165
|
+
else
|
|
166
|
+
dbOptions.compression = new Compression(Object.assign({
|
|
167
|
+
threshold: 1000,
|
|
168
|
+
dictionary: fs.readFileSync(require.resolve('./dict/dict.txt')),
|
|
169
|
+
}), dbOptions.compression)
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (dbOptions.dupSort && (dbOptions.useVersions || dbOptions.cache)) {
|
|
173
|
+
throw new Error('The dupSort flag can not be combined with versions or caching')
|
|
174
|
+
}
|
|
175
|
+
openDB()
|
|
176
|
+
resetReadTxn() // a read transaction becomes invalid after opening another db
|
|
177
|
+
this.name = dbName
|
|
178
|
+
this.env = env
|
|
179
|
+
this.reads = 0
|
|
180
|
+
this.writes = 0
|
|
181
|
+
this.transactions = 0
|
|
182
|
+
this.averageTransactionTime = 5
|
|
183
|
+
if (dbOptions.syncBatchThreshold)
|
|
184
|
+
console.warn('syncBatchThreshold is no longer supported')
|
|
185
|
+
if (dbOptions.immediateBatchThreshold)
|
|
186
|
+
console.warn('immediateBatchThreshold is no longer supported')
|
|
187
|
+
this.commitDelay = DEFAULT_COMMIT_DELAY
|
|
188
|
+
Object.assign(this, { // these are the options that are inherited
|
|
189
|
+
path: options.path,
|
|
190
|
+
encoding: options.encoding,
|
|
191
|
+
strictAsyncOrder: options.strictAsyncOrder,
|
|
192
|
+
}, dbOptions)
|
|
193
|
+
if (!this.encoding || this.encoding == 'msgpack' || this.encoding == 'cbor') {
|
|
194
|
+
this.encoder = this.decoder = new (this.encoding == 'cbor' ? require('cbor-x').Encoder : require('msgpackr').Encoder)
|
|
195
|
+
(Object.assign(this.sharedStructuresKey ?
|
|
196
|
+
this.setupSharedStructures() : {
|
|
197
|
+
copyBuffers: true // need to copy any embedded buffers that are found since we use unsafe buffers
|
|
198
|
+
}, options, dbOptions))
|
|
199
|
+
} else if (this.encoding == 'json') {
|
|
200
|
+
this.encoder = {
|
|
201
|
+
encode: JSON.stringify,
|
|
202
|
+
}
|
|
203
|
+
} else if (this.encoding == 'ordered-binary') {
|
|
204
|
+
this.encoder = this.decoder = {
|
|
205
|
+
encode(value) { return keyValueToBuffer(value) },
|
|
206
|
+
decode(buffer, end) { return bufferToKeyValue(buffer.slice(0, end)) }
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
allDbs.set(dbName ? name + '-' + dbName : name, this)
|
|
210
|
+
stores.push(this)
|
|
211
|
+
}
|
|
212
|
+
openDB(dbName, dbOptions) {
|
|
213
|
+
if (typeof dbName == 'object' && !dbOptions) {
|
|
214
|
+
dbOptions = dbName
|
|
215
|
+
dbName = options.name
|
|
216
|
+
} else
|
|
217
|
+
dbOptions = dbOptions || {}
|
|
218
|
+
try {
|
|
219
|
+
return dbOptions.cache ?
|
|
220
|
+
new (CachingStore(LMDBStore))(dbName, dbOptions) :
|
|
221
|
+
new LMDBStore(dbName, dbOptions)
|
|
222
|
+
} catch(error) {
|
|
223
|
+
if (error.message.indexOf('MDB_DBS_FULL') > -1) {
|
|
224
|
+
error.message += ' (increase your maxDbs option)'
|
|
225
|
+
}
|
|
226
|
+
throw error
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
transactionAsync(callback, asChild) {
|
|
230
|
+
if (writeTxn) {
|
|
231
|
+
// already nested in a transaction, just execute and return
|
|
232
|
+
return callback()
|
|
233
|
+
}
|
|
234
|
+
let lastOperation
|
|
235
|
+
let after, strictOrder
|
|
236
|
+
if (scheduledOperations) {
|
|
237
|
+
lastOperation = asyncTransactionAfter ? scheduledOperations.appendAsyncTxn :
|
|
238
|
+
scheduledOperations[asyncTransactionStrictOrder ? scheduledOperations.length - 1 : 0]
|
|
239
|
+
} else {
|
|
240
|
+
scheduledOperations = []
|
|
241
|
+
scheduledOperations.bytes = 0
|
|
242
|
+
}
|
|
243
|
+
let transactionSet
|
|
244
|
+
let transactionSetIndex
|
|
245
|
+
if (lastOperation === true) { // continue last set of transactions
|
|
246
|
+
transactionSetIndex = scheduledTransactions.length - 1
|
|
247
|
+
transactionSet = scheduledTransactions[transactionSetIndex]
|
|
248
|
+
} else {
|
|
249
|
+
// for now we signify transactions as a true
|
|
250
|
+
if (asyncTransactionAfter) // by default we add a flag to put transactions after other operations
|
|
251
|
+
scheduledOperations.appendAsyncTxn = true
|
|
252
|
+
else if (asyncTransactionStrictOrder)
|
|
253
|
+
scheduledOperations.push(true)
|
|
254
|
+
else // in before mode, we put all the async transaction at the beginning
|
|
255
|
+
scheduledOperations.unshift(true)
|
|
256
|
+
if (!scheduledTransactions) {
|
|
257
|
+
scheduledTransactions = []
|
|
258
|
+
}
|
|
259
|
+
transactionSetIndex = scheduledTransactions.push(transactionSet = []) - 1
|
|
260
|
+
}
|
|
261
|
+
let index = (transactionSet.push(asChild ?
|
|
262
|
+
{asChild, callback } : callback) - 1) << 1
|
|
263
|
+
return this.scheduleCommit().results.then((results) => {
|
|
264
|
+
let transactionResults = results.transactionResults[transactionSetIndex]
|
|
265
|
+
let error = transactionResults[index]
|
|
266
|
+
if (error)
|
|
267
|
+
throw error
|
|
268
|
+
return transactionResults[index + 1]
|
|
269
|
+
})
|
|
270
|
+
}
|
|
271
|
+
childTransaction(callback) {
|
|
272
|
+
if (useWritemap)
|
|
273
|
+
throw new Error('Child transactions are not supported in writemap mode')
|
|
274
|
+
if (writeTxn) {
|
|
275
|
+
let parentTxn = writeTxn
|
|
276
|
+
let childTxn = writeTxn = env.beginTxn(null, parentTxn)
|
|
277
|
+
try {
|
|
278
|
+
return when(callback(), (result) => {
|
|
279
|
+
writeTxn = parentTxn
|
|
280
|
+
if (result === ABORT)
|
|
281
|
+
childTxn.abort()
|
|
282
|
+
else
|
|
283
|
+
childTxn.commit()
|
|
284
|
+
return result
|
|
285
|
+
}, (error) => {
|
|
286
|
+
writeTxn = parentTxn
|
|
287
|
+
childTxn.abort()
|
|
288
|
+
throw error
|
|
289
|
+
})
|
|
290
|
+
} catch(error) {
|
|
291
|
+
writeTxn = parentTxn
|
|
292
|
+
childTxn.abort()
|
|
293
|
+
throw error
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
return this.transactionAsync(callback, true)
|
|
297
|
+
}
|
|
298
|
+
transaction(callback, abort) {
|
|
299
|
+
if (!transactionWarned) {
|
|
300
|
+
console.warn('transaction is deprecated, use transactionSync if you want a synchronous transaction or transactionAsync for asynchronous transaction. In this future this will always call transactionAsync.')
|
|
301
|
+
transactionWarned = true
|
|
302
|
+
}
|
|
303
|
+
let result = this.transactionSync(callback, abort)
|
|
304
|
+
return abort ? ABORT : result
|
|
305
|
+
}
|
|
306
|
+
transactionSync(callback, abort) {
|
|
307
|
+
if (writeTxn) {
|
|
308
|
+
if (!useWritemap && !this.cache)
|
|
309
|
+
// already nested in a transaction, execute as child transaction (if possible) and return
|
|
310
|
+
return this.childTransaction(callback)
|
|
311
|
+
let result = callback() // else just run in current transaction
|
|
312
|
+
if (result == ABORT && !abortedNonChildTransactionWarn) {
|
|
313
|
+
console.warn('Can not abort a transaction inside another transaction with ' + (this.cache ? 'caching enabled' : 'useWritemap enabled'))
|
|
314
|
+
abortedNonChildTransactionWarn = true
|
|
315
|
+
}
|
|
316
|
+
return result
|
|
317
|
+
}
|
|
318
|
+
let txn
|
|
319
|
+
try {
|
|
320
|
+
this.transactions++
|
|
321
|
+
txn = writeTxn = env.beginTxn()
|
|
322
|
+
/*if (scheduledOperations && runNextBatch) {
|
|
323
|
+
runNextBatch((operations, callback) => {
|
|
324
|
+
try {
|
|
325
|
+
callback(null, this.commitBatchNow(operations))
|
|
326
|
+
} catch (error) {
|
|
327
|
+
callback(error)
|
|
328
|
+
}
|
|
329
|
+
})
|
|
330
|
+
}
|
|
331
|
+
TODO: To reenable forced sequential writes, we need to re-execute the operations if we get an env resize
|
|
332
|
+
*/
|
|
333
|
+
return when(callback(), (result) => {
|
|
334
|
+
try {
|
|
335
|
+
if (result === ABORT)
|
|
336
|
+
txn.abort()
|
|
337
|
+
else {
|
|
338
|
+
txn.commit()
|
|
339
|
+
resetReadTxn()
|
|
340
|
+
}
|
|
341
|
+
writeTxn = null
|
|
342
|
+
return result
|
|
343
|
+
} catch(error) {
|
|
344
|
+
if (error.message == 'The transaction is already closed.') {
|
|
345
|
+
return result
|
|
346
|
+
}
|
|
347
|
+
return handleError(error, this, txn, () => this.transaction(callback))
|
|
348
|
+
}
|
|
349
|
+
}, (error) => {
|
|
350
|
+
return handleError(error, this, txn, () => this.transaction(callback))
|
|
351
|
+
})
|
|
352
|
+
} catch(error) {
|
|
353
|
+
return handleError(error, this, txn, () => this.transaction(callback))
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
getBinaryLocation(id) {
|
|
357
|
+
syncInstructionsView.setUint32(8, 4, true)
|
|
358
|
+
syncInstructionsView.setUint32(16, id, true)
|
|
359
|
+
//syncInstructions.utf8Write(id, 16, 1000)
|
|
360
|
+
syncInstructionsView.setUint32(20, 0)
|
|
361
|
+
// keyToBinary(syncInstructions, id)
|
|
362
|
+
this.db.get(id)
|
|
363
|
+
lastSize = syncInstructionsView.getUint32(0, true)
|
|
364
|
+
let bufferIndex = syncInstructionsView.getUint32(12, true)
|
|
365
|
+
let lastOffset = syncInstructionsView.getUint32(8, true)
|
|
366
|
+
if (lastOffset == 0 && bufferIndex == 0) {
|
|
367
|
+
return // not found
|
|
368
|
+
}
|
|
369
|
+
let buffer = buffers[bufferIndex]
|
|
370
|
+
let startOffset
|
|
371
|
+
if (!buffer || lastOffset < (startOffset = buffer.startOffset) || (lastOffset + lastSize > startOffset + 0x100000000)) {
|
|
372
|
+
if (buffer)
|
|
373
|
+
env.detachBuffer(buffer.buffer)
|
|
374
|
+
startOffset = (lastOffset >>> 16) * 0x10000
|
|
375
|
+
console.log('make buffer for address', bufferIndex * 0x100000000 + startOffset)
|
|
376
|
+
buffer = buffers[bufferIndex] = Buffer.from(getBufferForAddress(bufferIndex * 0x100000000 + startOffset))
|
|
377
|
+
buffer.startOffset = startOffset
|
|
378
|
+
}
|
|
379
|
+
lastOffset -= startOffset
|
|
380
|
+
return buffer.slice(lastOffset, lastOffset + lastSize)
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
getSizeBinaryFast(id) {
|
|
384
|
+
return lastSize = (writeTxn || (readTxnRenewed ? readTxn : renewReadTxn()))
|
|
385
|
+
.getBinaryUnsafe(this.db, id)
|
|
386
|
+
}
|
|
387
|
+
getString(id) {
|
|
388
|
+
let string = (writeTxn || (readTxnRenewed ? readTxn : renewReadTxn()))
|
|
389
|
+
.getUtf8(this.db, id)
|
|
390
|
+
if (string)
|
|
391
|
+
lastSize = string.length
|
|
392
|
+
return string
|
|
393
|
+
}
|
|
394
|
+
getBinaryFast(id) {
|
|
395
|
+
this.getSizeBinaryFast(id)
|
|
396
|
+
return lastSize === undefined ? undefined : this.db.unsafeBuffer.slice(0, lastSize)
|
|
397
|
+
}
|
|
398
|
+
getBinary(id) {
|
|
399
|
+
this.getSizeBinaryFast(id)
|
|
400
|
+
return lastSize === undefined ? undefined : Uint8ArraySlice.call(this.db.unsafeBuffer, 0, lastSize)
|
|
401
|
+
}
|
|
402
|
+
get(id) {
|
|
403
|
+
if (this.decoder) {
|
|
404
|
+
this.getSizeBinaryFast(id)
|
|
405
|
+
return lastSize === undefined ? undefined : this.decoder.decode(this.db.unsafeBuffer, lastSize)
|
|
406
|
+
}
|
|
407
|
+
if (this.encoding == 'binary')
|
|
408
|
+
return this.getBinary(id)
|
|
409
|
+
|
|
410
|
+
let result = this.getString(id)
|
|
411
|
+
if (result) {
|
|
412
|
+
if (this.encoding == 'json')
|
|
413
|
+
return JSON.parse(result)
|
|
414
|
+
}
|
|
415
|
+
return result
|
|
416
|
+
}
|
|
417
|
+
getEntry(id) {
|
|
418
|
+
let value = this.get(id)
|
|
419
|
+
if (value !== undefined) {
|
|
420
|
+
if (this.useVersions)
|
|
421
|
+
return {
|
|
422
|
+
value,
|
|
423
|
+
version: getLastVersion(),
|
|
424
|
+
//size: lastSize
|
|
425
|
+
}
|
|
426
|
+
else
|
|
427
|
+
return {
|
|
428
|
+
value,
|
|
429
|
+
//size: lastSize
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
resetReadTxn() {
|
|
434
|
+
resetReadTxn()
|
|
435
|
+
}
|
|
436
|
+
ifNoExists(key, callback) {
|
|
437
|
+
return this.ifVersion(key, null, callback)
|
|
438
|
+
}
|
|
439
|
+
ifVersion(key, version, callback) {
|
|
440
|
+
if (typeof version != 'number') {
|
|
441
|
+
if (version == null) {
|
|
442
|
+
if (version === null)
|
|
443
|
+
version = -4.2434325325532E-199 // NO_EXIST_VERSION
|
|
444
|
+
else {// if undefined, just do callback without any condition being added
|
|
445
|
+
callback()
|
|
446
|
+
// TODO: if we are inside another ifVersion, use that promise, or use ANY_VERSION
|
|
447
|
+
return pendingBatch ? pendingBatch.unconditionalResults : Promise.resolve(true) // be consistent in returning a promise, indicate success
|
|
448
|
+
}
|
|
449
|
+
} else {
|
|
450
|
+
throw new Error('Version must be a number or null')
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
let scheduledOperations = this.getScheduledOperations()
|
|
454
|
+
let index = scheduledOperations.push([key, version]) - 1
|
|
455
|
+
try {
|
|
456
|
+
callback()
|
|
457
|
+
let commit = this.scheduleCommit()
|
|
458
|
+
return commit.results.then((writeResults) => {
|
|
459
|
+
if (writeResults[index] === 0)
|
|
460
|
+
return true
|
|
461
|
+
if (writeResults[index] === 3) {
|
|
462
|
+
throw new Error('The key size was 0 or too large')
|
|
463
|
+
}
|
|
464
|
+
return false
|
|
465
|
+
})
|
|
466
|
+
} finally {
|
|
467
|
+
scheduledOperations.push(false) // reset condition
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
doesExist(key, versionOrValue) {
|
|
471
|
+
let txn
|
|
472
|
+
try {
|
|
473
|
+
if (writeTxn) {
|
|
474
|
+
txn = writeTxn
|
|
475
|
+
} else {
|
|
476
|
+
txn = readTxnRenewed ? readTxn : renewReadTxn()
|
|
477
|
+
}
|
|
478
|
+
if (versionOrValue === undefined)
|
|
479
|
+
return txn.getBinaryUnsafe(this.db, key) !== undefined
|
|
480
|
+
else if (this.useVersions)
|
|
481
|
+
return txn.getBinaryUnsafe(this.db, key) !== undefined && matches(getLastVersion(), versionOrValue)
|
|
482
|
+
else {
|
|
483
|
+
let cursor = new Cursor(txn, this.db)
|
|
484
|
+
if (this.encoder) {
|
|
485
|
+
versionOrValue = this.encoder.encode(versionOrValue)
|
|
486
|
+
}
|
|
487
|
+
if (typeof versionOrValue == 'string')
|
|
488
|
+
versionOrValue = Buffer.from(versionOrValue)
|
|
489
|
+
let result = cursor.goToDup(key, versionOrValue) !== undefined
|
|
490
|
+
cursor.close()
|
|
491
|
+
return result
|
|
492
|
+
}
|
|
493
|
+
} catch(error) {
|
|
494
|
+
return handleError(error, this, txn, () => this.doesExist(key, versionOrValue))
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
getScheduledOperations() {
|
|
498
|
+
if (!scheduledOperations) {
|
|
499
|
+
scheduledOperations = []
|
|
500
|
+
scheduledOperations.bytes = 0
|
|
501
|
+
}
|
|
502
|
+
if (scheduledOperations.store != this) {
|
|
503
|
+
// issue action to switch dbs
|
|
504
|
+
scheduledOperations.store = this
|
|
505
|
+
scheduledOperations.push(this.db)
|
|
506
|
+
}
|
|
507
|
+
return scheduledOperations
|
|
508
|
+
}
|
|
509
|
+
put(id, value, version, ifVersion) {
|
|
510
|
+
if (id.length > 1978) {
|
|
511
|
+
throw new Error('Key is larger than maximum key size (1978)')
|
|
512
|
+
}
|
|
513
|
+
this.writes++
|
|
514
|
+
if (writeTxn) {
|
|
515
|
+
if (ifVersion !== undefined) {
|
|
516
|
+
this.get(id)
|
|
517
|
+
let previousVersion = this.get(id) ? getLastVersion() : null
|
|
518
|
+
if (!matches(previousVersion, ifVersion)) {
|
|
519
|
+
return SYNC_PROMISE_FAIL
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
putSync.call(this, id, value, version)
|
|
523
|
+
return SYNC_PROMISE_RESULT
|
|
524
|
+
}
|
|
525
|
+
if (this.encoder) {
|
|
526
|
+
//if (!(value instanceof Uint8Array)) TODO: in a future version, directly store buffers that are provided
|
|
527
|
+
value = this.encoder.encode(value)
|
|
528
|
+
} else if (typeof value != 'string' && !(value instanceof Uint8Array))
|
|
529
|
+
throw new Error('Invalid value to put in database ' + value + ' (' + (typeof value) +'), consider using encoder')
|
|
530
|
+
let operations = this.getScheduledOperations()
|
|
531
|
+
let index = operations.push(ifVersion == null ? version == null ? [id, value] : [id, value, version] : [id, value, version, ifVersion]) - 1
|
|
532
|
+
// track the size of the scheduled operations (and include the approx size of the array structure too)
|
|
533
|
+
operations.bytes += (id.length || 6) + (value && value.length || 0) + 100
|
|
534
|
+
let commit = this.scheduleCommit()
|
|
535
|
+
return ifVersion === undefined ? commit.unconditionalResults : // TODO: Technically you can get a bad key if an array is passed in there is no ifVersion and still fail
|
|
536
|
+
commit.results.then((writeResults) => {
|
|
537
|
+
if (writeResults[index] === 0)
|
|
538
|
+
return true
|
|
539
|
+
if (writeResults[index] === 3) {
|
|
540
|
+
throw new Error('The key size was 0 or too large')
|
|
541
|
+
}
|
|
542
|
+
return false
|
|
543
|
+
})
|
|
544
|
+
}
|
|
545
|
+
putSync(id, value, version) {
|
|
546
|
+
if (id.length > 1978) {
|
|
547
|
+
throw new Error('Key is larger than maximum key size (1978)')
|
|
548
|
+
}
|
|
549
|
+
let localTxn, hadWriteTxn = writeTxn
|
|
550
|
+
try {
|
|
551
|
+
this.writes++
|
|
552
|
+
if (!writeTxn)
|
|
553
|
+
localTxn = writeTxn = env.beginTxn()
|
|
554
|
+
if (this.encoder)
|
|
555
|
+
value = this.encoder.encode(value)
|
|
556
|
+
if (typeof value == 'string') {
|
|
557
|
+
writeTxn.putUtf8(this.db, id, value, version)
|
|
558
|
+
} else {
|
|
559
|
+
if (!(value instanceof Uint8Array)) {
|
|
560
|
+
throw new Error('Invalid value type ' + typeof value + ' used ' + value)
|
|
561
|
+
}
|
|
562
|
+
writeTxn.putBinary(this.db, id, value, version)
|
|
563
|
+
}
|
|
564
|
+
if (localTxn) {
|
|
565
|
+
writeTxn.commit()
|
|
566
|
+
writeTxn = null
|
|
567
|
+
resetReadTxn()
|
|
568
|
+
}
|
|
569
|
+
} catch(error) {
|
|
570
|
+
if (hadWriteTxn)
|
|
571
|
+
throw error // if we are in a transaction, the whole transaction probably needs to restart
|
|
572
|
+
return handleError(error, this, localTxn, () => this.putSync(id, value, version))
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
removeSync(id, ifVersionOrValue) {
|
|
576
|
+
if (id.length > 1978) {
|
|
577
|
+
throw new Error('Key is larger than maximum key size (1978)')
|
|
578
|
+
}
|
|
579
|
+
let localTxn, hadWriteTxn = writeTxn
|
|
580
|
+
try {
|
|
581
|
+
if (!writeTxn)
|
|
582
|
+
localTxn = writeTxn = env.beginTxn()
|
|
583
|
+
let deleteValue
|
|
584
|
+
if (ifVersionOrValue !== undefined) {
|
|
585
|
+
if (this.useVersions) {
|
|
586
|
+
let previousVersion = this.get(id) ? getLastVersion() : null
|
|
587
|
+
if (!matches(previousVersion, ifVersionOrValue))
|
|
588
|
+
return false
|
|
589
|
+
} else if (this.encoder)
|
|
590
|
+
deleteValue = this.encoder.encode(ifVersionOrValue)
|
|
591
|
+
else
|
|
592
|
+
deleteValue = ifVersionOrValue
|
|
593
|
+
}
|
|
594
|
+
this.writes++
|
|
595
|
+
let result
|
|
596
|
+
if (deleteValue)
|
|
597
|
+
result = writeTxn.del(this.db, id, deleteValue)
|
|
598
|
+
else
|
|
599
|
+
result = writeTxn.del(this.db, id)
|
|
600
|
+
if (localTxn) {
|
|
601
|
+
writeTxn.commit()
|
|
602
|
+
writeTxn = null
|
|
603
|
+
resetReadTxn()
|
|
604
|
+
}
|
|
605
|
+
return result // object found and deleted
|
|
606
|
+
} catch(error) {
|
|
607
|
+
if (hadWriteTxn)
|
|
608
|
+
throw error // if we are in a transaction, the whole transaction probably needs to restart
|
|
609
|
+
return handleError(error, this, localTxn, () => this.removeSync(id))
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
remove(id, ifVersionOrValue) {
|
|
613
|
+
if (id.length > 1978) {
|
|
614
|
+
throw new Error('Key is larger than maximum key size (1978)')
|
|
615
|
+
}
|
|
616
|
+
this.writes++
|
|
617
|
+
if (writeTxn) {
|
|
618
|
+
if (removeSync.call(this, id, ifVersionOrValue) === false)
|
|
619
|
+
return SYNC_PROMISE_FAIL
|
|
620
|
+
return SYNC_PROMISE_RESULT
|
|
621
|
+
}
|
|
622
|
+
let scheduledOperations = this.getScheduledOperations()
|
|
623
|
+
let operation
|
|
624
|
+
if (ifVersionOrValue === undefined)
|
|
625
|
+
operation = [id]
|
|
626
|
+
else if (this.useVersions)
|
|
627
|
+
operation = [id, undefined, undefined, ifVersionOrValue] // version condition
|
|
628
|
+
else {
|
|
629
|
+
if (this.encoder)
|
|
630
|
+
operation = [id, this.encoder.encode(ifVersionOrValue), true]
|
|
631
|
+
else
|
|
632
|
+
operation = [id, ifVersionOrValue, true]
|
|
633
|
+
}
|
|
634
|
+
let index = scheduledOperations.push(operation) - 1 // remove specific values
|
|
635
|
+
scheduledOperations.bytes += (id.length || 6) + 100
|
|
636
|
+
let commit = this.scheduleCommit()
|
|
637
|
+
return ifVersionOrValue === undefined ? commit.unconditionalResults :
|
|
638
|
+
commit.results.then((writeResults) => {
|
|
639
|
+
if (writeResults[index] === 0)
|
|
640
|
+
return true
|
|
641
|
+
if (writeResults[index] === 3) {
|
|
642
|
+
throw new Error('The key size was 0 or too large')
|
|
643
|
+
}
|
|
644
|
+
return false
|
|
645
|
+
})
|
|
646
|
+
}
|
|
647
|
+
getValues(key, options) {
|
|
648
|
+
let defaultOptions = {
|
|
649
|
+
start: key,
|
|
650
|
+
valuesForKey: true
|
|
651
|
+
}
|
|
652
|
+
if (options && options.snapshot === false)
|
|
653
|
+
throw new Error('Can not disable snapshots for getValues')
|
|
654
|
+
return this.getRange(options ? Object.assign(defaultOptions, options) : defaultOptions)
|
|
655
|
+
}
|
|
656
|
+
getKeys(options) {
|
|
657
|
+
if (!options)
|
|
658
|
+
options = {}
|
|
659
|
+
options.values = false
|
|
660
|
+
return this.getRange(options)
|
|
661
|
+
}
|
|
662
|
+
getCount(options) {
|
|
663
|
+
if (!options)
|
|
664
|
+
options = {}
|
|
665
|
+
options.onlyCount = true
|
|
666
|
+
return this.getRange(options)[Symbol.iterator]()
|
|
667
|
+
}
|
|
668
|
+
getKeysCount(options) {
|
|
669
|
+
if (!options)
|
|
670
|
+
options = {}
|
|
671
|
+
options.onlyCount = true
|
|
672
|
+
options.values = false
|
|
673
|
+
return this.getRange(options)[Symbol.iterator]()
|
|
674
|
+
}
|
|
675
|
+
getValuesCount(key, options) {
|
|
676
|
+
if (!options)
|
|
677
|
+
options = {}
|
|
678
|
+
options.start = key
|
|
679
|
+
options.valuesForKey = true
|
|
680
|
+
options.onlyCount = true
|
|
681
|
+
return this.getRange(options)[Symbol.iterator]()
|
|
682
|
+
}
|
|
683
|
+
getRange(options) {
|
|
684
|
+
let iterable = new ArrayLikeIterable()
|
|
685
|
+
if (!options)
|
|
686
|
+
options = {}
|
|
687
|
+
let includeValues = options.values !== false
|
|
688
|
+
let includeVersions = options.versions
|
|
689
|
+
let valuesForKey = options.valuesForKey
|
|
690
|
+
let limit = options.limit
|
|
691
|
+
let db = this.db
|
|
692
|
+
iterable[Symbol.iterator] = () => {
|
|
693
|
+
let currentKey = options.start !== undefined ? options.start :
|
|
694
|
+
(options.reverse ? this.keyIsUint32 ? 0xffffffff : this.keyIsBuffer ? LAST_BUFFER_KEY : LAST_KEY :
|
|
695
|
+
this.keyIsUint32 ? 0 : this.keyIsBuffer ? FIRST_BUFFER_KEY : false)
|
|
696
|
+
let endKey = options.end !== undefined ? options.end :
|
|
697
|
+
(options.reverse ? this.keyIsUint32 ? 0 : this.keyIsBuffer ? FIRST_BUFFER_KEY : false :
|
|
698
|
+
this.keyIsUint32 ? 0xffffffff : this.keyIsBuffer ? LAST_BUFFER_KEY : LAST_KEY)
|
|
699
|
+
const reverse = options.reverse
|
|
700
|
+
let count = 0
|
|
701
|
+
let cursor, cursorRenewId
|
|
702
|
+
let txn
|
|
703
|
+
function resetCursor() {
|
|
704
|
+
try {
|
|
705
|
+
if (cursor)
|
|
706
|
+
finishCursor()
|
|
707
|
+
|
|
708
|
+
txn = writeTxn || (readTxnRenewed ? readTxn : renewReadTxn())
|
|
709
|
+
cursor = new Cursor(txn, db)
|
|
710
|
+
txn.cursorCount = (txn.cursorCount || 0) + 1 // track transaction so we always use the same one
|
|
711
|
+
if (options.snapshot === false) {
|
|
712
|
+
cursorRenewId = renewId // use shared read transaction
|
|
713
|
+
txn.renewingCursorCount = (txn.renewingCursorCount || 0) + 1 // need to know how many are renewing cursors
|
|
714
|
+
}
|
|
715
|
+
if (reverse) {
|
|
716
|
+
if (valuesForKey) {
|
|
717
|
+
// position at key
|
|
718
|
+
currentKey = cursor.goToKey(currentKey)
|
|
719
|
+
// now move to next key and then previous entry to get to last value
|
|
720
|
+
if (currentKey) {
|
|
721
|
+
cursor.goToNextNoDup()
|
|
722
|
+
cursor.goToPrev()
|
|
723
|
+
}
|
|
724
|
+
} else {
|
|
725
|
+
// for reverse retrieval, goToRange is backwards because it positions at the key equal or *greater than* the provided key
|
|
726
|
+
let nextKey = cursor.goToRange(currentKey)
|
|
727
|
+
if (nextKey) {
|
|
728
|
+
if (compareKey(nextKey, currentKey)) {
|
|
729
|
+
// goToRange positioned us at a key after the provided key, so we need to go the previous key to be less than the provided key
|
|
730
|
+
currentKey = cursor.goToPrev()
|
|
731
|
+
} else
|
|
732
|
+
currentKey = nextKey // they match, we are good, and currentKey is already correct
|
|
733
|
+
} else {
|
|
734
|
+
// likewise, we have been position beyond the end of the index, need to go to last
|
|
735
|
+
currentKey = cursor.goToLast()
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
} else {
|
|
739
|
+
// for forward retrieval, goToRange does what we want
|
|
740
|
+
currentKey = valuesForKey ? cursor.goToKey(currentKey) : cursor.goToRange(currentKey)
|
|
741
|
+
}
|
|
742
|
+
// TODO: Make a makeCompare(endKey)
|
|
743
|
+
} catch(error) {
|
|
744
|
+
if (cursor) {
|
|
745
|
+
try {
|
|
746
|
+
cursor.close()
|
|
747
|
+
} catch(error) { }
|
|
748
|
+
}
|
|
749
|
+
return handleError(error, this, txn, resetCursor)
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
resetCursor()
|
|
753
|
+
let offset = options.offset
|
|
754
|
+
while(offset-- > 0 && currentKey !== undefined) {
|
|
755
|
+
currentKey = reverse ?
|
|
756
|
+
valuesForKey ? cursor.goToPrevDup() :
|
|
757
|
+
includeValues ? cursor.goToPrev() : cursor.goToPrevNoDup() :
|
|
758
|
+
valuesForKey ? cursor.goToNextDup() :
|
|
759
|
+
includeValues ? cursor.goToNext() : cursor.goToNextNoDup()
|
|
760
|
+
}
|
|
761
|
+
if (options.onlyCount) {
|
|
762
|
+
while (!(currentKey === undefined ||
|
|
763
|
+
(reverse ? compareKey(currentKey, endKey) <= 0 : compareKey(currentKey, endKey) >= 0) ||
|
|
764
|
+
(count++ >= limit))) {
|
|
765
|
+
currentKey = reverse ?
|
|
766
|
+
valuesForKey ? cursor.goToPrevDup() :
|
|
767
|
+
includeValues ? cursor.goToPrev() : cursor.goToPrevNoDup() :
|
|
768
|
+
valuesForKey ? cursor.goToNextDup() :
|
|
769
|
+
includeValues ? cursor.goToNext() : cursor.goToNextNoDup()
|
|
770
|
+
}
|
|
771
|
+
finishCursor()
|
|
772
|
+
return count
|
|
773
|
+
}
|
|
774
|
+
|
|
775
|
+
let store = this
|
|
776
|
+
function finishCursor() {
|
|
777
|
+
if (txn.isAborted)
|
|
778
|
+
return
|
|
779
|
+
cursor.close()
|
|
780
|
+
if (cursorRenewId)
|
|
781
|
+
txn.renewingCursorCount--
|
|
782
|
+
if (--txn.cursorCount <= 0 && txn.onlyCursor) {
|
|
783
|
+
let index = cursorTxns.indexOf(txn)
|
|
784
|
+
if (index > -1)
|
|
785
|
+
cursorTxns.splice(index, 1)
|
|
786
|
+
txn.abort() // this is no longer main read txn, abort it now that we are done
|
|
787
|
+
txn.isAborted = true
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
return {
|
|
791
|
+
next() {
|
|
792
|
+
if (cursorRenewId && cursorRenewId != renewId)
|
|
793
|
+
resetCursor()
|
|
794
|
+
if (count > 0)
|
|
795
|
+
currentKey = reverse ?
|
|
796
|
+
valuesForKey ? cursor.goToPrevDup() :
|
|
797
|
+
includeValues ? cursor.goToPrev() : cursor.goToPrevNoDup() :
|
|
798
|
+
valuesForKey ? cursor.goToNextDup() :
|
|
799
|
+
includeValues ? cursor.goToNext() : cursor.goToNextNoDup()
|
|
800
|
+
if (currentKey === undefined ||
|
|
801
|
+
(reverse ? compareKey(currentKey, endKey) <= 0 : compareKey(currentKey, endKey) >= 0) ||
|
|
802
|
+
(count++ >= limit)) {
|
|
803
|
+
finishCursor()
|
|
804
|
+
return ITERATOR_DONE
|
|
805
|
+
}
|
|
806
|
+
if (includeValues) {
|
|
807
|
+
let value
|
|
808
|
+
if (store.decoder) {
|
|
809
|
+
lastSize = value = cursor.getCurrentBinaryUnsafe()
|
|
810
|
+
if (value)
|
|
811
|
+
value = store.decoder.decode(store.db.unsafeBuffer, value)
|
|
812
|
+
} else if (store.encoding == 'binary')
|
|
813
|
+
value = cursor.getCurrentBinary()
|
|
814
|
+
else {
|
|
815
|
+
value = cursor.getCurrentUtf8()
|
|
816
|
+
if (store.encoding == 'json' && value)
|
|
817
|
+
value = JSON.parse(value)
|
|
818
|
+
}
|
|
819
|
+
if (includeVersions)
|
|
820
|
+
return {
|
|
821
|
+
value: {
|
|
822
|
+
key: currentKey,
|
|
823
|
+
value,
|
|
824
|
+
version: getLastVersion()
|
|
825
|
+
}
|
|
826
|
+
}
|
|
827
|
+
else if (valuesForKey)
|
|
828
|
+
return {
|
|
829
|
+
value
|
|
830
|
+
}
|
|
831
|
+
else
|
|
832
|
+
return {
|
|
833
|
+
value: {
|
|
834
|
+
key: currentKey,
|
|
835
|
+
value,
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
} else if (includeVersions) {
|
|
839
|
+
cursor.getCurrentBinaryUnsafe()
|
|
840
|
+
return {
|
|
841
|
+
value: {
|
|
842
|
+
key: currentKey,
|
|
843
|
+
version: getLastVersion()
|
|
844
|
+
}
|
|
845
|
+
}
|
|
846
|
+
} else {
|
|
847
|
+
return {
|
|
848
|
+
value: currentKey
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
},
|
|
852
|
+
return() {
|
|
853
|
+
finishCursor()
|
|
854
|
+
return ITERATOR_DONE
|
|
855
|
+
},
|
|
856
|
+
throw() {
|
|
857
|
+
finishCursor()
|
|
858
|
+
return ITERATOR_DONE
|
|
859
|
+
}
|
|
860
|
+
}
|
|
861
|
+
}
|
|
862
|
+
return iterable
|
|
863
|
+
}
|
|
864
|
+
scheduleCommit() {
|
|
865
|
+
if (!pendingBatch) {
|
|
866
|
+
// pendingBatch promise represents the completion of the transaction
|
|
867
|
+
let whenCommitted = new Promise((resolve, reject) => {
|
|
868
|
+
runNextBatch = (sync) => {
|
|
869
|
+
if (!whenCommitted)
|
|
870
|
+
return
|
|
871
|
+
runNextBatch = null
|
|
872
|
+
if (pendingBatch) {
|
|
873
|
+
for (const store of stores) {
|
|
874
|
+
store.emit('beforecommit', { scheduledOperations })
|
|
875
|
+
}
|
|
876
|
+
}
|
|
877
|
+
clearTimeout(timeout)
|
|
878
|
+
currentCommit = whenCommitted
|
|
879
|
+
whenCommitted = null
|
|
880
|
+
pendingBatch = null
|
|
881
|
+
if (scheduledOperations || scheduledTransactions) {
|
|
882
|
+
// operations to perform, collect them as an array and start doing them
|
|
883
|
+
let operations = scheduledOperations || []
|
|
884
|
+
let transactions = scheduledTransactions
|
|
885
|
+
if (operations.appendAsyncTxn) {
|
|
886
|
+
operations.push(true)
|
|
887
|
+
}
|
|
888
|
+
scheduledOperations = null
|
|
889
|
+
scheduledTransactions = null
|
|
890
|
+
const writeBatch = () => {
|
|
891
|
+
let start = Date.now()
|
|
892
|
+
let results = Buffer.alloc(operations.length)
|
|
893
|
+
let continuedWriteTxn
|
|
894
|
+
let transactionResults
|
|
895
|
+
let transactionSetIndex = 0
|
|
896
|
+
let callback = async (error) => {
|
|
897
|
+
if (error === true) {
|
|
898
|
+
// resume batch transaction
|
|
899
|
+
if (!transactionResults) {
|
|
900
|
+
// get the transaction we will use
|
|
901
|
+
continuedWriteTxn = env.beginTxn(true)
|
|
902
|
+
transactionResults = new Array(transactions.length)
|
|
903
|
+
results.transactionResults = transactionResults
|
|
904
|
+
}
|
|
905
|
+
let transactionSet = transactions[transactionSetIndex]
|
|
906
|
+
let transactionSetResults = transactionResults[transactionSetIndex++] = []
|
|
907
|
+
let promises
|
|
908
|
+
for (let i = 0, l = transactionSet.length; i < l; i++) {
|
|
909
|
+
let userTxn = transactionSet[i]
|
|
910
|
+
let asChild = userTxn.asChild
|
|
911
|
+
if (asChild) {
|
|
912
|
+
if (promises) {
|
|
913
|
+
// must complete any outstanding transactions before proceeding
|
|
914
|
+
await Promise.all(promises)
|
|
915
|
+
promises = null
|
|
916
|
+
}
|
|
917
|
+
let childTxn = writeTxn = env.beginTxn(null, continuedWriteTxn)
|
|
918
|
+
try {
|
|
919
|
+
let result = userTxn.callback()
|
|
920
|
+
if (result && result.then) {
|
|
921
|
+
await result
|
|
922
|
+
}
|
|
923
|
+
if (result === ABORT)
|
|
924
|
+
childTxn.abort()
|
|
925
|
+
else
|
|
926
|
+
childTxn.commit()
|
|
927
|
+
transactionSetResults[(i << 1) + 1] = result
|
|
928
|
+
} catch(error) {
|
|
929
|
+
childTxn.abort()
|
|
930
|
+
if (!txnError(error, i))
|
|
931
|
+
return
|
|
932
|
+
}
|
|
933
|
+
} else {
|
|
934
|
+
writeTxn = continuedWriteTxn
|
|
935
|
+
try {
|
|
936
|
+
let result = userTxn()
|
|
937
|
+
if (result && result.then) {
|
|
938
|
+
if (!promises)
|
|
939
|
+
promises = []
|
|
940
|
+
transactionSetResults[(i << 1) + 1] = result
|
|
941
|
+
promises.push(result.catch(() => {
|
|
942
|
+
txnError(error, i)
|
|
943
|
+
}))
|
|
944
|
+
} else
|
|
945
|
+
transactionSetResults[(i << 1) + 1] = result
|
|
946
|
+
} catch(error) {
|
|
947
|
+
if (!txnError(error, i))
|
|
948
|
+
return
|
|
949
|
+
}
|
|
950
|
+
}
|
|
951
|
+
}
|
|
952
|
+
if (promises) { // finish any outstanding commit functions
|
|
953
|
+
await Promise.all(promises)
|
|
954
|
+
}
|
|
955
|
+
writeTxn = null
|
|
956
|
+
return env.continueBatch(0)
|
|
957
|
+
function txnError(error, i) {
|
|
958
|
+
if (error.message.startsWith('MDB_MAP_FULL')) {
|
|
959
|
+
env.continueBatch(-30792)
|
|
960
|
+
writeTxn = null
|
|
961
|
+
return false
|
|
962
|
+
}
|
|
963
|
+
if (error.message.startsWith('MDB_MAP_RESIZED')) {
|
|
964
|
+
env.continueBatch(-30785)
|
|
965
|
+
writeTxn = null
|
|
966
|
+
return false
|
|
967
|
+
}
|
|
968
|
+
// user exception
|
|
969
|
+
transactionSetResults[i << 1] = error
|
|
970
|
+
return true
|
|
971
|
+
}
|
|
972
|
+
}
|
|
973
|
+
let duration = Date.now() - start
|
|
974
|
+
this.averageTransactionTime = (this.averageTransactionTime * 3 + duration) / 4
|
|
975
|
+
//console.log('did batch', (duration) + 'ms', name, operations.length/*map(o => o[1].toString('binary')).join(',')*/)
|
|
976
|
+
resetReadTxn()
|
|
977
|
+
if (error) {
|
|
978
|
+
if (error.message == 'Interrupted batch')
|
|
979
|
+
// if the batch was interrupted by a sync transaction request we just have to restart it
|
|
980
|
+
return writeBatch()
|
|
981
|
+
try {
|
|
982
|
+
// see if we can recover from recoverable error (like full map with a resize)
|
|
983
|
+
handleError(error, this, null, writeBatch)
|
|
984
|
+
} catch(error) {
|
|
985
|
+
currentCommit = null
|
|
986
|
+
for (const store of stores) {
|
|
987
|
+
store.emit('aftercommit', { operations })
|
|
988
|
+
}
|
|
989
|
+
reject(error)
|
|
990
|
+
}
|
|
991
|
+
} else {
|
|
992
|
+
currentCommit = null
|
|
993
|
+
for (const store of stores) {
|
|
994
|
+
store.emit('aftercommit', { operations, results })
|
|
995
|
+
}
|
|
996
|
+
resolve(results)
|
|
997
|
+
}
|
|
998
|
+
}
|
|
999
|
+
try {
|
|
1000
|
+
if (sync === true) {
|
|
1001
|
+
env.batchWrite(operations, results)
|
|
1002
|
+
callback()
|
|
1003
|
+
} else
|
|
1004
|
+
env.batchWrite(operations, results, callback)
|
|
1005
|
+
} catch (error) {
|
|
1006
|
+
callback(error)
|
|
1007
|
+
}
|
|
1008
|
+
}
|
|
1009
|
+
try {
|
|
1010
|
+
writeBatch()
|
|
1011
|
+
} catch(error) {
|
|
1012
|
+
reject(error)
|
|
1013
|
+
}
|
|
1014
|
+
} else {
|
|
1015
|
+
resolve([])
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
let timeout
|
|
1019
|
+
if (this.commitDelay > 0) {
|
|
1020
|
+
timeout = setTimeout(() => {
|
|
1021
|
+
when(currentCommit, () => whenCommitted && runNextBatch(), () => whenCommitted && runNextBatch())
|
|
1022
|
+
}, this.commitDelay)
|
|
1023
|
+
} else {
|
|
1024
|
+
timeout = runNextBatch.immediate = setImmediate(() => {
|
|
1025
|
+
when(currentCommit, () => whenCommitted && runNextBatch(), () => whenCommitted && runNextBatch())
|
|
1026
|
+
})
|
|
1027
|
+
}
|
|
1028
|
+
})
|
|
1029
|
+
pendingBatch = {
|
|
1030
|
+
results: whenCommitted,
|
|
1031
|
+
unconditionalResults: whenCommitted.then(() => true) // for returning from non-conditional operations
|
|
1032
|
+
}
|
|
1033
|
+
}
|
|
1034
|
+
return pendingBatch
|
|
1035
|
+
}
|
|
1036
|
+
|
|
1037
|
+
batch(operations) {
|
|
1038
|
+
/*if (writeTxn) {
|
|
1039
|
+
this.commitBatchNow(operations.map(operation => [this.db, operation.key, operation.value]))
|
|
1040
|
+
return Promise.resolve(true)
|
|
1041
|
+
}*/
|
|
1042
|
+
let scheduledOperations = this.getScheduledOperations()
|
|
1043
|
+
for (let operation of operations) {
|
|
1044
|
+
let value = operation.value
|
|
1045
|
+
scheduledOperations.push([operation.key, value])
|
|
1046
|
+
scheduledOperations.bytes += operation.key.length + (value && value.length || 0) + 200
|
|
1047
|
+
}
|
|
1048
|
+
return this.scheduleCommit().unconditionalResults
|
|
1049
|
+
}
|
|
1050
|
+
backup(path) {
|
|
1051
|
+
return new Promise((resolve, reject) => env.copy(path, true, (error) => {
|
|
1052
|
+
if (error) {
|
|
1053
|
+
reject(error)
|
|
1054
|
+
} else {
|
|
1055
|
+
resolve()
|
|
1056
|
+
}
|
|
1057
|
+
}))
|
|
1058
|
+
}
|
|
1059
|
+
close() {
|
|
1060
|
+
this.db.close()
|
|
1061
|
+
if (this.isRoot) {
|
|
1062
|
+
if (readTxn) {
|
|
1063
|
+
try {
|
|
1064
|
+
readTxn.abort()
|
|
1065
|
+
} catch(error) {}
|
|
1066
|
+
}
|
|
1067
|
+
readTxnRenewed = null
|
|
1068
|
+
env.close()
|
|
1069
|
+
}
|
|
1070
|
+
}
|
|
1071
|
+
getStats() {
|
|
1072
|
+
try {
|
|
1073
|
+
let stats = this.db.stat(readTxnRenewed ? readTxn : renewReadTxn())
|
|
1074
|
+
return stats
|
|
1075
|
+
}
|
|
1076
|
+
catch(error) {
|
|
1077
|
+
return handleError(error, this, readTxn, () => this.getStats())
|
|
1078
|
+
}
|
|
1079
|
+
}
|
|
1080
|
+
sync(callback) {
|
|
1081
|
+
return env.sync(callback || function(error) {
|
|
1082
|
+
if (error) {
|
|
1083
|
+
console.error(error)
|
|
1084
|
+
}
|
|
1085
|
+
})
|
|
1086
|
+
}
|
|
1087
|
+
deleteDB() {
|
|
1088
|
+
try {
|
|
1089
|
+
this.db.drop({
|
|
1090
|
+
justFreePages: false,
|
|
1091
|
+
txn: writeTxn,
|
|
1092
|
+
})
|
|
1093
|
+
} catch(error) {
|
|
1094
|
+
handleError(error, this, null, () => this.deleteDB())
|
|
1095
|
+
}
|
|
1096
|
+
}
|
|
1097
|
+
clear() {
|
|
1098
|
+
try {
|
|
1099
|
+
this.db.drop({
|
|
1100
|
+
justFreePages: true,
|
|
1101
|
+
txn: writeTxn,
|
|
1102
|
+
})
|
|
1103
|
+
} catch(error) {
|
|
1104
|
+
handleError(error, this, null, () => this.clear())
|
|
1105
|
+
}
|
|
1106
|
+
if (this.encoder && this.encoder.structures)
|
|
1107
|
+
this.encoder.structures = []
|
|
1108
|
+
|
|
1109
|
+
}
|
|
1110
|
+
readerCheck() {
|
|
1111
|
+
return env.readerCheck()
|
|
1112
|
+
}
|
|
1113
|
+
readerList() {
|
|
1114
|
+
return env.readerList().join('')
|
|
1115
|
+
}
|
|
1116
|
+
setupSharedStructures() {
|
|
1117
|
+
const getStructures = () => {
|
|
1118
|
+
let lastVersion // because we are doing a read here, we may need to save and restore the lastVersion from the last read
|
|
1119
|
+
if (this.useVersions)
|
|
1120
|
+
lastVersion = getLastVersion()
|
|
1121
|
+
try {
|
|
1122
|
+
let buffer = (writeTxn || (readTxnRenewed ? readTxn : renewReadTxn())).getBinary(this.db, this.sharedStructuresKey)
|
|
1123
|
+
if (this.useVersions)
|
|
1124
|
+
setLastVersion(lastVersion)
|
|
1125
|
+
return buffer ? this.encoder.decode(buffer) : []
|
|
1126
|
+
} catch(error) {
|
|
1127
|
+
return handleError(error, this, null, getStructures)
|
|
1128
|
+
}
|
|
1129
|
+
}
|
|
1130
|
+
return {
|
|
1131
|
+
saveStructures: (structures, previousLength) => {
|
|
1132
|
+
return this.transactionSync(() => {
|
|
1133
|
+
let existingStructuresBuffer = writeTxn.getBinary(this.db, this.sharedStructuresKey)
|
|
1134
|
+
let existingStructures = existingStructuresBuffer ? this.encoder.decode(existingStructuresBuffer) : []
|
|
1135
|
+
if (existingStructures.length != previousLength)
|
|
1136
|
+
return false // it changed, we need to indicate that we couldn't update
|
|
1137
|
+
writeTxn.putBinary(this.db, this.sharedStructuresKey, this.encoder.encode(structures))
|
|
1138
|
+
})
|
|
1139
|
+
},
|
|
1140
|
+
getStructures,
|
|
1141
|
+
copyBuffers: true // need to copy any embedded buffers that are found since we use unsafe buffers
|
|
1142
|
+
}
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
// if caching class overrides putSync, don't want to double call the caching code
|
|
1146
|
+
const putSync = LMDBStore.prototype.putSync
|
|
1147
|
+
const removeSync = LMDBStore.prototype.removeSync
|
|
1148
|
+
return options.cache ?
|
|
1149
|
+
new (CachingStore(LMDBStore))(options.name || null, options) :
|
|
1150
|
+
new LMDBStore(options.name || null, options)
|
|
1151
|
+
function handleError(error, store, txn, retry) {
|
|
1152
|
+
try {
|
|
1153
|
+
if (writeTxn)
|
|
1154
|
+
writeTxn.abort()
|
|
1155
|
+
} catch(error) {}
|
|
1156
|
+
if (writeTxn)
|
|
1157
|
+
writeTxn = null
|
|
1158
|
+
|
|
1159
|
+
if (error.message.startsWith('MDB_') &&
|
|
1160
|
+
!(error.message.startsWith('MDB_KEYEXIST') || error.message.startsWith('MDB_NOTFOUND')) ||
|
|
1161
|
+
error.message == 'The transaction is already closed.') {
|
|
1162
|
+
resetReadTxn() // separate out cursor-based read txns
|
|
1163
|
+
try {
|
|
1164
|
+
if (readTxn) {
|
|
1165
|
+
readTxn.abort()
|
|
1166
|
+
readTxn.isAborted = true
|
|
1167
|
+
}
|
|
1168
|
+
} catch(error) {}
|
|
1169
|
+
readTxn = null
|
|
1170
|
+
}
|
|
1171
|
+
if (error.message.startsWith('MDB_PROBLEM'))
|
|
1172
|
+
console.error(error)
|
|
1173
|
+
//if (error.message == 'The transaction is already closed.')
|
|
1174
|
+
// return handleError(error, store, null, retry)
|
|
1175
|
+
if (error.message.startsWith('MDB_MAP_FULL') || error.message.startsWith('MDB_MAP_RESIZED')) {
|
|
1176
|
+
const oldSize = env.info().mapSize
|
|
1177
|
+
const newSize = error.message.startsWith('MDB_MAP_FULL') ?
|
|
1178
|
+
Math.floor(((1.08 + 3000 / Math.sqrt(oldSize)) * oldSize) / 0x100000) * 0x100000 : // increase size, more rapidly at first, and round to nearest 1 MB
|
|
1179
|
+
oldSize + 0x2000//Math.pow(2, (Math.round(Math.log2(oldSize)) + 1)) // for resized notifications, we try to align to doubling each time
|
|
1180
|
+
for (const store of stores) {
|
|
1181
|
+
store.emit('remap')
|
|
1182
|
+
}
|
|
1183
|
+
try {
|
|
1184
|
+
env.resize(newSize)
|
|
1185
|
+
} catch(error) {
|
|
1186
|
+
throw new Error(error.message + ' trying to set map size to ' + newSize)
|
|
1187
|
+
}
|
|
1188
|
+
return retry()
|
|
1189
|
+
}
|
|
1190
|
+
error.message = 'In database ' + name + ': ' + error.message
|
|
1191
|
+
throw error
|
|
1192
|
+
}
|
|
1193
|
+
}
|
|
1194
|
+
|
|
1195
|
+
function matches(previousVersion, ifVersion){
|
|
1196
|
+
let matches
|
|
1197
|
+
if (previousVersion) {
|
|
1198
|
+
if (ifVersion) {
|
|
1199
|
+
matches = previousVersion == ifVersion
|
|
1200
|
+
} else {
|
|
1201
|
+
matches = false
|
|
1202
|
+
}
|
|
1203
|
+
} else {
|
|
1204
|
+
matches = !ifVersion
|
|
1205
|
+
}
|
|
1206
|
+
return matches
|
|
1207
|
+
}
|
|
1208
|
+
|
|
1209
|
+
function compareKey(a, b) {
|
|
1210
|
+
// compare with type consistency that matches ordered-binary
|
|
1211
|
+
if (typeof a == 'object') {
|
|
1212
|
+
if (!a) {
|
|
1213
|
+
return b == null ? 0 : -1
|
|
1214
|
+
}
|
|
1215
|
+
if (a.compare) {
|
|
1216
|
+
if (b == null) {
|
|
1217
|
+
return 1
|
|
1218
|
+
} else if (b.compare) {
|
|
1219
|
+
return a.compare(b)
|
|
1220
|
+
} else {
|
|
1221
|
+
return -1
|
|
1222
|
+
}
|
|
1223
|
+
}
|
|
1224
|
+
let arrayComparison
|
|
1225
|
+
if (b instanceof Array) {
|
|
1226
|
+
let i = 0
|
|
1227
|
+
while((arrayComparison = compareKey(a[i], b[i])) == 0 && i <= a.length) {
|
|
1228
|
+
i++
|
|
1229
|
+
}
|
|
1230
|
+
return arrayComparison
|
|
1231
|
+
}
|
|
1232
|
+
arrayComparison = compareKey(a[0], b)
|
|
1233
|
+
if (arrayComparison == 0 && a.length > 1)
|
|
1234
|
+
return 1
|
|
1235
|
+
return arrayComparison
|
|
1236
|
+
} else if (typeof a == typeof b) {
|
|
1237
|
+
if (typeof a === 'symbol') {
|
|
1238
|
+
a = Symbol.keyFor(a)
|
|
1239
|
+
b = Symbol.keyFor(b)
|
|
1240
|
+
}
|
|
1241
|
+
return a < b ? -1 : a === b ? 0 : 1
|
|
1242
|
+
}
|
|
1243
|
+
else if (typeof b == 'object') {
|
|
1244
|
+
if (b instanceof Array)
|
|
1245
|
+
return -compareKey(b, a)
|
|
1246
|
+
return 1
|
|
1247
|
+
} else {
|
|
1248
|
+
return typeOrder[typeof a] < typeOrder[typeof b] ? -1 : 1
|
|
1249
|
+
}
|
|
1250
|
+
}
|
|
1251
|
+
class Entry {
|
|
1252
|
+
constructor(value, version, db) {
|
|
1253
|
+
this.value = value
|
|
1254
|
+
this.version = version
|
|
1255
|
+
this.db = db
|
|
1256
|
+
}
|
|
1257
|
+
ifSamePut() {
|
|
1258
|
+
|
|
1259
|
+
}
|
|
1260
|
+
ifSameRemove() {
|
|
1261
|
+
|
|
1262
|
+
}
|
|
1263
|
+
}
|
|
1264
|
+
exports.compareKey = compareKey
|
|
1265
|
+
const typeOrder = {
|
|
1266
|
+
symbol: 0,
|
|
1267
|
+
undefined: 1,
|
|
1268
|
+
boolean: 2,
|
|
1269
|
+
number: 3,
|
|
1270
|
+
string: 4
|
|
1271
|
+
}
|
|
1272
|
+
exports.getLastEntrySize = function() {
|
|
1273
|
+
return lastSize
|
|
1274
|
+
}
|