harperdb 3.2.1 → 3.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +20 -11
- package/bin/BinObjects.jsc +0 -0
- package/bin/harperdb.jsc +0 -0
- package/bin/install.jsc +0 -0
- package/bin/register.jsc +0 -0
- package/bin/run.jsc +0 -0
- package/bin/stop.jsc +0 -0
- package/bin/upgrade.jsc +0 -0
- package/bin/utility.jsc +0 -0
- package/bin/version.jsc +0 -0
- package/coverage/lcov.info +7142 -6704
- package/data_layer/CreateAttributeObject.jsc +0 -0
- package/data_layer/CreateTableObject.jsc +0 -0
- package/data_layer/DataLayerObjects.jsc +0 -0
- package/data_layer/DeleteBeforeObject.jsc +0 -0
- package/data_layer/DeleteObject.jsc +0 -0
- package/data_layer/DropAttributeObject.jsc +0 -0
- package/data_layer/InsertObject.jsc +0 -0
- package/data_layer/ReadTransactionLogObject.jsc +0 -0
- package/data_layer/SQLSearch.jsc +0 -0
- package/data_layer/SearchByConditionsObject.jsc +0 -0
- package/data_layer/SearchByHashObject.jsc +0 -0
- package/data_layer/SearchObject.jsc +0 -0
- package/data_layer/SqlSearchObject.jsc +0 -0
- package/data_layer/UpdateObject.jsc +0 -0
- package/data_layer/UpsertObject.jsc +0 -0
- package/data_layer/bulkLoad.jsc +0 -0
- package/data_layer/data_objects/BulkLoadObjects.jsc +0 -0
- package/data_layer/data_objects/UpsertObject.jsc +0 -0
- package/data_layer/delete.jsc +0 -0
- package/data_layer/export.jsc +0 -0
- package/data_layer/harperBridge/BridgeMethods.jsc +0 -0
- package/data_layer/harperBridge/bridgeUtility/checkForNewAttr.jsc +0 -0
- package/data_layer/harperBridge/bridgeUtility/convertOperationToTransaction.jsc +0 -0
- package/data_layer/harperBridge/bridgeUtility/evaluateTableGetAttributes.jsc +0 -0
- package/data_layer/harperBridge/bridgeUtility/insertUpdateReturnObj.jsc +0 -0
- package/data_layer/harperBridge/bridgeUtility/insertUpdateValidate.jsc +0 -0
- package/data_layer/harperBridge/harperBridge.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/LMDBBridge.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/DeleteTransactionsBeforeResults.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateAttribute.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateRecords.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateSchema.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateTable.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteRecords.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteRecordsBefore.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteTransactionLogsBefore.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropAttribute.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropSchema.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropTable.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetDataByHash.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetDataByValue.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbReadTransactionLog.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByConditions.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByHash.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByValue.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbUpdateRecords.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbMethods/lmdbUpsertRecords.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBCreateAttributeObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBDeleteTransactionObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBInsertTransactionObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBTransactionObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBUpdateTransactionObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/LMDBUpsertTransactionObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/TableSizeObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/ThreadSearchObject.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/initializeHashSearch.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/initializePaths.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbCheckForNewAttributes.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbCreateTransactionsEnvironment.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbDropAllAttributes.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbGetTableSize.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbProcessRows.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbSearch.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbThreadSearch.jsc +0 -0
- package/data_layer/harperBridge/lmdbBridge/lmdbUtility/lmdbWriteTransaction.jsc +0 -0
- package/data_layer/hdbInfoController.jsc +0 -0
- package/data_layer/insert.jsc +0 -0
- package/data_layer/readTransactionLog.jsc +0 -0
- package/data_layer/schema.jsc +0 -0
- package/data_layer/schemaDescribe.jsc +0 -0
- package/data_layer/search.jsc +0 -0
- package/data_layer/update.jsc +0 -0
- package/events/ClusterStatusEmitter.jsc +0 -0
- package/events/SioServerStoppedEvent.jsc +0 -0
- package/events/SocketClusterStatusEmitter.jsc +0 -0
- package/license/LICENSE +91 -1
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/README.md +1 -0
- package/node_modules/{node-addon-api/src/nothing.c → @msgpackr-extract/msgpackr-extract-linux-x64/index.js} +0 -0
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/node.abi93.glibc.node +0 -0
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/node.abi93.musl.node +0 -0
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/node.napi.glibc.node +0 -0
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/node.napi.musl.node +0 -0
- package/node_modules/@msgpackr-extract/msgpackr-extract-linux-x64/package.json +53 -0
- package/node_modules/{msgpackr-extract → lmdb-store}/.github/workflows/prebuild.yml +9 -10
- package/node_modules/lmdb-store/.idea/lmdb-store.iml +12 -0
- package/node_modules/lmdb-store/.idea/misc.xml +6 -0
- package/node_modules/lmdb-store/.idea/modules.xml +8 -0
- package/node_modules/lmdb-store/.idea/workspace.xml +4 -0
- package/node_modules/lmdb-store/README.md +393 -388
- package/node_modules/lmdb-store/benchmark/index.js +162 -162
- package/node_modules/lmdb-store/binding.gyp +79 -88
- package/node_modules/lmdb-store/caching.js +113 -113
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/COPYRIGHT +20 -20
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/Doxyfile +1631 -1631
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/LICENSE +47 -47
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/chacha8.c +183 -183
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/chacha8.h +14 -14
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/crypto.c +121 -121
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/intro.doc +192 -192
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb.c +12125 -12125
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_copy.1 +74 -74
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_copy.c +106 -106
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_drop.1 +53 -53
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_drop.c +154 -154
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_dump.1 +94 -94
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_dump.c +333 -333
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_load.1 +97 -97
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_load.c +530 -530
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_stat.1 +83 -83
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb_stat.c +276 -276
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/midl.c +452 -452
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/midl.h +208 -208
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/module.c +101 -101
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/module.h +16 -16
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest.c +178 -178
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest2.c +124 -124
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest3.c +133 -133
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest4.c +168 -168
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest5.c +135 -135
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest6.c +141 -141
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest_enc.c +190 -190
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest_enc2.c +189 -189
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/mtest_remap.c +177 -177
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/sample-bdb.txt +73 -73
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/sample-mdb.txt +62 -62
- package/node_modules/lmdb-store/dependencies/lmdb/libraries/liblmdb/tooltag +27 -27
- package/node_modules/lmdb-store/dependencies/lz4/LICENSE +11 -11
- package/node_modules/lmdb-store/dependencies/lz4/lib/README.md +137 -137
- package/node_modules/lmdb-store/dependencies/lz4/lib/dll/example/README.md +69 -69
- package/node_modules/lmdb-store/dependencies/lz4/lib/lz4frame.c +1899 -1899
- package/node_modules/lmdb-store/dependencies/lz4/lib/xxhash.c +1030 -1030
- package/node_modules/lmdb-store/dependencies/lz4/lib/xxhash.h +328 -328
- package/node_modules/lmdb-store/dist/index.cjs +2591 -0
- package/node_modules/lmdb-store/dist/index.cjs.map +1 -0
- package/node_modules/lmdb-store/index.d.ts +323 -323
- package/node_modules/lmdb-store/index.js +1274 -1274
- package/node_modules/lmdb-store/index.mjs +3 -3
- package/node_modules/lmdb-store/package.json +16 -11
- package/node_modules/lmdb-store/prebuilds/darwin-arm64/electron.abi98.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-arm64/node.abi83.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-arm64/node.abi93.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-x64/electron.abi98.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-x64/node.abi83.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-x64/node.abi93.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-arm64/electron.abi98.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-arm64/node.abi83.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-arm64/node.abi93.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-x64/electron.abi98.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-x64/node.abi83.musl.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-x64/node.abi83.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-x64/node.abi93.musl.node +0 -0
- package/node_modules/lmdb-store/prebuilds/linux-x64/node.abi93.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/electron.abi98.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/node.abi83.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/node.abi93.node +0 -0
- package/node_modules/lmdb-store/src/compression.cpp +181 -181
- package/node_modules/lmdb-store/src/cursor.cpp +407 -407
- package/node_modules/lmdb-store/src/dbi.cpp +354 -354
- package/node_modules/lmdb-store/src/env.cpp +1134 -1134
- package/node_modules/lmdb-store/src/misc.cpp +528 -528
- package/node_modules/lmdb-store/src/node-lmdb.cpp +44 -44
- package/node_modules/lmdb-store/src/node-lmdb.h +965 -965
- package/node_modules/lmdb-store/src/ordered-binary.cpp +337 -337
- package/node_modules/lmdb-store/src/txn.cpp +513 -513
- package/node_modules/lmdb-store/src/v8-fast-api-calls.h +419 -419
- package/node_modules/lmdb-store/src/windows.c +30 -30
- package/node_modules/lmdb-store/test/index.test.js +584 -584
- package/node_modules/lmdb-store/test/node-lmdb.test.js +1525 -1525
- package/node_modules/lmdb-store/test/threads.js +100 -100
- package/node_modules/lmdb-store/util/ArrayLikeIterable.js +136 -136
- package/node_modules/lmdb-store/util/WeakValueMap.js +40 -40
- package/node_modules/lmdb-store/util/upgrade-lmdb.js +46 -46
- package/node_modules/lmdb-store/util/when.js +8 -8
- package/node_modules/microtime/.github/workflows/release.yml +76 -0
- package/node_modules/microtime/.github/workflows/test.yml +46 -0
- package/node_modules/microtime/README.md +0 -2
- package/node_modules/microtime/binding.gyp +28 -10
- package/node_modules/microtime/package.json +25 -20
- package/node_modules/microtime/prebuilds/darwin-x64+arm64/electron.napi.node +0 -0
- package/node_modules/microtime/prebuilds/darwin-x64+arm64/node.napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-arm/node.napi.armv7.node +0 -0
- package/node_modules/microtime/prebuilds/linux-arm64/node.napi.armv8.node +0 -0
- package/node_modules/microtime/prebuilds/linux-x64/electron.napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-x64/node.napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-ia32/electron.napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-ia32/node.napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-x64/electron.napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-x64/node.napi.node +0 -0
- package/node_modules/msgpackr/dist/index.js +1929 -1917
- package/node_modules/msgpackr/dist/index.min.js +67 -68
- package/node_modules/msgpackr/dist/node.cjs +1994 -1980
- package/node_modules/msgpackr/dist/test.js +683 -1235
- package/node_modules/msgpackr/index.d.ts +23 -12
- package/node_modules/msgpackr/node-index.js +23 -21
- package/node_modules/msgpackr/pack.js +935 -931
- package/node_modules/msgpackr/package.json +24 -12
- package/node_modules/msgpackr/unpack.d.ts +52 -50
- package/node_modules/msgpackr/unpack.js +1061 -1053
- package/node_modules/msgpackr-extract/bin/download-prebuilds.js +11 -0
- package/node_modules/msgpackr-extract/binding.gyp +22 -5
- package/node_modules/msgpackr-extract/index.js +1 -1
- package/node_modules/msgpackr-extract/package.json +46 -21
- package/node_modules/msgpackr-extract/src/.vs/ProjectSettings.json +3 -0
- package/node_modules/msgpackr-extract/src/.vs/VSWorkspaceState.json +7 -0
- package/node_modules/msgpackr-extract/src/.vs/slnx.sqlite +0 -0
- package/node_modules/msgpackr-extract/src/.vs/src/v16/.suo +0 -0
- package/node_modules/msgpackr-extract/src/.vs/src/v16/Browse.VC.db +0 -0
- package/node_modules/msgpackr-extract/{prebuilds/darwin-x64/node.abi72.node → src/.vs/src/v16/Browse.VC.db-shm} +0 -0
- package/node_modules/msgpackr-extract/src/.vs/src/v16/Browse.VC.db-wal +0 -0
- package/node_modules/msgpackr-extract/src/extract.cpp +272 -269
- package/node_modules/nan/package.json +0 -1
- package/node_modules/node-addon-api/README.md +146 -53
- package/node_modules/node-addon-api/common.gypi +21 -0
- package/node_modules/node-addon-api/except.gypi +25 -0
- package/node_modules/node-addon-api/index.js +7 -41
- package/node_modules/node-addon-api/napi-inl.deprecated.h +8 -8
- package/node_modules/node-addon-api/napi-inl.h +2795 -633
- package/node_modules/node-addon-api/napi.h +1547 -597
- package/node_modules/node-addon-api/node_api.gyp +9 -0
- package/node_modules/node-addon-api/noexcept.gypi +26 -0
- package/node_modules/node-addon-api/nothing.c +0 -0
- package/node_modules/node-addon-api/package-support.json +21 -0
- package/node_modules/node-addon-api/package.json +203 -13
- package/node_modules/node-addon-api/tools/README.md +12 -6
- package/node_modules/node-addon-api/tools/clang-format.js +71 -0
- package/node_modules/node-addon-api/tools/conversion.js +4 -8
- package/node_modules/node-addon-api/tools/eslint-format.js +71 -0
- package/node_modules/node-gyp-build/README.md +17 -14
- package/node_modules/node-gyp-build/bin.js +28 -15
- package/node_modules/node-gyp-build/index.js +145 -34
- package/node_modules/node-gyp-build/package.json +18 -15
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/LICENSE +0 -0
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/README.md +0 -0
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/bin.js +1 -1
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/build-test.js +0 -0
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/index.js +17 -11
- package/node_modules/{lmdb-store/node_modules/node-gyp-build → node-gyp-build-optional-packages}/optional.js +0 -0
- package/node_modules/{msgpackr-extract/node_modules/node-gyp-build → node-gyp-build-optional-packages}/package.json +17 -17
- package/package.json +12 -11
- package/security/JWTObjects.jsc +0 -0
- package/security/auth.jsc +0 -0
- package/security/cryptoHash.jsc +0 -0
- package/security/data_objects/PermissionAttributeResponseObject.jsc +0 -0
- package/security/data_objects/PermissionResponseObject.jsc +0 -0
- package/security/data_objects/PermissionTableResponseObject.jsc +0 -0
- package/security/permissionsTranslator.jsc +0 -0
- package/security/role.jsc +0 -0
- package/security/tokenAuthentication.jsc +0 -0
- package/security/user.jsc +0 -0
- package/server/ClusteringOriginObject.jsc +0 -0
- package/server/JobObject.jsc +0 -0
- package/server/clustering/ClusterStatusObject.jsc +0 -0
- package/server/clustering/NodeObject.jsc +0 -0
- package/server/clustering/clusterUtilities.jsc +0 -0
- package/server/configuration.jsc +0 -0
- package/server/customFunctions/customFunctionsServer.jsc +0 -0
- package/server/customFunctions/helpers/getCORSOptions.jsc +0 -0
- package/server/customFunctions/helpers/getHeaderTimeoutConfig.jsc +0 -0
- package/server/customFunctions/helpers/getServerOptions.jsc +0 -0
- package/server/customFunctions/operations.jsc +0 -0
- package/server/customFunctions/operationsValidation.jsc +0 -0
- package/server/harperdb/hdbServer.jsc +0 -0
- package/server/ipc/IPCClient.jsc +0 -0
- package/server/ipc/hdbIpcServer.jsc +0 -0
- package/server/ipc/serverHandlers.jsc +0 -0
- package/server/ipc/utility/IPCEventObject.jsc +0 -0
- package/server/ipc/utility/ipcUtils.jsc +0 -0
- package/server/jobRunner.jsc +0 -0
- package/server/jobThread.jsc +0 -0
- package/server/jobs.jsc +0 -0
- package/server/serverHelpers/OperationFunctionObject.jsc +0 -0
- package/server/serverHelpers/requestTimePlugin.jsc +0 -0
- package/server/serverHelpers/serverHandlers.jsc +0 -0
- package/server/serverHelpers/serverUtilities.jsc +0 -0
- package/server/socketcluster/Server.jsc +0 -0
- package/server/socketcluster/broker.jsc +0 -0
- package/server/socketcluster/connector/HDBSocketConnector.jsc +0 -0
- package/server/socketcluster/connector/InterNodeSocketConnector.jsc +0 -0
- package/server/socketcluster/connector/SocketConnector.jsc +0 -0
- package/server/socketcluster/connector/spawnSCConnection.jsc +0 -0
- package/server/socketcluster/decisionMatrix/CoreDecisionMatrix.jsc +0 -0
- package/server/socketcluster/decisionMatrix/DecisionMatrixIF.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/AssignToHdbChildWorkerRule.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/CallRoomMsgHandlerRule.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/CleanDataObjectRule.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/CommandCollection.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/DummyRule.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/RulesIF.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/StripHdbHeaderRule.jsc +0 -0
- package/server/socketcluster/decisionMatrix/rules/TestRule.jsc +0 -0
- package/server/socketcluster/handlers/NodeConnectionsHandler.jsc +0 -0
- package/server/socketcluster/handlers/SCServer.jsc +0 -0
- package/server/socketcluster/handlers/ServerSocket.jsc +0 -0
- package/server/socketcluster/interNodeConnectionLauncher.jsc +0 -0
- package/server/socketcluster/messageQueue/MessageQueueIF.jsc +0 -0
- package/server/socketcluster/middleware/AuthMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/ConnectionNameCheckMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/GenericMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/MessagePrepMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/MiddlewareFactory.jsc +0 -0
- package/server/socketcluster/middleware/MiddlewareIF.jsc +0 -0
- package/server/socketcluster/middleware/OriginatorCheckMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/RequestDataValidMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/StampOriginatorMiddleware.jsc +0 -0
- package/server/socketcluster/middleware/StampRequestMiddleware.jsc +0 -0
- package/server/socketcluster/observer/EventableIF.jsc +0 -0
- package/server/socketcluster/room/AddUserRoom.jsc +0 -0
- package/server/socketcluster/room/AlterUserRoom.jsc +0 -0
- package/server/socketcluster/room/CoreRoom.jsc +0 -0
- package/server/socketcluster/room/CreateAttributeRoom.jsc +0 -0
- package/server/socketcluster/room/CreateSchemaRoom.jsc +0 -0
- package/server/socketcluster/room/CreateTableRoom.jsc +0 -0
- package/server/socketcluster/room/DropUserRoom.jsc +0 -0
- package/server/socketcluster/room/HDBNodeRoom.jsc +0 -0
- package/server/socketcluster/room/RoomIF.jsc +0 -0
- package/server/socketcluster/room/RoomMessageObjects.jsc +0 -0
- package/server/socketcluster/room/UsersRoom.jsc +0 -0
- package/server/socketcluster/room/WatchHDBWorkersRoom.jsc +0 -0
- package/server/socketcluster/room/WorkerRoom.jsc +0 -0
- package/server/socketcluster/room/roomFactory.jsc +0 -0
- package/server/socketcluster/socketClusterObjects.jsc +0 -0
- package/server/socketcluster/types.jsc +0 -0
- package/server/socketcluster/util/clusterData.jsc +0 -0
- package/server/socketcluster/util/socketClusterUtils.jsc +0 -0
- package/server/socketcluster/worker/ClusterWorker.jsc +0 -0
- package/server/socketcluster/worker/WorkerIF.jsc +0 -0
- package/server/socketcluster/worker/WorkerObjects.jsc +0 -0
- package/server/transactToClusteringUtilities.jsc +0 -0
- package/sqlTranslator/SelectValidator.jsc +0 -0
- package/sqlTranslator/alasqlFunctionImporter.jsc +0 -0
- package/sqlTranslator/conditionPatterns.jsc +0 -0
- package/sqlTranslator/deleteTranslator.jsc +0 -0
- package/sqlTranslator/index.jsc +0 -0
- package/sqlTranslator/sql_statement_bucket.jsc +0 -0
- package/upgrade/EnvironmentVariable.jsc +0 -0
- package/upgrade/UpgradeDirective.jsc +0 -0
- package/upgrade/UpgradeObjects.jsc +0 -0
- package/upgrade/directives/3-0-0.jsc +0 -0
- package/upgrade/directives/3-1-0.jsc +0 -0
- package/upgrade/directives/directivesController.jsc +0 -0
- package/upgrade/directives/upgrade_scripts/3_0_0_reindex_script.jsc +0 -0
- package/upgrade/directivesManager.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/DBIDefinition.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/OpenDBIObject.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/OpenEnvironmentObject.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/commonErrors.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/commonUtility.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/environmentUtility.jsc +0 -0
- package/upgrade/lmdb/nodeLMDB/terms.jsc +0 -0
- package/upgrade/upgradePrompt.jsc +0 -0
- package/upgrade/upgradeUtilities.jsc +0 -0
- package/utility/AWS/AWSConnector.jsc +0 -0
- package/utility/OperationFunctionCaller.jsc +0 -0
- package/utility/common_utils.jsc +0 -0
- package/utility/environment/SystemInformationObject.jsc +0 -0
- package/utility/environment/SystemInformationOperation.jsc +0 -0
- package/utility/environment/environmentManager.jsc +0 -0
- package/utility/environment/systemInformation.jsc +0 -0
- package/utility/errors/commonErrors.jsc +0 -0
- package/utility/errors/hdbError.jsc +0 -0
- package/utility/functions/date/dateFunctions.jsc +0 -0
- package/utility/functions/geo.jsc +0 -0
- package/utility/functions/math/avg.jsc +0 -0
- package/utility/functions/math/count.jsc +0 -0
- package/utility/functions/sql/alaSQLExtension.jsc +0 -0
- package/utility/functions/string/compare.jsc +0 -0
- package/utility/globalSchema.jsc +0 -0
- package/utility/hdbTerms.jsc +0 -0
- package/utility/install/checkJWTTokensExist.jsc +0 -0
- package/utility/install/installer.jsc +0 -0
- package/utility/install_user_permission.jsc +0 -0
- package/utility/lmdb/DBIDefinition.jsc +0 -0
- package/utility/lmdb/DeleteRecordsResponseObject.jsc +0 -0
- package/utility/lmdb/InsertRecordsResponseObject.jsc +0 -0
- package/utility/lmdb/OpenDBIObject.jsc +0 -0
- package/utility/lmdb/OpenEnvironmentObject.jsc +0 -0
- package/utility/lmdb/UpdateRecordsResponseObject.jsc +0 -0
- package/utility/lmdb/UpsertRecordsResponseObject.jsc +0 -0
- package/utility/lmdb/cleanLMDBMap.jsc +0 -0
- package/utility/lmdb/commonUtility.jsc +0 -0
- package/utility/lmdb/deleteUtility.jsc +0 -0
- package/utility/lmdb/environmentUtility.jsc +0 -0
- package/utility/lmdb/searchCursorFunctions.jsc +0 -0
- package/utility/lmdb/searchUtility.jsc +0 -0
- package/utility/lmdb/terms.jsc +0 -0
- package/utility/lmdb/writeUtility.jsc +0 -0
- package/utility/logging/harper_logger.jsc +0 -0
- package/utility/mount_hdb.jsc +0 -0
- package/utility/npmUtilities.jsc +0 -0
- package/utility/operation_authorization.jsc +0 -0
- package/utility/password.jsc +0 -0
- package/utility/pm2/servicesConfig.jsc +0 -0
- package/utility/pm2/utilityFunctions.jsc +0 -0
- package/utility/psList.jsc +0 -0
- package/utility/registration/hdb_license.jsc +0 -0
- package/utility/registration/licenseObjects.jsc +0 -0
- package/utility/registration/registrationHandler.jsc +0 -0
- package/utility/scripts/restartHdb.jsc +0 -0
- package/utility/signalling.jsc +0 -0
- package/utility/system_info.jsc +0 -0
- package/validation/bulkDeleteValidator.jsc +0 -0
- package/validation/check_permissions.jsc +0 -0
- package/validation/clustering/configureValidator.jsc +0 -0
- package/validation/common_validators.jsc +0 -0
- package/validation/conditionalDeleteValidator.jsc +0 -0
- package/validation/deleteValidator.jsc +0 -0
- package/validation/fileLoadValidator.jsc +0 -0
- package/validation/insertValidator.jsc +0 -0
- package/validation/nodeSubscriptionValidator.jsc +0 -0
- package/validation/nodeValidator.jsc +0 -0
- package/validation/readLogValidator.jsc +0 -0
- package/validation/registration/license_key_object.jsc +0 -0
- package/validation/role_validation.jsc +0 -0
- package/validation/schemaMetadataValidator.jsc +0 -0
- package/validation/schema_validator.jsc +0 -0
- package/validation/searchValidator.jsc +0 -0
- package/validation/user_validation.jsc +0 -0
- package/validation/validationWrapper.jsc +0 -0
- package/node_modules/lmdb-store/build/Makefile +0 -324
- package/node_modules/lmdb-store/build/Release/.deps/Release/lmdb-store.node.d +0 -1
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/chacha8.o.d +0 -6
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb.o.d +0 -8
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/midl.o.d +0 -8
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/dependencies/lz4/lib/lz4.o.d +0 -5
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/compression.o.d +0 -72
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/cursor.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/dbi.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/env.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/misc.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/node-lmdb.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/ordered-binary.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/txn.o.d +0 -73
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store/src/windows.o.d +0 -3
- package/node_modules/lmdb-store/build/Release/.deps/Release/obj.target/lmdb-store.node.d +0 -1
- package/node_modules/lmdb-store/build/Release/lmdb-store.node +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/chacha8.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/mdb.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/dependencies/lmdb/libraries/liblmdb/midl.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/dependencies/lz4/lib/lz4.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/compression.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/cursor.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/dbi.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/env.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/misc.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/node-lmdb.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/ordered-binary.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/txn.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store/src/windows.o +0 -0
- package/node_modules/lmdb-store/build/Release/obj.target/lmdb-store.node +0 -0
- package/node_modules/lmdb-store/build/binding.Makefile +0 -6
- package/node_modules/lmdb-store/build/config.gypi +0 -426
- package/node_modules/lmdb-store/build/lmdb-store.target.mk +0 -206
- package/node_modules/lmdb-store/node_modules/node-gyp-build/package.json +0 -60
- package/node_modules/lmdb-store/prebuilds/darwin-x64/electron.abi87.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-x64/node.abi72.node +0 -0
- package/node_modules/lmdb-store/prebuilds/darwin-x64/node.abi88.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/electron.abi87.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/node.abi72.node +0 -0
- package/node_modules/lmdb-store/prebuilds/win32-x64/node.abi88.node +0 -0
- package/node_modules/microtime/prebuilds/darwin-x64/electron-napi.node +0 -0
- package/node_modules/microtime/prebuilds/darwin-x64/node-napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-arm/electron-napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-arm/node-napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-x64/electron-napi.node +0 -0
- package/node_modules/microtime/prebuilds/linux-x64/node-napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-x64/electron-napi.node +0 -0
- package/node_modules/microtime/prebuilds/win32-x64/node-napi.node +0 -0
- package/node_modules/msgpackr-extract/.circleci/config.yml +0 -19
- package/node_modules/msgpackr-extract/.travis.yml +0 -30
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/LICENSE +0 -21
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/README.md +0 -58
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/bin.js +0 -77
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/build-test.js +0 -19
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/index.js +0 -202
- package/node_modules/msgpackr-extract/node_modules/node-gyp-build/optional.js +0 -7
- package/node_modules/msgpackr-extract/prebuilds/darwin-arm64/electron.abi98.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-arm64/node.abi102.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-arm64/node.abi83.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-arm64/node.abi93.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-x64/electron.abi98.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-x64/node.abi102.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-x64/node.abi83.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-x64/node.abi88.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/darwin-x64/node.abi93.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-arm64/electron.abi98.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-arm64/node.abi102.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-arm64/node.abi83.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-arm64/node.abi93.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/electron.abi98.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi102.musl.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi102.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi72.musl.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi72.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi83.musl.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi83.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi88.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi93.musl.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/linux-x64/node.abi93.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/electron.abi98.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/node.abi102.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/node.abi72.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/node.abi83.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/node.abi88.node +0 -0
- package/node_modules/msgpackr-extract/prebuilds/win32-x64/node.abi93.node +0 -0
- package/node_modules/node-addon-api/.editorconfig +0 -8
- package/node_modules/node-addon-api/.travis.yml +0 -65
- package/node_modules/node-addon-api/CHANGELOG.md +0 -325
- package/node_modules/node-addon-api/CODE_OF_CONDUCT.md +0 -4
- package/node_modules/node-addon-api/CONTRIBUTING.md +0 -66
- package/node_modules/node-addon-api/appveyor.yml +0 -48
- package/node_modules/node-addon-api/doc/Doxyfile +0 -2450
- package/node_modules/node-addon-api/doc/array_buffer.md +0 -129
- package/node_modules/node-addon-api/doc/async_context.md +0 -76
- package/node_modules/node-addon-api/doc/async_operations.md +0 -31
- package/node_modules/node-addon-api/doc/async_worker.md +0 -397
- package/node_modules/node-addon-api/doc/basic_types.md +0 -415
- package/node_modules/node-addon-api/doc/bigint.md +0 -92
- package/node_modules/node-addon-api/doc/boolean.md +0 -64
- package/node_modules/node-addon-api/doc/buffer.md +0 -140
- package/node_modules/node-addon-api/doc/callback_scope.md +0 -54
- package/node_modules/node-addon-api/doc/callbackinfo.md +0 -97
- package/node_modules/node-addon-api/doc/checker-tool.md +0 -32
- package/node_modules/node-addon-api/doc/class_property_descriptor.md +0 -118
- package/node_modules/node-addon-api/doc/cmake-js.md +0 -19
- package/node_modules/node-addon-api/doc/conversion-tool.md +0 -28
- package/node_modules/node-addon-api/doc/creating_a_release.md +0 -62
- package/node_modules/node-addon-api/doc/dataview.md +0 -244
- package/node_modules/node-addon-api/doc/env.md +0 -63
- package/node_modules/node-addon-api/doc/error.md +0 -115
- package/node_modules/node-addon-api/doc/error_handling.md +0 -186
- package/node_modules/node-addon-api/doc/escapable_handle_scope.md +0 -82
- package/node_modules/node-addon-api/doc/external.md +0 -59
- package/node_modules/node-addon-api/doc/function.md +0 -294
- package/node_modules/node-addon-api/doc/function_reference.md +0 -238
- package/node_modules/node-addon-api/doc/generator.md +0 -13
- package/node_modules/node-addon-api/doc/handle_scope.md +0 -65
- package/node_modules/node-addon-api/doc/memory_management.md +0 -27
- package/node_modules/node-addon-api/doc/node-gyp.md +0 -82
- package/node_modules/node-addon-api/doc/number.md +0 -163
- package/node_modules/node-addon-api/doc/object.md +0 -202
- package/node_modules/node-addon-api/doc/object_lifetime_management.md +0 -83
- package/node_modules/node-addon-api/doc/object_reference.md +0 -117
- package/node_modules/node-addon-api/doc/object_wrap.md +0 -546
- package/node_modules/node-addon-api/doc/prebuild_tools.md +0 -16
- package/node_modules/node-addon-api/doc/promises.md +0 -74
- package/node_modules/node-addon-api/doc/property_descriptor.md +0 -231
- package/node_modules/node-addon-api/doc/range_error.md +0 -59
- package/node_modules/node-addon-api/doc/reference.md +0 -111
- package/node_modules/node-addon-api/doc/setup.md +0 -82
- package/node_modules/node-addon-api/doc/string.md +0 -89
- package/node_modules/node-addon-api/doc/symbol.md +0 -44
- package/node_modules/node-addon-api/doc/threadsafe_function.md +0 -303
- package/node_modules/node-addon-api/doc/type_error.md +0 -59
- package/node_modules/node-addon-api/doc/typed_array.md +0 -74
- package/node_modules/node-addon-api/doc/typed_array_of.md +0 -133
- package/node_modules/node-addon-api/doc/value.md +0 -269
- package/node_modules/node-addon-api/doc/version_management.md +0 -43
- package/node_modules/node-addon-api/doc/working_with_javascript_values.md +0 -14
- package/node_modules/node-addon-api/external-napi/node_api.h +0 -7
- package/node_modules/node-addon-api/src/node_api.cc +0 -3655
- package/node_modules/node-addon-api/src/node_api.gyp +0 -21
- package/node_modules/node-addon-api/src/node_api.h +0 -588
- package/node_modules/node-addon-api/src/node_api_types.h +0 -115
- package/node_modules/node-addon-api/src/node_internals.cc +0 -142
- package/node_modules/node-addon-api/src/node_internals.h +0 -157
- package/node_modules/node-addon-api/src/util-inl.h +0 -38
- package/node_modules/node-addon-api/src/util.h +0 -7
|
@@ -0,0 +1,2591 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var module$1 = require('module');
|
|
6
|
+
var pathModule = require('path');
|
|
7
|
+
var url = require('url');
|
|
8
|
+
var loadNAPI = require('node-gyp-build-optional-packages');
|
|
9
|
+
var v8 = require('v8');
|
|
10
|
+
var events = require('events');
|
|
11
|
+
var os$1 = require('os');
|
|
12
|
+
var fs$1 = require('fs');
|
|
13
|
+
var msgpackr = require('msgpackr');
|
|
14
|
+
var weakLruCache = require('weak-lru-cache');
|
|
15
|
+
var orderedBinary$1 = require('ordered-binary');
|
|
16
|
+
|
|
17
|
+
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
|
18
|
+
|
|
19
|
+
function _interopNamespace(e) {
|
|
20
|
+
if (e && e.__esModule) return e;
|
|
21
|
+
var n = Object.create(null);
|
|
22
|
+
if (e) {
|
|
23
|
+
Object.keys(e).forEach(function (k) {
|
|
24
|
+
if (k !== 'default') {
|
|
25
|
+
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
26
|
+
Object.defineProperty(n, k, d.get ? d : {
|
|
27
|
+
enumerable: true,
|
|
28
|
+
get: function () { return e[k]; }
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
n["default"] = e;
|
|
34
|
+
return Object.freeze(n);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
var pathModule__default = /*#__PURE__*/_interopDefaultLegacy(pathModule);
|
|
38
|
+
var loadNAPI__default = /*#__PURE__*/_interopDefaultLegacy(loadNAPI);
|
|
39
|
+
var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs$1);
|
|
40
|
+
var orderedBinary__namespace = /*#__PURE__*/_interopNamespace(orderedBinary$1);
|
|
41
|
+
|
|
42
|
+
let Env, Txn, Dbi, Compression, Cursor, getAddress; exports.clearKeptObjects = void 0; let globalBuffer, setGlobalBuffer, arch, fs, os, tmpdir, lmdbError, path, EventEmitter, orderedBinary, MsgpackrEncoder, WeakLRUCache, getByBinary, write, position, iterate, prefetch, resetTxn, getCurrentValue, getStringByBinary, getSharedBuffer, compress;
|
|
43
|
+
|
|
44
|
+
path = pathModule__default["default"];
|
|
45
|
+
let dirName = (typeof __dirname == 'string' ? __dirname : // for bun, which doesn't have fileURLToPath
|
|
46
|
+
pathModule.dirname(url.fileURLToPath((typeof document === 'undefined' ? new (require('u' + 'rl').URL)('file:' + __filename).href : (document.currentScript && document.currentScript.src || new URL('index.cjs', document.baseURI).href))))).replace(/dist$/, ''); // for node, which doesn't have __dirname in ESM
|
|
47
|
+
let nativeAddon = loadNAPI__default["default"](dirName);
|
|
48
|
+
|
|
49
|
+
if (process.isBun) {
|
|
50
|
+
const { linkSymbols, FFIType } = require('bun:ffi');
|
|
51
|
+
let lmdbLib = linkSymbols({
|
|
52
|
+
getByBinary: {
|
|
53
|
+
args: [FFIType.f64, FFIType.u32],
|
|
54
|
+
returns: FFIType.u32,
|
|
55
|
+
ptr: nativeAddon.getByBinaryPtr
|
|
56
|
+
},
|
|
57
|
+
iterate: {
|
|
58
|
+
args: [FFIType.f64],
|
|
59
|
+
returns: FFIType.i32,
|
|
60
|
+
ptr: nativeAddon.iteratePtr,
|
|
61
|
+
},
|
|
62
|
+
position: {
|
|
63
|
+
args: [FFIType.f64, FFIType.u32, FFIType.u32, FFIType.u32, FFIType.f64],
|
|
64
|
+
returns: FFIType.i32,
|
|
65
|
+
ptr: nativeAddon.positionPtr,
|
|
66
|
+
},
|
|
67
|
+
write: {
|
|
68
|
+
args: [FFIType.f64, FFIType.f64],
|
|
69
|
+
returns: FFIType.i32,
|
|
70
|
+
ptr: nativeAddon.writePtr,
|
|
71
|
+
},
|
|
72
|
+
resetTxn: {
|
|
73
|
+
args: [FFIType.f64],
|
|
74
|
+
returns: FFIType.void,
|
|
75
|
+
ptr: nativeAddon.resetTxnPtr,
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
for (let key in lmdbLib.symbols) {
|
|
79
|
+
nativeAddon[key] = lmdbLib.symbols[key].native;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
setNativeFunctions(nativeAddon);
|
|
83
|
+
|
|
84
|
+
function setNativeFunctions(externals) {
|
|
85
|
+
Env = externals.Env;
|
|
86
|
+
Txn = externals.Txn;
|
|
87
|
+
Dbi = externals.Dbi;
|
|
88
|
+
Compression = externals.Compression;
|
|
89
|
+
getAddress = externals.getAddress;
|
|
90
|
+
exports.clearKeptObjects = externals.clearKeptObjects || function() {};
|
|
91
|
+
getByBinary = externals.getByBinary;
|
|
92
|
+
setGlobalBuffer = externals.setGlobalBuffer;
|
|
93
|
+
globalBuffer = externals.globalBuffer;
|
|
94
|
+
getSharedBuffer = externals.getSharedBuffer;
|
|
95
|
+
prefetch = externals.prefetch;
|
|
96
|
+
iterate = externals.iterate;
|
|
97
|
+
position = externals.position;
|
|
98
|
+
resetTxn = externals.resetTxn;
|
|
99
|
+
getCurrentValue = externals.getCurrentValue;
|
|
100
|
+
getStringByBinary = externals.getStringByBinary;
|
|
101
|
+
write = externals.write;
|
|
102
|
+
compress = externals.compress;
|
|
103
|
+
Cursor = externals.Cursor;
|
|
104
|
+
lmdbError = externals.lmdbError;
|
|
105
|
+
if (externals.tmpdir)
|
|
106
|
+
tmpdir = externals.tmpdir;
|
|
107
|
+
}
|
|
108
|
+
function setExternals(externals) {
|
|
109
|
+
arch = externals.arch;
|
|
110
|
+
fs = externals.fs;
|
|
111
|
+
EventEmitter = externals.EventEmitter;
|
|
112
|
+
orderedBinary = externals.orderedBinary;
|
|
113
|
+
MsgpackrEncoder = externals.MsgpackrEncoder;
|
|
114
|
+
WeakLRUCache = externals.WeakLRUCache;
|
|
115
|
+
tmpdir = externals.tmpdir;
|
|
116
|
+
os = externals.os;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
function when(promise, callback, errback) {
|
|
120
|
+
if (promise && promise.then) {
|
|
121
|
+
return errback ?
|
|
122
|
+
promise.then(callback, errback) :
|
|
123
|
+
promise.then(callback);
|
|
124
|
+
}
|
|
125
|
+
return callback(promise);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
var backpressureArray;
|
|
129
|
+
|
|
130
|
+
const WAITING_OPERATION = 0x2000000;
|
|
131
|
+
const BACKPRESSURE_THRESHOLD = 100000;
|
|
132
|
+
const TXN_DELIMITER = 0x8000000;
|
|
133
|
+
const TXN_COMMITTED = 0x10000000;
|
|
134
|
+
const TXN_FLUSHED = 0x20000000;
|
|
135
|
+
const TXN_FAILED = 0x40000000;
|
|
136
|
+
const FAILED_CONDITION = 0x4000000;
|
|
137
|
+
const REUSE_BUFFER_MODE = 512;
|
|
138
|
+
const RESET_BUFFER_MODE = 1024;
|
|
139
|
+
const NO_RESOLVE = 16;
|
|
140
|
+
const HAS_TXN = 8;
|
|
141
|
+
const CONDITIONAL_VERSION_LESS_THAN = 0x800;
|
|
142
|
+
const CONDITIONAL_ALLOW_NOTFOUND = 0x800;
|
|
143
|
+
|
|
144
|
+
const SYNC_PROMISE_SUCCESS = Promise.resolve(true);
|
|
145
|
+
const SYNC_PROMISE_FAIL = Promise.resolve(false);
|
|
146
|
+
SYNC_PROMISE_SUCCESS.isSync = true;
|
|
147
|
+
SYNC_PROMISE_FAIL.isSync = true;
|
|
148
|
+
const PROMISE_SUCCESS = Promise.resolve(true);
|
|
149
|
+
const ABORT = {};
|
|
150
|
+
const IF_EXISTS = 3.542694326329068e-103;
|
|
151
|
+
const CALLBACK_THREW = {};
|
|
152
|
+
const LocalSharedArrayBuffer = typeof Deno != 'undefined' ? ArrayBuffer : SharedArrayBuffer; // Deno can't handle SharedArrayBuffer as an FFI argument due to https://github.com/denoland/deno/issues/12678
|
|
153
|
+
const ByteArray = typeof Buffer != 'undefined' ? function(buffer) { return Buffer.from(buffer) } : Uint8Array;
|
|
154
|
+
const queueTask = typeof setImmediate != 'undefined' ? setImmediate : setTimeout; // TODO: Or queueMicrotask?
|
|
155
|
+
//let debugLog = []
|
|
156
|
+
const WRITE_BUFFER_SIZE = 0x10000;
|
|
157
|
+
function addWriteMethods(LMDBStore, { env, fixedBuffer, resetReadTxn, useWritemap, maxKeySize,
|
|
158
|
+
eventTurnBatching, txnStartThreshold, batchStartThreshold, overlappingSync, commitDelay, separateFlushed, maxFlushDelay }) {
|
|
159
|
+
// stands for write instructions
|
|
160
|
+
var dynamicBytes;
|
|
161
|
+
function allocateInstructionBuffer() {
|
|
162
|
+
// Must use a shared buffer on older node in order to use Atomics, and it is also more correct since we are
|
|
163
|
+
// indeed accessing and modifying it from another thread (in C). However, Deno can't handle it for
|
|
164
|
+
// FFI so aliased above
|
|
165
|
+
let buffer = new LocalSharedArrayBuffer(WRITE_BUFFER_SIZE);
|
|
166
|
+
dynamicBytes = new ByteArray(buffer);
|
|
167
|
+
let uint32 = dynamicBytes.uint32 = new Uint32Array(buffer, 0, WRITE_BUFFER_SIZE >> 2);
|
|
168
|
+
uint32[0] = 0;
|
|
169
|
+
dynamicBytes.float64 = new Float64Array(buffer, 0, WRITE_BUFFER_SIZE >> 3);
|
|
170
|
+
buffer.address = getAddress(dynamicBytes);
|
|
171
|
+
uint32.address = buffer.address + uint32.byteOffset;
|
|
172
|
+
dynamicBytes.position = 0;
|
|
173
|
+
return dynamicBytes;
|
|
174
|
+
}
|
|
175
|
+
var newBufferThreshold = (WRITE_BUFFER_SIZE - maxKeySize - 64) >> 3; // need to reserve more room if we do inline values
|
|
176
|
+
var outstandingWriteCount = 0;
|
|
177
|
+
var startAddress = 0;
|
|
178
|
+
var writeTxn = null;
|
|
179
|
+
var committed;
|
|
180
|
+
var abortedNonChildTransactionWarn;
|
|
181
|
+
var nextTxnCallbacks = [];
|
|
182
|
+
var commitPromise, flushPromise, flushResolvers = [];
|
|
183
|
+
commitDelay = commitDelay || 0;
|
|
184
|
+
eventTurnBatching = eventTurnBatching === false ? false : true;
|
|
185
|
+
var enqueuedCommit;
|
|
186
|
+
var afterCommitCallbacks = [];
|
|
187
|
+
var beforeCommitCallbacks = [];
|
|
188
|
+
var enqueuedEventTurnBatch;
|
|
189
|
+
var batchDepth = 0;
|
|
190
|
+
var lastWritePromise;
|
|
191
|
+
var writeBatchStart, outstandingBatchCount, lastSyncTxnFlush, lastFlushTimeout, lastFlushCallback;
|
|
192
|
+
txnStartThreshold = txnStartThreshold || 5;
|
|
193
|
+
batchStartThreshold = batchStartThreshold || 1000;
|
|
194
|
+
maxFlushDelay = maxFlushDelay || 500;
|
|
195
|
+
|
|
196
|
+
allocateInstructionBuffer();
|
|
197
|
+
dynamicBytes.uint32[0] = TXN_DELIMITER | TXN_COMMITTED | TXN_FLUSHED;
|
|
198
|
+
var txnResolution, lastQueuedResolution, nextResolution = {
|
|
199
|
+
uint32: dynamicBytes.uint32, flagPosition: 0, flag: 0, valueBuffer: null, next: null, meta: null };
|
|
200
|
+
var uncommittedResolution = {
|
|
201
|
+
uint32: null, flagPosition: 0, flag: 0, valueBuffer: null, next: nextResolution, meta: null };
|
|
202
|
+
var unwrittenResolution = nextResolution;
|
|
203
|
+
var lastPromisedResolution = uncommittedResolution;
|
|
204
|
+
let lastValue, valueBuffer;
|
|
205
|
+
function writeInstructions(flags, store, key, value, version, ifVersion) {
|
|
206
|
+
let writeStatus;
|
|
207
|
+
let targetBytes, position, encoder;
|
|
208
|
+
let valueSize, valueBufferStart;
|
|
209
|
+
if (flags & 2) {
|
|
210
|
+
// encode first in case we have to write a shared structure
|
|
211
|
+
encoder = store.encoder;
|
|
212
|
+
if (typeof value !== 'object' && value && value === lastValue && !store.compression) ; else if (value && value['\x10binary-data\x02'])
|
|
213
|
+
valueBuffer = value['\x10binary-data\x02'];
|
|
214
|
+
else if (encoder) {
|
|
215
|
+
if (encoder.copyBuffers) // use this as indicator for support buffer reuse for now
|
|
216
|
+
valueBuffer = encoder.encode(value, REUSE_BUFFER_MODE | (writeTxn ? RESET_BUFFER_MODE : 0)); // in addition, if we are writing sync, after using, we can immediately reset the encoder's position to reuse that space, which can improve performance
|
|
217
|
+
else { // various other encoders, including JSON.stringify, that might serialize to a string
|
|
218
|
+
valueBuffer = encoder.encode(value);
|
|
219
|
+
if (typeof valueBuffer == 'string')
|
|
220
|
+
valueBuffer = Buffer.from(valueBuffer); // TODO: Would be nice to write strings inline in the instructions
|
|
221
|
+
}
|
|
222
|
+
} else if (typeof value == 'string') {
|
|
223
|
+
valueBuffer = Buffer.from(value); // TODO: Would be nice to write strings inline in the instructions
|
|
224
|
+
} else if (value instanceof Uint8Array)
|
|
225
|
+
valueBuffer = value;
|
|
226
|
+
else
|
|
227
|
+
throw new Error('Invalid value to put in database ' + value + ' (' + (typeof value) +'), consider using encoder');
|
|
228
|
+
lastValue = (writeTxn || store.compression) ? null : value; // can't reuse values from write txns because we reset the buffer
|
|
229
|
+
valueBufferStart = valueBuffer.start;
|
|
230
|
+
if (valueBufferStart > -1) // if we have buffers with start/end position
|
|
231
|
+
valueSize = valueBuffer.end - valueBufferStart; // size
|
|
232
|
+
else
|
|
233
|
+
valueSize = valueBuffer.length;
|
|
234
|
+
if (store.dupSort && valueSize > maxKeySize)
|
|
235
|
+
throw new Error('The value is larger than the maximum size (' + maxKeySize + ') for a value in a dupSort database');
|
|
236
|
+
} else
|
|
237
|
+
valueSize = 0;
|
|
238
|
+
if (writeTxn) {
|
|
239
|
+
targetBytes = fixedBuffer;
|
|
240
|
+
position = 0;
|
|
241
|
+
} else {
|
|
242
|
+
if (eventTurnBatching && !enqueuedEventTurnBatch && batchDepth == 0) {
|
|
243
|
+
enqueuedEventTurnBatch = queueTask(() => {
|
|
244
|
+
try {
|
|
245
|
+
for (let i = 0, l = beforeCommitCallbacks.length; i < l; i++) {
|
|
246
|
+
beforeCommitCallbacks[i]();
|
|
247
|
+
}
|
|
248
|
+
} catch(error) {
|
|
249
|
+
console.error(error);
|
|
250
|
+
}
|
|
251
|
+
enqueuedEventTurnBatch = null;
|
|
252
|
+
batchDepth--;
|
|
253
|
+
finishBatch();
|
|
254
|
+
if (writeBatchStart)
|
|
255
|
+
writeBatchStart(); // TODO: When we support delay start of batch, optionally don't delay this
|
|
256
|
+
});
|
|
257
|
+
commitPromise = null; // reset the commit promise, can't know if it is really a new transaction prior to finishWrite being called
|
|
258
|
+
flushPromise = null;
|
|
259
|
+
writeBatchStart = writeInstructions(1, store);
|
|
260
|
+
outstandingBatchCount = 0;
|
|
261
|
+
batchDepth++;
|
|
262
|
+
}
|
|
263
|
+
targetBytes = dynamicBytes;
|
|
264
|
+
position = targetBytes.position;
|
|
265
|
+
}
|
|
266
|
+
let uint32 = targetBytes.uint32, float64 = targetBytes.float64;
|
|
267
|
+
let flagPosition = position << 1; // flagPosition is the 32-bit word starting position
|
|
268
|
+
|
|
269
|
+
// don't increment position until we are sure we don't have any key writing errors
|
|
270
|
+
if (!uint32) {
|
|
271
|
+
throw new Error('Internal buffers have been corrupted');
|
|
272
|
+
}
|
|
273
|
+
uint32[flagPosition + 1] = store.db.dbi;
|
|
274
|
+
if (flags & 4) {
|
|
275
|
+
let keyStartPosition = (position << 3) + 12;
|
|
276
|
+
let endPosition;
|
|
277
|
+
try {
|
|
278
|
+
endPosition = store.writeKey(key, targetBytes, keyStartPosition);
|
|
279
|
+
if (!(keyStartPosition < endPosition) && (flags & 0xf) != 12)
|
|
280
|
+
throw new Error('Invalid key or zero length key is not allowed in LMDB')
|
|
281
|
+
} catch(error) {
|
|
282
|
+
targetBytes.fill(0, keyStartPosition);
|
|
283
|
+
if (error.name == 'RangeError')
|
|
284
|
+
error = new Error('Key size is larger than the maximum key size (' + maxKeySize + ')');
|
|
285
|
+
throw error;
|
|
286
|
+
}
|
|
287
|
+
let keySize = endPosition - keyStartPosition;
|
|
288
|
+
if (keySize > maxKeySize) {
|
|
289
|
+
targetBytes.fill(0, keyStartPosition); // restore zeros
|
|
290
|
+
throw new Error('Key size is larger than the maximum key size (' + maxKeySize + ')');
|
|
291
|
+
}
|
|
292
|
+
uint32[flagPosition + 2] = keySize;
|
|
293
|
+
position = (endPosition + 16) >> 3;
|
|
294
|
+
if (flags & 2) {
|
|
295
|
+
let mustCompress;
|
|
296
|
+
if (valueBufferStart > -1) { // if we have buffers with start/end position
|
|
297
|
+
// record pointer to value buffer
|
|
298
|
+
float64[position] = (valueBuffer.address ||
|
|
299
|
+
(valueBuffer.address = getAddress(valueBuffer))) + valueBufferStart;
|
|
300
|
+
mustCompress = valueBuffer[valueBufferStart] >= 250; // this is the compression indicator, so we must compress
|
|
301
|
+
} else {
|
|
302
|
+
let valueArrayBuffer = valueBuffer.buffer;
|
|
303
|
+
// record pointer to value buffer
|
|
304
|
+
let address = (valueArrayBuffer.address ||
|
|
305
|
+
(valueBuffer.length === 0 ? 0 : // externally allocated buffers of zero-length with the same non-null-pointer can crash node, #161
|
|
306
|
+
(valueArrayBuffer.address = (getAddress(valueBuffer) - valueBuffer.byteOffset))))
|
|
307
|
+
+ valueBuffer.byteOffset;
|
|
308
|
+
if (address <= 0 && valueBuffer.length > 0)
|
|
309
|
+
console.error('Supplied buffer had an invalid address', address);
|
|
310
|
+
float64[position] = address;
|
|
311
|
+
mustCompress = valueBuffer[0] >= 250; // this is the compression indicator, so we must compress
|
|
312
|
+
}
|
|
313
|
+
uint32[(position++ << 1) - 1] = valueSize;
|
|
314
|
+
if (store.compression && (valueSize >= store.compression.threshold || mustCompress)) {
|
|
315
|
+
flags |= 0x100000;
|
|
316
|
+
float64[position] = store.compression.address;
|
|
317
|
+
if (!writeTxn)
|
|
318
|
+
compress(env.address, uint32.address + (position << 3), () => {
|
|
319
|
+
// this is never actually called in NodeJS, just use to pin the buffer in memory until it is finished
|
|
320
|
+
// and is a no-op in Deno
|
|
321
|
+
if (!float64)
|
|
322
|
+
throw new Error('No float64 available');
|
|
323
|
+
});
|
|
324
|
+
position++;
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
if (ifVersion !== undefined) {
|
|
328
|
+
if (ifVersion === null)
|
|
329
|
+
flags |= 0x10; // if it does not exist, MDB_NOOVERWRITE
|
|
330
|
+
else {
|
|
331
|
+
flags |= 0x100;
|
|
332
|
+
float64[position++] = ifVersion;
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
if (version !== undefined) {
|
|
336
|
+
flags |= 0x200;
|
|
337
|
+
float64[position++] = version || 0;
|
|
338
|
+
}
|
|
339
|
+
} else
|
|
340
|
+
position++;
|
|
341
|
+
targetBytes.position = position;
|
|
342
|
+
if (writeTxn) {
|
|
343
|
+
uint32[0] = flags;
|
|
344
|
+
write(env.address, uint32.address);
|
|
345
|
+
return () => (uint32[0] & FAILED_CONDITION) ? SYNC_PROMISE_FAIL : SYNC_PROMISE_SUCCESS;
|
|
346
|
+
}
|
|
347
|
+
// if we ever use buffers that haven't been zero'ed, need to clear out the next slot like this:
|
|
348
|
+
// uint32[position << 1] = 0 // clear out the next slot
|
|
349
|
+
let nextUint32;
|
|
350
|
+
if (position > newBufferThreshold) {
|
|
351
|
+
// make new buffer and make pointer to it
|
|
352
|
+
let lastPosition = position;
|
|
353
|
+
targetBytes = allocateInstructionBuffer();
|
|
354
|
+
position = targetBytes.position;
|
|
355
|
+
float64[lastPosition + 1] = targetBytes.uint32.address + position;
|
|
356
|
+
uint32[lastPosition << 1] = 3; // pointer instruction
|
|
357
|
+
nextUint32 = targetBytes.uint32;
|
|
358
|
+
} else
|
|
359
|
+
nextUint32 = uint32;
|
|
360
|
+
let resolution = nextResolution;
|
|
361
|
+
// create the placeholder next resolution
|
|
362
|
+
nextResolution = resolution.next = { // we try keep resolutions exactly the same object type
|
|
363
|
+
uint32: nextUint32,
|
|
364
|
+
flagPosition: position << 1,
|
|
365
|
+
flag: 0, // TODO: eventually eliminate this, as we can probably signify HAS_TXN/NO_RESOLVE/FAILED_CONDITION in upper bits
|
|
366
|
+
valueBuffer: fixedBuffer, // these are all just placeholders so that we have the right hidden class initially allocated
|
|
367
|
+
next: null,
|
|
368
|
+
meta: null,
|
|
369
|
+
};
|
|
370
|
+
let writtenBatchDepth = batchDepth;
|
|
371
|
+
|
|
372
|
+
return (callback) => {
|
|
373
|
+
if (writtenBatchDepth) {
|
|
374
|
+
// if we are in a batch, the transaction can't close, so we do the faster,
|
|
375
|
+
// but non-deterministic updates, knowing that the write thread can
|
|
376
|
+
// just poll for the status change if we miss a status update
|
|
377
|
+
writeStatus = uint32[flagPosition];
|
|
378
|
+
uint32[flagPosition] = flags;
|
|
379
|
+
//writeStatus = Atomics.or(uint32, flagPosition, flags)
|
|
380
|
+
if (writeBatchStart && !writeStatus) {
|
|
381
|
+
outstandingBatchCount += 1 + (valueSize >> 12);
|
|
382
|
+
if (outstandingBatchCount > batchStartThreshold) {
|
|
383
|
+
outstandingBatchCount = 0;
|
|
384
|
+
writeBatchStart();
|
|
385
|
+
writeBatchStart = null;
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
} else // otherwise the transaction could end at any time and we need to know the
|
|
389
|
+
// deterministically if it is ending, so we can reset the commit promise
|
|
390
|
+
// so we use the slower atomic operation
|
|
391
|
+
writeStatus = Atomics.or(uint32, flagPosition, flags);
|
|
392
|
+
|
|
393
|
+
outstandingWriteCount++;
|
|
394
|
+
if (writeStatus & TXN_DELIMITER) {
|
|
395
|
+
commitPromise = null; // TODO: Don't reset these if this comes from the batch start operation on an event turn batch
|
|
396
|
+
flushPromise = null;
|
|
397
|
+
queueCommitResolution(resolution);
|
|
398
|
+
if (!startAddress) {
|
|
399
|
+
startAddress = uint32.address + (flagPosition << 2);
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
if (!flushPromise && overlappingSync)
|
|
403
|
+
flushPromise = new Promise(resolve => flushResolvers.push(resolve));
|
|
404
|
+
if (writeStatus & WAITING_OPERATION) { // write thread is waiting
|
|
405
|
+
write(env.address, 0);
|
|
406
|
+
}
|
|
407
|
+
if (outstandingWriteCount > BACKPRESSURE_THRESHOLD && !writeBatchStart) {
|
|
408
|
+
if (!backpressureArray)
|
|
409
|
+
backpressureArray = new Int32Array(new SharedArrayBuffer(4), 0, 1);
|
|
410
|
+
Atomics.wait(backpressureArray, 0, 0, Math.round(outstandingWriteCount / BACKPRESSURE_THRESHOLD));
|
|
411
|
+
}
|
|
412
|
+
if (startAddress) {
|
|
413
|
+
if (eventTurnBatching)
|
|
414
|
+
startWriting(); // start writing immediately because this has already been batched/queued
|
|
415
|
+
else if (!enqueuedCommit && txnStartThreshold) {
|
|
416
|
+
enqueuedCommit = (commitDelay == 0 && typeof setImmediate != 'undefined') ? setImmediate(() => startWriting()) : setTimeout(() => startWriting(), commitDelay);
|
|
417
|
+
} else if (outstandingWriteCount > txnStartThreshold)
|
|
418
|
+
startWriting();
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
if ((outstandingWriteCount & 7) === 0)
|
|
422
|
+
resolveWrites();
|
|
423
|
+
|
|
424
|
+
if (store.cache) {
|
|
425
|
+
resolution.meta = {
|
|
426
|
+
key,
|
|
427
|
+
store,
|
|
428
|
+
valueSize: valueBuffer ? valueBuffer.length : 0,
|
|
429
|
+
};
|
|
430
|
+
}
|
|
431
|
+
resolution.valueBuffer = valueBuffer;
|
|
432
|
+
lastQueuedResolution = resolution;
|
|
433
|
+
|
|
434
|
+
if (callback) {
|
|
435
|
+
if (callback === IF_EXISTS)
|
|
436
|
+
ifVersion = IF_EXISTS;
|
|
437
|
+
else {
|
|
438
|
+
let meta = resolution.meta || (resolution.meta = {});
|
|
439
|
+
meta.reject = callback;
|
|
440
|
+
meta.resolve = (value) => callback(null, value);
|
|
441
|
+
return;
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
if (ifVersion === undefined) {
|
|
445
|
+
if (writtenBatchDepth > 1) {
|
|
446
|
+
if (!resolution.flag && !store.cache)
|
|
447
|
+
resolution.flag = NO_RESOLVE;
|
|
448
|
+
return PROMISE_SUCCESS; // or return undefined?
|
|
449
|
+
}
|
|
450
|
+
if (commitPromise) {
|
|
451
|
+
if (!resolution.flag)
|
|
452
|
+
resolution.flag = NO_RESOLVE;
|
|
453
|
+
} else {
|
|
454
|
+
commitPromise = new Promise((resolve, reject) => {
|
|
455
|
+
let meta = resolution.meta || (resolution.meta = {});
|
|
456
|
+
meta.resolve = resolve;
|
|
457
|
+
resolve.unconditional = true;
|
|
458
|
+
meta.reject = reject;
|
|
459
|
+
});
|
|
460
|
+
if (separateFlushed)
|
|
461
|
+
commitPromise.flushed = overlappingSync ? flushPromise : commitPromise;
|
|
462
|
+
}
|
|
463
|
+
return commitPromise;
|
|
464
|
+
}
|
|
465
|
+
lastWritePromise = new Promise((resolve, reject) => {
|
|
466
|
+
let meta = resolution.meta || (resolution.meta = {});
|
|
467
|
+
meta.resolve = resolve;
|
|
468
|
+
meta.reject = reject;
|
|
469
|
+
});
|
|
470
|
+
if (separateFlushed)
|
|
471
|
+
lastWritePromise.flushed = overlappingSync ? flushPromise : lastWritePromise;
|
|
472
|
+
return lastWritePromise;
|
|
473
|
+
};
|
|
474
|
+
}
|
|
475
|
+
let committedFlushResolvers, lastSync = Promise.resolve();
|
|
476
|
+
function startWriting() {
|
|
477
|
+
if (enqueuedCommit) {
|
|
478
|
+
clearImmediate(enqueuedCommit);
|
|
479
|
+
enqueuedCommit = null;
|
|
480
|
+
}
|
|
481
|
+
let resolvers = flushResolvers;
|
|
482
|
+
flushResolvers = [];
|
|
483
|
+
let start = Date.now();
|
|
484
|
+
env.startWriting(startAddress, (status) => {
|
|
485
|
+
if (dynamicBytes.uint32[dynamicBytes.position << 1] & TXN_DELIMITER)
|
|
486
|
+
queueCommitResolution(nextResolution);
|
|
487
|
+
|
|
488
|
+
resolveWrites(true);
|
|
489
|
+
switch (status) {
|
|
490
|
+
case 0:
|
|
491
|
+
if (resolvers.length > 0) {
|
|
492
|
+
let delay = Date.now() - start;
|
|
493
|
+
scheduleFlush(resolvers, Math.min((flushPromise && flushPromise.hasCallbacks ? delay >> 1 : delay) + 1, maxFlushDelay));
|
|
494
|
+
}
|
|
495
|
+
case 1:
|
|
496
|
+
break;
|
|
497
|
+
case 2:
|
|
498
|
+
executeTxnCallbacks();
|
|
499
|
+
break;
|
|
500
|
+
default:
|
|
501
|
+
console.error(status);
|
|
502
|
+
if (commitRejectPromise) {
|
|
503
|
+
commitRejectPromise.reject(status);
|
|
504
|
+
commitRejectPromise = null;
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
});
|
|
508
|
+
startAddress = 0;
|
|
509
|
+
}
|
|
510
|
+
function scheduleFlush(resolvers, delay) {
|
|
511
|
+
if (committedFlushResolvers)
|
|
512
|
+
committedFlushResolvers.push(...resolvers);
|
|
513
|
+
else {
|
|
514
|
+
committedFlushResolvers = resolvers;
|
|
515
|
+
lastFlushTimeout = setTimeout(lastFlushCallback = () => {
|
|
516
|
+
lastFlushTimeout = null;
|
|
517
|
+
lastSync.then(() => {
|
|
518
|
+
let resolvers = committedFlushResolvers || [];
|
|
519
|
+
committedFlushResolvers = null;
|
|
520
|
+
lastSync = new Promise((resolve) => {
|
|
521
|
+
env.sync(() => {
|
|
522
|
+
for (let i = 0; i < resolvers.length; i++)
|
|
523
|
+
resolvers[i]();
|
|
524
|
+
resolve();
|
|
525
|
+
});
|
|
526
|
+
});
|
|
527
|
+
});
|
|
528
|
+
}, delay || 0);
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
function expediteFlush() {
|
|
532
|
+
if (lastFlushTimeout) {
|
|
533
|
+
clearTimeout(lastFlushTimeout);
|
|
534
|
+
lastFlushCallback();
|
|
535
|
+
}
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
function queueCommitResolution(resolution) {
|
|
539
|
+
if (!(resolution.flag & HAS_TXN)) {
|
|
540
|
+
resolution.flag = HAS_TXN;
|
|
541
|
+
if (txnResolution) {
|
|
542
|
+
txnResolution.nextTxn = resolution;
|
|
543
|
+
//outstandingWriteCount = 0
|
|
544
|
+
}
|
|
545
|
+
else
|
|
546
|
+
txnResolution = resolution;
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
var TXN_DONE = TXN_COMMITTED | TXN_FAILED;
|
|
550
|
+
function resolveWrites(async) {
|
|
551
|
+
// clean up finished instructions
|
|
552
|
+
let instructionStatus;
|
|
553
|
+
while ((instructionStatus = unwrittenResolution.uint32[unwrittenResolution.flagPosition])
|
|
554
|
+
& 0x1000000) {
|
|
555
|
+
if (unwrittenResolution.callbacks) {
|
|
556
|
+
nextTxnCallbacks.push(unwrittenResolution.callbacks);
|
|
557
|
+
unwrittenResolution.callbacks = null;
|
|
558
|
+
}
|
|
559
|
+
outstandingWriteCount--;
|
|
560
|
+
if (unwrittenResolution.flag !== HAS_TXN) {
|
|
561
|
+
if (unwrittenResolution.flag === NO_RESOLVE && !unwrittenResolution.store) {
|
|
562
|
+
// in this case we can completely remove from the linked list, clearing more memory
|
|
563
|
+
lastPromisedResolution.next = unwrittenResolution = unwrittenResolution.next;
|
|
564
|
+
continue;
|
|
565
|
+
}
|
|
566
|
+
unwrittenResolution.uint32 = null;
|
|
567
|
+
}
|
|
568
|
+
unwrittenResolution.valueBuffer = null;
|
|
569
|
+
unwrittenResolution.flag = instructionStatus;
|
|
570
|
+
lastPromisedResolution = unwrittenResolution;
|
|
571
|
+
unwrittenResolution = unwrittenResolution.next;
|
|
572
|
+
}
|
|
573
|
+
while (txnResolution &&
|
|
574
|
+
(instructionStatus = txnResolution.uint32[txnResolution.flagPosition] & TXN_DONE)) {
|
|
575
|
+
if (instructionStatus & TXN_FAILED)
|
|
576
|
+
rejectCommit();
|
|
577
|
+
else
|
|
578
|
+
resolveCommit(async);
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
function resolveCommit(async) {
|
|
583
|
+
afterCommit(txnResolution.uint32[txnResolution.flagPosition + 1]);
|
|
584
|
+
if (async)
|
|
585
|
+
resetReadTxn();
|
|
586
|
+
else
|
|
587
|
+
queueMicrotask(resetReadTxn); // TODO: only do this if there are actually committed writes?
|
|
588
|
+
do {
|
|
589
|
+
if (uncommittedResolution.meta && uncommittedResolution.meta.resolve) {
|
|
590
|
+
let resolve = uncommittedResolution.meta.resolve;
|
|
591
|
+
if (uncommittedResolution.flag & FAILED_CONDITION && !resolve.unconditional)
|
|
592
|
+
resolve(false);
|
|
593
|
+
else
|
|
594
|
+
resolve(true);
|
|
595
|
+
}
|
|
596
|
+
} while((uncommittedResolution = uncommittedResolution.next) && uncommittedResolution != txnResolution)
|
|
597
|
+
txnResolution = txnResolution.nextTxn;
|
|
598
|
+
}
|
|
599
|
+
var commitRejectPromise;
|
|
600
|
+
function rejectCommit() {
|
|
601
|
+
afterCommit();
|
|
602
|
+
if (!commitRejectPromise) {
|
|
603
|
+
let rejectFunction;
|
|
604
|
+
commitRejectPromise = new Promise((resolve, reject) => rejectFunction = reject);
|
|
605
|
+
commitRejectPromise.reject = rejectFunction;
|
|
606
|
+
}
|
|
607
|
+
do {
|
|
608
|
+
if (uncommittedResolution.meta && uncommittedResolution.meta.reject) {
|
|
609
|
+
uncommittedResolution.flag & 0xf;
|
|
610
|
+
let error = new Error("Commit failed (see commitError for details)");
|
|
611
|
+
error.commitError = commitRejectPromise;
|
|
612
|
+
uncommittedResolution.meta.reject(error);
|
|
613
|
+
}
|
|
614
|
+
} while((uncommittedResolution = uncommittedResolution.next) && uncommittedResolution != txnResolution)
|
|
615
|
+
txnResolution = txnResolution.nextTxn;
|
|
616
|
+
}
|
|
617
|
+
function atomicStatus(uint32, flagPosition, newStatus) {
|
|
618
|
+
if (batchDepth) {
|
|
619
|
+
// if we are in a batch, the transaction can't close, so we do the faster,
|
|
620
|
+
// but non-deterministic updates, knowing that the write thread can
|
|
621
|
+
// just poll for the status change if we miss a status update
|
|
622
|
+
let writeStatus = uint32[flagPosition];
|
|
623
|
+
uint32[flagPosition] = newStatus;
|
|
624
|
+
return writeStatus;
|
|
625
|
+
//return Atomics.or(uint32, flagPosition, newStatus)
|
|
626
|
+
} else // otherwise the transaction could end at any time and we need to know the
|
|
627
|
+
// deterministically if it is ending, so we can reset the commit promise
|
|
628
|
+
// so we use the slower atomic operation
|
|
629
|
+
return Atomics.or(uint32, flagPosition, newStatus);
|
|
630
|
+
}
|
|
631
|
+
function afterCommit(txnId) {
|
|
632
|
+
for (let i = 0, l = afterCommitCallbacks.length; i < l; i++) {
|
|
633
|
+
afterCommitCallbacks[i]({ next: uncommittedResolution, last: unwrittenResolution, txnId });
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
async function executeTxnCallbacks() {
|
|
637
|
+
env.writeTxn = writeTxn = { write: true };
|
|
638
|
+
//this.emit('begin-transaction');
|
|
639
|
+
let promises;
|
|
640
|
+
let txnCallbacks;
|
|
641
|
+
for (let i = 0, l = nextTxnCallbacks.length; i < l; i++) {
|
|
642
|
+
txnCallbacks = nextTxnCallbacks[i];
|
|
643
|
+
for (let i = 0, l = txnCallbacks.length; i < l; i++) {
|
|
644
|
+
let userTxnCallback = txnCallbacks[i];
|
|
645
|
+
let asChild = userTxnCallback.asChild;
|
|
646
|
+
if (asChild) {
|
|
647
|
+
if (promises) {
|
|
648
|
+
// must complete any outstanding transactions before proceeding
|
|
649
|
+
await Promise.all(promises);
|
|
650
|
+
promises = null;
|
|
651
|
+
}
|
|
652
|
+
env.beginTxn(1); // abortable
|
|
653
|
+
let parentTxn = writeTxn;
|
|
654
|
+
env.writeTxn = writeTxn = { write: true };
|
|
655
|
+
try {
|
|
656
|
+
let result = userTxnCallback.callback();
|
|
657
|
+
if (result && result.then) {
|
|
658
|
+
await result;
|
|
659
|
+
}
|
|
660
|
+
if (result === ABORT)
|
|
661
|
+
env.abortTxn();
|
|
662
|
+
else
|
|
663
|
+
env.commitTxn();
|
|
664
|
+
clearWriteTxn(parentTxn);
|
|
665
|
+
txnCallbacks[i] = result;
|
|
666
|
+
} catch(error) {
|
|
667
|
+
clearWriteTxn(parentTxn);
|
|
668
|
+
env.abortTxn();
|
|
669
|
+
txnError(error, i);
|
|
670
|
+
}
|
|
671
|
+
} else {
|
|
672
|
+
try {
|
|
673
|
+
let result = userTxnCallback();
|
|
674
|
+
txnCallbacks[i] = result;
|
|
675
|
+
if (result && result.then) {
|
|
676
|
+
if (!promises)
|
|
677
|
+
promises = [];
|
|
678
|
+
promises.push(result.catch(() => {}));
|
|
679
|
+
}
|
|
680
|
+
} catch(error) {
|
|
681
|
+
txnError(error, i);
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
nextTxnCallbacks = [];
|
|
687
|
+
if (promises) { // finish any outstanding commit functions
|
|
688
|
+
await Promise.all(promises);
|
|
689
|
+
}
|
|
690
|
+
clearWriteTxn(null);
|
|
691
|
+
function txnError(error, i) {
|
|
692
|
+
(txnCallbacks.errors || (txnCallbacks.errors = []))[i] = error;
|
|
693
|
+
txnCallbacks[i] = CALLBACK_THREW;
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
function finishBatch() {
|
|
697
|
+
dynamicBytes.uint32[(dynamicBytes.position + 1) << 1] = 0; // clear out the next slot
|
|
698
|
+
let writeStatus = atomicStatus(dynamicBytes.uint32, (dynamicBytes.position++) << 1, 2); // atomically write the end block
|
|
699
|
+
nextResolution.flagPosition += 2;
|
|
700
|
+
if (writeStatus & WAITING_OPERATION) {
|
|
701
|
+
write(env.address, 0);
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
function clearWriteTxn(parentTxn) {
|
|
705
|
+
// TODO: We might actually want to track cursors in a write txn and manually
|
|
706
|
+
// close them.
|
|
707
|
+
if (writeTxn.refCount > 0)
|
|
708
|
+
writeTxn.isDone = true;
|
|
709
|
+
env.writeTxn = writeTxn = parentTxn || null;
|
|
710
|
+
}
|
|
711
|
+
Object.assign(LMDBStore.prototype, {
|
|
712
|
+
put(key, value, versionOrOptions, ifVersion) {
|
|
713
|
+
let callback, flags = 15, type = typeof versionOrOptions;
|
|
714
|
+
if (type == 'object') {
|
|
715
|
+
if (versionOrOptions.noOverwrite)
|
|
716
|
+
flags |= 0x10;
|
|
717
|
+
if (versionOrOptions.noDupData)
|
|
718
|
+
flags |= 0x20;
|
|
719
|
+
if (versionOrOptions.append)
|
|
720
|
+
flags |= 0x20000;
|
|
721
|
+
if (versionOrOptions.ifVersion != undefined)
|
|
722
|
+
ifVersion = versionsOrOptions.ifVersion;
|
|
723
|
+
versionOrOptions = versionOrOptions.version;
|
|
724
|
+
if (typeof ifVersion == 'function')
|
|
725
|
+
callback = ifVersion;
|
|
726
|
+
} else if (type == 'function') {
|
|
727
|
+
callback = versionOrOptions;
|
|
728
|
+
}
|
|
729
|
+
return writeInstructions(flags, this, key, value, this.useVersions ? versionOrOptions || 0 : undefined, ifVersion)(callback);
|
|
730
|
+
},
|
|
731
|
+
remove(key, ifVersionOrValue, callback) {
|
|
732
|
+
let flags = 13;
|
|
733
|
+
let ifVersion, value;
|
|
734
|
+
if (ifVersionOrValue !== undefined) {
|
|
735
|
+
if (typeof ifVersionOrValue == 'function')
|
|
736
|
+
callback = ifVersionOrValue;
|
|
737
|
+
else if (ifVersionOrValue === IF_EXISTS && !callback)
|
|
738
|
+
// we have a handler for IF_EXISTS in the callback handler for remove
|
|
739
|
+
callback = ifVersionOrValue;
|
|
740
|
+
else if (this.useVersions)
|
|
741
|
+
ifVersion = ifVersionOrValue;
|
|
742
|
+
else {
|
|
743
|
+
flags = 14;
|
|
744
|
+
value = ifVersionOrValue;
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
return writeInstructions(flags, this, key, value, undefined, ifVersion)(callback);
|
|
748
|
+
},
|
|
749
|
+
del(key, options, callback) {
|
|
750
|
+
return this.remove(key, options, callback);
|
|
751
|
+
},
|
|
752
|
+
ifNoExists(key, callback) {
|
|
753
|
+
return this.ifVersion(key, null, callback);
|
|
754
|
+
},
|
|
755
|
+
ifVersion(key, version, callback, options) {
|
|
756
|
+
if (!callback) {
|
|
757
|
+
return new Batch((operations, callback) => {
|
|
758
|
+
let promise = this.ifVersion(key, version, operations, options);
|
|
759
|
+
if (callback)
|
|
760
|
+
promise.then(callback);
|
|
761
|
+
return promise;
|
|
762
|
+
});
|
|
763
|
+
}
|
|
764
|
+
if (writeTxn) {
|
|
765
|
+
if (version === undefined || this.doesExist(key, version)) {
|
|
766
|
+
callback();
|
|
767
|
+
return SYNC_PROMISE_SUCCESS;
|
|
768
|
+
}
|
|
769
|
+
return SYNC_PROMISE_FAIL;
|
|
770
|
+
}
|
|
771
|
+
let flags = key === undefined || version === undefined ? 1 : 4;
|
|
772
|
+
if (options?.ifLessThan)
|
|
773
|
+
flags |= CONDITIONAL_VERSION_LESS_THAN;
|
|
774
|
+
if (options?.allowNotFound)
|
|
775
|
+
flags |= CONDITIONAL_ALLOW_NOTFOUND;
|
|
776
|
+
let finishStartWrite = writeInstructions(flags, this, key, undefined, undefined, version);
|
|
777
|
+
let promise;
|
|
778
|
+
batchDepth += 2;
|
|
779
|
+
if (batchDepth > 2)
|
|
780
|
+
promise = finishStartWrite();
|
|
781
|
+
else {
|
|
782
|
+
writeBatchStart = () => {
|
|
783
|
+
promise = finishStartWrite();
|
|
784
|
+
};
|
|
785
|
+
outstandingBatchCount = 0;
|
|
786
|
+
}
|
|
787
|
+
try {
|
|
788
|
+
if (typeof callback === 'function') {
|
|
789
|
+
callback();
|
|
790
|
+
} else {
|
|
791
|
+
for (let i = 0, l = callback.length; i < l; i++) {
|
|
792
|
+
let operation = callback[i];
|
|
793
|
+
this[operation.type](operation.key, operation.value);
|
|
794
|
+
}
|
|
795
|
+
}
|
|
796
|
+
} finally {
|
|
797
|
+
if (!promise) {
|
|
798
|
+
finishBatch();
|
|
799
|
+
batchDepth -= 2;
|
|
800
|
+
promise = finishStartWrite(); // finish write once all the operations have been written (and it hasn't been written prematurely)
|
|
801
|
+
writeBatchStart = null;
|
|
802
|
+
} else {
|
|
803
|
+
batchDepth -= 2;
|
|
804
|
+
finishBatch();
|
|
805
|
+
}
|
|
806
|
+
}
|
|
807
|
+
return promise;
|
|
808
|
+
},
|
|
809
|
+
batch(callbackOrOperations) {
|
|
810
|
+
return this.ifVersion(undefined, undefined, callbackOrOperations);
|
|
811
|
+
},
|
|
812
|
+
drop(callback) {
|
|
813
|
+
return writeInstructions(1024 + 12, this, undefined, undefined, undefined, undefined)(callback);
|
|
814
|
+
},
|
|
815
|
+
clearAsync(callback) {
|
|
816
|
+
if (this.encoder) {
|
|
817
|
+
if (this.encoder.clearSharedData)
|
|
818
|
+
this.encoder.clearSharedData();
|
|
819
|
+
else if (this.encoder.structures)
|
|
820
|
+
this.encoder.structures = [];
|
|
821
|
+
}
|
|
822
|
+
return writeInstructions(12, this, undefined, undefined, undefined, undefined)(callback);
|
|
823
|
+
},
|
|
824
|
+
_triggerError() {
|
|
825
|
+
finishBatch();
|
|
826
|
+
},
|
|
827
|
+
|
|
828
|
+
putSync(key, value, versionOrOptions, ifVersion) {
|
|
829
|
+
if (writeTxn)
|
|
830
|
+
return this.put(key, value, versionOrOptions, ifVersion);
|
|
831
|
+
else
|
|
832
|
+
return this.transactionSync(() =>
|
|
833
|
+
this.put(key, value, versionOrOptions, ifVersion) == SYNC_PROMISE_SUCCESS, overlappingSync? 0x10002 : 2); // non-abortable, async flush
|
|
834
|
+
},
|
|
835
|
+
removeSync(key, ifVersionOrValue) {
|
|
836
|
+
if (writeTxn)
|
|
837
|
+
return this.remove(key, ifVersionOrValue);
|
|
838
|
+
else
|
|
839
|
+
return this.transactionSync(() =>
|
|
840
|
+
this.remove(key, ifVersionOrValue) == SYNC_PROMISE_SUCCESS, overlappingSync? 0x10002 : 2); // non-abortable, async flush
|
|
841
|
+
},
|
|
842
|
+
transaction(callback) {
|
|
843
|
+
if (writeTxn) {
|
|
844
|
+
// already nested in a transaction, just execute and return
|
|
845
|
+
return callback();
|
|
846
|
+
}
|
|
847
|
+
return this.transactionAsync(callback);
|
|
848
|
+
},
|
|
849
|
+
childTransaction(callback) {
|
|
850
|
+
if (useWritemap)
|
|
851
|
+
throw new Error('Child transactions are not supported in writemap mode');
|
|
852
|
+
if (writeTxn) {
|
|
853
|
+
let parentTxn = writeTxn;
|
|
854
|
+
env.writeTxn = writeTxn = { write: true };
|
|
855
|
+
env.beginTxn(1); // abortable
|
|
856
|
+
let callbackDone;
|
|
857
|
+
try {
|
|
858
|
+
return when(callback(), (result) => {
|
|
859
|
+
callbackDone = true;
|
|
860
|
+
if (result === ABORT)
|
|
861
|
+
env.abortTxn();
|
|
862
|
+
else
|
|
863
|
+
env.commitTxn();
|
|
864
|
+
clearWriteTxn(parentTxn);
|
|
865
|
+
return result;
|
|
866
|
+
}, (error) => {
|
|
867
|
+
env.abortTxn();
|
|
868
|
+
clearWriteTxn(parentTxn);
|
|
869
|
+
throw error;
|
|
870
|
+
});
|
|
871
|
+
} catch(error) {
|
|
872
|
+
if (!callbackDone)
|
|
873
|
+
env.abortTxn();
|
|
874
|
+
clearWriteTxn(parentTxn);
|
|
875
|
+
throw error;
|
|
876
|
+
}
|
|
877
|
+
}
|
|
878
|
+
return this.transactionAsync(callback, true);
|
|
879
|
+
},
|
|
880
|
+
transactionAsync(callback, asChild) {
|
|
881
|
+
let txnIndex;
|
|
882
|
+
let txnCallbacks;
|
|
883
|
+
if (!nextResolution.callbacks) {
|
|
884
|
+
txnCallbacks = [asChild ? { callback, asChild } : callback];
|
|
885
|
+
nextResolution.callbacks = txnCallbacks;
|
|
886
|
+
txnCallbacks.results = writeInstructions(8 | (this.strictAsyncOrder ? 0x100000 : 0), this)();
|
|
887
|
+
txnIndex = 0;
|
|
888
|
+
} else {
|
|
889
|
+
txnCallbacks = lastQueuedResolution.callbacks;
|
|
890
|
+
txnIndex = txnCallbacks.push(asChild ? { callback, asChild } : callback) - 1;
|
|
891
|
+
}
|
|
892
|
+
return txnCallbacks.results.then((results) => {
|
|
893
|
+
let result = txnCallbacks[txnIndex];
|
|
894
|
+
if (result === CALLBACK_THREW)
|
|
895
|
+
throw txnCallbacks.errors[txnIndex];
|
|
896
|
+
return result;
|
|
897
|
+
});
|
|
898
|
+
},
|
|
899
|
+
transactionSync(callback, flags) {
|
|
900
|
+
if (writeTxn) {
|
|
901
|
+
if (!useWritemap && !this.isCaching) // can't use child transactions in write maps or caching stores
|
|
902
|
+
// already nested in a transaction, execute as child transaction (if possible) and return
|
|
903
|
+
return this.childTransaction(callback);
|
|
904
|
+
let result = callback(); // else just run in current transaction
|
|
905
|
+
if (result == ABORT && !abortedNonChildTransactionWarn) {
|
|
906
|
+
console.warn('Can not abort a transaction inside another transaction with ' + (this.cache ? 'caching enabled' : 'useWritemap enabled'));
|
|
907
|
+
abortedNonChildTransactionWarn = true;
|
|
908
|
+
}
|
|
909
|
+
return result;
|
|
910
|
+
}
|
|
911
|
+
let callbackDone;
|
|
912
|
+
this.transactions++;
|
|
913
|
+
env.beginTxn(flags == undefined ? 3 : flags);
|
|
914
|
+
writeTxn = env.writeTxn = { write: true };
|
|
915
|
+
try {
|
|
916
|
+
this.emit('begin-transaction');
|
|
917
|
+
return when(callback(), (result) => {
|
|
918
|
+
try {
|
|
919
|
+
callbackDone = true;
|
|
920
|
+
if (result === ABORT)
|
|
921
|
+
env.abortTxn();
|
|
922
|
+
else {
|
|
923
|
+
env.commitTxn();
|
|
924
|
+
resetReadTxn();
|
|
925
|
+
if ((flags & 0x10000) && overlappingSync) // if it is no-sync in overlapping-sync mode, need to schedule flush for it to be marked as persisted
|
|
926
|
+
lastSyncTxnFlush = new Promise(resolve => scheduleFlush([resolve]));
|
|
927
|
+
}
|
|
928
|
+
return result;
|
|
929
|
+
} finally {
|
|
930
|
+
clearWriteTxn(null);
|
|
931
|
+
}
|
|
932
|
+
}, (error) => {
|
|
933
|
+
try { env.abortTxn(); } catch(e) {}
|
|
934
|
+
clearWriteTxn(null);
|
|
935
|
+
throw error;
|
|
936
|
+
});
|
|
937
|
+
} catch(error) {
|
|
938
|
+
if (!callbackDone)
|
|
939
|
+
try { env.abortTxn(); } catch(e) {}
|
|
940
|
+
clearWriteTxn(null);
|
|
941
|
+
throw error;
|
|
942
|
+
}
|
|
943
|
+
},
|
|
944
|
+
transactionSyncStart(callback) {
|
|
945
|
+
return this.transactionSync(callback, 0);
|
|
946
|
+
},
|
|
947
|
+
// make the db a thenable/promise-like for when the last commit is committed
|
|
948
|
+
committed: committed = {
|
|
949
|
+
then(onfulfilled, onrejected) {
|
|
950
|
+
if (commitPromise)
|
|
951
|
+
return commitPromise.then(onfulfilled, onrejected);
|
|
952
|
+
if (lastWritePromise) // always resolve to true
|
|
953
|
+
return lastWritePromise.then(() => onfulfilled(true), onrejected);
|
|
954
|
+
return SYNC_PROMISE_SUCCESS.then(onfulfilled, onrejected);
|
|
955
|
+
}
|
|
956
|
+
},
|
|
957
|
+
flushed: {
|
|
958
|
+
// make this a thenable for when the commit is flushed to disk
|
|
959
|
+
then(onfulfilled, onrejected) {
|
|
960
|
+
if (flushPromise)
|
|
961
|
+
flushPromise.hasCallbacks = true;
|
|
962
|
+
return Promise.all([flushPromise || committed, lastSyncTxnFlush]).then(onfulfilled, onrejected);
|
|
963
|
+
}
|
|
964
|
+
},
|
|
965
|
+
_endWrites(resolvedPromise, resolvedSyncPromise) {
|
|
966
|
+
this.put = this.remove = this.del = this.batch = this.removeSync = this.putSync = this.transactionAsync = this.drop = this.clearAsync = () => { throw new Error('Database is closed') };
|
|
967
|
+
// wait for all txns to finish, checking again after the current txn is done
|
|
968
|
+
let finalPromise = flushPromise || commitPromise || lastWritePromise;
|
|
969
|
+
if (flushPromise)
|
|
970
|
+
flushPromise.hasCallbacks = true;
|
|
971
|
+
if (lastFlushTimeout)
|
|
972
|
+
expediteFlush();
|
|
973
|
+
let finalSyncPromise = lastSyncTxnFlush;
|
|
974
|
+
if (finalPromise && resolvedPromise != finalPromise ||
|
|
975
|
+
finalSyncPromise && resolvedSyncPromise != finalSyncPromise) {
|
|
976
|
+
return Promise.all([finalPromise, finalSyncPromise]).then(() => this._endWrites(finalPromise, finalSyncPromise), () => this._endWrites(finalPromise, finalSyncPromise));
|
|
977
|
+
}
|
|
978
|
+
Object.defineProperty(env, 'sync', { value: null });
|
|
979
|
+
},
|
|
980
|
+
on(event, callback) {
|
|
981
|
+
if (event == 'beforecommit') {
|
|
982
|
+
eventTurnBatching = true;
|
|
983
|
+
beforeCommitCallbacks.push(callback);
|
|
984
|
+
} else if (event == 'aftercommit')
|
|
985
|
+
afterCommitCallbacks.push(callback);
|
|
986
|
+
else
|
|
987
|
+
super.on(event, callback);
|
|
988
|
+
}
|
|
989
|
+
});
|
|
990
|
+
}
|
|
991
|
+
|
|
992
|
+
class Batch extends Array {
|
|
993
|
+
constructor(callback) {
|
|
994
|
+
super();
|
|
995
|
+
this.callback = callback;
|
|
996
|
+
}
|
|
997
|
+
put(key, value) {
|
|
998
|
+
this.push({ type: 'put', key, value });
|
|
999
|
+
}
|
|
1000
|
+
del(key) {
|
|
1001
|
+
this.push({ type: 'del', key });
|
|
1002
|
+
}
|
|
1003
|
+
clear() {
|
|
1004
|
+
this.splice(0, this.length);
|
|
1005
|
+
}
|
|
1006
|
+
write(callback) {
|
|
1007
|
+
return this.callback(this, callback);
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
function asBinary(buffer) {
|
|
1011
|
+
return {
|
|
1012
|
+
['\x10binary-data\x02']: buffer
|
|
1013
|
+
};
|
|
1014
|
+
}
|
|
1015
|
+
|
|
1016
|
+
const SKIP = {};
|
|
1017
|
+
if (!Symbol.asyncIterator) {
|
|
1018
|
+
Symbol.asyncIterator = Symbol.for('Symbol.asyncIterator');
|
|
1019
|
+
}
|
|
1020
|
+
|
|
1021
|
+
class RangeIterable {
|
|
1022
|
+
constructor(sourceArray) {
|
|
1023
|
+
if (sourceArray) {
|
|
1024
|
+
this.iterate = sourceArray[Symbol.iterator].bind(sourceArray);
|
|
1025
|
+
}
|
|
1026
|
+
}
|
|
1027
|
+
map(func) {
|
|
1028
|
+
let source = this;
|
|
1029
|
+
let result = new RangeIterable();
|
|
1030
|
+
result.iterate = (async) => {
|
|
1031
|
+
let iterator = source[Symbol.iterator](async);
|
|
1032
|
+
return {
|
|
1033
|
+
next(resolvedResult) {
|
|
1034
|
+
let result;
|
|
1035
|
+
do {
|
|
1036
|
+
let iteratorResult;
|
|
1037
|
+
if (resolvedResult) {
|
|
1038
|
+
iteratorResult = resolvedResult;
|
|
1039
|
+
resolvedResult = null; // don't go in this branch on next iteration
|
|
1040
|
+
} else {
|
|
1041
|
+
iteratorResult = iterator.next();
|
|
1042
|
+
if (iteratorResult.then) {
|
|
1043
|
+
return iteratorResult.then(iteratorResult => this.next(iteratorResult));
|
|
1044
|
+
}
|
|
1045
|
+
}
|
|
1046
|
+
if (iteratorResult.done === true) {
|
|
1047
|
+
this.done = true;
|
|
1048
|
+
return iteratorResult;
|
|
1049
|
+
}
|
|
1050
|
+
result = func(iteratorResult.value);
|
|
1051
|
+
if (result && result.then) {
|
|
1052
|
+
return result.then(result =>
|
|
1053
|
+
result == SKIP ?
|
|
1054
|
+
this.next() :
|
|
1055
|
+
{
|
|
1056
|
+
value: result
|
|
1057
|
+
});
|
|
1058
|
+
}
|
|
1059
|
+
} while(result == SKIP)
|
|
1060
|
+
return {
|
|
1061
|
+
value: result
|
|
1062
|
+
};
|
|
1063
|
+
},
|
|
1064
|
+
return() {
|
|
1065
|
+
return iterator.return();
|
|
1066
|
+
},
|
|
1067
|
+
throw() {
|
|
1068
|
+
return iterator.throw();
|
|
1069
|
+
}
|
|
1070
|
+
};
|
|
1071
|
+
};
|
|
1072
|
+
return result;
|
|
1073
|
+
}
|
|
1074
|
+
[Symbol.asyncIterator]() {
|
|
1075
|
+
return this.iterator = this.iterate();
|
|
1076
|
+
}
|
|
1077
|
+
[Symbol.iterator]() {
|
|
1078
|
+
return this.iterator = this.iterate();
|
|
1079
|
+
}
|
|
1080
|
+
filter(func) {
|
|
1081
|
+
return this.map(element => func(element) ? element : SKIP);
|
|
1082
|
+
}
|
|
1083
|
+
|
|
1084
|
+
forEach(callback) {
|
|
1085
|
+
let iterator = this.iterator = this.iterate();
|
|
1086
|
+
let result;
|
|
1087
|
+
while ((result = iterator.next()).done !== true) {
|
|
1088
|
+
callback(result.value);
|
|
1089
|
+
}
|
|
1090
|
+
}
|
|
1091
|
+
concat(secondIterable) {
|
|
1092
|
+
let concatIterable = new RangeIterable();
|
|
1093
|
+
concatIterable.iterate = (async) => {
|
|
1094
|
+
let iterator = this.iterator = this.iterate();
|
|
1095
|
+
let isFirst = true;
|
|
1096
|
+
let concatIterator = {
|
|
1097
|
+
next() {
|
|
1098
|
+
let result = iterator.next();
|
|
1099
|
+
if (isFirst && result.done) {
|
|
1100
|
+
isFirst = false;
|
|
1101
|
+
iterator = secondIterable[Symbol.iterator](async);
|
|
1102
|
+
return iterator.next();
|
|
1103
|
+
}
|
|
1104
|
+
return result;
|
|
1105
|
+
},
|
|
1106
|
+
return() {
|
|
1107
|
+
return iterator.return();
|
|
1108
|
+
},
|
|
1109
|
+
throw() {
|
|
1110
|
+
return iterator.throw();
|
|
1111
|
+
}
|
|
1112
|
+
};
|
|
1113
|
+
return concatIterator;
|
|
1114
|
+
};
|
|
1115
|
+
return concatIterable;
|
|
1116
|
+
}
|
|
1117
|
+
next() {
|
|
1118
|
+
if (!this.iterator)
|
|
1119
|
+
this.iterator = this.iterate();
|
|
1120
|
+
return this.iterator.next();
|
|
1121
|
+
}
|
|
1122
|
+
toJSON() {
|
|
1123
|
+
if (this.asArray && this.asArray.forEach) {
|
|
1124
|
+
return this.asArray;
|
|
1125
|
+
}
|
|
1126
|
+
throw new Error('Can not serialize async iteratables without first calling resolveJSON');
|
|
1127
|
+
//return Array.from(this)
|
|
1128
|
+
}
|
|
1129
|
+
get asArray() {
|
|
1130
|
+
if (this._asArray)
|
|
1131
|
+
return this._asArray;
|
|
1132
|
+
let promise = new Promise((resolve, reject) => {
|
|
1133
|
+
let iterator = this.iterate();
|
|
1134
|
+
let array = [];
|
|
1135
|
+
let iterable = this;
|
|
1136
|
+
Object.defineProperty(array, 'iterable', { value: iterable });
|
|
1137
|
+
function next(result) {
|
|
1138
|
+
while (result.done !== true) {
|
|
1139
|
+
if (result.then) {
|
|
1140
|
+
return result.then(next);
|
|
1141
|
+
} else {
|
|
1142
|
+
array.push(result.value);
|
|
1143
|
+
}
|
|
1144
|
+
result = iterator.next();
|
|
1145
|
+
}
|
|
1146
|
+
resolve(iterable._asArray = array);
|
|
1147
|
+
}
|
|
1148
|
+
next(iterator.next());
|
|
1149
|
+
});
|
|
1150
|
+
promise.iterable = this;
|
|
1151
|
+
return this._asArray || (this._asArray = promise);
|
|
1152
|
+
}
|
|
1153
|
+
resolveData() {
|
|
1154
|
+
return this.asArray;
|
|
1155
|
+
}
|
|
1156
|
+
}
|
|
1157
|
+
|
|
1158
|
+
const writeUint32Key = (key, target, start) => {
|
|
1159
|
+
(target.dataView || (target.dataView = new DataView(target.buffer, 0, target.length))).setUint32(start, key, true);
|
|
1160
|
+
return start + 4;
|
|
1161
|
+
};
|
|
1162
|
+
const readUint32Key = (target, start) => {
|
|
1163
|
+
return (target.dataView || (target.dataView = new DataView(target.buffer, 0, target.length))).getUint32(start, true);
|
|
1164
|
+
};
|
|
1165
|
+
const writeBufferKey = (key, target, start) => {
|
|
1166
|
+
target.set(key, start);
|
|
1167
|
+
return key.length + start;
|
|
1168
|
+
};
|
|
1169
|
+
const Uint8ArraySlice$1 = Uint8Array.prototype.slice;
|
|
1170
|
+
const readBufferKey = (target, start, end) => {
|
|
1171
|
+
return Uint8ArraySlice$1.call(target, start, end);
|
|
1172
|
+
};
|
|
1173
|
+
|
|
1174
|
+
function applyKeyHandling(store) {
|
|
1175
|
+
if (store.encoding == 'ordered-binary') {
|
|
1176
|
+
store.encoder = store.decoder = {
|
|
1177
|
+
writeKey: orderedBinary.writeKey,
|
|
1178
|
+
readKey: orderedBinary.readKey,
|
|
1179
|
+
};
|
|
1180
|
+
}
|
|
1181
|
+
if (store.encoder && store.encoder.writeKey && !store.encoder.encode) {
|
|
1182
|
+
store.encoder.encode = function(value) {
|
|
1183
|
+
return saveKey(value, this.writeKey, false, store.maxKeySize);
|
|
1184
|
+
};
|
|
1185
|
+
}
|
|
1186
|
+
if (store.decoder && store.decoder.readKey && !store.decoder.decode) {
|
|
1187
|
+
store.decoder.decode = function(buffer) { return this.readKey(buffer, 0, buffer.length); };
|
|
1188
|
+
store.decoderCopies = true;
|
|
1189
|
+
}
|
|
1190
|
+
if (store.keyIsUint32 || store.keyEncoding == 'uint32') {
|
|
1191
|
+
store.writeKey = writeUint32Key;
|
|
1192
|
+
store.readKey = readUint32Key;
|
|
1193
|
+
} else if (store.keyIsBuffer || store.keyEncoding == 'binary') {
|
|
1194
|
+
store.writeKey = writeBufferKey;
|
|
1195
|
+
store.readKey = readBufferKey;
|
|
1196
|
+
} else if (store.keyEncoder) {
|
|
1197
|
+
store.writeKey = store.keyEncoder.writeKey;
|
|
1198
|
+
store.readKey = store.keyEncoder.readKey;
|
|
1199
|
+
} else {
|
|
1200
|
+
store.writeKey = orderedBinary.writeKey;
|
|
1201
|
+
store.readKey = orderedBinary.readKey;
|
|
1202
|
+
}
|
|
1203
|
+
}
|
|
1204
|
+
|
|
1205
|
+
let saveBuffer, saveDataView = { setFloat64() {}, setUint32() {} }, saveDataAddress;
|
|
1206
|
+
let savePosition = 8000;
|
|
1207
|
+
let DYNAMIC_KEY_BUFFER_SIZE = 8192;
|
|
1208
|
+
function allocateSaveBuffer() {
|
|
1209
|
+
saveBuffer = typeof Buffer != 'undefined' ? Buffer.alloc(DYNAMIC_KEY_BUFFER_SIZE) : new Uint8Array(DYNAMIC_KEY_BUFFER_SIZE);
|
|
1210
|
+
saveBuffer.buffer.address = getAddress(saveBuffer);
|
|
1211
|
+
saveDataAddress = saveBuffer.buffer.address;
|
|
1212
|
+
// TODO: Conditionally only do this for key sequences?
|
|
1213
|
+
saveDataView.setUint32(savePosition, 0xffffffff);
|
|
1214
|
+
saveDataView.setFloat64(savePosition + 4, saveDataAddress, true); // save a pointer from the old buffer to the new address for the sake of the prefetch sequences
|
|
1215
|
+
saveBuffer.dataView = saveDataView = new DataView(saveBuffer.buffer, saveBuffer.byteOffset, saveBuffer.byteLength);
|
|
1216
|
+
savePosition = 0;
|
|
1217
|
+
}
|
|
1218
|
+
function saveKey(key, writeKey, saveTo, maxKeySize, skip) {
|
|
1219
|
+
if (savePosition > 7800) {
|
|
1220
|
+
allocateSaveBuffer();
|
|
1221
|
+
}
|
|
1222
|
+
if (skip > 0)
|
|
1223
|
+
savePosition += skip;
|
|
1224
|
+
let start = savePosition;
|
|
1225
|
+
try {
|
|
1226
|
+
savePosition = key === undefined ? start + 4 :
|
|
1227
|
+
writeKey(key, saveBuffer, start + 4);
|
|
1228
|
+
} catch (error) {
|
|
1229
|
+
saveBuffer.fill(0, start + 4); // restore zeros
|
|
1230
|
+
if (error.name == 'RangeError') {
|
|
1231
|
+
if (8180 - start < maxKeySize) {
|
|
1232
|
+
allocateSaveBuffer(); // try again:
|
|
1233
|
+
return saveKey(key, writeKey, saveTo, maxKeySize);
|
|
1234
|
+
}
|
|
1235
|
+
throw new Error('Key was too large, max key size is ' + maxKeySize);
|
|
1236
|
+
} else
|
|
1237
|
+
throw error;
|
|
1238
|
+
}
|
|
1239
|
+
let length = savePosition - start - 4;
|
|
1240
|
+
if (length > maxKeySize) {
|
|
1241
|
+
throw new Error('Key of size ' + length + ' was too large, max key size is ' + maxKeySize);
|
|
1242
|
+
}
|
|
1243
|
+
if (savePosition >= 8160) { // need to reserve enough room at the end for pointers
|
|
1244
|
+
savePosition = start; // reset position
|
|
1245
|
+
allocateSaveBuffer(); // try again:
|
|
1246
|
+
return saveKey(key, writeKey, saveTo, maxKeySize);
|
|
1247
|
+
}
|
|
1248
|
+
if (saveTo) {
|
|
1249
|
+
saveDataView.setUint32(start, length, true); // save the length
|
|
1250
|
+
saveTo.saveBuffer = saveBuffer;
|
|
1251
|
+
savePosition = (savePosition + 12) & 0xfffffc;
|
|
1252
|
+
return start + saveDataAddress;
|
|
1253
|
+
} else {
|
|
1254
|
+
saveBuffer.start = start + 4;
|
|
1255
|
+
saveBuffer.end = savePosition;
|
|
1256
|
+
savePosition = (savePosition + 7) & 0xfffff8; // full 64-bit word alignment since these are usually copied
|
|
1257
|
+
return saveBuffer;
|
|
1258
|
+
}
|
|
1259
|
+
}
|
|
1260
|
+
|
|
1261
|
+
const ITERATOR_DONE = { done: true, value: undefined };
|
|
1262
|
+
const Uint8ArraySlice = Uint8Array.prototype.slice;
|
|
1263
|
+
const Uint8A = typeof Buffer != 'undefined' ? Buffer.allocUnsafeSlow : Uint8Array;
|
|
1264
|
+
let getValueBytes = globalBuffer;
|
|
1265
|
+
if (!getValueBytes.maxLength) {
|
|
1266
|
+
getValueBytes.maxLength = getValueBytes.length;
|
|
1267
|
+
Object.defineProperty(getValueBytes, 'length', { value: getValueBytes.length, writable: true, configurable: true });
|
|
1268
|
+
}
|
|
1269
|
+
const START_ADDRESS_POSITION = 4064;
|
|
1270
|
+
const NEW_BUFFER_THRESHOLD = 0x8000;
|
|
1271
|
+
const UNMODIFIED = {};
|
|
1272
|
+
|
|
1273
|
+
function addReadMethods(LMDBStore, {
|
|
1274
|
+
maxKeySize, env, keyBytes, keyBytesView, getLastVersion, getLastTxnId
|
|
1275
|
+
}) {
|
|
1276
|
+
let readTxn, readTxnRenewed, asSafeBuffer = false;
|
|
1277
|
+
let renewId = 1;
|
|
1278
|
+
let mmaps = [];
|
|
1279
|
+
Object.assign(LMDBStore.prototype, {
|
|
1280
|
+
getString(id) {
|
|
1281
|
+
(env.writeTxn || (readTxnRenewed ? readTxn : renewReadTxn(this)));
|
|
1282
|
+
let string = getStringByBinary(this.dbAddress, this.writeKey(id, keyBytes, 0));
|
|
1283
|
+
if (typeof string === 'number') { // indicates the buffer wasn't large enough
|
|
1284
|
+
this._allocateGetBuffer(string);
|
|
1285
|
+
// and then try again
|
|
1286
|
+
string = getStringByBinary(this.dbAddress, this.writeKey(id, keyBytes, 0));
|
|
1287
|
+
}
|
|
1288
|
+
if (string)
|
|
1289
|
+
this.lastSize = string.length;
|
|
1290
|
+
return string;
|
|
1291
|
+
},
|
|
1292
|
+
getBinaryFast(id, options) {
|
|
1293
|
+
let rc;
|
|
1294
|
+
if (options?.txn?.address)
|
|
1295
|
+
rc = this.lastSize = getByBinary(this.dbAddress, this.writeKey(id, keyBytes, 0), options.ifNotTxnId || 0, options.txn.address);
|
|
1296
|
+
else {
|
|
1297
|
+
(env.writeTxn || (readTxnRenewed ? readTxn : renewReadTxn(this)));
|
|
1298
|
+
rc = this.lastSize = getByBinary(this.dbAddress, this.writeKey(id, keyBytes, 0), options?.ifNotTxnId || 0, 0);
|
|
1299
|
+
}
|
|
1300
|
+
if (rc < 0) {
|
|
1301
|
+
if (rc == -30798) // MDB_NOTFOUND
|
|
1302
|
+
return; // undefined
|
|
1303
|
+
if (rc == -30004) // txn id matched
|
|
1304
|
+
return UNMODIFIED;
|
|
1305
|
+
if (rc == -30781 /*MDB_BAD_VALSIZE*/ && this.writeKey(id, keyBytes, 0) == 0)
|
|
1306
|
+
throw new Error(id === undefined ?
|
|
1307
|
+
'A key is required for get, but is undefined' :
|
|
1308
|
+
'Zero length key is not allowed in LMDB');
|
|
1309
|
+
if (rc == -30000) // int32 overflow, read uint32
|
|
1310
|
+
rc = this.lastSize = keyBytesView.getUint32(0, true);
|
|
1311
|
+
else if (rc == -30001) {// shared buffer
|
|
1312
|
+
this.lastSize = keyBytesView.getUint32(0, true);
|
|
1313
|
+
let bufferId = keyBytesView.getUint32(4, true);
|
|
1314
|
+
return getMMapBuffer(bufferId, this.lastSize);
|
|
1315
|
+
} else
|
|
1316
|
+
throw lmdbError(rc);
|
|
1317
|
+
}
|
|
1318
|
+
let compression = this.compression;
|
|
1319
|
+
let bytes = compression ? compression.getValueBytes : getValueBytes;
|
|
1320
|
+
if (rc > bytes.maxLength) {
|
|
1321
|
+
// this means the target buffer wasn't big enough, so the get failed to copy all the data from the database, need to either grow or use special buffer
|
|
1322
|
+
return this._returnLargeBuffer(
|
|
1323
|
+
() => getByBinary(this.dbAddress, this.writeKey(id, keyBytes, 0), 0, 0));
|
|
1324
|
+
}
|
|
1325
|
+
bytes.length = this.lastSize;
|
|
1326
|
+
return bytes;
|
|
1327
|
+
},
|
|
1328
|
+
getBFAsync(id, callback, options) {
|
|
1329
|
+
saveKey(id, writeKey, {});
|
|
1330
|
+
},
|
|
1331
|
+
retainBinary(buffer) {
|
|
1332
|
+
if (!buffer)
|
|
1333
|
+
return
|
|
1334
|
+
if (!buffer.isGlobal && !env.writeTxn) {
|
|
1335
|
+
buffer.txn = readTxn;
|
|
1336
|
+
readTxn.refCount = (readTxn.refCount || 0) + 1;
|
|
1337
|
+
return buffer;
|
|
1338
|
+
} else {
|
|
1339
|
+
return Uint8ArraySlice.call(buffer, 0, this.lastSize);
|
|
1340
|
+
}
|
|
1341
|
+
},
|
|
1342
|
+
_returnLargeBuffer(getFast) {
|
|
1343
|
+
let bytes;
|
|
1344
|
+
let compression = this.compression;
|
|
1345
|
+
if (asSafeBuffer && this.lastSize > NEW_BUFFER_THRESHOLD) {
|
|
1346
|
+
// used by getBinary to indicate it should create a dedicated buffer to receive this
|
|
1347
|
+
let bytesToRestore;
|
|
1348
|
+
try {
|
|
1349
|
+
if (compression) {
|
|
1350
|
+
bytesToRestore = compression.getValueBytes;
|
|
1351
|
+
let dictionary = compression.dictionary || [];
|
|
1352
|
+
let dictLength = (dictionary.length >> 3) << 3;// make sure it is word-aligned
|
|
1353
|
+
bytes = makeReusableBuffer(this.lastSize);
|
|
1354
|
+
compression.setBuffer(bytes, this.lastSize, dictionary, dictLength);
|
|
1355
|
+
compression.getValueBytes = bytes;
|
|
1356
|
+
} else {
|
|
1357
|
+
bytesToRestore = getValueBytes;
|
|
1358
|
+
setGlobalBuffer(bytes = getValueBytes = makeReusableBuffer(this.lastSize));
|
|
1359
|
+
}
|
|
1360
|
+
getFast();
|
|
1361
|
+
} finally {
|
|
1362
|
+
if (compression) {
|
|
1363
|
+
let dictLength = (compression.dictionary.length >> 3) << 3;
|
|
1364
|
+
compression.setBuffer(bytesToRestore, bytesToRestore.maxLength, compression.dictionary, dictLength);
|
|
1365
|
+
compression.getValueBytes = bytesToRestore;
|
|
1366
|
+
} else {
|
|
1367
|
+
setGlobalBuffer(bytesToRestore);
|
|
1368
|
+
getValueBytes = bytesToRestore;
|
|
1369
|
+
}
|
|
1370
|
+
}
|
|
1371
|
+
return bytes;
|
|
1372
|
+
}
|
|
1373
|
+
// grow our shared/static buffer to accomodate the size of the data
|
|
1374
|
+
bytes = this._allocateGetBuffer(this.lastSize);
|
|
1375
|
+
// and try again
|
|
1376
|
+
getFast();
|
|
1377
|
+
bytes.length = this.lastSize;
|
|
1378
|
+
return bytes;
|
|
1379
|
+
},
|
|
1380
|
+
_allocateGetBuffer(lastSize) {
|
|
1381
|
+
let newLength = Math.min(Math.max(lastSize * 2, 0x1000), 0xfffffff8);
|
|
1382
|
+
let bytes;
|
|
1383
|
+
if (this.compression) {
|
|
1384
|
+
let dictionary = this.compression.dictionary || new Uint8A(0);
|
|
1385
|
+
let dictLength = (dictionary.length >> 3) << 3;// make sure it is word-aligned
|
|
1386
|
+
bytes = new Uint8A(newLength + dictLength);
|
|
1387
|
+
bytes.set(dictionary); // copy dictionary into start
|
|
1388
|
+
// the section after the dictionary is the target area for get values
|
|
1389
|
+
bytes = bytes.subarray(dictLength);
|
|
1390
|
+
this.compression.setBuffer(bytes, newLength, dictionary, dictLength);
|
|
1391
|
+
bytes.maxLength = newLength;
|
|
1392
|
+
Object.defineProperty(bytes, 'length', { value: newLength, writable: true, configurable: true });
|
|
1393
|
+
this.compression.getValueBytes = bytes;
|
|
1394
|
+
} else {
|
|
1395
|
+
console.log('should not get here', newLength);
|
|
1396
|
+
bytes = makeReusableBuffer(newLength);
|
|
1397
|
+
setGlobalBuffer(getValueBytes = bytes);
|
|
1398
|
+
}
|
|
1399
|
+
bytes.isGlobal = true;
|
|
1400
|
+
return bytes;
|
|
1401
|
+
},
|
|
1402
|
+
getBinary(id) {
|
|
1403
|
+
try {
|
|
1404
|
+
asSafeBuffer = true;
|
|
1405
|
+
let fastBuffer = this.getBinaryFast(id);
|
|
1406
|
+
return fastBuffer && (fastBuffer.isGlobal ? Uint8ArraySlice.call(fastBuffer, 0, this.lastSize) : fastBuffer);
|
|
1407
|
+
} finally {
|
|
1408
|
+
asSafeBuffer = false;
|
|
1409
|
+
}
|
|
1410
|
+
},
|
|
1411
|
+
getSharedBinary(id) {
|
|
1412
|
+
let fastBuffer = this.getBinaryFast(id);
|
|
1413
|
+
if (fastBuffer) {
|
|
1414
|
+
if (fastBuffer.isGlobal || writeTxn)
|
|
1415
|
+
return Uint8ArraySlice.call(fastBuffer, 0, this.lastSize)
|
|
1416
|
+
fastBuffer.txn = readTxn;
|
|
1417
|
+
readTxn.refCount = (readTxn.refCount || 0) + 1;
|
|
1418
|
+
return fastBuffer;
|
|
1419
|
+
}
|
|
1420
|
+
},
|
|
1421
|
+
get(id, options) {
|
|
1422
|
+
if (this.decoderCopies) {
|
|
1423
|
+
// the decoder copies any data, so we can use the fast binary retrieval that overwrites the same buffer space
|
|
1424
|
+
let bytes = this.getBinaryFast(id, options);
|
|
1425
|
+
return bytes && (bytes == UNMODIFIED ? UNMODIFIED : this.decoder.decode(bytes));
|
|
1426
|
+
}
|
|
1427
|
+
if (this.encoding == 'binary')
|
|
1428
|
+
return this.getBinary(id, options);
|
|
1429
|
+
if (this.decoder) {
|
|
1430
|
+
// the decoder potentially uses the data from the buffer in the future and needs a stable buffer
|
|
1431
|
+
let bytes = this.getBinary(id);
|
|
1432
|
+
return bytes && (bytes == UNMODIFIED ? UNMODIFIED : this.decoder.decode(bytes));
|
|
1433
|
+
}
|
|
1434
|
+
|
|
1435
|
+
let result = this.getString(id);
|
|
1436
|
+
if (result) {
|
|
1437
|
+
if (this.encoding == 'json')
|
|
1438
|
+
return JSON.parse(result);
|
|
1439
|
+
}
|
|
1440
|
+
return result;
|
|
1441
|
+
},
|
|
1442
|
+
getEntry(id) {
|
|
1443
|
+
let value = this.get(id);
|
|
1444
|
+
if (value !== undefined) {
|
|
1445
|
+
if (this.useVersions)
|
|
1446
|
+
return {
|
|
1447
|
+
value,
|
|
1448
|
+
version: getLastVersion(),
|
|
1449
|
+
//size: this.lastSize
|
|
1450
|
+
};
|
|
1451
|
+
else
|
|
1452
|
+
return {
|
|
1453
|
+
value,
|
|
1454
|
+
//size: this.lastSize
|
|
1455
|
+
};
|
|
1456
|
+
}
|
|
1457
|
+
},
|
|
1458
|
+
resetReadTxn() {
|
|
1459
|
+
resetReadTxn();
|
|
1460
|
+
},
|
|
1461
|
+
_commitReadTxn() {
|
|
1462
|
+
if (readTxn) {
|
|
1463
|
+
readTxn.isCommitted = true;
|
|
1464
|
+
readTxn.commit();
|
|
1465
|
+
}
|
|
1466
|
+
lastReadTxnRef = null;
|
|
1467
|
+
readTxnRenewed = null;
|
|
1468
|
+
readTxn = null;
|
|
1469
|
+
},
|
|
1470
|
+
ensureReadTxn() {
|
|
1471
|
+
if (!env.writeTxn && !readTxnRenewed)
|
|
1472
|
+
renewReadTxn(this);
|
|
1473
|
+
},
|
|
1474
|
+
doesExist(key, versionOrValue) {
|
|
1475
|
+
if (!env.writeTxn)
|
|
1476
|
+
readTxnRenewed ? readTxn : renewReadTxn(this);
|
|
1477
|
+
if (versionOrValue == null) {
|
|
1478
|
+
// undefined means the entry exists, null is used specifically to check for the entry *not* existing
|
|
1479
|
+
return (this.getBinaryFast(key) === undefined) == (versionOrValue === null);
|
|
1480
|
+
}
|
|
1481
|
+
else if (this.useVersions) {
|
|
1482
|
+
this.getBinaryFast(key);
|
|
1483
|
+
return this.getBinaryFast(key) !== undefined && getLastVersion() === versionOrValue;
|
|
1484
|
+
}
|
|
1485
|
+
else {
|
|
1486
|
+
if (versionOrValue && versionOrValue['\x10binary-data\x02'])
|
|
1487
|
+
versionOrValue = versionOrValue['\x10binary-data\x02'];
|
|
1488
|
+
else if (this.encoder)
|
|
1489
|
+
versionOrValue = this.encoder.encode(versionOrValue);
|
|
1490
|
+
if (typeof versionOrValue == 'string')
|
|
1491
|
+
versionOrValue = Buffer.from(versionOrValue);
|
|
1492
|
+
return this.getValuesCount(key, { start: versionOrValue, exactMatch: true}) > 0;
|
|
1493
|
+
}
|
|
1494
|
+
},
|
|
1495
|
+
getValues(key, options) {
|
|
1496
|
+
let defaultOptions = {
|
|
1497
|
+
key,
|
|
1498
|
+
valuesForKey: true
|
|
1499
|
+
};
|
|
1500
|
+
if (options && options.snapshot === false)
|
|
1501
|
+
throw new Error('Can not disable snapshots for getValues');
|
|
1502
|
+
return this.getRange(options ? Object.assign(defaultOptions, options) : defaultOptions);
|
|
1503
|
+
},
|
|
1504
|
+
getKeys(options) {
|
|
1505
|
+
if (!options)
|
|
1506
|
+
options = {};
|
|
1507
|
+
options.values = false;
|
|
1508
|
+
return this.getRange(options);
|
|
1509
|
+
},
|
|
1510
|
+
getCount(options) {
|
|
1511
|
+
if (!options)
|
|
1512
|
+
options = {};
|
|
1513
|
+
options.onlyCount = true;
|
|
1514
|
+
return this.getRange(options).iterate();
|
|
1515
|
+
},
|
|
1516
|
+
getKeysCount(options) {
|
|
1517
|
+
if (!options)
|
|
1518
|
+
options = {};
|
|
1519
|
+
options.onlyCount = true;
|
|
1520
|
+
options.values = false;
|
|
1521
|
+
return this.getRange(options).iterate();
|
|
1522
|
+
},
|
|
1523
|
+
getValuesCount(key, options) {
|
|
1524
|
+
if (!options)
|
|
1525
|
+
options = {};
|
|
1526
|
+
options.key = key;
|
|
1527
|
+
options.valuesForKey = true;
|
|
1528
|
+
options.onlyCount = true;
|
|
1529
|
+
return this.getRange(options).iterate();
|
|
1530
|
+
},
|
|
1531
|
+
getRange(options) {
|
|
1532
|
+
let iterable = new RangeIterable();
|
|
1533
|
+
if (!options)
|
|
1534
|
+
options = {};
|
|
1535
|
+
let includeValues = options.values !== false;
|
|
1536
|
+
let includeVersions = options.versions;
|
|
1537
|
+
let valuesForKey = options.valuesForKey;
|
|
1538
|
+
let limit = options.limit;
|
|
1539
|
+
let db = this.db;
|
|
1540
|
+
let snapshot = options.snapshot;
|
|
1541
|
+
let compression = this.compression;
|
|
1542
|
+
iterable.iterate = () => {
|
|
1543
|
+
let currentKey = valuesForKey ? options.key : options.start;
|
|
1544
|
+
const reverse = options.reverse;
|
|
1545
|
+
let count = 0;
|
|
1546
|
+
let cursor, cursorRenewId, cursorAddress;
|
|
1547
|
+
let txn;
|
|
1548
|
+
let flags = (includeValues ? 0x100 : 0) | (reverse ? 0x400 : 0) |
|
|
1549
|
+
(valuesForKey ? 0x800 : 0) | (options.exactMatch ? 0x4000 : 0) |
|
|
1550
|
+
(options.inclusiveEnd ? 0x8000 : 0) |
|
|
1551
|
+
(options.exclusiveStart ? 0x10000 : 0);
|
|
1552
|
+
let store = this;
|
|
1553
|
+
function resetCursor() {
|
|
1554
|
+
try {
|
|
1555
|
+
if (cursor)
|
|
1556
|
+
finishCursor();
|
|
1557
|
+
let writeTxn = env.writeTxn;
|
|
1558
|
+
if (writeTxn)
|
|
1559
|
+
snapshot = false;
|
|
1560
|
+
txn = writeTxn || (readTxnRenewed ? readTxn : renewReadTxn(store));
|
|
1561
|
+
cursor = !writeTxn && db.availableCursor;
|
|
1562
|
+
if (cursor) {
|
|
1563
|
+
db.availableCursor = null;
|
|
1564
|
+
flags |= 0x2000;
|
|
1565
|
+
} else {
|
|
1566
|
+
cursor = new Cursor(db);
|
|
1567
|
+
}
|
|
1568
|
+
cursorAddress = cursor.address;
|
|
1569
|
+
txn.refCount = (txn.refCount || 0) + 1; // track transaction so we always use the same one
|
|
1570
|
+
if (snapshot === false) {
|
|
1571
|
+
cursorRenewId = renewId; // use shared read transaction
|
|
1572
|
+
txn.renewingrefCount = (txn.renewingrefCount || 0) + 1; // need to know how many are renewing cursors
|
|
1573
|
+
}
|
|
1574
|
+
} catch(error) {
|
|
1575
|
+
if (cursor) {
|
|
1576
|
+
try {
|
|
1577
|
+
cursor.close();
|
|
1578
|
+
} catch(error) { }
|
|
1579
|
+
}
|
|
1580
|
+
throw error;
|
|
1581
|
+
}
|
|
1582
|
+
}
|
|
1583
|
+
resetCursor();
|
|
1584
|
+
if (options.onlyCount) {
|
|
1585
|
+
flags |= 0x1000;
|
|
1586
|
+
let count = position$1(options.offset);
|
|
1587
|
+
if (count < 0)
|
|
1588
|
+
lmdbError(count);
|
|
1589
|
+
finishCursor();
|
|
1590
|
+
return count;
|
|
1591
|
+
}
|
|
1592
|
+
function position$1(offset) {
|
|
1593
|
+
let keySize = currentKey === undefined ? 0 : store.writeKey(currentKey, keyBytes, 0);
|
|
1594
|
+
let endAddress;
|
|
1595
|
+
if (valuesForKey) {
|
|
1596
|
+
if (options.start === undefined && options.end === undefined)
|
|
1597
|
+
endAddress = 0;
|
|
1598
|
+
else {
|
|
1599
|
+
let startAddress;
|
|
1600
|
+
if (store.encoder.writeKey) {
|
|
1601
|
+
startAddress = saveKey(options.start, store.encoder.writeKey, iterable, maxKeySize);
|
|
1602
|
+
keyBytesView.setFloat64(START_ADDRESS_POSITION, startAddress, true);
|
|
1603
|
+
endAddress = saveKey(options.end, store.encoder.writeKey, iterable, maxKeySize);
|
|
1604
|
+
} else if ((!options.start || options.start instanceof Uint8Array) && (!options.end || options.end instanceof Uint8Array)) {
|
|
1605
|
+
startAddress = saveKey(options.start, orderedBinary.writeKey, iterable, maxKeySize);
|
|
1606
|
+
keyBytesView.setFloat64(START_ADDRESS_POSITION, startAddress, true);
|
|
1607
|
+
endAddress = saveKey(options.end, orderedBinary.writeKey, iterable, maxKeySize);
|
|
1608
|
+
} else {
|
|
1609
|
+
throw new Error('Only key-based encoding is supported for start/end values');
|
|
1610
|
+
}
|
|
1611
|
+
}
|
|
1612
|
+
} else
|
|
1613
|
+
endAddress = saveKey(options.end, store.writeKey, iterable, maxKeySize);
|
|
1614
|
+
return position(cursorAddress, flags, offset || 0, keySize, endAddress);
|
|
1615
|
+
}
|
|
1616
|
+
|
|
1617
|
+
function finishCursor() {
|
|
1618
|
+
if (txn.isDone)
|
|
1619
|
+
return;
|
|
1620
|
+
if (cursorRenewId)
|
|
1621
|
+
txn.renewingrefCount--;
|
|
1622
|
+
if (--txn.refCount <= 0 && txn.onlyCursor) {
|
|
1623
|
+
cursor.close();
|
|
1624
|
+
txn.abort(); // this is no longer main read txn, abort it now that we are done
|
|
1625
|
+
txn.isDone = true;
|
|
1626
|
+
} else {
|
|
1627
|
+
if (db.availableCursor || txn != readTxn) {
|
|
1628
|
+
cursor.close();
|
|
1629
|
+
} else { // try to reuse it
|
|
1630
|
+
db.availableCursor = cursor;
|
|
1631
|
+
db.cursorTxn = txn;
|
|
1632
|
+
}
|
|
1633
|
+
}
|
|
1634
|
+
}
|
|
1635
|
+
return {
|
|
1636
|
+
next() {
|
|
1637
|
+
let keySize, lastSize;
|
|
1638
|
+
if (cursorRenewId && (cursorRenewId != renewId || txn.isDone)) {
|
|
1639
|
+
resetCursor();
|
|
1640
|
+
keySize = position$1(0);
|
|
1641
|
+
}
|
|
1642
|
+
if (count === 0) { // && includeValues) // on first entry, get current value if we need to
|
|
1643
|
+
keySize = position$1(options.offset);
|
|
1644
|
+
} else
|
|
1645
|
+
keySize = iterate(cursorAddress);
|
|
1646
|
+
if (keySize <= 0 ||
|
|
1647
|
+
(count++ >= limit)) {
|
|
1648
|
+
if (count < 0)
|
|
1649
|
+
lmdbError(count);
|
|
1650
|
+
finishCursor();
|
|
1651
|
+
return ITERATOR_DONE;
|
|
1652
|
+
}
|
|
1653
|
+
if (!valuesForKey || snapshot === false) {
|
|
1654
|
+
if (keySize > 20000) {
|
|
1655
|
+
if (keySize > 0x1000000)
|
|
1656
|
+
lmdbError(keySize - 0x100000000);
|
|
1657
|
+
throw new Error('Invalid key size ' + keySize.toString(16))
|
|
1658
|
+
}
|
|
1659
|
+
currentKey = store.readKey(keyBytes, 32, keySize + 32);
|
|
1660
|
+
}
|
|
1661
|
+
if (includeValues) {
|
|
1662
|
+
let value;
|
|
1663
|
+
lastSize = keyBytesView.getUint32(0, true);
|
|
1664
|
+
let bufferId = keyBytesView.getUint32(4, true);
|
|
1665
|
+
let bytes;
|
|
1666
|
+
if (bufferId) {
|
|
1667
|
+
bytes = getMMapBuffer(bufferId, lastSize);
|
|
1668
|
+
} else {
|
|
1669
|
+
bytes = compression ? compression.getValueBytes : getValueBytes;
|
|
1670
|
+
if (lastSize > bytes.maxLength) {
|
|
1671
|
+
store.lastSize = lastSize;
|
|
1672
|
+
asSafeBuffer = store.encoding == 'binary';
|
|
1673
|
+
try {
|
|
1674
|
+
bytes = store._returnLargeBuffer(() => getCurrentValue(cursorAddress));
|
|
1675
|
+
} finally {
|
|
1676
|
+
asSafeBuffer = false;
|
|
1677
|
+
}
|
|
1678
|
+
} else
|
|
1679
|
+
bytes.length = lastSize;
|
|
1680
|
+
}
|
|
1681
|
+
if (store.decoder) {
|
|
1682
|
+
value = store.decoder.decode(bytes, lastSize);
|
|
1683
|
+
} else if (store.encoding == 'binary')
|
|
1684
|
+
value = bytes.isGlobal ? Uint8ArraySlice.call(bytes, 0, lastSize) : bytes;
|
|
1685
|
+
else {
|
|
1686
|
+
value = bytes.toString('utf8', 0, lastSize);
|
|
1687
|
+
if (store.encoding == 'json' && value)
|
|
1688
|
+
value = JSON.parse(value);
|
|
1689
|
+
}
|
|
1690
|
+
if (includeVersions)
|
|
1691
|
+
return {
|
|
1692
|
+
value: {
|
|
1693
|
+
key: currentKey,
|
|
1694
|
+
value,
|
|
1695
|
+
version: getLastVersion()
|
|
1696
|
+
}
|
|
1697
|
+
};
|
|
1698
|
+
else if (valuesForKey)
|
|
1699
|
+
return {
|
|
1700
|
+
value
|
|
1701
|
+
};
|
|
1702
|
+
else
|
|
1703
|
+
return {
|
|
1704
|
+
value: {
|
|
1705
|
+
key: currentKey,
|
|
1706
|
+
value,
|
|
1707
|
+
}
|
|
1708
|
+
};
|
|
1709
|
+
} else if (includeVersions) {
|
|
1710
|
+
return {
|
|
1711
|
+
value: {
|
|
1712
|
+
key: currentKey,
|
|
1713
|
+
version: getLastVersion()
|
|
1714
|
+
}
|
|
1715
|
+
};
|
|
1716
|
+
} else {
|
|
1717
|
+
return {
|
|
1718
|
+
value: currentKey
|
|
1719
|
+
};
|
|
1720
|
+
}
|
|
1721
|
+
},
|
|
1722
|
+
return() {
|
|
1723
|
+
finishCursor();
|
|
1724
|
+
return ITERATOR_DONE;
|
|
1725
|
+
},
|
|
1726
|
+
throw() {
|
|
1727
|
+
finishCursor();
|
|
1728
|
+
return ITERATOR_DONE;
|
|
1729
|
+
}
|
|
1730
|
+
};
|
|
1731
|
+
};
|
|
1732
|
+
return iterable;
|
|
1733
|
+
},
|
|
1734
|
+
|
|
1735
|
+
getMany(keys, callback) {
|
|
1736
|
+
// this is an asynchronous get for multiple keys. It actually works by prefetching asynchronously,
|
|
1737
|
+
// allowing a separate to absorb the potentially largest cost: hard page faults (and disk I/O).
|
|
1738
|
+
// And then we just do standard sync gets (to deserialized data) to fulfil the callback/promise
|
|
1739
|
+
// once the prefetch occurs
|
|
1740
|
+
let promise = callback ? undefined : new Promise(resolve => callback = (error, results) => resolve(results));
|
|
1741
|
+
this.prefetch(keys, () => {
|
|
1742
|
+
let results = new Array(keys.length);
|
|
1743
|
+
for (let i = 0, l = keys.length; i < l; i++) {
|
|
1744
|
+
results[i] = get.call(this, keys[i]);
|
|
1745
|
+
}
|
|
1746
|
+
callback(null, results);
|
|
1747
|
+
});
|
|
1748
|
+
return promise;
|
|
1749
|
+
},
|
|
1750
|
+
getSharedBufferForGet(id) {
|
|
1751
|
+
let txn = (env.writeTxn || (readTxnRenewed ? readTxn : renewReadTxn(this)));
|
|
1752
|
+
this.lastSize = this.keyIsCompatibility ? txn.getBinaryShared(id) : this.db.get(this.writeKey(id, keyBytes, 0));
|
|
1753
|
+
if (this.lastSize === -30798) { // not found code
|
|
1754
|
+
return; //undefined
|
|
1755
|
+
}
|
|
1756
|
+
return this.lastSize;
|
|
1757
|
+
},
|
|
1758
|
+
prefetch(keys, callback) {
|
|
1759
|
+
if (!keys)
|
|
1760
|
+
throw new Error('An array of keys must be provided');
|
|
1761
|
+
if (!keys.length) {
|
|
1762
|
+
if (callback) {
|
|
1763
|
+
callback(null);
|
|
1764
|
+
return;
|
|
1765
|
+
} else
|
|
1766
|
+
return Promise.resolve();
|
|
1767
|
+
}
|
|
1768
|
+
let buffers = [];
|
|
1769
|
+
let startPosition;
|
|
1770
|
+
let bufferHolder = {};
|
|
1771
|
+
let lastBuffer;
|
|
1772
|
+
for (let key of keys) {
|
|
1773
|
+
let position = saveKey(key, this.writeKey, bufferHolder, maxKeySize);
|
|
1774
|
+
if (!startPosition)
|
|
1775
|
+
startPosition = position;
|
|
1776
|
+
if (bufferHolder.saveBuffer != lastBuffer) {
|
|
1777
|
+
buffers.push(bufferHolder);
|
|
1778
|
+
lastBuffer = bufferHolder.saveBuffer;
|
|
1779
|
+
bufferHolder = { saveBuffer: lastBuffer };
|
|
1780
|
+
}
|
|
1781
|
+
}
|
|
1782
|
+
saveKey(undefined, this.writeKey, bufferHolder, maxKeySize);
|
|
1783
|
+
prefetch(this.dbAddress, startPosition, (error) => {
|
|
1784
|
+
if (error)
|
|
1785
|
+
console.error('Error with prefetch', buffers, bufferHolder); // partly exists to keep the buffers pinned in memory
|
|
1786
|
+
else
|
|
1787
|
+
callback(null);
|
|
1788
|
+
});
|
|
1789
|
+
if (!callback)
|
|
1790
|
+
return new Promise(resolve => callback = resolve);
|
|
1791
|
+
},
|
|
1792
|
+
useReadTxn() {
|
|
1793
|
+
let txn = readTxnRenewed ? readTxn : renewReadTxn(this);
|
|
1794
|
+
txn.refCount = (txn.refCount || 0) + 1;
|
|
1795
|
+
},
|
|
1796
|
+
close(callback) {
|
|
1797
|
+
this.status = 'closing';
|
|
1798
|
+
if (this.isRoot) {
|
|
1799
|
+
if (readTxn) {
|
|
1800
|
+
try {
|
|
1801
|
+
readTxn.abort();
|
|
1802
|
+
} catch(error) {}
|
|
1803
|
+
}
|
|
1804
|
+
readTxn = {
|
|
1805
|
+
renew() {
|
|
1806
|
+
throw new Error('Can not read from a closed database');
|
|
1807
|
+
}
|
|
1808
|
+
};
|
|
1809
|
+
readTxnRenewed = null;
|
|
1810
|
+
}
|
|
1811
|
+
let txnPromise = this._endWrites();
|
|
1812
|
+
const doClose = () => {
|
|
1813
|
+
if (this.isRoot)
|
|
1814
|
+
env.close();
|
|
1815
|
+
else
|
|
1816
|
+
this.db.close();
|
|
1817
|
+
this.status = 'closed';
|
|
1818
|
+
if (callback)
|
|
1819
|
+
callback();
|
|
1820
|
+
};
|
|
1821
|
+
if (txnPromise)
|
|
1822
|
+
return txnPromise.then(doClose);
|
|
1823
|
+
else {
|
|
1824
|
+
doClose();
|
|
1825
|
+
return Promise.resolve();
|
|
1826
|
+
}
|
|
1827
|
+
},
|
|
1828
|
+
getStats() {
|
|
1829
|
+
(env.writeTxn || (readTxnRenewed ? readTxn : renewReadTxn(this)));
|
|
1830
|
+
let dbStats = this.db.stat();
|
|
1831
|
+
dbStats.root = env.stat();
|
|
1832
|
+
dbStats.env = env.info();
|
|
1833
|
+
dbStats.free = env.freeStat();
|
|
1834
|
+
return dbStats;
|
|
1835
|
+
},
|
|
1836
|
+
});
|
|
1837
|
+
let get = LMDBStore.prototype.get;
|
|
1838
|
+
let lastReadTxnRef;
|
|
1839
|
+
function getMMapBuffer(bufferId, size) {
|
|
1840
|
+
let buffer = mmaps[bufferId];
|
|
1841
|
+
if (!buffer) {
|
|
1842
|
+
buffer = mmaps[bufferId] = getSharedBuffer(bufferId, env.address);
|
|
1843
|
+
}
|
|
1844
|
+
let offset = keyBytesView.getUint32(8, true);
|
|
1845
|
+
return new Uint8Array(buffer, offset, size);
|
|
1846
|
+
}
|
|
1847
|
+
function renewReadTxn(store) {
|
|
1848
|
+
if (!readTxn) {
|
|
1849
|
+
let retries = 0;
|
|
1850
|
+
let waitArray;
|
|
1851
|
+
do {
|
|
1852
|
+
try {
|
|
1853
|
+
let lastReadTxn = lastReadTxnRef && lastReadTxnRef.deref();
|
|
1854
|
+
readTxn = new Txn(env, 0x20000, lastReadTxn && !lastReadTxn.isDone && lastReadTxn);
|
|
1855
|
+
if (readTxn.address == 0) {
|
|
1856
|
+
readTxn = lastReadTxn;
|
|
1857
|
+
if (readTxn.onlyCursor)
|
|
1858
|
+
readTxn.onlyCursor = false;
|
|
1859
|
+
}
|
|
1860
|
+
break;
|
|
1861
|
+
} catch (error) {
|
|
1862
|
+
if (error.message.includes('temporarily')) {
|
|
1863
|
+
if (!waitArray)
|
|
1864
|
+
waitArray = new Int32Array(new SharedArrayBuffer(4), 0, 1);
|
|
1865
|
+
Atomics.wait(waitArray, 0, 0, retries * 2);
|
|
1866
|
+
} else
|
|
1867
|
+
throw error;
|
|
1868
|
+
}
|
|
1869
|
+
} while (retries++ < 100);
|
|
1870
|
+
}
|
|
1871
|
+
// we actually don't renew here, we let the renew take place in the next
|
|
1872
|
+
// lmdb native read/call so as to avoid an extra native call
|
|
1873
|
+
readTxnRenewed = setTimeout(resetReadTxn, 0);
|
|
1874
|
+
store.emit('begin-transaction');
|
|
1875
|
+
return readTxn;
|
|
1876
|
+
}
|
|
1877
|
+
function resetReadTxn(hardReset) {
|
|
1878
|
+
renewId++;
|
|
1879
|
+
if (readTxnRenewed) {
|
|
1880
|
+
readTxnRenewed = null;
|
|
1881
|
+
if (readTxn.refCount - (readTxn.renewingrefCount || 0) > 0) {
|
|
1882
|
+
readTxn.onlyCursor = true;
|
|
1883
|
+
lastReadTxnRef = new WeakRef(readTxn);
|
|
1884
|
+
readTxn = null;
|
|
1885
|
+
} else
|
|
1886
|
+
resetTxn(readTxn.address);
|
|
1887
|
+
}
|
|
1888
|
+
}
|
|
1889
|
+
}
|
|
1890
|
+
function makeReusableBuffer(size) {
|
|
1891
|
+
let bytes = typeof Buffer != 'undefined' ? Buffer.alloc(size) : new Uint8Array(size);
|
|
1892
|
+
bytes.maxLength = size;
|
|
1893
|
+
Object.defineProperty(bytes, 'length', { value: size, writable: true, configurable: true });
|
|
1894
|
+
return bytes;
|
|
1895
|
+
}
|
|
1896
|
+
|
|
1897
|
+
Txn.prototype.done = function() {
|
|
1898
|
+
this.refCount--;
|
|
1899
|
+
if (this.refCount == 0 && this.onlyCursor) {
|
|
1900
|
+
this.abort();
|
|
1901
|
+
this.isDone = true;
|
|
1902
|
+
}
|
|
1903
|
+
};
|
|
1904
|
+
|
|
1905
|
+
let getLastVersion$1, getLastTxnId$1;
|
|
1906
|
+
const mapGet = Map.prototype.get;
|
|
1907
|
+
const CachingStore = (Store, env) => {
|
|
1908
|
+
let childTxnChanges;
|
|
1909
|
+
return class extends Store {
|
|
1910
|
+
constructor(dbName, options) {
|
|
1911
|
+
super(dbName, options);
|
|
1912
|
+
if (!env.cacheCommitter) {
|
|
1913
|
+
env.cacheCommitter = true;
|
|
1914
|
+
this.on('aftercommit', ({ next, last, txnId }) => {
|
|
1915
|
+
do {
|
|
1916
|
+
let meta = next.meta;
|
|
1917
|
+
let store = meta && meta.store;
|
|
1918
|
+
if (store) {
|
|
1919
|
+
if (next.flag & FAILED_CONDITION)
|
|
1920
|
+
store.cache.delete(meta.key); // just delete it from the map
|
|
1921
|
+
else {
|
|
1922
|
+
let expirationPriority = meta.valueSize >> 10;
|
|
1923
|
+
let cache = store.cache;
|
|
1924
|
+
let entry = mapGet.call(cache, meta.key);
|
|
1925
|
+
if (entry) {
|
|
1926
|
+
entry.txnId = txnId;
|
|
1927
|
+
cache.used(entry, expirationPriority + 4); // this will enter it into the LRFU (with a little lower priority than a read)
|
|
1928
|
+
}
|
|
1929
|
+
}
|
|
1930
|
+
}
|
|
1931
|
+
} while (next != last && (next = next.next))
|
|
1932
|
+
});
|
|
1933
|
+
}
|
|
1934
|
+
this.db.cachingDb = this;
|
|
1935
|
+
if (options.cache.clearKeptInterval)
|
|
1936
|
+
options.cache.clearKeptObjects = exports.clearKeptObjects;
|
|
1937
|
+
this.cache = new WeakLRUCache(options.cache);
|
|
1938
|
+
if (options.cache.validated)
|
|
1939
|
+
this.cache.validated = true;
|
|
1940
|
+
}
|
|
1941
|
+
get isCaching() {
|
|
1942
|
+
return true
|
|
1943
|
+
}
|
|
1944
|
+
get(id, cacheMode) {
|
|
1945
|
+
let value;
|
|
1946
|
+
if (this.cache.validated) {
|
|
1947
|
+
let entry = this.cache.get(id);
|
|
1948
|
+
if (entry) {
|
|
1949
|
+
let cachedValue = entry.value;
|
|
1950
|
+
if (entry.txnId) {
|
|
1951
|
+
value = super.get(id, { ifNotTxnId: entry.txnId });
|
|
1952
|
+
if (value === UNMODIFIED)
|
|
1953
|
+
return cachedValue;
|
|
1954
|
+
} else // with no txn id we do not validate; this is the state of a cached value after a write before it transacts
|
|
1955
|
+
value = cachedValue;
|
|
1956
|
+
} else
|
|
1957
|
+
value = super.get(id);
|
|
1958
|
+
} else {
|
|
1959
|
+
value = this.cache.getValue(id);
|
|
1960
|
+
if (value !== undefined) {
|
|
1961
|
+
return value;
|
|
1962
|
+
}
|
|
1963
|
+
value = super.get(id);
|
|
1964
|
+
}
|
|
1965
|
+
if (value && typeof value === 'object' && !cacheMode && typeof id !== 'object') {
|
|
1966
|
+
let entry = this.cache.setValue(id, value, this.lastSize >> 10);
|
|
1967
|
+
if (this.useVersions) {
|
|
1968
|
+
entry.version = getLastVersion$1();
|
|
1969
|
+
}
|
|
1970
|
+
if (this.cache.validated)
|
|
1971
|
+
entry.txnId = getLastTxnId$1();
|
|
1972
|
+
}
|
|
1973
|
+
return value;
|
|
1974
|
+
}
|
|
1975
|
+
getEntry(id, cacheMode) {
|
|
1976
|
+
let entry = this.cache.get(id);
|
|
1977
|
+
if (entry)
|
|
1978
|
+
return entry;
|
|
1979
|
+
let value = super.get(id);
|
|
1980
|
+
if (value === undefined)
|
|
1981
|
+
return;
|
|
1982
|
+
if (value && typeof value === 'object' && !cacheMode && typeof id !== 'object') {
|
|
1983
|
+
entry = this.cache.setValue(id, value, this.lastSize >> 10);
|
|
1984
|
+
} else {
|
|
1985
|
+
entry = { value };
|
|
1986
|
+
}
|
|
1987
|
+
if (this.useVersions) {
|
|
1988
|
+
entry.version = getLastVersion$1();
|
|
1989
|
+
}
|
|
1990
|
+
return entry;
|
|
1991
|
+
}
|
|
1992
|
+
putEntry(id, entry, ifVersion) {
|
|
1993
|
+
let result = super.put(id, entry.value, entry.version, ifVersion);
|
|
1994
|
+
if (typeof id === 'object')
|
|
1995
|
+
return result;
|
|
1996
|
+
if (result && result.then)
|
|
1997
|
+
this.cache.setManually(id, entry); // set manually so we can keep it pinned in memory until it is committed
|
|
1998
|
+
else // sync operation, immediately add to cache
|
|
1999
|
+
this.cache.set(id, entry);
|
|
2000
|
+
}
|
|
2001
|
+
put(id, value, version, ifVersion) {
|
|
2002
|
+
let result = super.put(id, value, version, ifVersion);
|
|
2003
|
+
if (typeof id !== 'object') {
|
|
2004
|
+
if (value && value['\x10binary-data\x02']) {
|
|
2005
|
+
// don't cache binary data, since it will be decoded on get
|
|
2006
|
+
this.cache.delete(id);
|
|
2007
|
+
return result;
|
|
2008
|
+
}
|
|
2009
|
+
// sync operation, immediately add to cache, otherwise keep it pinned in memory until it is committed
|
|
2010
|
+
let entry = this.cache.setValue(id, value, !result || result.isSync ? 0 : -1);
|
|
2011
|
+
if (childTxnChanges)
|
|
2012
|
+
childTxnChanges.add(id);
|
|
2013
|
+
if (version !== undefined)
|
|
2014
|
+
entry.version = typeof version === 'object' ? version.version : version;
|
|
2015
|
+
}
|
|
2016
|
+
return result;
|
|
2017
|
+
}
|
|
2018
|
+
putSync(id, value, version, ifVersion) {
|
|
2019
|
+
if (id !== 'object') {
|
|
2020
|
+
// sync operation, immediately add to cache, otherwise keep it pinned in memory until it is committed
|
|
2021
|
+
if (value && typeof value === 'object') {
|
|
2022
|
+
let entry = this.cache.setValue(id, value);
|
|
2023
|
+
if (childTxnChanges)
|
|
2024
|
+
childTxnChanges.add(id);
|
|
2025
|
+
if (version !== undefined) {
|
|
2026
|
+
entry.version = typeof version === 'object' ? version.version : version;
|
|
2027
|
+
}
|
|
2028
|
+
} else // it is possible that a value used to exist here
|
|
2029
|
+
this.cache.delete(id);
|
|
2030
|
+
}
|
|
2031
|
+
return super.putSync(id, value, version, ifVersion);
|
|
2032
|
+
}
|
|
2033
|
+
remove(id, ifVersion) {
|
|
2034
|
+
this.cache.delete(id);
|
|
2035
|
+
return super.remove(id, ifVersion);
|
|
2036
|
+
}
|
|
2037
|
+
removeSync(id, ifVersion) {
|
|
2038
|
+
this.cache.delete(id);
|
|
2039
|
+
return super.removeSync(id, ifVersion);
|
|
2040
|
+
}
|
|
2041
|
+
clearAsync(callback) {
|
|
2042
|
+
this.cache.clear();
|
|
2043
|
+
return super.clearAsync(callback);
|
|
2044
|
+
}
|
|
2045
|
+
clearSync() {
|
|
2046
|
+
this.cache.clear();
|
|
2047
|
+
super.clearSync();
|
|
2048
|
+
}
|
|
2049
|
+
childTransaction(callback) {
|
|
2050
|
+
return super.childTransaction(() => {
|
|
2051
|
+
let cache = this.cache;
|
|
2052
|
+
let previousChanges = childTxnChanges;
|
|
2053
|
+
try {
|
|
2054
|
+
childTxnChanges = new Set();
|
|
2055
|
+
return when(callback(), (result) => {
|
|
2056
|
+
if (result === ABORT)
|
|
2057
|
+
return abort();
|
|
2058
|
+
childTxnChanges = previousChanges;
|
|
2059
|
+
return result;
|
|
2060
|
+
}, abort);
|
|
2061
|
+
} catch(error) {
|
|
2062
|
+
abort(error);
|
|
2063
|
+
}
|
|
2064
|
+
function abort(error) {
|
|
2065
|
+
// if the transaction was aborted, remove all affected entries from cache
|
|
2066
|
+
for (let id of childTxnChanges)
|
|
2067
|
+
cache.delete(id);
|
|
2068
|
+
childTxnChanges = previousChanges;
|
|
2069
|
+
if (error)
|
|
2070
|
+
throw error;
|
|
2071
|
+
}
|
|
2072
|
+
});
|
|
2073
|
+
}
|
|
2074
|
+
};
|
|
2075
|
+
};
|
|
2076
|
+
function setGetLastVersion(get, getTxnId) {
|
|
2077
|
+
getLastVersion$1 = get;
|
|
2078
|
+
getLastTxnId$1 = getTxnId;
|
|
2079
|
+
}
|
|
2080
|
+
|
|
2081
|
+
let moduleRequire = typeof require == 'function' && require;
|
|
2082
|
+
function setRequire(require) {
|
|
2083
|
+
moduleRequire = require;
|
|
2084
|
+
}
|
|
2085
|
+
|
|
2086
|
+
setGetLastVersion(getLastVersion, getLastTxnId);
|
|
2087
|
+
let keyBytes, keyBytesView;
|
|
2088
|
+
const { onExit, getEnvsPointer, setEnvsPointer, getEnvFlags, setJSFlags } = nativeAddon;
|
|
2089
|
+
if (globalThis.__lmdb_envs__)
|
|
2090
|
+
setEnvsPointer(globalThis.__lmdb_envs__);
|
|
2091
|
+
else
|
|
2092
|
+
globalThis.__lmdb_envs__ = getEnvsPointer();
|
|
2093
|
+
|
|
2094
|
+
// this is hard coded as an upper limit because it is important assumption of the fixed buffers in writing instructions
|
|
2095
|
+
// this corresponds to the max key size for 8KB pages
|
|
2096
|
+
const MAX_KEY_SIZE = 4026;
|
|
2097
|
+
// this is used as the key size by default because default page size is OS page size, which is usually
|
|
2098
|
+
// 4KB (but is 16KB on M-series MacOS), and this keeps a consistent max key size when no page size specified.
|
|
2099
|
+
const DEFAULT_MAX_KEY_SIZE = 1978;
|
|
2100
|
+
const DEFAULT_COMMIT_DELAY = 0;
|
|
2101
|
+
|
|
2102
|
+
const allDbs = new Map();
|
|
2103
|
+
let defaultCompression;
|
|
2104
|
+
let hasRegisteredOnExit;
|
|
2105
|
+
function open(path$1, options) {
|
|
2106
|
+
if (!keyBytes) // TODO: Consolidate get buffer and key buffer (don't think we need both)
|
|
2107
|
+
allocateFixedBuffer();
|
|
2108
|
+
if (typeof path$1 == 'object' && !options) {
|
|
2109
|
+
options = path$1;
|
|
2110
|
+
path$1 = options.path;
|
|
2111
|
+
}
|
|
2112
|
+
options = options || {};
|
|
2113
|
+
let userOptions = options;
|
|
2114
|
+
if (!path$1) {
|
|
2115
|
+
options = Object.assign({
|
|
2116
|
+
deleteOnClose: true,
|
|
2117
|
+
noSync: true,
|
|
2118
|
+
}, options);
|
|
2119
|
+
path$1 = tmpdir() + '/' + Math.floor(Math.random() * 2821109907455).toString(36) + '.mdb';
|
|
2120
|
+
} else if (!options)
|
|
2121
|
+
options = {};
|
|
2122
|
+
let extension = path.extname(path$1);
|
|
2123
|
+
let name = path.basename(path$1, extension);
|
|
2124
|
+
let is32Bit = arch().endsWith('32');
|
|
2125
|
+
let remapChunks = options.remapChunks || options.encryptionKey || (options.mapSize ?
|
|
2126
|
+
(is32Bit && options.mapSize > 0x100000000) : // larger than fits in address space, must use dynamic maps
|
|
2127
|
+
is32Bit); // without a known map size, we default to being able to handle large data correctly/well*/
|
|
2128
|
+
options = Object.assign({
|
|
2129
|
+
path: path$1,
|
|
2130
|
+
noSubdir: Boolean(extension),
|
|
2131
|
+
isRoot: true,
|
|
2132
|
+
maxDbs: 12,
|
|
2133
|
+
remapChunks,
|
|
2134
|
+
keyBytes,
|
|
2135
|
+
overlappingSync: (options.noSync || options.readOnly) ? false : (os != 'win32'),
|
|
2136
|
+
// default map size limit of 4 exabytes when using remapChunks, since it is not preallocated and we can
|
|
2137
|
+
// make it super huge.
|
|
2138
|
+
mapSize: remapChunks ? 0x10000000000000 :
|
|
2139
|
+
0x20000, // Otherwise we start small with 128KB
|
|
2140
|
+
safeRestore: process.env.LMDB_RESTORE == 'safe',
|
|
2141
|
+
}, options);
|
|
2142
|
+
if (options.asyncTransactionOrder == 'strict') {
|
|
2143
|
+
options.strictAsyncOrder = true;
|
|
2144
|
+
}
|
|
2145
|
+
|
|
2146
|
+
if (!exists(options.noSubdir ? path.dirname(path$1) : path$1))
|
|
2147
|
+
fs.mkdirSync(options.noSubdir ? path.dirname(path$1) : path$1, { recursive: true }
|
|
2148
|
+
);
|
|
2149
|
+
function makeCompression(compressionOptions) {
|
|
2150
|
+
if (compressionOptions instanceof Compression)
|
|
2151
|
+
return compressionOptions;
|
|
2152
|
+
let useDefault = typeof compressionOptions != 'object';
|
|
2153
|
+
if (useDefault && defaultCompression)
|
|
2154
|
+
return defaultCompression;
|
|
2155
|
+
compressionOptions = Object.assign({
|
|
2156
|
+
threshold: 1000,
|
|
2157
|
+
dictionary: fs.readFileSync(new URL('./dict/dict.txt', (typeof document === 'undefined' ? new (require('u' + 'rl').URL)('file:' + __filename).href : (document.currentScript && document.currentScript.src || new URL('index.cjs', document.baseURI).href)).replace(/dist[\\\/]index.cjs$/, ''))),
|
|
2158
|
+
getValueBytes: makeReusableBuffer(0),
|
|
2159
|
+
}, compressionOptions);
|
|
2160
|
+
let compression = Object.assign(new Compression(compressionOptions), compressionOptions);
|
|
2161
|
+
if (useDefault)
|
|
2162
|
+
defaultCompression = compression;
|
|
2163
|
+
return compression;
|
|
2164
|
+
}
|
|
2165
|
+
|
|
2166
|
+
if (options.compression)
|
|
2167
|
+
options.compression = makeCompression(options.compression);
|
|
2168
|
+
let flags =
|
|
2169
|
+
(options.overlappingSync ? 0x1000 : 0) |
|
|
2170
|
+
(options.noSubdir ? 0x4000 : 0) |
|
|
2171
|
+
(options.noSync ? 0x10000 : 0) |
|
|
2172
|
+
(options.readOnly ? 0x20000 : 0) |
|
|
2173
|
+
(options.noMetaSync ? 0x40000 : 0) |
|
|
2174
|
+
(options.useWritemap ? 0x80000 : 0) |
|
|
2175
|
+
(options.mapAsync ? 0x100000 : 0) |
|
|
2176
|
+
(options.noReadAhead ? 0x800000 : 0) |
|
|
2177
|
+
(options.noMemInit ? 0x1000000 : 0) |
|
|
2178
|
+
(options.usePreviousSnapshot ? 0x2000000 : 0) |
|
|
2179
|
+
(options.remapChunks ? 0x4000000 : 0) |
|
|
2180
|
+
(options.safeRestore ? 0x8000000 : 0);
|
|
2181
|
+
|
|
2182
|
+
let env = new Env();
|
|
2183
|
+
let jsFlags = (options.overlappingSync ? 0x1000 : 0) |
|
|
2184
|
+
(options.separateFlushed ? 1 : 0) |
|
|
2185
|
+
(options.deleteOnClose ? 2 : 0);
|
|
2186
|
+
let rc = env.open(options, flags, jsFlags);
|
|
2187
|
+
if (rc)
|
|
2188
|
+
lmdbError(rc);
|
|
2189
|
+
delete options.keyBytes; // no longer needed, don't copy to stores
|
|
2190
|
+
let maxKeySize = env.getMaxKeySize();
|
|
2191
|
+
maxKeySize = Math.min(maxKeySize, options.pageSize ? MAX_KEY_SIZE : DEFAULT_MAX_KEY_SIZE);
|
|
2192
|
+
flags = getEnvFlags(env.address); // re-retrieve them, they are not necessarily the same if we are connecting to an existing env
|
|
2193
|
+
if (flags & 0x1000) {
|
|
2194
|
+
if (userOptions.noSync) {
|
|
2195
|
+
env.close();
|
|
2196
|
+
throw new Error('Can not set noSync on a database that was opened with overlappingSync');
|
|
2197
|
+
}
|
|
2198
|
+
} else if (options.overlappingSync) {
|
|
2199
|
+
if (userOptions.overlappingSync) {
|
|
2200
|
+
env.close();
|
|
2201
|
+
throw new Error('Can not enable overlappingSync on a database that was opened without this flag');
|
|
2202
|
+
}
|
|
2203
|
+
options.overlappingSync = false;
|
|
2204
|
+
jsFlags = jsFlags & 0xff; // clear overlapping sync
|
|
2205
|
+
setJSFlags(env.address, jsFlags);
|
|
2206
|
+
}
|
|
2207
|
+
|
|
2208
|
+
env.readerCheck(); // clear out any stale entries
|
|
2209
|
+
if ((options.overlappingSync || options.deleteOnClose) && !hasRegisteredOnExit && process.on) {
|
|
2210
|
+
hasRegisteredOnExit = true;
|
|
2211
|
+
process.on('exit', onExit);
|
|
2212
|
+
}
|
|
2213
|
+
|
|
2214
|
+
class LMDBStore extends EventEmitter {
|
|
2215
|
+
constructor(dbName, dbOptions) {
|
|
2216
|
+
super();
|
|
2217
|
+
if (dbName === undefined)
|
|
2218
|
+
throw new Error('Database name must be supplied in name property (may be null for root database)');
|
|
2219
|
+
|
|
2220
|
+
if (options.compression && dbOptions.compression !== false && typeof dbOptions.compression != 'object')
|
|
2221
|
+
dbOptions.compression = options.compression; // use the parent compression if available
|
|
2222
|
+
else if (dbOptions.compression)
|
|
2223
|
+
dbOptions.compression = makeCompression(dbOptions.compression);
|
|
2224
|
+
|
|
2225
|
+
if (dbOptions.dupSort && (dbOptions.useVersions || dbOptions.cache)) {
|
|
2226
|
+
throw new Error('The dupSort flag can not be combined with versions or caching');
|
|
2227
|
+
}
|
|
2228
|
+
let keyIsBuffer = dbOptions.keyIsBuffer;
|
|
2229
|
+
if (dbOptions.keyEncoding == 'uint32') {
|
|
2230
|
+
dbOptions.keyIsUint32 = true;
|
|
2231
|
+
} else if (dbOptions.keyEncoder) {
|
|
2232
|
+
if (dbOptions.keyEncoder.enableNullTermination) {
|
|
2233
|
+
dbOptions.keyEncoder.enableNullTermination();
|
|
2234
|
+
} else
|
|
2235
|
+
keyIsBuffer = true;
|
|
2236
|
+
} else if (dbOptions.keyEncoding == 'binary') {
|
|
2237
|
+
keyIsBuffer = true;
|
|
2238
|
+
}
|
|
2239
|
+
let flags = (dbOptions.reverseKey ? 0x02 : 0) |
|
|
2240
|
+
(dbOptions.dupSort ? 0x04 : 0) |
|
|
2241
|
+
(dbOptions.dupFixed ? 0x10 : 0) |
|
|
2242
|
+
(dbOptions.integerDup ? 0x20 : 0) |
|
|
2243
|
+
(dbOptions.reverseDup ? 0x40 : 0) |
|
|
2244
|
+
(!options.readOnly && dbOptions.create !== false ? 0x40000 : 0) |
|
|
2245
|
+
(dbOptions.useVersions ? 0x100 : 0);
|
|
2246
|
+
let keyType = (dbOptions.keyIsUint32 || dbOptions.keyEncoding == 'uint32') ? 2 : keyIsBuffer ? 3 : 0;
|
|
2247
|
+
if (keyType == 2)
|
|
2248
|
+
flags |= 0x08; // integer key
|
|
2249
|
+
|
|
2250
|
+
if (options.readOnly) {
|
|
2251
|
+
// in read-only mode we use a read-only txn to open the database
|
|
2252
|
+
// TODO: LMDB is actually not entirely thread-safe when it comes to opening databases with
|
|
2253
|
+
// read-only transactions since there is a race condition on setting the update dbis that
|
|
2254
|
+
// occurs outside the lock
|
|
2255
|
+
// make sure we are using a fresh read txn, so we don't want to share with a cursor txn
|
|
2256
|
+
this.resetReadTxn();
|
|
2257
|
+
this.ensureReadTxn();
|
|
2258
|
+
this.db = new Dbi(env, flags, dbName, keyType, dbOptions.compression);
|
|
2259
|
+
} else {
|
|
2260
|
+
this.transactionSync(() => {
|
|
2261
|
+
this.db = new Dbi(env, flags, dbName, keyType, dbOptions.compression);
|
|
2262
|
+
}, options.overlappingSync ? 0x10002 : 2); // no flush-sync, but synchronously commit
|
|
2263
|
+
}
|
|
2264
|
+
this._commitReadTxn(); // current read transaction becomes invalid after opening another db
|
|
2265
|
+
if (!this.db || this.db.dbi == 0xffffffff) {// not found
|
|
2266
|
+
throw new Error('Database not found')
|
|
2267
|
+
}
|
|
2268
|
+
this.dbAddress = this.db.address;
|
|
2269
|
+
this.db.name = dbName || null;
|
|
2270
|
+
this.name = dbName;
|
|
2271
|
+
this.status = 'open';
|
|
2272
|
+
this.env = env;
|
|
2273
|
+
this.reads = 0;
|
|
2274
|
+
this.writes = 0;
|
|
2275
|
+
this.transactions = 0;
|
|
2276
|
+
this.averageTransactionTime = 5;
|
|
2277
|
+
if (dbOptions.syncBatchThreshold)
|
|
2278
|
+
console.warn('syncBatchThreshold is no longer supported');
|
|
2279
|
+
if (dbOptions.immediateBatchThreshold)
|
|
2280
|
+
console.warn('immediateBatchThreshold is no longer supported');
|
|
2281
|
+
this.commitDelay = DEFAULT_COMMIT_DELAY;
|
|
2282
|
+
Object.assign(this, { // these are the options that are inherited
|
|
2283
|
+
path: options.path,
|
|
2284
|
+
encoding: options.encoding,
|
|
2285
|
+
strictAsyncOrder: options.strictAsyncOrder,
|
|
2286
|
+
}, dbOptions);
|
|
2287
|
+
let Encoder;
|
|
2288
|
+
if (this.encoder && this.encoder.Encoder) {
|
|
2289
|
+
Encoder = this.encoder.Encoder;
|
|
2290
|
+
this.encoder = null; // don't copy everything from the module
|
|
2291
|
+
}
|
|
2292
|
+
if (!Encoder && !(this.encoder && this.encoder.encode) && (!this.encoding || this.encoding == 'msgpack' || this.encoding == 'cbor')) {
|
|
2293
|
+
Encoder = (this.encoding == 'cbor' ? moduleRequire('cbor-x').Encoder : MsgpackrEncoder);
|
|
2294
|
+
}
|
|
2295
|
+
if (Encoder) {
|
|
2296
|
+
this.encoder = new Encoder(Object.assign(
|
|
2297
|
+
assignConstrainedProperties(['copyBuffers', 'getStructures', 'saveStructures', 'useFloat32', 'useRecords', 'structuredClone', 'variableMapSize', 'useTimestamp32', 'largeBigIntToFloat', 'encodeUndefinedAsNil', 'int64AsNumber', 'onInvalidDate', 'mapsAsObjects', 'useTag259ForMaps', 'pack', 'maxSharedStructures', 'shouldShareStructure'],
|
|
2298
|
+
this.sharedStructuresKey ? this.setupSharedStructures() : {
|
|
2299
|
+
copyBuffers: true, // need to copy any embedded buffers that are found since we use unsafe buffers
|
|
2300
|
+
}, options, dbOptions), this.encoder));
|
|
2301
|
+
}
|
|
2302
|
+
if (this.encoding == 'json') {
|
|
2303
|
+
this.encoder = {
|
|
2304
|
+
encode: JSON.stringify,
|
|
2305
|
+
};
|
|
2306
|
+
} else if (this.encoder) {
|
|
2307
|
+
this.decoder = this.encoder;
|
|
2308
|
+
this.decoderCopies = !this.encoder.needsStableBuffer;
|
|
2309
|
+
}
|
|
2310
|
+
this.maxKeySize = maxKeySize;
|
|
2311
|
+
applyKeyHandling(this);
|
|
2312
|
+
allDbs.set(dbName ? name + '-' + dbName : name, this);
|
|
2313
|
+
}
|
|
2314
|
+
openDB(dbName, dbOptions) {
|
|
2315
|
+
if (this.dupSort && this.name == null)
|
|
2316
|
+
throw new Error('Can not open named databases if the main database is dupSort')
|
|
2317
|
+
if (typeof dbName == 'object' && !dbOptions) {
|
|
2318
|
+
dbOptions = dbName;
|
|
2319
|
+
dbName = dbOptions.name;
|
|
2320
|
+
} else
|
|
2321
|
+
dbOptions = dbOptions || {};
|
|
2322
|
+
try {
|
|
2323
|
+
return dbOptions.cache ?
|
|
2324
|
+
new (CachingStore(LMDBStore, env))(dbName, dbOptions) :
|
|
2325
|
+
new LMDBStore(dbName, dbOptions);
|
|
2326
|
+
} catch(error) {
|
|
2327
|
+
if (error.message == 'Database not found')
|
|
2328
|
+
return; // return undefined to indicate db not found
|
|
2329
|
+
if (error.message.indexOf('MDB_DBS_FULL') > -1) {
|
|
2330
|
+
error.message += ' (increase your maxDbs option)';
|
|
2331
|
+
}
|
|
2332
|
+
throw error;
|
|
2333
|
+
}
|
|
2334
|
+
}
|
|
2335
|
+
open(dbOptions, callback) {
|
|
2336
|
+
let db = this.openDB(dbOptions);
|
|
2337
|
+
if (callback)
|
|
2338
|
+
callback(null, db);
|
|
2339
|
+
return db;
|
|
2340
|
+
}
|
|
2341
|
+
backup(path$1, compact) {
|
|
2342
|
+
fs.mkdirSync(path.dirname(path$1), { recursive: true });
|
|
2343
|
+
return new Promise((resolve, reject) => env.copy(path$1, compact, (error) => {
|
|
2344
|
+
if (error) {
|
|
2345
|
+
reject(error);
|
|
2346
|
+
} else {
|
|
2347
|
+
resolve();
|
|
2348
|
+
}
|
|
2349
|
+
}));
|
|
2350
|
+
}
|
|
2351
|
+
isOperational() {
|
|
2352
|
+
return this.status == 'open';
|
|
2353
|
+
}
|
|
2354
|
+
sync(callback) {
|
|
2355
|
+
return env.sync(callback || function(error) {
|
|
2356
|
+
if (error) {
|
|
2357
|
+
console.error(error);
|
|
2358
|
+
}
|
|
2359
|
+
});
|
|
2360
|
+
}
|
|
2361
|
+
deleteDB() {
|
|
2362
|
+
console.warn('deleteDB() is deprecated, use drop or dropSync instead');
|
|
2363
|
+
return this.dropSync();
|
|
2364
|
+
}
|
|
2365
|
+
dropSync() {
|
|
2366
|
+
this.transactionSync(() =>
|
|
2367
|
+
this.db.drop({
|
|
2368
|
+
justFreePages: false
|
|
2369
|
+
}), options.overlappingSync ? 0x10002 : 2);
|
|
2370
|
+
}
|
|
2371
|
+
clear(callback) {
|
|
2372
|
+
if (typeof callback == 'function')
|
|
2373
|
+
return this.clearAsync(callback);
|
|
2374
|
+
console.warn('clear() is deprecated, use clearAsync or clearSync instead');
|
|
2375
|
+
this.clearSync();
|
|
2376
|
+
}
|
|
2377
|
+
clearSync() {
|
|
2378
|
+
if (this.encoder) {
|
|
2379
|
+
if (this.encoder.clearSharedData)
|
|
2380
|
+
this.encoder.clearSharedData();
|
|
2381
|
+
else if (this.encoder.structures)
|
|
2382
|
+
this.encoder.structures = [];
|
|
2383
|
+
}
|
|
2384
|
+
this.transactionSync(() =>
|
|
2385
|
+
this.db.drop({
|
|
2386
|
+
justFreePages: true
|
|
2387
|
+
}), options.overlappingSync ? 0x10002 : 2);
|
|
2388
|
+
}
|
|
2389
|
+
readerCheck() {
|
|
2390
|
+
return env.readerCheck();
|
|
2391
|
+
}
|
|
2392
|
+
readerList() {
|
|
2393
|
+
return env.readerList().join('');
|
|
2394
|
+
}
|
|
2395
|
+
setupSharedStructures() {
|
|
2396
|
+
const getStructures = () => {
|
|
2397
|
+
let lastVersion; // because we are doing a read here, we may need to save and restore the lastVersion from the last read
|
|
2398
|
+
if (this.useVersions)
|
|
2399
|
+
lastVersion = getLastVersion();
|
|
2400
|
+
let buffer = this.getBinary(this.sharedStructuresKey);
|
|
2401
|
+
if (this.useVersions)
|
|
2402
|
+
setLastVersion(lastVersion);
|
|
2403
|
+
return buffer && this.decoder.decode(buffer);
|
|
2404
|
+
};
|
|
2405
|
+
return {
|
|
2406
|
+
saveStructures: (structures, isCompatible) => {
|
|
2407
|
+
return this.transactionSync(() => {
|
|
2408
|
+
let existingStructuresBuffer = this.getBinary(this.sharedStructuresKey);
|
|
2409
|
+
let existingStructures = existingStructuresBuffer && this.decoder.decode(existingStructuresBuffer);
|
|
2410
|
+
if (typeof isCompatible == 'function' ?
|
|
2411
|
+
!isCompatible(existingStructures) :
|
|
2412
|
+
(existingStructures && existingStructures.length != isCompatible))
|
|
2413
|
+
return false; // it changed, we need to indicate that we couldn't update
|
|
2414
|
+
this.put(this.sharedStructuresKey, structures);
|
|
2415
|
+
}, options.overlappingSync ? 0x10000 : 0);
|
|
2416
|
+
},
|
|
2417
|
+
getStructures,
|
|
2418
|
+
copyBuffers: true, // need to copy any embedded buffers that are found since we use unsafe buffers
|
|
2419
|
+
};
|
|
2420
|
+
}
|
|
2421
|
+
}
|
|
2422
|
+
// if caching class overrides putSync, don't want to double call the caching code
|
|
2423
|
+
LMDBStore.prototype.putSync;
|
|
2424
|
+
LMDBStore.prototype.removeSync;
|
|
2425
|
+
addReadMethods(LMDBStore, { env, maxKeySize, keyBytes, keyBytesView, getLastVersion });
|
|
2426
|
+
if (!options.readOnly)
|
|
2427
|
+
addWriteMethods(LMDBStore, { env, maxKeySize, fixedBuffer: keyBytes,
|
|
2428
|
+
resetReadTxn: LMDBStore.prototype.resetReadTxn, ...options });
|
|
2429
|
+
LMDBStore.prototype.supports = {
|
|
2430
|
+
permanence: true,
|
|
2431
|
+
bufferKeys: true,
|
|
2432
|
+
promises: true,
|
|
2433
|
+
snapshots: true,
|
|
2434
|
+
clear: true,
|
|
2435
|
+
status: true,
|
|
2436
|
+
deferredOpen: true,
|
|
2437
|
+
openCallback: true,
|
|
2438
|
+
};
|
|
2439
|
+
let Class = options.cache ? CachingStore(LMDBStore, env) : LMDBStore;
|
|
2440
|
+
return options.asClass ? Class : new Class(options.name || null, options);
|
|
2441
|
+
}
|
|
2442
|
+
function openAsClass(path, options) {
|
|
2443
|
+
if (typeof path == 'object' && !options) {
|
|
2444
|
+
options = path;
|
|
2445
|
+
path = options.path;
|
|
2446
|
+
}
|
|
2447
|
+
options = options || {};
|
|
2448
|
+
options.asClass = true;
|
|
2449
|
+
return open(path, options);
|
|
2450
|
+
}
|
|
2451
|
+
|
|
2452
|
+
function getLastVersion() {
|
|
2453
|
+
return keyBytesView.getFloat64(16, true);
|
|
2454
|
+
}
|
|
2455
|
+
function setLastVersion(version) {
|
|
2456
|
+
return keyBytesView.setFloat64(16, version, true);
|
|
2457
|
+
}
|
|
2458
|
+
|
|
2459
|
+
function getLastTxnId() {
|
|
2460
|
+
return keyBytesView.getUint32(32, true);
|
|
2461
|
+
}
|
|
2462
|
+
|
|
2463
|
+
const KEY_BUFFER_SIZE = 4096;
|
|
2464
|
+
function allocateFixedBuffer() {
|
|
2465
|
+
keyBytes = typeof Buffer != 'undefined' ? Buffer.allocUnsafeSlow(KEY_BUFFER_SIZE) : new Uint8Array(KEY_BUFFER_SIZE);
|
|
2466
|
+
const keyBuffer = keyBytes.buffer;
|
|
2467
|
+
keyBytesView = keyBytes.dataView || (keyBytes.dataView = new DataView(keyBytes.buffer, 0, KEY_BUFFER_SIZE)); // max key size is actually 4026
|
|
2468
|
+
keyBytes.uint32 = new Uint32Array(keyBuffer, 0, KEY_BUFFER_SIZE >> 2);
|
|
2469
|
+
keyBytes.float64 = new Float64Array(keyBuffer, 0, KEY_BUFFER_SIZE >> 3);
|
|
2470
|
+
keyBytes.uint32.address = keyBytes.address = keyBuffer.address = getAddress(keyBytes);
|
|
2471
|
+
}
|
|
2472
|
+
|
|
2473
|
+
function exists(path) {
|
|
2474
|
+
if (fs.existsSync)
|
|
2475
|
+
return fs.existsSync(path);
|
|
2476
|
+
try {
|
|
2477
|
+
return fs.statSync(path);
|
|
2478
|
+
} catch (error) {
|
|
2479
|
+
return false
|
|
2480
|
+
}
|
|
2481
|
+
}
|
|
2482
|
+
|
|
2483
|
+
function assignConstrainedProperties(allowedProperties, target) {
|
|
2484
|
+
for (let i = 2; i < arguments.length; i++) {
|
|
2485
|
+
let source = arguments[i];
|
|
2486
|
+
for (let key in source) {
|
|
2487
|
+
if (allowedProperties.includes(key))
|
|
2488
|
+
target[key] = source[key];
|
|
2489
|
+
}
|
|
2490
|
+
}
|
|
2491
|
+
return target;
|
|
2492
|
+
}
|
|
2493
|
+
|
|
2494
|
+
function levelup(store) {
|
|
2495
|
+
return Object.assign(Object.create(store), {
|
|
2496
|
+
get(key, options, callback) {
|
|
2497
|
+
let result = store.get(key);
|
|
2498
|
+
if (typeof options == 'function')
|
|
2499
|
+
callback = options;
|
|
2500
|
+
if (callback) {
|
|
2501
|
+
if (result === undefined)
|
|
2502
|
+
callback(new NotFoundError());
|
|
2503
|
+
else
|
|
2504
|
+
callback(null, result);
|
|
2505
|
+
} else {
|
|
2506
|
+
if (result === undefined)
|
|
2507
|
+
return Promise.reject(new NotFoundError());
|
|
2508
|
+
else
|
|
2509
|
+
return Promise.resolve(result);
|
|
2510
|
+
}
|
|
2511
|
+
},
|
|
2512
|
+
});
|
|
2513
|
+
}
|
|
2514
|
+
class NotFoundError extends Error {
|
|
2515
|
+
constructor(message) {
|
|
2516
|
+
super(message);
|
|
2517
|
+
this.name = 'NotFoundError';
|
|
2518
|
+
this.notFound = true;
|
|
2519
|
+
}
|
|
2520
|
+
}
|
|
2521
|
+
|
|
2522
|
+
orderedBinary__namespace.enableNullTermination();
|
|
2523
|
+
setExternals({
|
|
2524
|
+
arch: os$1.arch, fs: fs__default["default"], tmpdir: os$1.tmpdir, MsgpackrEncoder: msgpackr.Encoder, WeakLRUCache: weakLruCache.WeakLRUCache, orderedBinary: orderedBinary__namespace,
|
|
2525
|
+
EventEmitter: events.EventEmitter, os: os$1.platform(), onExit(callback) {
|
|
2526
|
+
if (process.getMaxListeners() < process.listenerCount('exit') + 8)
|
|
2527
|
+
process.setMaxListeners(process.listenerCount('exit') + 8);
|
|
2528
|
+
process.on('exit', callback);
|
|
2529
|
+
},
|
|
2530
|
+
});
|
|
2531
|
+
let { noop } = nativeAddon;
|
|
2532
|
+
const TransactionFlags = {
|
|
2533
|
+
ABORTABLE: 1,
|
|
2534
|
+
SYNCHRONOUS_COMMIT: 2,
|
|
2535
|
+
NO_SYNC_FLUSH: 0x10000,
|
|
2536
|
+
};
|
|
2537
|
+
var index = {
|
|
2538
|
+
open, openAsClass, getLastVersion, compareKey: orderedBinary$1.compareKeys, keyValueToBuffer: orderedBinary$1.toBufferKey, bufferToKeyValue: orderedBinary$1.fromBufferKey, ABORT, IF_EXISTS, asBinary, levelup, TransactionFlags
|
|
2539
|
+
};
|
|
2540
|
+
|
|
2541
|
+
setRequire(module$1.createRequire((typeof document === 'undefined' ? new (require('u' + 'rl').URL)('file:' + __filename).href : (document.currentScript && document.currentScript.src || new URL('index.cjs', document.baseURI).href))));
|
|
2542
|
+
exports.v8AccelerationEnabled = false;
|
|
2543
|
+
|
|
2544
|
+
let versions = process.versions;
|
|
2545
|
+
let [ majorVersion, minorVersion ] = versions.node.split('.');
|
|
2546
|
+
|
|
2547
|
+
if (versions.v8 && +majorVersion == nativeAddon.version.nodeCompiledVersion) {
|
|
2548
|
+
let v8Funcs = {};
|
|
2549
|
+
let fastApiCalls = (majorVersion == 17 || majorVersion == 18 || majorVersion == 16 && minorVersion > 6) && !process.env.DISABLE_TURBO_CALLS;
|
|
2550
|
+
if (fastApiCalls)
|
|
2551
|
+
v8.setFlagsFromString('--turbo-fast-api-calls');
|
|
2552
|
+
nativeAddon.enableDirectV8(v8Funcs, fastApiCalls);
|
|
2553
|
+
Object.assign(nativeAddon, v8Funcs);
|
|
2554
|
+
exports.v8AccelerationEnabled = true;
|
|
2555
|
+
} else if (majorVersion == 14) {
|
|
2556
|
+
// node v14 only has ABI compatibility with node v16 for zero-arg clearKeptObjects
|
|
2557
|
+
let v8Funcs = {};
|
|
2558
|
+
nativeAddon.enableDirectV8(v8Funcs, false);
|
|
2559
|
+
nativeAddon.clearKeptObjects = v8Funcs.clearKeptObjects;
|
|
2560
|
+
}
|
|
2561
|
+
setNativeFunctions(nativeAddon);
|
|
2562
|
+
|
|
2563
|
+
Object.defineProperty(exports, 'bufferToKeyValue', {
|
|
2564
|
+
enumerable: true,
|
|
2565
|
+
get: function () { return orderedBinary$1.fromBufferKey; }
|
|
2566
|
+
});
|
|
2567
|
+
Object.defineProperty(exports, 'compareKey', {
|
|
2568
|
+
enumerable: true,
|
|
2569
|
+
get: function () { return orderedBinary$1.compareKeys; }
|
|
2570
|
+
});
|
|
2571
|
+
Object.defineProperty(exports, 'compareKeys', {
|
|
2572
|
+
enumerable: true,
|
|
2573
|
+
get: function () { return orderedBinary$1.compareKeys; }
|
|
2574
|
+
});
|
|
2575
|
+
Object.defineProperty(exports, 'keyValueToBuffer', {
|
|
2576
|
+
enumerable: true,
|
|
2577
|
+
get: function () { return orderedBinary$1.toBufferKey; }
|
|
2578
|
+
});
|
|
2579
|
+
exports.ABORT = ABORT;
|
|
2580
|
+
exports.IF_EXISTS = IF_EXISTS;
|
|
2581
|
+
exports.TransactionFlags = TransactionFlags;
|
|
2582
|
+
exports.allDbs = allDbs;
|
|
2583
|
+
exports.asBinary = asBinary;
|
|
2584
|
+
exports["default"] = index;
|
|
2585
|
+
exports.getLastVersion = getLastVersion;
|
|
2586
|
+
exports.levelup = levelup;
|
|
2587
|
+
exports.nativeAddon = nativeAddon;
|
|
2588
|
+
exports.noop = noop;
|
|
2589
|
+
exports.open = open;
|
|
2590
|
+
exports.openAsClass = openAsClass;
|
|
2591
|
+
//# sourceMappingURL=index.cjs.map
|