@harperfast/harper 5.0.0-alpha.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CODE_OF_CONDUCT.md +83 -0
- package/LICENSE +201 -0
- package/README.md +54 -0
- package/SECURITY.md +18 -0
- package/SUPPORT.md +26 -0
- package/dist/bin/BinObjects.d.ts +9 -0
- package/dist/bin/BinObjects.js +15 -0
- package/dist/bin/BinObjects.js.map +1 -0
- package/dist/bin/cliOperations.d.ts +12 -0
- package/dist/bin/cliOperations.js +156 -0
- package/dist/bin/cliOperations.js.map +1 -0
- package/dist/bin/copyDb.d.ts +2 -0
- package/dist/bin/copyDb.js +298 -0
- package/dist/bin/copyDb.js.map +1 -0
- package/dist/bin/harper.d.ts +2 -0
- package/dist/bin/harper.js +148 -0
- package/dist/bin/harper.js.map +1 -0
- package/dist/bin/install.d.ts +2 -0
- package/dist/bin/install.js +16 -0
- package/dist/bin/install.js.map +1 -0
- package/dist/bin/lite.d.ts +1 -0
- package/dist/bin/lite.js +6 -0
- package/dist/bin/lite.js.map +1 -0
- package/dist/bin/restart.d.ts +13 -0
- package/dist/bin/restart.js +194 -0
- package/dist/bin/restart.js.map +1 -0
- package/dist/bin/run.d.ts +21 -0
- package/dist/bin/run.js +365 -0
- package/dist/bin/run.js.map +1 -0
- package/dist/bin/status.d.ts +2 -0
- package/dist/bin/status.js +56 -0
- package/dist/bin/status.js.map +1 -0
- package/dist/bin/stop.d.ts +2 -0
- package/dist/bin/stop.js +17 -0
- package/dist/bin/stop.js.map +1 -0
- package/dist/bin/upgrade.d.ts +7 -0
- package/dist/bin/upgrade.js +111 -0
- package/dist/bin/upgrade.js.map +1 -0
- package/dist/components/Application.d.ts +110 -0
- package/dist/components/Application.js +509 -0
- package/dist/components/Application.js.map +1 -0
- package/dist/components/ApplicationScope.d.ts +28 -0
- package/dist/components/ApplicationScope.js +81 -0
- package/dist/components/ApplicationScope.js.map +1 -0
- package/dist/components/Component.d.ts +21 -0
- package/dist/components/Component.js +43 -0
- package/dist/components/Component.js.map +1 -0
- package/dist/components/ComponentV1.d.ts +69 -0
- package/dist/components/ComponentV1.js +263 -0
- package/dist/components/ComponentV1.js.map +1 -0
- package/dist/components/DEFAULT_CONFIG.d.ts +18 -0
- package/dist/components/DEFAULT_CONFIG.js +22 -0
- package/dist/components/DEFAULT_CONFIG.js.map +1 -0
- package/dist/components/EntryHandler.d.ts +61 -0
- package/dist/components/EntryHandler.js +148 -0
- package/dist/components/EntryHandler.js.map +1 -0
- package/dist/components/OptionsWatcher.d.ts +75 -0
- package/dist/components/OptionsWatcher.js +281 -0
- package/dist/components/OptionsWatcher.js.map +1 -0
- package/dist/components/PluginModule.d.ts +5 -0
- package/dist/components/PluginModule.js +3 -0
- package/dist/components/PluginModule.js.map +1 -0
- package/dist/components/Scope.d.ts +49 -0
- package/dist/components/Scope.js +262 -0
- package/dist/components/Scope.js.map +1 -0
- package/dist/components/componentLoader.js +498 -0
- package/dist/components/componentLoader.js.map +1 -0
- package/dist/components/deriveCommonPatternBase.d.ts +1 -0
- package/dist/components/deriveCommonPatternBase.js +34 -0
- package/dist/components/deriveCommonPatternBase.js.map +1 -0
- package/dist/components/deriveGlobOptions.d.ts +13 -0
- package/dist/components/deriveGlobOptions.js +30 -0
- package/dist/components/deriveGlobOptions.js.map +1 -0
- package/dist/components/deriveURLPath.d.ts +3 -0
- package/dist/components/deriveURLPath.js +55 -0
- package/dist/components/deriveURLPath.js.map +1 -0
- package/dist/components/operations.d.ts +97 -0
- package/dist/components/operations.js +556 -0
- package/dist/components/operations.js.map +1 -0
- package/dist/components/operationsValidation.d.ts +44 -0
- package/dist/components/operationsValidation.js +221 -0
- package/dist/components/operationsValidation.js.map +1 -0
- package/dist/components/packageComponent.d.ts +8 -0
- package/dist/components/packageComponent.js +42 -0
- package/dist/components/packageComponent.js.map +1 -0
- package/dist/components/requestRestart.d.ts +3 -0
- package/dist/components/requestRestart.js +27 -0
- package/dist/components/requestRestart.js.map +1 -0
- package/dist/components/resolveBaseURLPath.d.ts +15 -0
- package/dist/components/resolveBaseURLPath.js +38 -0
- package/dist/components/resolveBaseURLPath.js.map +1 -0
- package/dist/components/status/ComponentStatus.d.ts +61 -0
- package/dist/components/status/ComponentStatus.js +102 -0
- package/dist/components/status/ComponentStatus.js.map +1 -0
- package/dist/components/status/ComponentStatusRegistry.d.ts +89 -0
- package/dist/components/status/ComponentStatusRegistry.js +195 -0
- package/dist/components/status/ComponentStatusRegistry.js.map +1 -0
- package/dist/components/status/api.d.ts +104 -0
- package/dist/components/status/api.js +137 -0
- package/dist/components/status/api.js.map +1 -0
- package/dist/components/status/crossThread.d.ts +62 -0
- package/dist/components/status/crossThread.js +343 -0
- package/dist/components/status/crossThread.js.map +1 -0
- package/dist/components/status/errors.d.ts +68 -0
- package/dist/components/status/errors.js +123 -0
- package/dist/components/status/errors.js.map +1 -0
- package/dist/components/status/index.d.ts +35 -0
- package/dist/components/status/index.js +75 -0
- package/dist/components/status/index.js.map +1 -0
- package/dist/components/status/internal.d.ts +40 -0
- package/dist/components/status/internal.js +76 -0
- package/dist/components/status/internal.js.map +1 -0
- package/dist/components/status/registry.d.ts +10 -0
- package/dist/components/status/registry.js +14 -0
- package/dist/components/status/registry.js.map +1 -0
- package/dist/components/status/types.d.ts +94 -0
- package/dist/components/status/types.js +20 -0
- package/dist/components/status/types.js.map +1 -0
- package/dist/config/RootConfigWatcher.d.ts +10 -0
- package/dist/config/RootConfigWatcher.js +59 -0
- package/dist/config/RootConfigWatcher.js.map +1 -0
- package/dist/config/configHelpers.d.ts +6 -0
- package/dist/config/configHelpers.js +47 -0
- package/dist/config/configHelpers.js.map +1 -0
- package/dist/config/configUtils.d.ts +85 -0
- package/dist/config/configUtils.js +801 -0
- package/dist/config/configUtils.js.map +1 -0
- package/dist/config/harperConfigEnvVars.d.ts +46 -0
- package/dist/config/harperConfigEnvVars.js +527 -0
- package/dist/config/harperConfigEnvVars.js.map +1 -0
- package/dist/dataLayer/CreateAttributeObject.d.ts +19 -0
- package/dist/dataLayer/CreateAttributeObject.js +23 -0
- package/dist/dataLayer/CreateAttributeObject.js.map +1 -0
- package/dist/dataLayer/CreateTableObject.d.ts +7 -0
- package/dist/dataLayer/CreateTableObject.js +10 -0
- package/dist/dataLayer/CreateTableObject.js.map +1 -0
- package/dist/dataLayer/DataLayerObjects.d.ts +22 -0
- package/dist/dataLayer/DataLayerObjects.js +33 -0
- package/dist/dataLayer/DataLayerObjects.js.map +1 -0
- package/dist/dataLayer/DeleteBeforeObject.d.ts +18 -0
- package/dist/dataLayer/DeleteBeforeObject.js +21 -0
- package/dist/dataLayer/DeleteBeforeObject.js.map +1 -0
- package/dist/dataLayer/DeleteObject.d.ts +19 -0
- package/dist/dataLayer/DeleteObject.js +23 -0
- package/dist/dataLayer/DeleteObject.js.map +1 -0
- package/dist/dataLayer/DropAttributeObject.d.ts +7 -0
- package/dist/dataLayer/DropAttributeObject.js +10 -0
- package/dist/dataLayer/DropAttributeObject.js.map +1 -0
- package/dist/dataLayer/GetBackupObject.d.ts +16 -0
- package/dist/dataLayer/GetBackupObject.js +20 -0
- package/dist/dataLayer/GetBackupObject.js.map +1 -0
- package/dist/dataLayer/InsertObject.d.ts +20 -0
- package/dist/dataLayer/InsertObject.js +24 -0
- package/dist/dataLayer/InsertObject.js.map +1 -0
- package/dist/dataLayer/ReadAuditLogObject.d.ts +18 -0
- package/dist/dataLayer/ReadAuditLogObject.js +22 -0
- package/dist/dataLayer/ReadAuditLogObject.js.map +1 -0
- package/dist/dataLayer/SQLSearch.d.ts +171 -0
- package/dist/dataLayer/SQLSearch.js +1168 -0
- package/dist/dataLayer/SQLSearch.js.map +1 -0
- package/dist/dataLayer/SearchByConditionsObject.d.ts +85 -0
- package/dist/dataLayer/SearchByConditionsObject.js +57 -0
- package/dist/dataLayer/SearchByConditionsObject.js.map +1 -0
- package/dist/dataLayer/SearchByHashObject.d.ts +17 -0
- package/dist/dataLayer/SearchByHashObject.js +20 -0
- package/dist/dataLayer/SearchByHashObject.js.map +1 -0
- package/dist/dataLayer/SearchObject.d.ts +30 -0
- package/dist/dataLayer/SearchObject.js +33 -0
- package/dist/dataLayer/SearchObject.js.map +1 -0
- package/dist/dataLayer/SqlSearchObject.d.ts +10 -0
- package/dist/dataLayer/SqlSearchObject.js +13 -0
- package/dist/dataLayer/SqlSearchObject.js.map +1 -0
- package/dist/dataLayer/UpdateObject.d.ts +18 -0
- package/dist/dataLayer/UpdateObject.js +22 -0
- package/dist/dataLayer/UpdateObject.js.map +1 -0
- package/dist/dataLayer/UpsertObject.d.ts +18 -0
- package/dist/dataLayer/UpsertObject.js +22 -0
- package/dist/dataLayer/UpsertObject.js.map +1 -0
- package/dist/dataLayer/bulkLoad.d.ts +28 -0
- package/dist/dataLayer/bulkLoad.js +624 -0
- package/dist/dataLayer/bulkLoad.js.map +1 -0
- package/dist/dataLayer/dataObjects/BulkLoadObjects.d.ts +17 -0
- package/dist/dataLayer/dataObjects/BulkLoadObjects.js +25 -0
- package/dist/dataLayer/dataObjects/BulkLoadObjects.js.map +1 -0
- package/dist/dataLayer/dataObjects/UpsertObject.d.ts +18 -0
- package/dist/dataLayer/dataObjects/UpsertObject.js +22 -0
- package/dist/dataLayer/dataObjects/UpsertObject.js.map +1 -0
- package/dist/dataLayer/delete.d.ts +22 -0
- package/dist/dataLayer/delete.js +111 -0
- package/dist/dataLayer/delete.js.map +1 -0
- package/dist/dataLayer/export.d.ts +15 -0
- package/dist/dataLayer/export.js +302 -0
- package/dist/dataLayer/export.js.map +1 -0
- package/dist/dataLayer/getBackup.d.ts +8 -0
- package/dist/dataLayer/getBackup.js +28 -0
- package/dist/dataLayer/getBackup.js.map +1 -0
- package/dist/dataLayer/harperBridge/BridgeMethods.d.ts +24 -0
- package/dist/dataLayer/harperBridge/BridgeMethods.js +62 -0
- package/dist/dataLayer/harperBridge/BridgeMethods.js.map +1 -0
- package/dist/dataLayer/harperBridge/ResourceBridge.d.ts +104 -0
- package/dist/dataLayer/harperBridge/ResourceBridge.js +630 -0
- package/dist/dataLayer/harperBridge/ResourceBridge.js.map +1 -0
- package/dist/dataLayer/harperBridge/bridgeUtility/insertUpdateReturnObj.d.ts +14 -0
- package/dist/dataLayer/harperBridge/bridgeUtility/insertUpdateReturnObj.js +24 -0
- package/dist/dataLayer/harperBridge/bridgeUtility/insertUpdateReturnObj.js.map +1 -0
- package/dist/dataLayer/harperBridge/bridgeUtility/insertUpdateValidate.d.ts +11 -0
- package/dist/dataLayer/harperBridge/bridgeUtility/insertUpdateValidate.js +68 -0
- package/dist/dataLayer/harperBridge/bridgeUtility/insertUpdateValidate.js.map +1 -0
- package/dist/dataLayer/harperBridge/harperBridge.d.ts +2 -0
- package/dist/dataLayer/harperBridge/harperBridge.js +18 -0
- package/dist/dataLayer/harperBridge/harperBridge.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/LMDBBridge.d.ts +47 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/LMDBBridge.js +99 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/LMDBBridge.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/DeleteAuditLogsBeforeResults.d.ts +15 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/DeleteAuditLogsBeforeResults.js +18 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/DeleteAuditLogsBeforeResults.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateAttribute.d.ts +12 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateAttribute.js +74 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateAttribute.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateRecords.d.ts +13 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateRecords.js +54 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateRecords.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateSchema.d.ts +6 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateSchema.js +23 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateSchema.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateTable.d.ts +8 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateTable.js +62 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbCreateTable.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteAuditLogsBefore.d.ts +8 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteAuditLogsBefore.js +82 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteAuditLogsBefore.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteRecords.d.ts +11 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteRecords.js +76 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDeleteRecords.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropAttribute.d.ts +9 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropAttribute.js +83 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropAttribute.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropSchema.d.ts +6 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropSchema.js +73 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropSchema.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropTable.d.ts +6 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropTable.js +110 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbDropTable.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbFlush.d.ts +14 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbFlush.js +33 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbFlush.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetBackup.d.ts +7 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetBackup.js +110 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetBackup.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetDataByHash.d.ts +6 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetDataByHash.js +21 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetDataByHash.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetDataByValue.d.ts +14 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetDataByValue.js +25 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbGetDataByValue.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbReadAuditLog.d.ts +7 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbReadAuditLog.js +180 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbReadAuditLog.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByConditions.d.ts +8 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByConditions.js +134 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByConditions.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByHash.d.ts +6 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByHash.js +14 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByHash.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByValue.d.ts +15 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByValue.js +26 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbSearchByValue.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbTransaction.d.ts +8 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbTransaction.js +17 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbTransaction.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbUpdateRecords.d.ts +12 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbUpdateRecords.js +52 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbUpdateRecords.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbUpsertRecords.d.ts +15 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbUpsertRecords.js +56 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbMethods/lmdbUpsertRecords.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBCreateAttributeObject.d.ts +16 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBCreateAttributeObject.js +20 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBCreateAttributeObject.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBDeleteTransactionObject.d.ts +16 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBDeleteTransactionObject.js +22 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBDeleteTransactionObject.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBInsertTransactionObject.d.ts +16 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBInsertTransactionObject.js +21 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBInsertTransactionObject.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBTransactionObject.d.ts +19 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBTransactionObject.js +22 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBTransactionObject.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBUpdateTransactionObject.d.ts +18 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBUpdateTransactionObject.js +23 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBUpdateTransactionObject.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBUpsertTransactionObject.d.ts +18 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBUpsertTransactionObject.js +23 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/LMDBUpsertTransactionObject.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/TableSizeObject.d.ts +21 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/TableSizeObject.js +24 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/TableSizeObject.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/initializeHashSearch.d.ts +7 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/initializeHashSearch.js +19 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/initializeHashSearch.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/initializePaths.d.ts +22 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/initializePaths.js +137 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/initializePaths.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbCheckForNewAttributes.d.ts +9 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbCheckForNewAttributes.js +73 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbCheckForNewAttributes.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbCreateTransactionsAuditEnvironment.d.ts +8 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbCreateTransactionsAuditEnvironment.js +38 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbCreateTransactionsAuditEnvironment.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbGetTableSize.d.ts +8 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbGetTableSize.js +29 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbGetTableSize.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbProcessRows.d.ts +17 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbProcessRows.js +63 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbProcessRows.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbSearch.d.ts +106 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbSearch.js +251 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbSearch.js.map +1 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbWriteTransaction.d.ts +8 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbWriteTransaction.js +66 -0
- package/dist/dataLayer/harperBridge/lmdbBridge/lmdbUtility/lmdbWriteTransaction.js.map +1 -0
- package/dist/dataLayer/hdbInfoController.d.ts +28 -0
- package/dist/dataLayer/hdbInfoController.js +203 -0
- package/dist/dataLayer/hdbInfoController.js.map +1 -0
- package/dist/dataLayer/insert.d.ts +44 -0
- package/dist/dataLayer/insert.js +192 -0
- package/dist/dataLayer/insert.js.map +1 -0
- package/dist/dataLayer/readAuditLog.d.ts +8 -0
- package/dist/dataLayer/readAuditLog.js +37 -0
- package/dist/dataLayer/readAuditLog.js.map +1 -0
- package/dist/dataLayer/schema.d.ts +24 -0
- package/dist/dataLayer/schema.js +225 -0
- package/dist/dataLayer/schema.js.map +1 -0
- package/dist/dataLayer/schemaDescribe.d.ts +26 -0
- package/dist/dataLayer/schemaDescribe.js +265 -0
- package/dist/dataLayer/schemaDescribe.js.map +1 -0
- package/dist/dataLayer/search.d.ts +4 -0
- package/dist/dataLayer/search.js +56 -0
- package/dist/dataLayer/search.js.map +1 -0
- package/dist/dataLayer/transaction.d.ts +8 -0
- package/dist/dataLayer/transaction.js +16 -0
- package/dist/dataLayer/transaction.js.map +1 -0
- package/dist/dataLayer/update.d.ts +15 -0
- package/dist/dataLayer/update.js +107 -0
- package/dist/dataLayer/update.js.map +1 -0
- package/dist/globals.d.ts +7 -0
- package/dist/globals.js +12 -0
- package/dist/globals.js.map +1 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.js +40 -0
- package/dist/index.js.map +1 -0
- package/dist/json/systemSchema.json +373 -0
- package/dist/launchServiceScripts/utility/checkNodeVersion.d.ts +4 -0
- package/dist/launchServiceScripts/utility/checkNodeVersion.js +13 -0
- package/dist/launchServiceScripts/utility/checkNodeVersion.js.map +1 -0
- package/dist/resources/DatabaseTransaction.d.ts +96 -0
- package/dist/resources/DatabaseTransaction.js +354 -0
- package/dist/resources/DatabaseTransaction.js.map +1 -0
- package/dist/resources/ErrorResource.d.ts +26 -0
- package/dist/resources/ErrorResource.js +60 -0
- package/dist/resources/ErrorResource.js.map +1 -0
- package/dist/resources/IterableEventQueue.d.ts +19 -0
- package/dist/resources/IterableEventQueue.js +103 -0
- package/dist/resources/IterableEventQueue.js.map +1 -0
- package/dist/resources/LMDBTransaction.d.ts +43 -0
- package/dist/resources/LMDBTransaction.js +371 -0
- package/dist/resources/LMDBTransaction.js.map +1 -0
- package/dist/resources/RecordEncoder.d.ts +65 -0
- package/dist/resources/RecordEncoder.js +700 -0
- package/dist/resources/RecordEncoder.js.map +1 -0
- package/dist/resources/RequestTarget.d.ts +57 -0
- package/dist/resources/RequestTarget.js +91 -0
- package/dist/resources/RequestTarget.js.map +1 -0
- package/dist/resources/Resource.d.ts +152 -0
- package/dist/resources/Resource.js +727 -0
- package/dist/resources/Resource.js.map +1 -0
- package/dist/resources/ResourceInterface.d.ts +162 -0
- package/dist/resources/ResourceInterface.js +3 -0
- package/dist/resources/ResourceInterface.js.map +1 -0
- package/dist/resources/ResourceInterfaceV2.d.ts +21 -0
- package/dist/resources/ResourceInterfaceV2.js +3 -0
- package/dist/resources/ResourceInterfaceV2.js.map +1 -0
- package/dist/resources/ResourceV2.d.ts +30 -0
- package/dist/resources/ResourceV2.js +27 -0
- package/dist/resources/ResourceV2.js.map +1 -0
- package/dist/resources/Resources.d.ts +36 -0
- package/dist/resources/Resources.js +155 -0
- package/dist/resources/Resources.js.map +1 -0
- package/dist/resources/RocksIndexStore.d.ts +24 -0
- package/dist/resources/RocksIndexStore.js +54 -0
- package/dist/resources/RocksIndexStore.js.map +1 -0
- package/dist/resources/RocksTransactionLogStore.d.ts +60 -0
- package/dist/resources/RocksTransactionLogStore.js +344 -0
- package/dist/resources/RocksTransactionLogStore.js.map +1 -0
- package/dist/resources/Table.d.ts +854 -0
- package/dist/resources/Table.js +4350 -0
- package/dist/resources/Table.js.map +1 -0
- package/dist/resources/analytics/hostnames.d.ts +16 -0
- package/dist/resources/analytics/hostnames.js +72 -0
- package/dist/resources/analytics/hostnames.js.map +1 -0
- package/dist/resources/analytics/metadata.d.ts +9 -0
- package/dist/resources/analytics/metadata.js +12 -0
- package/dist/resources/analytics/metadata.js.map +1 -0
- package/dist/resources/analytics/profile.d.ts +2 -0
- package/dist/resources/analytics/profile.js +144 -0
- package/dist/resources/analytics/profile.js.map +1 -0
- package/dist/resources/analytics/read.d.ts +41 -0
- package/dist/resources/analytics/read.js +189 -0
- package/dist/resources/analytics/read.js.map +1 -0
- package/dist/resources/analytics/write.d.ts +131 -0
- package/dist/resources/analytics/write.js +822 -0
- package/dist/resources/analytics/write.js.map +1 -0
- package/dist/resources/auditStore.d.ts +81 -0
- package/dist/resources/auditStore.js +572 -0
- package/dist/resources/auditStore.js.map +1 -0
- package/dist/resources/blob.d.ts +137 -0
- package/dist/resources/blob.js +1273 -0
- package/dist/resources/blob.js.map +1 -0
- package/dist/resources/crdt.d.ts +19 -0
- package/dist/resources/crdt.js +146 -0
- package/dist/resources/crdt.js.map +1 -0
- package/dist/resources/dataLoader.d.ts +98 -0
- package/dist/resources/dataLoader.js +461 -0
- package/dist/resources/dataLoader.js.map +1 -0
- package/dist/resources/databases.d.ts +131 -0
- package/dist/resources/databases.js +1220 -0
- package/dist/resources/databases.js.map +1 -0
- package/dist/resources/graphql.d.ts +19 -0
- package/dist/resources/graphql.js +223 -0
- package/dist/resources/graphql.js.map +1 -0
- package/dist/resources/indexes/HierarchicalNavigableSmallWorld.d.ts +87 -0
- package/dist/resources/indexes/HierarchicalNavigableSmallWorld.js +598 -0
- package/dist/resources/indexes/HierarchicalNavigableSmallWorld.js.map +1 -0
- package/dist/resources/indexes/customIndexes.d.ts +4 -0
- package/dist/resources/indexes/customIndexes.js +10 -0
- package/dist/resources/indexes/customIndexes.js.map +1 -0
- package/dist/resources/indexes/vector.d.ts +2 -0
- package/dist/resources/indexes/vector.js +40 -0
- package/dist/resources/indexes/vector.js.map +1 -0
- package/dist/resources/jsResource.d.ts +24 -0
- package/dist/resources/jsResource.js +82 -0
- package/dist/resources/jsResource.js.map +1 -0
- package/dist/resources/loadEnv.d.ts +5 -0
- package/dist/resources/loadEnv.js +28 -0
- package/dist/resources/loadEnv.js.map +1 -0
- package/dist/resources/login.d.ts +3 -0
- package/dist/resources/login.js +22 -0
- package/dist/resources/login.js.map +1 -0
- package/dist/resources/openApi.d.ts +27 -0
- package/dist/resources/openApi.js +327 -0
- package/dist/resources/openApi.js.map +1 -0
- package/dist/resources/registrationDeprecated.d.ts +4 -0
- package/dist/resources/registrationDeprecated.js +11 -0
- package/dist/resources/registrationDeprecated.js.map +1 -0
- package/dist/resources/replayLogs.d.ts +2 -0
- package/dist/resources/replayLogs.js +170 -0
- package/dist/resources/replayLogs.js.map +1 -0
- package/dist/resources/roles.d.ts +11 -0
- package/dist/resources/roles.js +102 -0
- package/dist/resources/roles.js.map +1 -0
- package/dist/resources/search.d.ts +39 -0
- package/dist/resources/search.js +1333 -0
- package/dist/resources/search.js.map +1 -0
- package/dist/resources/tracked.d.ts +49 -0
- package/dist/resources/tracked.js +665 -0
- package/dist/resources/tracked.js.map +1 -0
- package/dist/resources/transaction.d.ts +9 -0
- package/dist/resources/transaction.js +89 -0
- package/dist/resources/transaction.js.map +1 -0
- package/dist/resources/transactionBroadcast.d.ts +38 -0
- package/dist/resources/transactionBroadcast.js +263 -0
- package/dist/resources/transactionBroadcast.js.map +1 -0
- package/dist/security/auth.d.ts +9 -0
- package/dist/security/auth.js +408 -0
- package/dist/security/auth.js.map +1 -0
- package/dist/security/certificateVerification/certificateVerificationSource.d.ts +18 -0
- package/dist/security/certificateVerification/certificateVerificationSource.js +78 -0
- package/dist/security/certificateVerification/certificateVerificationSource.js.map +1 -0
- package/dist/security/certificateVerification/configValidation.d.ts +14 -0
- package/dist/security/certificateVerification/configValidation.js +101 -0
- package/dist/security/certificateVerification/configValidation.js.map +1 -0
- package/dist/security/certificateVerification/crlVerification.d.ts +29 -0
- package/dist/security/certificateVerification/crlVerification.js +564 -0
- package/dist/security/certificateVerification/crlVerification.js.map +1 -0
- package/dist/security/certificateVerification/index.d.ts +31 -0
- package/dist/security/certificateVerification/index.js +111 -0
- package/dist/security/certificateVerification/index.js.map +1 -0
- package/dist/security/certificateVerification/ocspVerification.d.ts +23 -0
- package/dist/security/certificateVerification/ocspVerification.js +117 -0
- package/dist/security/certificateVerification/ocspVerification.js.map +1 -0
- package/dist/security/certificateVerification/pkijs-ed25519-patch.d.ts +14 -0
- package/dist/security/certificateVerification/pkijs-ed25519-patch.js +183 -0
- package/dist/security/certificateVerification/pkijs-ed25519-patch.js.map +1 -0
- package/dist/security/certificateVerification/types.d.ts +105 -0
- package/dist/security/certificateVerification/types.js +6 -0
- package/dist/security/certificateVerification/types.js.map +1 -0
- package/dist/security/certificateVerification/verificationConfig.d.ts +29 -0
- package/dist/security/certificateVerification/verificationConfig.js +121 -0
- package/dist/security/certificateVerification/verificationConfig.js.map +1 -0
- package/dist/security/certificateVerification/verificationUtils.d.ts +79 -0
- package/dist/security/certificateVerification/verificationUtils.js +441 -0
- package/dist/security/certificateVerification/verificationUtils.js.map +1 -0
- package/dist/security/cryptoHash.d.ts +2 -0
- package/dist/security/cryptoHash.js +35 -0
- package/dist/security/cryptoHash.js.map +1 -0
- package/dist/security/data_objects/PermissionAttributeResponseObject.d.ts +11 -0
- package/dist/security/data_objects/PermissionAttributeResponseObject.js +14 -0
- package/dist/security/data_objects/PermissionAttributeResponseObject.js.map +1 -0
- package/dist/security/data_objects/PermissionResponseObject.d.ts +57 -0
- package/dist/security/data_objects/PermissionResponseObject.js +105 -0
- package/dist/security/data_objects/PermissionResponseObject.js.map +1 -0
- package/dist/security/data_objects/PermissionTableResponseObject.d.ts +16 -0
- package/dist/security/data_objects/PermissionTableResponseObject.js +19 -0
- package/dist/security/data_objects/PermissionTableResponseObject.js.map +1 -0
- package/dist/security/fastifyAuth.d.ts +2 -0
- package/dist/security/fastifyAuth.js +135 -0
- package/dist/security/fastifyAuth.js.map +1 -0
- package/dist/security/impersonation.d.ts +11 -0
- package/dist/security/impersonation.js +139 -0
- package/dist/security/impersonation.js.map +1 -0
- package/dist/security/jsLoader.d.ts +9 -0
- package/dist/security/jsLoader.js +522 -0
- package/dist/security/jsLoader.js.map +1 -0
- package/dist/security/keys.d.ts +119 -0
- package/dist/security/keys.js +866 -0
- package/dist/security/keys.js.map +1 -0
- package/dist/security/permissionsTranslator.d.ts +9 -0
- package/dist/security/permissionsTranslator.js +269 -0
- package/dist/security/permissionsTranslator.js.map +1 -0
- package/dist/security/role.d.ts +5 -0
- package/dist/security/role.js +160 -0
- package/dist/security/role.js.map +1 -0
- package/dist/security/tokenAuthentication.d.ts +38 -0
- package/dist/security/tokenAuthentication.js +205 -0
- package/dist/security/tokenAuthentication.js.map +1 -0
- package/dist/security/user.d.ts +77 -0
- package/dist/security/user.js +349 -0
- package/dist/security/user.js.map +1 -0
- package/dist/server/DurableSubscriptionsSession.d.ts +74 -0
- package/dist/server/DurableSubscriptionsSession.js +511 -0
- package/dist/server/DurableSubscriptionsSession.js.map +1 -0
- package/dist/server/REST.d.ts +16 -0
- package/dist/server/REST.js +423 -0
- package/dist/server/REST.js.map +1 -0
- package/dist/server/Server.d.ts +62 -0
- package/dist/server/Server.js +27 -0
- package/dist/server/Server.js.map +1 -0
- package/dist/server/fastifyRoutes/helpers/getCORSOptions.d.ts +11 -0
- package/dist/server/fastifyRoutes/helpers/getCORSOptions.js +32 -0
- package/dist/server/fastifyRoutes/helpers/getCORSOptions.js.map +1 -0
- package/dist/server/fastifyRoutes/helpers/getHeaderTimeoutConfig.d.ts +6 -0
- package/dist/server/fastifyRoutes/helpers/getHeaderTimeoutConfig.js +13 -0
- package/dist/server/fastifyRoutes/helpers/getHeaderTimeoutConfig.js.map +1 -0
- package/dist/server/fastifyRoutes/helpers/getServerOptions.d.ts +12 -0
- package/dist/server/fastifyRoutes/helpers/getServerOptions.js +30 -0
- package/dist/server/fastifyRoutes/helpers/getServerOptions.js.map +1 -0
- package/dist/server/fastifyRoutes/plugins/hdbCore.d.ts +2 -0
- package/dist/server/fastifyRoutes/plugins/hdbCore.js +31 -0
- package/dist/server/fastifyRoutes/plugins/hdbCore.js.map +1 -0
- package/dist/server/fastifyRoutes.d.ts +25 -0
- package/dist/server/fastifyRoutes.js +235 -0
- package/dist/server/fastifyRoutes.js.map +1 -0
- package/dist/server/graphqlQuerying.d.ts +1 -0
- package/dist/server/graphqlQuerying.js +630 -0
- package/dist/server/graphqlQuerying.js.map +1 -0
- package/dist/server/http.d.ts +15 -0
- package/dist/server/http.js +650 -0
- package/dist/server/http.js.map +1 -0
- package/dist/server/itc/serverHandlers.d.ts +10 -0
- package/dist/server/itc/serverHandlers.js +153 -0
- package/dist/server/itc/serverHandlers.js.map +1 -0
- package/dist/server/itc/utility/ITCEventObject.d.ts +6 -0
- package/dist/server/itc/utility/ITCEventObject.js +9 -0
- package/dist/server/itc/utility/ITCEventObject.js.map +1 -0
- package/dist/server/jobs/JobObject.d.ts +15 -0
- package/dist/server/jobs/JobObject.js +22 -0
- package/dist/server/jobs/JobObject.js.map +1 -0
- package/dist/server/jobs/jobProcess.d.ts +1 -0
- package/dist/server/jobs/jobProcess.js +66 -0
- package/dist/server/jobs/jobProcess.js.map +1 -0
- package/dist/server/jobs/jobRunner.d.ts +11 -0
- package/dist/server/jobs/jobRunner.js +160 -0
- package/dist/server/jobs/jobRunner.js.map +1 -0
- package/dist/server/jobs/jobs.d.ts +20 -0
- package/dist/server/jobs/jobs.js +267 -0
- package/dist/server/jobs/jobs.js.map +1 -0
- package/dist/server/loadRootComponents.d.ts +5 -0
- package/dist/server/loadRootComponents.js +45 -0
- package/dist/server/loadRootComponents.js.map +1 -0
- package/dist/server/mqtt.d.ts +9 -0
- package/dist/server/mqtt.js +466 -0
- package/dist/server/mqtt.js.map +1 -0
- package/dist/server/nodeName.d.ts +5 -0
- package/dist/server/nodeName.js +84 -0
- package/dist/server/nodeName.js.map +1 -0
- package/dist/server/operationsServer.d.ts +48 -0
- package/dist/server/operationsServer.js +265 -0
- package/dist/server/operationsServer.js.map +1 -0
- package/dist/server/serverHelpers/Headers.d.ts +20 -0
- package/dist/server/serverHelpers/Headers.js +134 -0
- package/dist/server/serverHelpers/Headers.js.map +1 -0
- package/dist/server/serverHelpers/JSONStream.d.ts +14 -0
- package/dist/server/serverHelpers/JSONStream.js +322 -0
- package/dist/server/serverHelpers/JSONStream.js.map +1 -0
- package/dist/server/serverHelpers/OperationFunctionObject.d.ts +9 -0
- package/dist/server/serverHelpers/OperationFunctionObject.js +17 -0
- package/dist/server/serverHelpers/OperationFunctionObject.js.map +1 -0
- package/dist/server/serverHelpers/Request.d.ts +69 -0
- package/dist/server/serverHelpers/Request.js +141 -0
- package/dist/server/serverHelpers/Request.js.map +1 -0
- package/dist/server/serverHelpers/contentTypes.d.ts +57 -0
- package/dist/server/serverHelpers/contentTypes.js +639 -0
- package/dist/server/serverHelpers/contentTypes.js.map +1 -0
- package/dist/server/serverHelpers/requestTimePlugin.d.ts +2 -0
- package/dist/server/serverHelpers/requestTimePlugin.js +56 -0
- package/dist/server/serverHelpers/requestTimePlugin.js.map +1 -0
- package/dist/server/serverHelpers/serverHandlers.d.ts +6 -0
- package/dist/server/serverHelpers/serverHandlers.js +130 -0
- package/dist/server/serverHelpers/serverHandlers.js.map +1 -0
- package/dist/server/serverHelpers/serverUtilities.d.ts +29 -0
- package/dist/server/serverHelpers/serverUtilities.js +356 -0
- package/dist/server/serverHelpers/serverUtilities.js.map +1 -0
- package/dist/server/serverRegistry.d.ts +3 -0
- package/dist/server/serverRegistry.js +11 -0
- package/dist/server/serverRegistry.js.map +1 -0
- package/dist/server/static.d.ts +16 -0
- package/dist/server/static.js +164 -0
- package/dist/server/static.js.map +1 -0
- package/dist/server/status/definitions.d.ts +27 -0
- package/dist/server/status/definitions.js +22 -0
- package/dist/server/status/definitions.js.map +1 -0
- package/dist/server/status/index.d.ts +26 -0
- package/dist/server/status/index.js +89 -0
- package/dist/server/status/index.js.map +1 -0
- package/dist/server/storageReclamation.d.ts +18 -0
- package/dist/server/storageReclamation.js +96 -0
- package/dist/server/storageReclamation.js.map +1 -0
- package/dist/server/threads/itc.d.ts +53 -0
- package/dist/server/threads/itc.js +81 -0
- package/dist/server/threads/itc.js.map +1 -0
- package/dist/server/threads/manageThreads.d.ts +30 -0
- package/dist/server/threads/manageThreads.js +579 -0
- package/dist/server/threads/manageThreads.js.map +1 -0
- package/dist/server/threads/socketRouter.d.ts +6 -0
- package/dist/server/threads/socketRouter.js +395 -0
- package/dist/server/threads/socketRouter.js.map +1 -0
- package/dist/server/threads/threadServer.d.ts +5 -0
- package/dist/server/threads/threadServer.js +288 -0
- package/dist/server/threads/threadServer.js.map +1 -0
- package/dist/server/throttle.d.ts +7 -0
- package/dist/server/throttle.js +71 -0
- package/dist/server/throttle.js.map +1 -0
- package/dist/sqlTranslator/SelectValidator.d.ts +79 -0
- package/dist/sqlTranslator/SelectValidator.js +274 -0
- package/dist/sqlTranslator/SelectValidator.js.map +1 -0
- package/dist/sqlTranslator/alasqlFunctionImporter.d.ts +2 -0
- package/dist/sqlTranslator/alasqlFunctionImporter.js +55 -0
- package/dist/sqlTranslator/alasqlFunctionImporter.js.map +1 -0
- package/dist/sqlTranslator/deleteTranslator.d.ts +2 -0
- package/dist/sqlTranslator/deleteTranslator.js +56 -0
- package/dist/sqlTranslator/deleteTranslator.js.map +1 -0
- package/dist/sqlTranslator/index.d.ts +16 -0
- package/dist/sqlTranslator/index.js +215 -0
- package/dist/sqlTranslator/index.js.map +1 -0
- package/dist/sqlTranslator/sql_statement_bucket.d.ts +46 -0
- package/dist/sqlTranslator/sql_statement_bucket.js +430 -0
- package/dist/sqlTranslator/sql_statement_bucket.js.map +1 -0
- package/dist/upgrade/UpgradeObjects.d.ts +5 -0
- package/dist/upgrade/UpgradeObjects.js +12 -0
- package/dist/upgrade/UpgradeObjects.js.map +1 -0
- package/dist/upgrade/directives/directivesController.d.ts +30 -0
- package/dist/upgrade/directives/directivesController.js +76 -0
- package/dist/upgrade/directives/directivesController.js.map +1 -0
- package/dist/upgrade/directivesManager.d.ts +7 -0
- package/dist/upgrade/directivesManager.js +125 -0
- package/dist/upgrade/directivesManager.js.map +1 -0
- package/dist/upgrade/upgradePrompt.d.ts +13 -0
- package/dist/upgrade/upgradePrompt.js +102 -0
- package/dist/upgrade/upgradePrompt.js.map +1 -0
- package/dist/upgrade/upgradeUtilities.d.ts +10 -0
- package/dist/upgrade/upgradeUtilities.js +26 -0
- package/dist/upgrade/upgradeUtilities.js.map +1 -0
- package/dist/utility/AWS/AWSConnector.d.ts +2 -0
- package/dist/utility/AWS/AWSConnector.js +26 -0
- package/dist/utility/AWS/AWSConnector.js.map +1 -0
- package/dist/utility/OperationFunctionCaller.d.ts +9 -0
- package/dist/utility/OperationFunctionCaller.js +58 -0
- package/dist/utility/OperationFunctionCaller.js.map +1 -0
- package/dist/utility/assignCmdEnvVariables.d.ts +10 -0
- package/dist/utility/assignCmdEnvVariables.js +55 -0
- package/dist/utility/assignCmdEnvVariables.js.map +1 -0
- package/dist/utility/common_utils.d.ts +264 -0
- package/dist/utility/common_utils.js +806 -0
- package/dist/utility/common_utils.js.map +1 -0
- package/dist/utility/environment/environmentManager.d.ts +41 -0
- package/dist/utility/environment/environmentManager.js +179 -0
- package/dist/utility/environment/environmentManager.js.map +1 -0
- package/dist/utility/environment/systemInformation.d.ts +67 -0
- package/dist/utility/environment/systemInformation.js +326 -0
- package/dist/utility/environment/systemInformation.js.map +1 -0
- package/dist/utility/errors/commonErrors.d.ts +171 -0
- package/dist/utility/errors/commonErrors.js +230 -0
- package/dist/utility/errors/commonErrors.js.map +1 -0
- package/dist/utility/errors/hdbError.d.ts +76 -0
- package/dist/utility/errors/hdbError.js +128 -0
- package/dist/utility/errors/hdbError.js.map +1 -0
- package/dist/utility/functions/date/dateFunctions.d.ts +11 -0
- package/dist/utility/functions/date/dateFunctions.js +64 -0
- package/dist/utility/functions/date/dateFunctions.js.map +1 -0
- package/dist/utility/functions/geo.d.ts +74 -0
- package/dist/utility/functions/geo.js +311 -0
- package/dist/utility/functions/geo.js.map +1 -0
- package/dist/utility/functions/sql/alaSQLExtension.d.ts +13 -0
- package/dist/utility/functions/sql/alaSQLExtension.js +96 -0
- package/dist/utility/functions/sql/alaSQLExtension.js.map +1 -0
- package/dist/utility/globalSchema.d.ts +151 -0
- package/dist/utility/globalSchema.js +34 -0
- package/dist/utility/globalSchema.js.map +1 -0
- package/dist/utility/hdbTerms.d.ts +737 -0
- package/dist/utility/hdbTerms.js +756 -0
- package/dist/utility/hdbTerms.js.map +1 -0
- package/dist/utility/install/checkJWTTokensExist.d.ts +5 -0
- package/dist/utility/install/checkJWTTokensExist.js +53 -0
- package/dist/utility/install/checkJWTTokensExist.js.map +1 -0
- package/dist/utility/install/installer.d.ts +17 -0
- package/dist/utility/install/installer.js +569 -0
- package/dist/utility/install/installer.js.map +1 -0
- package/dist/utility/installation.d.ts +12 -0
- package/dist/utility/installation.js +64 -0
- package/dist/utility/installation.js.map +1 -0
- package/dist/utility/lmdb/DBIDefinition.d.ts +16 -0
- package/dist/utility/lmdb/DBIDefinition.js +19 -0
- package/dist/utility/lmdb/DBIDefinition.js.map +1 -0
- package/dist/utility/lmdb/DeleteRecordsResponseObject.d.ts +21 -0
- package/dist/utility/lmdb/DeleteRecordsResponseObject.js +24 -0
- package/dist/utility/lmdb/DeleteRecordsResponseObject.js.map +1 -0
- package/dist/utility/lmdb/InsertRecordsResponseObject.d.ts +18 -0
- package/dist/utility/lmdb/InsertRecordsResponseObject.js +21 -0
- package/dist/utility/lmdb/InsertRecordsResponseObject.js.map +1 -0
- package/dist/utility/lmdb/OpenDBIObject.d.ts +23 -0
- package/dist/utility/lmdb/OpenDBIObject.js +29 -0
- package/dist/utility/lmdb/OpenDBIObject.js.map +1 -0
- package/dist/utility/lmdb/OpenEnvironmentObject.d.ts +22 -0
- package/dist/utility/lmdb/OpenEnvironmentObject.js +40 -0
- package/dist/utility/lmdb/OpenEnvironmentObject.js.map +1 -0
- package/dist/utility/lmdb/UpdateRecordsResponseObject.d.ts +21 -0
- package/dist/utility/lmdb/UpdateRecordsResponseObject.js +24 -0
- package/dist/utility/lmdb/UpdateRecordsResponseObject.js.map +1 -0
- package/dist/utility/lmdb/UpsertRecordsResponseObject.d.ts +18 -0
- package/dist/utility/lmdb/UpsertRecordsResponseObject.js +21 -0
- package/dist/utility/lmdb/UpsertRecordsResponseObject.js.map +1 -0
- package/dist/utility/lmdb/cleanLMDBMap.d.ts +6 -0
- package/dist/utility/lmdb/cleanLMDBMap.js +63 -0
- package/dist/utility/lmdb/cleanLMDBMap.js.map +1 -0
- package/dist/utility/lmdb/commonUtility.d.ts +28 -0
- package/dist/utility/lmdb/commonUtility.js +120 -0
- package/dist/utility/lmdb/commonUtility.js.map +1 -0
- package/dist/utility/lmdb/deleteUtility.d.ts +10 -0
- package/dist/utility/lmdb/deleteUtility.js +115 -0
- package/dist/utility/lmdb/deleteUtility.js.map +1 -0
- package/dist/utility/lmdb/environmentUtility.d.ts +81 -0
- package/dist/utility/lmdb/environmentUtility.js +432 -0
- package/dist/utility/lmdb/environmentUtility.js.map +1 -0
- package/dist/utility/lmdb/searchCursorFunctions.d.ts +93 -0
- package/dist/utility/lmdb/searchCursorFunctions.js +174 -0
- package/dist/utility/lmdb/searchCursorFunctions.js.map +1 -0
- package/dist/utility/lmdb/searchUtility.d.ts +204 -0
- package/dist/utility/lmdb/searchUtility.js +724 -0
- package/dist/utility/lmdb/searchUtility.js.map +1 -0
- package/dist/utility/lmdb/terms.d.ts +34 -0
- package/dist/utility/lmdb/terms.js +52 -0
- package/dist/utility/lmdb/terms.js.map +1 -0
- package/dist/utility/lmdb/writeUtility.d.ts +32 -0
- package/dist/utility/lmdb/writeUtility.js +360 -0
- package/dist/utility/lmdb/writeUtility.js.map +1 -0
- package/dist/utility/logging/harper_logger.d.ts +141 -0
- package/dist/utility/logging/harper_logger.js +862 -0
- package/dist/utility/logging/harper_logger.js.map +1 -0
- package/dist/utility/logging/logRotator.d.ts +19 -0
- package/dist/utility/logging/logRotator.js +146 -0
- package/dist/utility/logging/logRotator.js.map +1 -0
- package/dist/utility/logging/logger.d.ts +11 -0
- package/dist/utility/logging/logger.js +19 -0
- package/dist/utility/logging/logger.js.map +1 -0
- package/dist/utility/logging/readLog.d.ts +8 -0
- package/dist/utility/logging/readLog.js +339 -0
- package/dist/utility/logging/readLog.js.map +1 -0
- package/dist/utility/logging/transactionLog.d.ts +8 -0
- package/dist/utility/logging/transactionLog.js +46 -0
- package/dist/utility/logging/transactionLog.js.map +1 -0
- package/dist/utility/mount_hdb.d.ts +2 -0
- package/dist/utility/mount_hdb.js +51 -0
- package/dist/utility/mount_hdb.js.map +1 -0
- package/dist/utility/npmUtilities.d.ts +6 -0
- package/dist/utility/npmUtilities.js +91 -0
- package/dist/utility/npmUtilities.js.map +1 -0
- package/dist/utility/operationPermissions.d.ts +36 -0
- package/dist/utility/operationPermissions.js +116 -0
- package/dist/utility/operationPermissions.js.map +1 -0
- package/dist/utility/operation_authorization.d.ts +18 -0
- package/dist/utility/operation_authorization.js +667 -0
- package/dist/utility/operation_authorization.js.map +1 -0
- package/dist/utility/packageUtils.d.ts +9 -0
- package/dist/utility/packageUtils.js +52 -0
- package/dist/utility/packageUtils.js.map +1 -0
- package/dist/utility/password.d.ts +20 -0
- package/dist/utility/password.js +119 -0
- package/dist/utility/password.js.map +1 -0
- package/dist/utility/processManagement/processManagement.d.ts +35 -0
- package/dist/utility/processManagement/processManagement.js +188 -0
- package/dist/utility/processManagement/processManagement.js.map +1 -0
- package/dist/utility/processManagement/servicesConfig.d.ts +29 -0
- package/dist/utility/processManagement/servicesConfig.js +52 -0
- package/dist/utility/processManagement/servicesConfig.js.map +1 -0
- package/dist/utility/scripts/restartHdb.d.ts +1 -0
- package/dist/utility/scripts/restartHdb.js +23 -0
- package/dist/utility/scripts/restartHdb.js.map +1 -0
- package/dist/utility/signalling.d.ts +2 -0
- package/dist/utility/signalling.js +35 -0
- package/dist/utility/signalling.js.map +1 -0
- package/dist/utility/terms/certificates.d.ts +46 -0
- package/dist/utility/terms/certificates.js +65 -0
- package/dist/utility/terms/certificates.js.map +1 -0
- package/dist/utility/when.d.ts +3 -0
- package/dist/utility/when.js +18 -0
- package/dist/utility/when.js.map +1 -0
- package/dist/validation/bulkDeleteValidator.d.ts +2 -0
- package/dist/validation/bulkDeleteValidator.js +21 -0
- package/dist/validation/bulkDeleteValidator.js.map +1 -0
- package/dist/validation/check_permissions.d.ts +2 -0
- package/dist/validation/check_permissions.js +20 -0
- package/dist/validation/check_permissions.js.map +1 -0
- package/dist/validation/common_validators.d.ts +19 -0
- package/dist/validation/common_validators.js +76 -0
- package/dist/validation/common_validators.js.map +1 -0
- package/dist/validation/configValidator.d.ts +8 -0
- package/dist/validation/configValidator.js +292 -0
- package/dist/validation/configValidator.js.map +1 -0
- package/dist/validation/deleteValidator.d.ts +2 -0
- package/dist/validation/deleteValidator.js +15 -0
- package/dist/validation/deleteValidator.js.map +1 -0
- package/dist/validation/fileLoadValidator.d.ts +4 -0
- package/dist/validation/fileLoadValidator.js +138 -0
- package/dist/validation/fileLoadValidator.js.map +1 -0
- package/dist/validation/insertValidator.d.ts +2 -0
- package/dist/validation/insertValidator.js +38 -0
- package/dist/validation/insertValidator.js.map +1 -0
- package/dist/validation/installValidator.d.ts +7 -0
- package/dist/validation/installValidator.js +28 -0
- package/dist/validation/installValidator.js.map +1 -0
- package/dist/validation/readLogValidator.d.ts +2 -0
- package/dist/validation/readLogValidator.js +48 -0
- package/dist/validation/readLogValidator.js.map +1 -0
- package/dist/validation/role_validation.d.ts +3 -0
- package/dist/validation/role_validation.js +284 -0
- package/dist/validation/role_validation.js.map +1 -0
- package/dist/validation/schemaMetadataValidator.d.ts +16 -0
- package/dist/validation/schemaMetadataValidator.js +38 -0
- package/dist/validation/schemaMetadataValidator.js.map +1 -0
- package/dist/validation/searchValidator.d.ts +2 -0
- package/dist/validation/searchValidator.js +141 -0
- package/dist/validation/searchValidator.js.map +1 -0
- package/dist/validation/statusValidator.d.ts +19 -0
- package/dist/validation/statusValidator.js +95 -0
- package/dist/validation/statusValidator.js.map +1 -0
- package/dist/validation/transactionLogValidator.d.ts +2 -0
- package/dist/validation/transactionLogValidator.js +28 -0
- package/dist/validation/transactionLogValidator.js.map +1 -0
- package/dist/validation/user_validation.d.ts +3 -0
- package/dist/validation/user_validation.js +52 -0
- package/dist/validation/user_validation.js.map +1 -0
- package/dist/validation/validationWrapper.d.ts +15 -0
- package/dist/validation/validationWrapper.js +95 -0
- package/dist/validation/validationWrapper.js.map +1 -0
- package/package.json +225 -0
- package/static/README.md +13 -0
- package/static/ascii_logo.txt +21 -0
- package/static/defaultConfig.yaml +75 -0
|
@@ -0,0 +1,1273 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* This module provides a Blob class that can be used to store binary data in the database, and can be used to store large binary data in a file
|
|
4
|
+
* on the server. The Blob class is a subclass of the global Blob class, and can be used in the same way.
|
|
5
|
+
* The Blob-backed files begin with an 8-byte header:
|
|
6
|
+
* - The first 2 bytes indicate the type of storage:
|
|
7
|
+
* - 0: Uncompressed
|
|
8
|
+
* - 1: Compressed with deflate
|
|
9
|
+
* - 0xff: Error state (followed by error message). A record can be saved prior to an error in saving a blob, so we must be capable of tracking and even replicating that state
|
|
10
|
+
* - The next 6 bytes are the size of the content
|
|
11
|
+
* - While the file is being written, 0xffffffffffff is used as a placeholder to indicate that the file is not finished being written (this nicely matches the logic that if the written content size is less than the indicated content size, it is not finished)
|
|
12
|
+
* - Note that for compressed data, the size is the uncompressed size, and the compressed size in the file
|
|
13
|
+
*/
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.databasePaths = exports.blobsWereEncoded = exports.Blob = void 0;
|
|
16
|
+
exports.deleteBlob = deleteBlob;
|
|
17
|
+
exports.setDeletionDelay = setDeletionDelay;
|
|
18
|
+
exports.saveBlob = saveBlob;
|
|
19
|
+
exports.getFileId = getFileId;
|
|
20
|
+
exports.isSaving = isSaving;
|
|
21
|
+
exports.getFilePathForBlob = getFilePathForBlob;
|
|
22
|
+
exports.getRootBlobPathsForDB = getRootBlobPathsForDB;
|
|
23
|
+
exports.deleteRootBlobPathsForDB = deleteRootBlobPathsForDB;
|
|
24
|
+
exports.encodeBlobsWithFilePath = encodeBlobsWithFilePath;
|
|
25
|
+
exports.encodeBlobsAsBuffers = encodeBlobsAsBuffers;
|
|
26
|
+
exports.decodeBlobsWithWrites = decodeBlobsWithWrites;
|
|
27
|
+
exports.decodeWithBlobCallback = decodeWithBlobCallback;
|
|
28
|
+
exports.decodeFromDatabase = decodeFromDatabase;
|
|
29
|
+
exports.deleteBlobsInObject = deleteBlobsInObject;
|
|
30
|
+
exports.findBlobsInObject = findBlobsInObject;
|
|
31
|
+
exports.startPreCommitBlobsForRecord = startPreCommitBlobsForRecord;
|
|
32
|
+
exports.cleanupOrphans = cleanupOrphans;
|
|
33
|
+
const msgpackr_1 = require("msgpackr");
|
|
34
|
+
const promises_1 = require("node:fs/promises");
|
|
35
|
+
const node_fs_1 = require("node:fs");
|
|
36
|
+
const node_zlib_1 = require("node:zlib");
|
|
37
|
+
const node_stream_1 = require("node:stream");
|
|
38
|
+
const fs_extra_1 = require("fs-extra");
|
|
39
|
+
const environmentManager_js_1 = require("../utility/environment/environmentManager.js");
|
|
40
|
+
const hdbTerms_ts_1 = require("../utility/hdbTerms.js");
|
|
41
|
+
const path_1 = require("path");
|
|
42
|
+
const logger_ts_1 = require("../utility/logging/logger.js");
|
|
43
|
+
const contentTypes_ts_1 = require("../server/serverHelpers/contentTypes.js");
|
|
44
|
+
const auditStore_ts_1 = require("./auditStore.js");
|
|
45
|
+
const node_v8_1 = require("node:v8");
|
|
46
|
+
const promises_2 = require("node:timers/promises");
|
|
47
|
+
const FILE_STORAGE_THRESHOLD = 8192; // if the file is below this size, we will store it in memory, or within the record itself, otherwise we will store it in a file
|
|
48
|
+
// We want to keep the file path private (but accessible to the extension)
|
|
49
|
+
const HEADER_SIZE = 8;
|
|
50
|
+
const UNCOMPRESSED_TYPE = 0;
|
|
51
|
+
const DEFLATE_TYPE = 1;
|
|
52
|
+
const ERROR_TYPE = 0xff;
|
|
53
|
+
const DEFAULT_HEADER = new Uint8Array([0, UNCOMPRESSED_TYPE, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]);
|
|
54
|
+
const COMPRESS_HEADER = new Uint8Array([0, DEFLATE_TYPE, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]);
|
|
55
|
+
const UNKNOWN_SIZE = 0xffffffffffff;
|
|
56
|
+
const storageInfoForBlob = new WeakMap();
|
|
57
|
+
let currentBlobCallback;
|
|
58
|
+
exports.Blob = global.Blob || polyfillBlob(); // use the global Blob class if it exists (it doesn't on Node v16)
|
|
59
|
+
let encodeForStorageForRecordId = undefined; // only enable encoding of the file path if we are saving to the DB, not for serialization to external clients, and only for one record
|
|
60
|
+
let promisedWrites;
|
|
61
|
+
let currentStore; // the root store of the database we are currently encoding for
|
|
62
|
+
exports.blobsWereEncoded = false; // keep track of whether blobs were encoded with file paths
|
|
63
|
+
// the header is 8 bytes
|
|
64
|
+
// this is a reusable buffer for reading and writing to the header (without having to create new allocations)
|
|
65
|
+
const HEADER = new Uint8Array(8);
|
|
66
|
+
const headerView = new DataView(HEADER.buffer);
|
|
67
|
+
const FILE_READ_TIMEOUT = 60000;
|
|
68
|
+
// We want FileBackedBlob instances to be an instanceof Blob, but we don't want to actually extend the class and call Blob's constructor, which is quite expensive because it has to set it up as a transferrable.
|
|
69
|
+
function InstanceOfBlobWithNoConstructor() { }
|
|
70
|
+
InstanceOfBlobWithNoConstructor.prototype = exports.Blob.prototype;
|
|
71
|
+
// @ts-ignore
|
|
72
|
+
/**
|
|
73
|
+
* A blob that is backed by a file, and can be saved to the database as a reference
|
|
74
|
+
* Note that this is used instead of the native Blob class for a few reasons:
|
|
75
|
+
* 1. This has the built-in functionality for reading from the file-based storage
|
|
76
|
+
* 2. This support for streams and asynchronous access to data that may not have a known size ahead of time
|
|
77
|
+
* 3. This also avoids the Blob constructor which is expensive due to the transferred setup
|
|
78
|
+
* Harper still supports saving native Blobs, but when they blobs are retrieved from storage, they always use this class.
|
|
79
|
+
*/
|
|
80
|
+
class FileBackedBlob extends InstanceOfBlobWithNoConstructor {
|
|
81
|
+
type = '';
|
|
82
|
+
size;
|
|
83
|
+
#onError;
|
|
84
|
+
#onSize;
|
|
85
|
+
constructor(options) {
|
|
86
|
+
super();
|
|
87
|
+
if (options?.type)
|
|
88
|
+
this.type = options.type;
|
|
89
|
+
if (options?.size != undefined)
|
|
90
|
+
this.size = options.size;
|
|
91
|
+
if (options?.saveBeforeCommit != undefined)
|
|
92
|
+
this.saveBeforeCommit = options.saveBeforeCommit;
|
|
93
|
+
}
|
|
94
|
+
on(type, callback) {
|
|
95
|
+
if (type === 'error') {
|
|
96
|
+
this.#onError ??= [];
|
|
97
|
+
this.#onError.push(callback);
|
|
98
|
+
}
|
|
99
|
+
else if (type === 'size') {
|
|
100
|
+
this.#onSize ??= [];
|
|
101
|
+
this.#onSize.push(callback);
|
|
102
|
+
}
|
|
103
|
+
else
|
|
104
|
+
throw new Error("Only 'error' and 'size' events are supported");
|
|
105
|
+
}
|
|
106
|
+
toJSON() {
|
|
107
|
+
if (this.type?.startsWith('text')) {
|
|
108
|
+
const storageInfo = storageInfoForBlob.get(this);
|
|
109
|
+
let { start, end, contentBuffer, asString } = storageInfo;
|
|
110
|
+
if (asString) {
|
|
111
|
+
return asString;
|
|
112
|
+
}
|
|
113
|
+
if (contentBuffer && (end !== undefined || start !== undefined)) {
|
|
114
|
+
contentBuffer = contentBuffer.subarray(start ?? 0, end ?? storageInfo.contentBuffer.length);
|
|
115
|
+
}
|
|
116
|
+
// if we have a content buffer we can return
|
|
117
|
+
if (contentBuffer) {
|
|
118
|
+
storageInfo.asString = contentBuffer.toString();
|
|
119
|
+
return storageInfo.asString;
|
|
120
|
+
}
|
|
121
|
+
if ((0, contentTypes_ts_1.hasAsyncSerialization)())
|
|
122
|
+
(0, contentTypes_ts_1.asyncSerialization)(this.bytes().then((buffer) => (storageInfo.contentBuffer = buffer)));
|
|
123
|
+
return `[blob: ${this.type}, ${this.size} bytes]`;
|
|
124
|
+
}
|
|
125
|
+
return {
|
|
126
|
+
description: 'Blobs that are not of type text/* can not be directly serialized as JSON, use as the body of a response or convert to another type',
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
async text() {
|
|
130
|
+
return (await this.bytes()).toString();
|
|
131
|
+
}
|
|
132
|
+
bytes() {
|
|
133
|
+
const storageInfo = storageInfoForBlob.get(this);
|
|
134
|
+
let { start, end, contentBuffer } = storageInfo;
|
|
135
|
+
if (contentBuffer) {
|
|
136
|
+
if (end !== undefined || start !== undefined) {
|
|
137
|
+
contentBuffer = contentBuffer.subarray(start ?? 0, end ?? storageInfo.contentBuffer.length);
|
|
138
|
+
}
|
|
139
|
+
return Promise.resolve(contentBuffer);
|
|
140
|
+
}
|
|
141
|
+
const filePath = getFilePath(storageInfo);
|
|
142
|
+
let writeFinished;
|
|
143
|
+
const readContents = async () => {
|
|
144
|
+
let rawBytes;
|
|
145
|
+
let size = HEADER_SIZE;
|
|
146
|
+
try {
|
|
147
|
+
rawBytes = await (0, promises_1.readFile)(filePath);
|
|
148
|
+
if (rawBytes.length >= HEADER_SIZE) {
|
|
149
|
+
rawBytes.copy(HEADER, 0, 0, HEADER_SIZE);
|
|
150
|
+
const headerValue = headerView.getBigUint64(0);
|
|
151
|
+
if (Number(headerValue >> 48n) === ERROR_TYPE) {
|
|
152
|
+
throw new Error('Error in blob: ' + rawBytes.subarray(HEADER_SIZE));
|
|
153
|
+
}
|
|
154
|
+
size = Number(headerValue & 0xffffffffffffn);
|
|
155
|
+
if (size < end)
|
|
156
|
+
size = end;
|
|
157
|
+
if (size < UNKNOWN_SIZE) {
|
|
158
|
+
this.size = size;
|
|
159
|
+
if (this.#onSize) {
|
|
160
|
+
for (const callback of this.#onSize)
|
|
161
|
+
callback(size);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
catch (error) {
|
|
167
|
+
if (error.code !== 'ENOENT')
|
|
168
|
+
throw error;
|
|
169
|
+
rawBytes = Buffer.alloc(0);
|
|
170
|
+
}
|
|
171
|
+
function checkCompletion(rawBytes) {
|
|
172
|
+
if (size > rawBytes.length) {
|
|
173
|
+
// the file is not finished being written, wait for the write lock to complete
|
|
174
|
+
const store = storageInfo.store;
|
|
175
|
+
const lockKey = storageInfo.fileId + ':blob';
|
|
176
|
+
if (writeFinished) {
|
|
177
|
+
throw new Error(`Incomplete blob for ${filePath}`);
|
|
178
|
+
}
|
|
179
|
+
return new Promise((resolve) => {
|
|
180
|
+
const callback = () => {
|
|
181
|
+
writeFinished = true;
|
|
182
|
+
return resolve(readContents());
|
|
183
|
+
};
|
|
184
|
+
const lockAcquired = store.tryLock(lockKey, callback);
|
|
185
|
+
if (lockAcquired) {
|
|
186
|
+
writeFinished = true;
|
|
187
|
+
store.unlock(lockKey);
|
|
188
|
+
return resolve(readContents());
|
|
189
|
+
}
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
if (end != undefined || start != undefined) {
|
|
193
|
+
rawBytes = rawBytes.subarray(start ?? 0, end ?? rawBytes.length);
|
|
194
|
+
}
|
|
195
|
+
return rawBytes;
|
|
196
|
+
}
|
|
197
|
+
if (rawBytes[1] === DEFLATE_TYPE) {
|
|
198
|
+
return new Promise((resolve, reject) => {
|
|
199
|
+
(0, node_zlib_1.deflate)(rawBytes.subarray(HEADER_SIZE), (error, result) => {
|
|
200
|
+
if (error)
|
|
201
|
+
reject(error);
|
|
202
|
+
else
|
|
203
|
+
resolve(checkCompletion(result));
|
|
204
|
+
});
|
|
205
|
+
});
|
|
206
|
+
}
|
|
207
|
+
return checkCompletion(rawBytes.subarray(HEADER_SIZE));
|
|
208
|
+
};
|
|
209
|
+
return readContents();
|
|
210
|
+
}
|
|
211
|
+
async arrayBuffer() {
|
|
212
|
+
const bytes = await this.bytes();
|
|
213
|
+
const arrayBuffer = new ArrayBuffer(bytes.length);
|
|
214
|
+
const bufferUint8 = new Uint8Array(arrayBuffer);
|
|
215
|
+
bufferUint8.set(bytes);
|
|
216
|
+
return arrayBuffer;
|
|
217
|
+
}
|
|
218
|
+
stream() {
|
|
219
|
+
const storageInfo = storageInfoForBlob.get(this);
|
|
220
|
+
let { contentBuffer, start, end } = storageInfo;
|
|
221
|
+
if (contentBuffer) {
|
|
222
|
+
if (end != undefined || start != undefined) {
|
|
223
|
+
contentBuffer = contentBuffer.subarray(start ?? 0, end ?? storageInfo.contentBuffer.length);
|
|
224
|
+
}
|
|
225
|
+
return new ReadableStream({
|
|
226
|
+
pull(controller) {
|
|
227
|
+
controller.enqueue(contentBuffer);
|
|
228
|
+
controller.close();
|
|
229
|
+
},
|
|
230
|
+
});
|
|
231
|
+
}
|
|
232
|
+
const filePath = getFilePath(storageInfo);
|
|
233
|
+
let fd;
|
|
234
|
+
let position = 0;
|
|
235
|
+
let totalContentRead = 0;
|
|
236
|
+
let watcher;
|
|
237
|
+
let timer;
|
|
238
|
+
let isBeingWritten;
|
|
239
|
+
let previouslyFinishedWriting = false;
|
|
240
|
+
const blob = this;
|
|
241
|
+
return new ReadableStream({
|
|
242
|
+
start() {
|
|
243
|
+
let retries = 1000;
|
|
244
|
+
const openFile = (resolve, reject) => {
|
|
245
|
+
(0, node_fs_1.open)(filePath, 'r', (error, openedFd) => {
|
|
246
|
+
if (error) {
|
|
247
|
+
if (error.code === 'ENOENT' && isBeingWritten !== false) {
|
|
248
|
+
logger_ts_1.logger.debug?.('File does not exist yet, waiting for it to be created', filePath, retries);
|
|
249
|
+
// the file doesn't exist, so we need to wait for it to be created
|
|
250
|
+
if (retries-- > 0)
|
|
251
|
+
return setTimeout(() => {
|
|
252
|
+
checkIfIsBeingWritten();
|
|
253
|
+
openFile(resolve, reject);
|
|
254
|
+
}, 20).unref();
|
|
255
|
+
}
|
|
256
|
+
reject(error);
|
|
257
|
+
blob.#onError?.forEach((callback) => callback(error));
|
|
258
|
+
}
|
|
259
|
+
else {
|
|
260
|
+
fd = openedFd;
|
|
261
|
+
resolve(openedFd);
|
|
262
|
+
}
|
|
263
|
+
});
|
|
264
|
+
};
|
|
265
|
+
return new Promise(openFile);
|
|
266
|
+
},
|
|
267
|
+
pull: (controller) => {
|
|
268
|
+
let size = 0;
|
|
269
|
+
let retries = 100;
|
|
270
|
+
return new Promise(function readMore(resolve, reject) {
|
|
271
|
+
function onError(error) {
|
|
272
|
+
(0, node_fs_1.close)(fd);
|
|
273
|
+
clearTimeout(timer);
|
|
274
|
+
if (watcher)
|
|
275
|
+
watcher.close();
|
|
276
|
+
reject(error);
|
|
277
|
+
blob.#onError?.forEach((callback) => callback(error));
|
|
278
|
+
}
|
|
279
|
+
// allocate a buffer for reading. Note that we could do a stat to get the size, but that is a little more complicated, and might be a little extra overhead
|
|
280
|
+
const buffer = Buffer.allocUnsafe(0x40000);
|
|
281
|
+
(0, node_fs_1.read)(fd, buffer, 0, buffer.length, position, (error, bytesRead, buffer) => {
|
|
282
|
+
// TODO: Implement support for decompression
|
|
283
|
+
totalContentRead += bytesRead;
|
|
284
|
+
if (error)
|
|
285
|
+
return onError(error);
|
|
286
|
+
if (position === 0) {
|
|
287
|
+
// for the first read, we need to read the header and skip it for the data
|
|
288
|
+
// but first check to see if we read anything
|
|
289
|
+
if (bytesRead < HEADER_SIZE) {
|
|
290
|
+
// didn't read any bytes, have to try again
|
|
291
|
+
if (retries-- > 0 && isBeingWritten !== false) {
|
|
292
|
+
checkIfIsBeingWritten();
|
|
293
|
+
logger_ts_1.logger.debug?.('File was empty, waiting for data to be written', filePath, retries);
|
|
294
|
+
setTimeout(() => readMore(resolve, reject), 20).unref();
|
|
295
|
+
}
|
|
296
|
+
else {
|
|
297
|
+
logger_ts_1.logger.debug?.('File was empty, throwing error', filePath, retries);
|
|
298
|
+
onError(new Error(`Blob ${storageInfo.fileId} was empty`));
|
|
299
|
+
}
|
|
300
|
+
// else throw new Error();
|
|
301
|
+
return;
|
|
302
|
+
}
|
|
303
|
+
buffer.copy(HEADER, 0, 0, HEADER_SIZE);
|
|
304
|
+
const headerValue = headerView.getBigUint64(0);
|
|
305
|
+
if (Number(headerValue >> 48n) === ERROR_TYPE) {
|
|
306
|
+
return onError(new Error('Error in blob: ' + buffer.subarray(HEADER_SIZE, bytesRead)));
|
|
307
|
+
}
|
|
308
|
+
size = Number(headerValue & 0xffffffffffffn);
|
|
309
|
+
if (size < UNKNOWN_SIZE && blob.size !== size) {
|
|
310
|
+
blob.size = size;
|
|
311
|
+
if (blob.#onSize) {
|
|
312
|
+
for (const callback of blob.#onSize)
|
|
313
|
+
callback(size);
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
buffer = buffer.subarray(HEADER_SIZE, bytesRead);
|
|
317
|
+
totalContentRead -= HEADER_SIZE;
|
|
318
|
+
}
|
|
319
|
+
else if (bytesRead === 0) {
|
|
320
|
+
const buffer = Buffer.allocUnsafe(8);
|
|
321
|
+
return (0, node_fs_1.read)(fd, buffer, 0, HEADER_SIZE, 0, (error) => {
|
|
322
|
+
if (error)
|
|
323
|
+
return onError(error);
|
|
324
|
+
HEADER.set(buffer);
|
|
325
|
+
size = Number(headerView.getBigUint64(0) & 0xffffffffffffn);
|
|
326
|
+
if (size > totalContentRead) {
|
|
327
|
+
if (checkIfIsBeingWritten()) {
|
|
328
|
+
// the file is not finished being written, watch the file for changes to resume reading
|
|
329
|
+
// set up a watcher to be notified of file changes
|
|
330
|
+
watcher = (0, node_fs_1.watch)(filePath, { persistent: false }, () => {
|
|
331
|
+
watcher.close();
|
|
332
|
+
watcher = null;
|
|
333
|
+
clearTimeout(timer); // clear it
|
|
334
|
+
readMore(resolve, reject);
|
|
335
|
+
});
|
|
336
|
+
// immediately try to read again in case there was a change before we started watching,
|
|
337
|
+
// readSync should be fine here, the data should be in memory
|
|
338
|
+
if ((0, node_fs_1.readSync)(fd, buffer, 0, buffer.length, position) > 0) {
|
|
339
|
+
// never mind with the watcher, let's read more data
|
|
340
|
+
watcher.close();
|
|
341
|
+
watcher = null;
|
|
342
|
+
readMore(resolve, reject);
|
|
343
|
+
}
|
|
344
|
+
else {
|
|
345
|
+
// set a timer for the watcher too
|
|
346
|
+
timer = setTimeout(() => {
|
|
347
|
+
onError(new Error(`File read timed out reading from ${filePath}`));
|
|
348
|
+
}, FILE_READ_TIMEOUT).unref();
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
else {
|
|
352
|
+
if (previouslyFinishedWriting) {
|
|
353
|
+
// we verified that the blob was finished writing before the last read, we can confidently say it is incomplete
|
|
354
|
+
onError(new Error('Blob is incomplete'));
|
|
355
|
+
}
|
|
356
|
+
else {
|
|
357
|
+
previouslyFinishedWriting = true;
|
|
358
|
+
readMore(resolve, reject); // try again (possibly for the last time) now that we know the status of the file writing
|
|
359
|
+
}
|
|
360
|
+
// do NOT close the controller, or the error won't propagate to the stream
|
|
361
|
+
}
|
|
362
|
+
return;
|
|
363
|
+
}
|
|
364
|
+
(0, node_fs_1.close)(fd);
|
|
365
|
+
controller.close();
|
|
366
|
+
resolve();
|
|
367
|
+
});
|
|
368
|
+
}
|
|
369
|
+
else {
|
|
370
|
+
buffer = buffer.subarray(0, bytesRead);
|
|
371
|
+
}
|
|
372
|
+
if (start !== undefined || end !== undefined) {
|
|
373
|
+
if (start && totalContentRead < start) {
|
|
374
|
+
// we are before the start of the slice, so we need to read more
|
|
375
|
+
position += bytesRead;
|
|
376
|
+
return readMore(resolve, reject);
|
|
377
|
+
}
|
|
378
|
+
if (end && totalContentRead >= end) {
|
|
379
|
+
// we are past or reached the end of the slice, so we have reached the end, indicate
|
|
380
|
+
if (totalContentRead > end)
|
|
381
|
+
buffer = buffer.subarray(0, end - position);
|
|
382
|
+
totalContentRead = size = end;
|
|
383
|
+
}
|
|
384
|
+
if (start && start > position) {
|
|
385
|
+
// we need to skip ahead to the start of the slice
|
|
386
|
+
buffer = buffer.subarray(start - position);
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
position += bytesRead;
|
|
390
|
+
try {
|
|
391
|
+
controller.enqueue(buffer);
|
|
392
|
+
}
|
|
393
|
+
catch (error) {
|
|
394
|
+
// we need to catch the error here, because if the controller is closed, it will throw an error
|
|
395
|
+
// but we still want to resolve the promise
|
|
396
|
+
logger_ts_1.logger.debug?.('Error enqueuing chunk', error);
|
|
397
|
+
return resolve();
|
|
398
|
+
}
|
|
399
|
+
if (totalContentRead === size) {
|
|
400
|
+
(0, node_fs_1.close)(fd);
|
|
401
|
+
controller.close();
|
|
402
|
+
}
|
|
403
|
+
resolve();
|
|
404
|
+
});
|
|
405
|
+
});
|
|
406
|
+
},
|
|
407
|
+
cancel() {
|
|
408
|
+
(0, node_fs_1.close)(fd);
|
|
409
|
+
clearTimeout(timer);
|
|
410
|
+
if (watcher)
|
|
411
|
+
watcher.close();
|
|
412
|
+
},
|
|
413
|
+
});
|
|
414
|
+
function checkIfIsBeingWritten() {
|
|
415
|
+
if (isBeingWritten === undefined) {
|
|
416
|
+
const store = storageInfo.store;
|
|
417
|
+
const lockKey = storageInfo.fileId + ':blob';
|
|
418
|
+
isBeingWritten = !store.tryLock(lockKey, () => {
|
|
419
|
+
isBeingWritten = false;
|
|
420
|
+
});
|
|
421
|
+
if (!isBeingWritten)
|
|
422
|
+
store.unlock(lockKey);
|
|
423
|
+
}
|
|
424
|
+
return isBeingWritten;
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
slice(start, end, type) {
|
|
428
|
+
const sourceStorageInfo = storageInfoForBlob.get(this);
|
|
429
|
+
const slicedBlob = new FileBackedBlob(type && { type });
|
|
430
|
+
if (sourceStorageInfo?.fileId) {
|
|
431
|
+
const slicedStorageInfo = {
|
|
432
|
+
...sourceStorageInfo,
|
|
433
|
+
start,
|
|
434
|
+
end,
|
|
435
|
+
};
|
|
436
|
+
storageInfoForBlob.set(slicedBlob, slicedStorageInfo);
|
|
437
|
+
if (this.size != undefined)
|
|
438
|
+
slicedBlob.size = (end == undefined ? this.size : Math.min(end, this.size)) - (start ?? 0);
|
|
439
|
+
}
|
|
440
|
+
else if (sourceStorageInfo?.contentBuffer && !sourceStorageInfo.storageBuffer) {
|
|
441
|
+
const slicedStorageInfo = {
|
|
442
|
+
...sourceStorageInfo,
|
|
443
|
+
contentBuffer: sourceStorageInfo.contentBuffer.subarray(start, end),
|
|
444
|
+
};
|
|
445
|
+
storageInfoForBlob.set(slicedBlob, slicedStorageInfo);
|
|
446
|
+
slicedBlob.size = (end ?? this.size) - start;
|
|
447
|
+
}
|
|
448
|
+
else {
|
|
449
|
+
// TODO: Implement this
|
|
450
|
+
throw new Error('Can not slice a streaming blob that is not backed by a file');
|
|
451
|
+
}
|
|
452
|
+
return slicedBlob;
|
|
453
|
+
}
|
|
454
|
+
get written() {
|
|
455
|
+
return storageInfoForBlob.get(this)?.saving ?? Promise.resolve();
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
let deletionDelay = 500;
|
|
459
|
+
/**
|
|
460
|
+
* Delete the file for the blob
|
|
461
|
+
* @param blob
|
|
462
|
+
*/
|
|
463
|
+
function deleteBlob(blob) {
|
|
464
|
+
// do we even need to check for completion here?
|
|
465
|
+
const filePath = getFilePathForBlob(blob);
|
|
466
|
+
if (!filePath) {
|
|
467
|
+
return;
|
|
468
|
+
}
|
|
469
|
+
setTimeout(() => {
|
|
470
|
+
// TODO: we need to determine when any read transaction are done with the file, and then delete it, this is a hack to just give it some time for that
|
|
471
|
+
(0, node_fs_1.unlink)(filePath, (error) => {
|
|
472
|
+
if (error)
|
|
473
|
+
logger_ts_1.logger.debug?.('Error trying to remove blob file', error);
|
|
474
|
+
});
|
|
475
|
+
}, deletionDelay);
|
|
476
|
+
}
|
|
477
|
+
function setDeletionDelay(delay) {
|
|
478
|
+
deletionDelay = delay;
|
|
479
|
+
}
|
|
480
|
+
/**
|
|
481
|
+
* Create a blob from a readable stream or a buffer by creating a file in the blob storage path with a new unique internal id, that
|
|
482
|
+
* can be saved/stored.
|
|
483
|
+
* @param source
|
|
484
|
+
*/
|
|
485
|
+
global.createBlob = function (source, options) {
|
|
486
|
+
const blob = new FileBackedBlob(options);
|
|
487
|
+
const storageInfo = {
|
|
488
|
+
storageIndex: 0,
|
|
489
|
+
fileId: null,
|
|
490
|
+
flush: options?.flush,
|
|
491
|
+
compress: options?.compress,
|
|
492
|
+
};
|
|
493
|
+
storageInfoForBlob.set(blob, storageInfo);
|
|
494
|
+
if (source instanceof Uint8Array) {
|
|
495
|
+
blob.size = source.length;
|
|
496
|
+
storageInfo.contentBuffer = source;
|
|
497
|
+
}
|
|
498
|
+
else if (source instanceof node_stream_1.Readable) {
|
|
499
|
+
storageInfo.source = source;
|
|
500
|
+
}
|
|
501
|
+
else if (typeof source === 'string')
|
|
502
|
+
storageInfo.contentBuffer = Buffer.from(source);
|
|
503
|
+
else if (source?.[Symbol.asyncIterator] || source?.[Symbol.iterator])
|
|
504
|
+
storageInfo.source = node_stream_1.Readable.from(source);
|
|
505
|
+
else
|
|
506
|
+
throw new Error('Invalid source type');
|
|
507
|
+
return blob;
|
|
508
|
+
};
|
|
509
|
+
function saveBlob(blob, deleteOnFailure = false) {
|
|
510
|
+
let storageInfo = storageInfoForBlob.get(blob);
|
|
511
|
+
if (!storageInfo) {
|
|
512
|
+
storageInfo = { storageIndex: 0, fileId: null, store: currentStore };
|
|
513
|
+
storageInfoForBlob.set(blob, storageInfo);
|
|
514
|
+
}
|
|
515
|
+
else {
|
|
516
|
+
if (storageInfo.fileId)
|
|
517
|
+
return storageInfo; // if there is any file id, we are already saving and can return the info
|
|
518
|
+
storageInfo.store = currentStore;
|
|
519
|
+
}
|
|
520
|
+
storageInfo.deleteOnFailure = deleteOnFailure;
|
|
521
|
+
if (blob.saveInRecord) {
|
|
522
|
+
if (!storageInfo.contentBuffer) {
|
|
523
|
+
// TODO: Add support for this: https://github.com/HarperFast/harper/issues/141
|
|
524
|
+
throw new Error('Cannot publish a message with a streamed blob');
|
|
525
|
+
}
|
|
526
|
+
return storageInfo; // nothing more to do if it supposed to be saved in the record
|
|
527
|
+
}
|
|
528
|
+
generateFilePath(storageInfo);
|
|
529
|
+
if (storageInfo.source)
|
|
530
|
+
writeBlobWithStream(blob, storageInfo.source, storageInfo);
|
|
531
|
+
else if (storageInfo.contentBuffer)
|
|
532
|
+
writeBlobWithBuffer(blob, storageInfo);
|
|
533
|
+
else {
|
|
534
|
+
// for native blobs, we have to read them from the stream
|
|
535
|
+
writeBlobWithStream(blob, node_stream_1.Readable.from(blob.stream()), storageInfo);
|
|
536
|
+
}
|
|
537
|
+
return storageInfo;
|
|
538
|
+
}
|
|
539
|
+
/**
|
|
540
|
+
* Create a blob from a readable stream
|
|
541
|
+
*/
|
|
542
|
+
function writeBlobWithStream(blob, stream, storageInfo) {
|
|
543
|
+
const { filePath, fileId, store, compress, flush } = storageInfo;
|
|
544
|
+
storageInfo.saving = new Promise((resolve, reject) => {
|
|
545
|
+
// pipe the stream to the file
|
|
546
|
+
const lockKey = fileId + ':blob';
|
|
547
|
+
if (!store.tryLock(lockKey)) {
|
|
548
|
+
throw new Error(`Unable to get lock for blob file ${fileId}`);
|
|
549
|
+
}
|
|
550
|
+
const writeStream = (0, node_fs_1.createWriteStream)(filePath, { autoClose: false, flags: 'w' });
|
|
551
|
+
let wroteSize = false;
|
|
552
|
+
if (blob.size !== undefined) {
|
|
553
|
+
// if we know the size, we can write the header immediately
|
|
554
|
+
writeStream.write(createHeader(blob.size)); // write the default header
|
|
555
|
+
wroteSize = true;
|
|
556
|
+
}
|
|
557
|
+
let compressedStream;
|
|
558
|
+
if (compress) {
|
|
559
|
+
if (!wroteSize)
|
|
560
|
+
writeStream.write(COMPRESS_HEADER); // write the default header to the file
|
|
561
|
+
compressedStream = (0, node_zlib_1.createDeflate)();
|
|
562
|
+
(0, node_stream_1.pipeline)(stream, compressedStream, writeStream, finished);
|
|
563
|
+
}
|
|
564
|
+
else {
|
|
565
|
+
if (!wroteSize)
|
|
566
|
+
writeStream.write(DEFAULT_HEADER); // write the default header to the file
|
|
567
|
+
(0, node_stream_1.pipeline)(stream, writeStream, finished);
|
|
568
|
+
}
|
|
569
|
+
function createHeader(size) {
|
|
570
|
+
let headerValue = BigInt(size);
|
|
571
|
+
const header = new Uint8Array(HEADER_SIZE);
|
|
572
|
+
const headerView = new DataView(header.buffer);
|
|
573
|
+
headerValue |= BigInt(compress ? DEFLATE_TYPE : UNCOMPRESSED_TYPE) << 48n;
|
|
574
|
+
headerView.setBigInt64(0, headerValue);
|
|
575
|
+
return header;
|
|
576
|
+
}
|
|
577
|
+
// when the stream is finished, we may need to flush, and then close the handle and resolve the promise
|
|
578
|
+
function finished(error) {
|
|
579
|
+
const fd = writeStream.fd;
|
|
580
|
+
if (error) {
|
|
581
|
+
store.unlock(lockKey);
|
|
582
|
+
if (fd) {
|
|
583
|
+
(0, node_fs_1.close)(fd);
|
|
584
|
+
writeStream.fd = null; // do not close the same fd twice, that is very dangerous because it might represent a new fd
|
|
585
|
+
}
|
|
586
|
+
if (storageInfo.deleteOnFailure) {
|
|
587
|
+
(0, node_fs_1.unlink)(filePath, (error) => {
|
|
588
|
+
if (error)
|
|
589
|
+
logger_ts_1.logger.debug?.('Error while deleting aborted blob file', error);
|
|
590
|
+
});
|
|
591
|
+
}
|
|
592
|
+
else {
|
|
593
|
+
try {
|
|
594
|
+
if ((0, node_fs_1.statSync)(filePath).size === 0) {
|
|
595
|
+
// if there was an error in the stream, nothing may have been written, so we can write the error message instead
|
|
596
|
+
const errorBuffer = Buffer.from(error.toString());
|
|
597
|
+
(0, node_fs_1.writeFile)(filePath, Buffer.concat([createHeader(BigInt(errorBuffer.length) + 0xff000000000000n), errorBuffer]), (error) => {
|
|
598
|
+
if (error)
|
|
599
|
+
logger_ts_1.logger.debug?.('Error write error message to blob file', error);
|
|
600
|
+
});
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
catch (error) {
|
|
604
|
+
logger_ts_1.logger.debug?.('Error checking blob file after abort', error);
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
reject(error);
|
|
608
|
+
}
|
|
609
|
+
else {
|
|
610
|
+
if (!wroteSize) {
|
|
611
|
+
wroteSize = true;
|
|
612
|
+
const size = compressedStream ? compressedStream.bytesWritten : writeStream.bytesWritten - HEADER_SIZE;
|
|
613
|
+
blob.size = size;
|
|
614
|
+
(0, node_fs_1.write)(fd, createHeader(size), 0, HEADER_SIZE, 0, finished);
|
|
615
|
+
return; // not finished yet, wait for this write and then we are finished
|
|
616
|
+
}
|
|
617
|
+
store.unlock(lockKey);
|
|
618
|
+
if (flush) {
|
|
619
|
+
// we just use fdatasync because we really aren't that concerned with flushing file metadata
|
|
620
|
+
(0, node_fs_1.fdatasync)(fd, (error) => {
|
|
621
|
+
if (error)
|
|
622
|
+
reject(error);
|
|
623
|
+
resolve();
|
|
624
|
+
(0, node_fs_1.close)(fd);
|
|
625
|
+
writeStream.fd = null; // do not close the same fd twice, that is very dangerous because it might represent a new fd
|
|
626
|
+
});
|
|
627
|
+
}
|
|
628
|
+
else {
|
|
629
|
+
resolve();
|
|
630
|
+
(0, node_fs_1.close)(fd);
|
|
631
|
+
writeStream.fd = null; // do not close the same fd twice, that is very dangerous because it might represent a new fd
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
});
|
|
636
|
+
return blob;
|
|
637
|
+
}
|
|
638
|
+
function getFileId(blob) {
|
|
639
|
+
return storageInfoForBlob.get(blob)?.fileId;
|
|
640
|
+
}
|
|
641
|
+
function isSaving(blob) {
|
|
642
|
+
return storageInfoForBlob.get(blob)?.saving;
|
|
643
|
+
}
|
|
644
|
+
function getFilePathForBlob(blob) {
|
|
645
|
+
const storageInfo = storageInfoForBlob.get(blob);
|
|
646
|
+
return storageInfo?.fileId && getFilePath(storageInfo);
|
|
647
|
+
}
|
|
648
|
+
exports.databasePaths = new Map();
|
|
649
|
+
function getRootBlobPathsForDB(store) {
|
|
650
|
+
if (!store) {
|
|
651
|
+
throw new Error('No store specified, can not determine blob storage path');
|
|
652
|
+
}
|
|
653
|
+
let paths = exports.databasePaths.get(store);
|
|
654
|
+
if (!paths) {
|
|
655
|
+
if (!store.databaseName) {
|
|
656
|
+
logger_ts_1.logger.warn?.('No database name specified, can not determine blob storage path');
|
|
657
|
+
return [];
|
|
658
|
+
}
|
|
659
|
+
const blobPaths = (0, environmentManager_js_1.get)(hdbTerms_ts_1.CONFIG_PARAMS.STORAGE_BLOBPATHS);
|
|
660
|
+
if (blobPaths) {
|
|
661
|
+
paths = blobPaths.map((path) => (0, path_1.join)(path, store.databaseName));
|
|
662
|
+
}
|
|
663
|
+
else {
|
|
664
|
+
paths = [(0, path_1.join)((0, environmentManager_js_1.getHdbBasePath)(), 'blobs', store.databaseName)];
|
|
665
|
+
}
|
|
666
|
+
exports.databasePaths.set(store, paths);
|
|
667
|
+
}
|
|
668
|
+
return paths;
|
|
669
|
+
}
|
|
670
|
+
async function deleteRootBlobPathsForDB(store) {
|
|
671
|
+
const paths = getRootBlobPathsForDB(store);
|
|
672
|
+
if (paths) {
|
|
673
|
+
await Promise.all(paths.map((path) => rimrafSteadily(path)));
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
/**
|
|
677
|
+
* recursively delete a directory and all of its contents, but do it one at a time, so that we don't run out of memory and hog resources
|
|
678
|
+
* @param path
|
|
679
|
+
*/
|
|
680
|
+
async function rimrafSteadily(path) {
|
|
681
|
+
if (!(0, node_fs_1.existsSync)(path))
|
|
682
|
+
return;
|
|
683
|
+
for (const entry of await (0, promises_1.readdir)(path, { withFileTypes: true })) {
|
|
684
|
+
if (entry.isDirectory()) {
|
|
685
|
+
await rimrafSteadily((0, path_1.join)(path, entry.name));
|
|
686
|
+
}
|
|
687
|
+
else {
|
|
688
|
+
try {
|
|
689
|
+
await (0, promises_1.unlink)((0, path_1.join)(path, entry.name));
|
|
690
|
+
}
|
|
691
|
+
catch (error) {
|
|
692
|
+
logger_ts_1.logger.warn?.('Error deleting file', error);
|
|
693
|
+
}
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
try {
|
|
697
|
+
await (0, promises_1.rmdir)(path);
|
|
698
|
+
}
|
|
699
|
+
catch (error) {
|
|
700
|
+
logger_ts_1.logger.warn?.('Error deleting directory', error);
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
function getFilePath({ storageIndex, fileId, store }) {
|
|
704
|
+
const blobStoragePaths = getRootBlobPathsForDB(store);
|
|
705
|
+
return (0, path_1.join)(
|
|
706
|
+
// Use a hierarchy of directories to store the file by id, to avoid to many entries in a single directory. This uses 4096 files or directories per parent directory
|
|
707
|
+
blobStoragePaths[storageIndex], fileId.slice(-9, -6) || '0', fileId.slice(-6, -3) || '0', fileId.length <= 9 ? fileId.slice(-3) : fileId.slice(0, -9) + fileId.slice(-3) // after 68 billion entries, we effectively wrap around and start reusing directories again, assuming the most the entries have been deleted
|
|
708
|
+
);
|
|
709
|
+
}
|
|
710
|
+
/**
|
|
711
|
+
* Create a blob from a buffer
|
|
712
|
+
* @param buffer
|
|
713
|
+
*/
|
|
714
|
+
function writeBlobWithBuffer(blob, storageInfo) {
|
|
715
|
+
// we know the size, so we can create the header immediately
|
|
716
|
+
const buffer = storageInfo.contentBuffer;
|
|
717
|
+
const size = buffer.length;
|
|
718
|
+
if (size < FILE_STORAGE_THRESHOLD) {
|
|
719
|
+
// if the buffer is small enough, just store it in memory
|
|
720
|
+
return;
|
|
721
|
+
}
|
|
722
|
+
blob.size = size;
|
|
723
|
+
return writeBlobWithStream(blob, node_stream_1.Readable.from([buffer]), storageInfo);
|
|
724
|
+
}
|
|
725
|
+
/**
|
|
726
|
+
* Create a blob that is backed by a *new* file with a new unique internal id, so it can be filled with data and saved to the database
|
|
727
|
+
*/
|
|
728
|
+
function generateFilePath(storageInfo) {
|
|
729
|
+
const blobStoragePaths = getRootBlobPathsForDB(storageInfo.store);
|
|
730
|
+
const id = getNextFileId();
|
|
731
|
+
// get the storage index, which is the index of the blob storage path to use, distributed round-robin based on the id
|
|
732
|
+
const storageIndex = blobStoragePaths?.length > 1 ? getNextStorageIndex(blobStoragePaths, id) : 0;
|
|
733
|
+
const fileId = id.toString(16); // get the next file id
|
|
734
|
+
storageInfo.storageIndex = storageIndex;
|
|
735
|
+
storageInfo.fileId = fileId;
|
|
736
|
+
const filePath = getFilePath(storageInfo);
|
|
737
|
+
const fileDir = (0, path_1.dirname)(filePath);
|
|
738
|
+
// ensure the directory structure exists
|
|
739
|
+
if (!(0, node_fs_1.existsSync)(fileDir))
|
|
740
|
+
(0, fs_extra_1.ensureDirSync)(fileDir);
|
|
741
|
+
storageInfo.filePath = filePath;
|
|
742
|
+
}
|
|
743
|
+
const idIncrementers = new Map();
|
|
744
|
+
function getNextFileId() {
|
|
745
|
+
// all threads will use a shared buffer to atomically increment the id
|
|
746
|
+
// first, we create our proposed incrementer buffer that will be used if we are the first thread to get here
|
|
747
|
+
// and initialize it with the starting id
|
|
748
|
+
let idIncrementer = idIncrementers.get(currentStore);
|
|
749
|
+
if (!idIncrementer) {
|
|
750
|
+
// get the last id by checking the highest id in all the blob storage paths
|
|
751
|
+
let highestId = 0;
|
|
752
|
+
const blobStoragePaths = getRootBlobPathsForDB(currentStore);
|
|
753
|
+
for (let path of blobStoragePaths) {
|
|
754
|
+
let id = 0;
|
|
755
|
+
// we need to get the highest id in the directory structure, so we need to iterate through all the directories to find the highest byte sequence
|
|
756
|
+
for (let i = 0; i < 3; i++) {
|
|
757
|
+
id = id * 0x1000;
|
|
758
|
+
let highest = 0;
|
|
759
|
+
if ((0, node_fs_1.existsSync)(path)) {
|
|
760
|
+
for (const entry of (0, node_fs_1.readdirSync)(path)) {
|
|
761
|
+
let n = parseInt(entry, 16);
|
|
762
|
+
if (i === 2 && entry.length > 3) {
|
|
763
|
+
// the last iteration is filenames, and if they are longer than 3 characters then the last 3 characters of the id, and the preceding characters are the highest value
|
|
764
|
+
n = parseInt(entry.slice(-3), 16);
|
|
765
|
+
n += parseInt(entry.slice(0, -3), 16) * 0x1000000000;
|
|
766
|
+
}
|
|
767
|
+
if (n > highest) {
|
|
768
|
+
highest = n;
|
|
769
|
+
}
|
|
770
|
+
}
|
|
771
|
+
}
|
|
772
|
+
id += highest;
|
|
773
|
+
path = (0, path_1.join)(path, highest.toString(16));
|
|
774
|
+
}
|
|
775
|
+
highestId = Math.max(highestId, id);
|
|
776
|
+
}
|
|
777
|
+
idIncrementer = new BigInt64Array([BigInt(highestId) + 1n]);
|
|
778
|
+
// now get the selected incrementer buffer, this is the shared buffer was first registered and that all threads will use
|
|
779
|
+
idIncrementer = new BigInt64Array(currentStore.getUserSharedBuffer('blob-file-id', idIncrementer.buffer));
|
|
780
|
+
idIncrementers.set(currentStore, idIncrementer);
|
|
781
|
+
}
|
|
782
|
+
return Number(Atomics.add(idIncrementer, 0, 1n));
|
|
783
|
+
}
|
|
784
|
+
const FREQUENCY_TABLE_SIZE = 128;
|
|
785
|
+
/**
|
|
786
|
+
* Select the next index from the storage paths, where the frequency of selecting each storage path is (mostly) proportional to the available space (which is occasionally updated)
|
|
787
|
+
* @param blobStoragePaths
|
|
788
|
+
*/
|
|
789
|
+
function getNextStorageIndex(blobStoragePaths, fileId) {
|
|
790
|
+
const now = Date.now();
|
|
791
|
+
if (!blobStoragePaths.frequencyTable) {
|
|
792
|
+
blobStoragePaths.lastUpdated = 0;
|
|
793
|
+
// setup default frequency table with even distribution
|
|
794
|
+
const frequencyTable = new Array(FREQUENCY_TABLE_SIZE);
|
|
795
|
+
for (let i = 0; i < frequencyTable.length; i++) {
|
|
796
|
+
frequencyTable[i] = i % blobStoragePaths.length;
|
|
797
|
+
}
|
|
798
|
+
blobStoragePaths.frequencyTable = frequencyTable;
|
|
799
|
+
}
|
|
800
|
+
if ((blobStoragePaths.lastUpdated ?? 0) + 60000 < now) {
|
|
801
|
+
blobStoragePaths.lastUpdated = now;
|
|
802
|
+
// create a new frequency table based on the available space
|
|
803
|
+
createFrequencyTableForStoragePaths(blobStoragePaths);
|
|
804
|
+
}
|
|
805
|
+
const nextIndex = blobStoragePaths.frequencyTable[fileId % FREQUENCY_TABLE_SIZE];
|
|
806
|
+
return nextIndex;
|
|
807
|
+
}
|
|
808
|
+
/**
|
|
809
|
+
* Create a frequency table for the storage paths, based on the available space, that allocates storage paths with more space more often
|
|
810
|
+
* and can be assigned quickly and consistently across threads (all threads will usually incrementally assign ids to the same alternating set of storage paths)
|
|
811
|
+
* @param blobStoragePaths
|
|
812
|
+
*/
|
|
813
|
+
async function createFrequencyTableForStoragePaths(blobStoragePaths) {
|
|
814
|
+
if (!promises_1.statfs)
|
|
815
|
+
return; // statfs is not available on all older node versions
|
|
816
|
+
const availableSpaces = await Promise.all(blobStoragePaths.map(async (path) => {
|
|
817
|
+
let stats;
|
|
818
|
+
try {
|
|
819
|
+
stats = await (0, promises_1.statfs)(path);
|
|
820
|
+
}
|
|
821
|
+
catch (error) {
|
|
822
|
+
if (error.code !== 'ENOENT')
|
|
823
|
+
throw error;
|
|
824
|
+
// if the path doesn't exist, go ahead and create it
|
|
825
|
+
(0, fs_extra_1.ensureDirSync)(path);
|
|
826
|
+
// try again after the path is created
|
|
827
|
+
stats = await (0, promises_1.statfs)(path);
|
|
828
|
+
}
|
|
829
|
+
const availableSpace = stats.bavail * stats.bsize;
|
|
830
|
+
return Math.pow(availableSpace, 0.8); // we don't want this to be quite linear, so we use a power function to reduce the impact of large differences in available space
|
|
831
|
+
}));
|
|
832
|
+
const frequencyTable = new Array(FREQUENCY_TABLE_SIZE);
|
|
833
|
+
const pathPeriods = availableSpaces.map((space) => 1 / space);
|
|
834
|
+
for (let i = 0; i < FREQUENCY_TABLE_SIZE; i++) {
|
|
835
|
+
let nextScore = Infinity;
|
|
836
|
+
let nextIndex = 0;
|
|
837
|
+
// find the next storage path to use, based on the lowest remaining period for each path
|
|
838
|
+
for (let i = 0; i < pathPeriods.length; i++) {
|
|
839
|
+
if (pathPeriods[i] < nextScore) {
|
|
840
|
+
nextIndex = i;
|
|
841
|
+
nextScore = pathPeriods[i];
|
|
842
|
+
}
|
|
843
|
+
}
|
|
844
|
+
// increment the period that we used, inversely proportional to the available space
|
|
845
|
+
pathPeriods[nextIndex] += 1 / availableSpaces[nextIndex];
|
|
846
|
+
frequencyTable[i] = nextIndex;
|
|
847
|
+
}
|
|
848
|
+
blobStoragePaths.frequencyTable = frequencyTable;
|
|
849
|
+
}
|
|
850
|
+
/**
|
|
851
|
+
* Encode blobs with file paths, so that they can be saved to the database
|
|
852
|
+
* @param callback
|
|
853
|
+
* @param encodingId
|
|
854
|
+
* @param objectToClear
|
|
855
|
+
*/
|
|
856
|
+
function encodeBlobsWithFilePath(callback, encodingId, store) {
|
|
857
|
+
encodeForStorageForRecordId = encodingId;
|
|
858
|
+
currentStore = store;
|
|
859
|
+
exports.blobsWereEncoded = false;
|
|
860
|
+
try {
|
|
861
|
+
return callback();
|
|
862
|
+
}
|
|
863
|
+
finally {
|
|
864
|
+
encodeForStorageForRecordId = undefined;
|
|
865
|
+
currentStore = undefined;
|
|
866
|
+
}
|
|
867
|
+
}
|
|
868
|
+
/**
|
|
869
|
+
* Encode blobs as buffers, so they can be transferred remotely
|
|
870
|
+
* @param callback
|
|
871
|
+
* @param encodingId
|
|
872
|
+
* @param objectToClear
|
|
873
|
+
*/
|
|
874
|
+
function encodeBlobsAsBuffers(callback) {
|
|
875
|
+
promisedWrites = [];
|
|
876
|
+
let result;
|
|
877
|
+
try {
|
|
878
|
+
result = callback();
|
|
879
|
+
}
|
|
880
|
+
catch (error) {
|
|
881
|
+
// if anything throws, we want to make sure we clear the promise aggregator
|
|
882
|
+
promisedWrites = undefined;
|
|
883
|
+
throw error;
|
|
884
|
+
}
|
|
885
|
+
const finished = promisedWrites.length < 2 ? promisedWrites[0] : Promise.all(promisedWrites);
|
|
886
|
+
promisedWrites = undefined;
|
|
887
|
+
return finished ? finished.then(() => callback()) : result;
|
|
888
|
+
}
|
|
889
|
+
/**
|
|
890
|
+
* Decode blobs, creating local storage to hold the blogs and returning a promise that resolves when all the blobs are written to disk
|
|
891
|
+
* @param callback
|
|
892
|
+
*/
|
|
893
|
+
function decodeBlobsWithWrites(callback, store, blobCallback) {
|
|
894
|
+
try {
|
|
895
|
+
promisedWrites = [];
|
|
896
|
+
currentBlobCallback = blobCallback;
|
|
897
|
+
currentStore = store;
|
|
898
|
+
callback();
|
|
899
|
+
}
|
|
900
|
+
catch (error) {
|
|
901
|
+
// if anything throws, we want to make sure we clear the promise aggregator
|
|
902
|
+
currentBlobCallback = undefined;
|
|
903
|
+
promisedWrites = undefined;
|
|
904
|
+
throw error;
|
|
905
|
+
}
|
|
906
|
+
currentBlobCallback = undefined;
|
|
907
|
+
const finished = promisedWrites.length < 2 ? promisedWrites[0] : Promise.all(promisedWrites);
|
|
908
|
+
promisedWrites = undefined;
|
|
909
|
+
return finished;
|
|
910
|
+
}
|
|
911
|
+
/**
|
|
912
|
+
* Decode with a callback for when blobs are encountered, allowing for detecting of blobs
|
|
913
|
+
* @param callback
|
|
914
|
+
*/
|
|
915
|
+
function decodeWithBlobCallback(callback, blobCallback, rootStore) {
|
|
916
|
+
currentStore = rootStore;
|
|
917
|
+
try {
|
|
918
|
+
currentBlobCallback = blobCallback;
|
|
919
|
+
return callback();
|
|
920
|
+
}
|
|
921
|
+
finally {
|
|
922
|
+
currentBlobCallback = undefined;
|
|
923
|
+
}
|
|
924
|
+
}
|
|
925
|
+
/**
|
|
926
|
+
* Decode with a callback for when blobs are encountered, allowing for detecting of blobs
|
|
927
|
+
* @param callback
|
|
928
|
+
*/
|
|
929
|
+
function decodeFromDatabase(callback, rootStore) {
|
|
930
|
+
// note that this is actually called recursively (but always the same root store), so we don't clear afterwards
|
|
931
|
+
currentStore = rootStore;
|
|
932
|
+
return callback();
|
|
933
|
+
}
|
|
934
|
+
/**
|
|
935
|
+
* Delete blobs in an object, recursively searching for blobs
|
|
936
|
+
* @param object
|
|
937
|
+
*/
|
|
938
|
+
function deleteBlobsInObject(object) {
|
|
939
|
+
findBlobsInObject(object, (object) => {
|
|
940
|
+
deleteBlob(object);
|
|
941
|
+
});
|
|
942
|
+
}
|
|
943
|
+
/**
|
|
944
|
+
* Find all blobs in an object, recursively searching for Blob instances
|
|
945
|
+
* @param object
|
|
946
|
+
* @param callback
|
|
947
|
+
*/
|
|
948
|
+
function findBlobsInObject(object, callback) {
|
|
949
|
+
if (object instanceof exports.Blob) {
|
|
950
|
+
// eslint-disable-next-line
|
|
951
|
+
// @ts-ignore
|
|
952
|
+
callback(object);
|
|
953
|
+
}
|
|
954
|
+
else if (Array.isArray(object)) {
|
|
955
|
+
for (const value of object) {
|
|
956
|
+
if (typeof value === 'object' && value)
|
|
957
|
+
findBlobsInObject(value, callback);
|
|
958
|
+
}
|
|
959
|
+
}
|
|
960
|
+
else if (object && typeof object === 'object' && !object[Symbol.iterator]) {
|
|
961
|
+
// try to find plain objects, excluding things like buffers and typed arrays
|
|
962
|
+
for (const key in object) {
|
|
963
|
+
const value = object[key];
|
|
964
|
+
if (typeof value === 'object' && value)
|
|
965
|
+
findBlobsInObject(object[key], callback);
|
|
966
|
+
}
|
|
967
|
+
}
|
|
968
|
+
}
|
|
969
|
+
/**
|
|
970
|
+
* Do a shallow/fast search for blobs on the record and start saving them if they are supposed to be saved before a commit
|
|
971
|
+
* @param record
|
|
972
|
+
* @param store
|
|
973
|
+
*/
|
|
974
|
+
function startPreCommitBlobsForRecord(record, store, saveInRecord) {
|
|
975
|
+
let blobsNeedingSaving = [];
|
|
976
|
+
for (const key in record) {
|
|
977
|
+
const value = record[key];
|
|
978
|
+
if (value instanceof FileBackedBlob && (saveInRecord || value.saveBeforeCommit)) {
|
|
979
|
+
currentStore = store;
|
|
980
|
+
if (saveInRecord) {
|
|
981
|
+
value.saveInRecord = true;
|
|
982
|
+
}
|
|
983
|
+
blobsNeedingSaving.push(value);
|
|
984
|
+
}
|
|
985
|
+
}
|
|
986
|
+
if (blobsNeedingSaving.length > 0) {
|
|
987
|
+
// we do have blobs, start saving once the returned function is called
|
|
988
|
+
return () => {
|
|
989
|
+
currentStore = store;
|
|
990
|
+
return Promise.all(blobsNeedingSaving.map((blob) => {
|
|
991
|
+
return saveBlob(blob, true).saving ?? Promise.resolve();
|
|
992
|
+
}));
|
|
993
|
+
};
|
|
994
|
+
}
|
|
995
|
+
}
|
|
996
|
+
const copyingUnpacker = new msgpackr_1.Packr({ copyBuffers: true, mapsAsObjects: true });
|
|
997
|
+
(0, msgpackr_1.addExtension)({
|
|
998
|
+
Class: exports.Blob,
|
|
999
|
+
type: 11,
|
|
1000
|
+
unpack: function (buffer) {
|
|
1001
|
+
const blobInfo = copyingUnpacker.unpack(buffer);
|
|
1002
|
+
const blob = new FileBackedBlob();
|
|
1003
|
+
Object.assign(blob, blobInfo[0]); // copy any properties
|
|
1004
|
+
if (typeof blobInfo[1] !== 'object') {
|
|
1005
|
+
// this is a reference, not followed by any buffer
|
|
1006
|
+
storageInfoForBlob.set(blob, {
|
|
1007
|
+
storageIndex: blobInfo[1],
|
|
1008
|
+
fileId: blobInfo[2],
|
|
1009
|
+
store: currentStore,
|
|
1010
|
+
});
|
|
1011
|
+
if (currentBlobCallback)
|
|
1012
|
+
return currentBlobCallback(blob) ?? blob;
|
|
1013
|
+
if (!currentStore) {
|
|
1014
|
+
throw new Error('No store specified, cannot load blob from storage');
|
|
1015
|
+
}
|
|
1016
|
+
}
|
|
1017
|
+
else {
|
|
1018
|
+
storageInfoForBlob.set(blob, {
|
|
1019
|
+
storageIndex: 0,
|
|
1020
|
+
fileId: null,
|
|
1021
|
+
storageBuffer: buffer,
|
|
1022
|
+
contentBuffer: blobInfo[1],
|
|
1023
|
+
});
|
|
1024
|
+
blob.size = blobInfo[1]?.length;
|
|
1025
|
+
}
|
|
1026
|
+
return blob;
|
|
1027
|
+
},
|
|
1028
|
+
pack: function (blob) {
|
|
1029
|
+
let storageInfo = storageInfoForBlob.get(blob);
|
|
1030
|
+
if (encodeForStorageForRecordId !== undefined) {
|
|
1031
|
+
exports.blobsWereEncoded = true;
|
|
1032
|
+
if (storageInfo?.recordId !== undefined && storageInfo.recordId !== encodeForStorageForRecordId) {
|
|
1033
|
+
throw new Error('Cannot use the same blob in two different records');
|
|
1034
|
+
}
|
|
1035
|
+
}
|
|
1036
|
+
const options = { ...blob };
|
|
1037
|
+
if (blob.type)
|
|
1038
|
+
options.type = blob.type;
|
|
1039
|
+
if (blob.size !== undefined)
|
|
1040
|
+
options.size = blob.size;
|
|
1041
|
+
if (storageInfo) {
|
|
1042
|
+
if (storageInfo.storageBuffer) {
|
|
1043
|
+
return storageInfo.storageBuffer;
|
|
1044
|
+
}
|
|
1045
|
+
if (storageInfo.contentBuffer &&
|
|
1046
|
+
(storageInfo.contentBuffer?.length < FILE_STORAGE_THRESHOLD || blob.saveInRecord)) {
|
|
1047
|
+
options.size = storageInfo.contentBuffer.length;
|
|
1048
|
+
return (0, msgpackr_1.pack)([options, storageInfo.contentBuffer]);
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
if (encodeForStorageForRecordId !== undefined) {
|
|
1052
|
+
storageInfo = saveBlob(blob);
|
|
1053
|
+
if (!storageInfo.fileId) {
|
|
1054
|
+
throw new Error('Unable to save blob without file id');
|
|
1055
|
+
}
|
|
1056
|
+
storageInfo.recordId = encodeForStorageForRecordId;
|
|
1057
|
+
return (0, msgpackr_1.pack)([options, storageInfo.storageIndex, storageInfo.fileId]);
|
|
1058
|
+
}
|
|
1059
|
+
if (storageInfo) {
|
|
1060
|
+
if (currentBlobCallback) {
|
|
1061
|
+
currentBlobCallback(blob);
|
|
1062
|
+
return (0, msgpackr_1.pack)([options, storageInfo.storageIndex, storageInfo.fileId]);
|
|
1063
|
+
}
|
|
1064
|
+
// if we want to encode as binary (necessary for replication), we need to encode as a buffer, not sure if we should always do that
|
|
1065
|
+
// also, for replication, we would presume that this is most likely in OS cache, and sync will be fast. For other situations, a large sync call could be
|
|
1066
|
+
// unpleasant
|
|
1067
|
+
// we include the headers, as the receiving end will need them, and this differentiates from a reference
|
|
1068
|
+
try {
|
|
1069
|
+
const buffer = (0, node_fs_1.readFileSync)(getFilePath(storageInfo));
|
|
1070
|
+
if (buffer.length >= HEADER_SIZE) {
|
|
1071
|
+
buffer.copy(HEADER, 0, 0, HEADER_SIZE);
|
|
1072
|
+
const size = Number(headerView.getBigUint64(0) & 0xffffffffffffn);
|
|
1073
|
+
if (size === buffer.length - HEADER_SIZE) {
|
|
1074
|
+
// the file is there and complete, we can return the encoding
|
|
1075
|
+
return Buffer.concat([(0, msgpackr_1.pack)([options]), buffer]);
|
|
1076
|
+
}
|
|
1077
|
+
}
|
|
1078
|
+
// declare it as being fulfilled
|
|
1079
|
+
if (promisedWrites)
|
|
1080
|
+
promisedWrites.push(blob.bytes());
|
|
1081
|
+
else {
|
|
1082
|
+
throw new Error('Incomplete blob');
|
|
1083
|
+
}
|
|
1084
|
+
return Buffer.alloc(0);
|
|
1085
|
+
}
|
|
1086
|
+
catch (error) {
|
|
1087
|
+
if (error.code === 'ENOENT' && promisedWrites) {
|
|
1088
|
+
promisedWrites.push(blob.bytes());
|
|
1089
|
+
return Buffer.alloc(0);
|
|
1090
|
+
}
|
|
1091
|
+
else
|
|
1092
|
+
throw error;
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
else {
|
|
1096
|
+
throw new Error('Blob has no storage info or buffer attached to it');
|
|
1097
|
+
}
|
|
1098
|
+
},
|
|
1099
|
+
});
|
|
1100
|
+
// with Blobs, it is easy to forget to await the creation, make sure that the blob is created before continuing
|
|
1101
|
+
(0, msgpackr_1.addExtension)({
|
|
1102
|
+
Class: Promise,
|
|
1103
|
+
type: 12, // not actually used, but we need to define a type
|
|
1104
|
+
pack() {
|
|
1105
|
+
throw new Error('Cannot encode a promise');
|
|
1106
|
+
},
|
|
1107
|
+
});
|
|
1108
|
+
function polyfillBlob() {
|
|
1109
|
+
// polyfill Blob for older Node, it has just enough to handle a single Buffer
|
|
1110
|
+
return class Blob {
|
|
1111
|
+
content;
|
|
1112
|
+
constructor(contents) {
|
|
1113
|
+
this.content = contents[0];
|
|
1114
|
+
}
|
|
1115
|
+
stream() {
|
|
1116
|
+
return new ReadableStream({
|
|
1117
|
+
start(controller) {
|
|
1118
|
+
controller.enqueue(this.content);
|
|
1119
|
+
controller.close();
|
|
1120
|
+
},
|
|
1121
|
+
});
|
|
1122
|
+
}
|
|
1123
|
+
text() {
|
|
1124
|
+
return Promise.resolve(this.content.toString());
|
|
1125
|
+
}
|
|
1126
|
+
arrayBuffer() {
|
|
1127
|
+
return Promise.resolve(this.content.buffer);
|
|
1128
|
+
}
|
|
1129
|
+
get size() {
|
|
1130
|
+
return this.content.length;
|
|
1131
|
+
}
|
|
1132
|
+
slice() {
|
|
1133
|
+
throw new Error('Not implemented');
|
|
1134
|
+
}
|
|
1135
|
+
bytes() {
|
|
1136
|
+
return Promise.resolve(this.content);
|
|
1137
|
+
}
|
|
1138
|
+
get type() {
|
|
1139
|
+
return '';
|
|
1140
|
+
}
|
|
1141
|
+
};
|
|
1142
|
+
}
|
|
1143
|
+
/**
|
|
1144
|
+
* Scans for blobs on the file system and then checks to verify they are referenced
|
|
1145
|
+
* from the database, and if not, deletes them
|
|
1146
|
+
* @param database
|
|
1147
|
+
*/
|
|
1148
|
+
async function cleanupOrphans(database, databaseName) {
|
|
1149
|
+
let store;
|
|
1150
|
+
let auditStore;
|
|
1151
|
+
let orphansDeleted = 0;
|
|
1152
|
+
for (const tableName in database) {
|
|
1153
|
+
const table = database[tableName];
|
|
1154
|
+
store = table.primaryStore.rootStore;
|
|
1155
|
+
auditStore = table.auditStore;
|
|
1156
|
+
if (auditStore)
|
|
1157
|
+
break;
|
|
1158
|
+
}
|
|
1159
|
+
const pathsToCheck = new Set();
|
|
1160
|
+
const rootPaths = getRootBlobPathsForDB(store);
|
|
1161
|
+
if (rootPaths) {
|
|
1162
|
+
// search all the root paths
|
|
1163
|
+
for (const rootPath of rootPaths) {
|
|
1164
|
+
await searchPath(rootPath);
|
|
1165
|
+
}
|
|
1166
|
+
}
|
|
1167
|
+
// remove all remaining paths are not referenced
|
|
1168
|
+
await removePathsThatAreNotReferenced();
|
|
1169
|
+
logger_ts_1.logger.warn?.(`Cleaned Orphan Blobs from ${databaseName ?? 'database'}, deleted ${orphansDeleted} blobs)`);
|
|
1170
|
+
return orphansDeleted;
|
|
1171
|
+
async function searchPath(path) {
|
|
1172
|
+
try {
|
|
1173
|
+
if (!(0, node_fs_1.existsSync)(path))
|
|
1174
|
+
return;
|
|
1175
|
+
for (const entry of await (0, promises_1.readdir)(path, { withFileTypes: true })) {
|
|
1176
|
+
const entryPath = (0, path_1.join)(path, entry.name);
|
|
1177
|
+
if (entry.isDirectory()) {
|
|
1178
|
+
// keep recursively searching
|
|
1179
|
+
await searchPath(entryPath);
|
|
1180
|
+
}
|
|
1181
|
+
else {
|
|
1182
|
+
if (pathsToCheck.size % 1_000_000 === 0)
|
|
1183
|
+
logger_ts_1.logger.info?.('Finding all blobs for orphan check, paths accumulated', pathsToCheck.size);
|
|
1184
|
+
pathsToCheck.add(entryPath);
|
|
1185
|
+
if (pathsToCheck.size % 2000 === 0) {
|
|
1186
|
+
// this might be a bit expensive, so only check occasionally
|
|
1187
|
+
const stats = (0, node_v8_1.getHeapStatistics)();
|
|
1188
|
+
// The maximum size of a Set is 16,777,216, so we limit the size of the set and we try to limit memory usage (starting at 80%, but gradually going down as the Set gets bigger)
|
|
1189
|
+
if (stats.used_heap_size > stats.heap_size_limit * (0.8 - pathsToCheck.size / 16_000_000)) {
|
|
1190
|
+
// if our array gets too big and we are running out of space, we can start the db search for references before running out of memory
|
|
1191
|
+
await removePathsThatAreNotReferenced();
|
|
1192
|
+
}
|
|
1193
|
+
}
|
|
1194
|
+
}
|
|
1195
|
+
}
|
|
1196
|
+
}
|
|
1197
|
+
catch (error) {
|
|
1198
|
+
logger_ts_1.logger.error?.('Error searching path for blobs', path, error);
|
|
1199
|
+
}
|
|
1200
|
+
}
|
|
1201
|
+
async function removePathsThatAreNotReferenced() {
|
|
1202
|
+
let i = 0;
|
|
1203
|
+
const perMS = Math.floor(((0, environmentManager_js_1.get)(hdbTerms_ts_1.CONFIG_PARAMS.STORAGE_BLOBCLEANUPSPEED) ?? 10000) / 1000 + 1);
|
|
1204
|
+
// search all the tables for references
|
|
1205
|
+
for (const tableName in database) {
|
|
1206
|
+
logger_ts_1.logger.warn?.('Checking for references to potential orphaned blobs in table', tableName);
|
|
1207
|
+
const table = database[tableName];
|
|
1208
|
+
for (const entry of table.primaryStore.getRange({ versions: true, snapshot: false, lazy: true })) {
|
|
1209
|
+
try {
|
|
1210
|
+
if (entry.metadataFlags & auditStore_ts_1.HAS_BLOBS && entry.value) {
|
|
1211
|
+
checkObjectForReferences(entry.value);
|
|
1212
|
+
}
|
|
1213
|
+
// slow this down a bit to reduce excessive load, this runs approximately at 10k per second
|
|
1214
|
+
if (i++ % perMS === 0)
|
|
1215
|
+
await (0, promises_2.setTimeout)(1); // one millisecond delay to avoid overloading the system
|
|
1216
|
+
else
|
|
1217
|
+
await (0, promises_2.setImmediate)();
|
|
1218
|
+
}
|
|
1219
|
+
catch (error) {
|
|
1220
|
+
logger_ts_1.logger.error?.('Error searching table', tableName, ' for references to potential orphaned blobs failed', error);
|
|
1221
|
+
}
|
|
1222
|
+
}
|
|
1223
|
+
}
|
|
1224
|
+
logger_ts_1.logger.warn?.('Checking for references to potential orphaned blobs in the audit log');
|
|
1225
|
+
// search the audit store for references
|
|
1226
|
+
for (const auditRecord of auditStore.getRange({ start: 1, snapshot: false, lazy: true })) {
|
|
1227
|
+
try {
|
|
1228
|
+
const primaryStore = auditStore.tableStores[auditRecord.tableId];
|
|
1229
|
+
if (!primaryStore)
|
|
1230
|
+
continue;
|
|
1231
|
+
const entry = primaryStore?.getEntry(auditRecord.recordId);
|
|
1232
|
+
if (!entry || entry.version !== auditRecord.version || !entry.value) {
|
|
1233
|
+
checkObjectForReferences(auditRecord.getValue(primaryStore));
|
|
1234
|
+
}
|
|
1235
|
+
// slow this down a bit to reduce excessive load, this runs approximately at 10k per second
|
|
1236
|
+
if (i++ % perMS === 0)
|
|
1237
|
+
await (0, promises_2.setTimeout)(1); // one millisecond delay to avoid overloading the system
|
|
1238
|
+
else
|
|
1239
|
+
await (0, promises_2.setImmediate)();
|
|
1240
|
+
}
|
|
1241
|
+
catch (error) {
|
|
1242
|
+
logger_ts_1.logger.error?.('Error searching audit log for references to potential orphaned blobs failed', error);
|
|
1243
|
+
}
|
|
1244
|
+
}
|
|
1245
|
+
logger_ts_1.logger.warn?.('Deleting', pathsToCheck.size, 'orphaned blobs');
|
|
1246
|
+
orphansDeleted += pathsToCheck.size;
|
|
1247
|
+
for (const path of pathsToCheck) {
|
|
1248
|
+
try {
|
|
1249
|
+
await (0, promises_1.unlink)(path);
|
|
1250
|
+
}
|
|
1251
|
+
catch (error) {
|
|
1252
|
+
logger_ts_1.logger.debug?.('Error deleting file', error);
|
|
1253
|
+
}
|
|
1254
|
+
}
|
|
1255
|
+
logger_ts_1.logger.warn?.('Finished deleting', pathsToCheck.size, 'orphaned blobs');
|
|
1256
|
+
pathsToCheck.clear();
|
|
1257
|
+
}
|
|
1258
|
+
// check each object for any blob references and removes from the paths to check if found
|
|
1259
|
+
function checkObjectForReferences(value) {
|
|
1260
|
+
findBlobsInObject(value, (blob) => {
|
|
1261
|
+
if (blob instanceof FileBackedBlob) {
|
|
1262
|
+
const storageInfo = storageInfoForBlob.get(blob);
|
|
1263
|
+
if (storageInfo.fileId != null) {
|
|
1264
|
+
const path = getFilePath(storageInfo);
|
|
1265
|
+
if (pathsToCheck.has(path)) {
|
|
1266
|
+
pathsToCheck.delete(path);
|
|
1267
|
+
}
|
|
1268
|
+
}
|
|
1269
|
+
}
|
|
1270
|
+
});
|
|
1271
|
+
}
|
|
1272
|
+
}
|
|
1273
|
+
//# sourceMappingURL=blob.js.map
|