@joystick.js/db-canary 0.0.0-canary.2209
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.build/getFilesToBuild.js +26 -0
- package/.build/getPlatformSafeFilePath.js +6 -0
- package/.build/getPlatformSafePath.js +6 -0
- package/.build/index.js +88 -0
- package/.build/isWindows.js +3 -0
- package/API_KEY +1 -0
- package/README.md +1821 -0
- package/data/data.mdb +0 -0
- package/data/lock.mdb +0 -0
- package/dist/client/database.js +1 -0
- package/dist/client/index.js +1 -0
- package/dist/server/cluster/index.js +1 -0
- package/dist/server/cluster/master.js +20 -0
- package/dist/server/cluster/worker.js +1 -0
- package/dist/server/index.js +1 -0
- package/dist/server/lib/api_key_manager.js +9 -0
- package/dist/server/lib/auth_manager.js +1 -0
- package/dist/server/lib/auto_index_manager.js +1 -0
- package/dist/server/lib/backup_manager.js +1 -0
- package/dist/server/lib/connection_manager.js +1 -0
- package/dist/server/lib/disk_utils.js +2 -0
- package/dist/server/lib/http_server.js +405 -0
- package/dist/server/lib/index_manager.js +1 -0
- package/dist/server/lib/load_settings.js +1 -0
- package/dist/server/lib/logger.js +1 -0
- package/dist/server/lib/op_types.js +1 -0
- package/dist/server/lib/operation_dispatcher.js +1 -0
- package/dist/server/lib/operations/admin.js +1 -0
- package/dist/server/lib/operations/bulk_write.js +1 -0
- package/dist/server/lib/operations/create_index.js +1 -0
- package/dist/server/lib/operations/delete_one.js +1 -0
- package/dist/server/lib/operations/drop_index.js +1 -0
- package/dist/server/lib/operations/find.js +1 -0
- package/dist/server/lib/operations/find_one.js +1 -0
- package/dist/server/lib/operations/get_indexes.js +1 -0
- package/dist/server/lib/operations/insert_one.js +1 -0
- package/dist/server/lib/operations/update_one.js +1 -0
- package/dist/server/lib/performance_monitor.js +1 -0
- package/dist/server/lib/query_engine.js +1 -0
- package/dist/server/lib/recovery_manager.js +1 -0
- package/dist/server/lib/replication_manager.js +1 -0
- package/dist/server/lib/safe_json_parse.js +1 -0
- package/dist/server/lib/send_response.js +1 -0
- package/dist/server/lib/tcp_protocol.js +1 -0
- package/dist/server/lib/write_forwarder.js +1 -0
- package/dist/server/lib/write_queue.js +1 -0
- package/increment_version.js +3 -0
- package/logs/.013e15b54597d05db4b4b53ecc37b10c92a72927-audit.json +20 -0
- package/logs/.02de550a67ea0f5961faa2dfd458a4d06f59ebd1-audit.json +20 -0
- package/logs/.03494ba24eb3c72214b4068a77d54b8993bee651-audit.json +20 -0
- package/logs/.06309ec60b339be1259a7993dd09c732f8907fbc-audit.json +20 -0
- package/logs/.0663a04dcfa17285661e5e1b8cfa51f41523b210-audit.json +20 -0
- package/logs/.0f06e6c4c9b824622729e13927587479e5060391-audit.json +20 -0
- package/logs/.16ccf58682ecb22b3e3ec63f0da1b7fe9be56528-audit.json +20 -0
- package/logs/.1fa1a5d02f496474b1ab473524c65c984146a9ad-audit.json +20 -0
- package/logs/.2223c0ae3bea6f0d62c62b1d319cc8634856abb7-audit.json +20 -0
- package/logs/.23dc79ffda3e083665e6f5993f59397adcbf4a46-audit.json +20 -0
- package/logs/.28104f49b03906b189eefd1cd462cb46c3c0af22-audit.json +20 -0
- package/logs/.29cdbf13808abe6a0ce70ee2f2efdd680ce3fd8e-audit.json +20 -0
- package/logs/.2a9889afd071f77f41f5170d08703a0afca866b7-audit.json +20 -0
- package/logs/.2acec3d1940a2bbed487528b703ee5948959a599-audit.json +20 -0
- package/logs/.2fb60ff326338c02bfedbcd0e936444e4a216750-audit.json +20 -0
- package/logs/.318fc7a19530d76a345f030f7cad00dda15300e7-audit.json +20 -0
- package/logs/.3cf27043e19085f908cedc7701e6d013463208ee-audit.json +25 -0
- package/logs/.3d90d785415817fc443402843b7c95f8371adc9b-audit.json +20 -0
- package/logs/.4074bca620375f72966fc52dfd439577727671e5-audit.json +20 -0
- package/logs/.40eecf018417ea80a70ea8ec7a3cc9406bc6334b-audit.json +20 -0
- package/logs/.50e974f1ef7c365fca6a1251b2e2c2252914cb5e-audit.json +20 -0
- package/logs/.52cb7d9e4223cf26ba36006ac26b949a97c7923c-audit.json +20 -0
- package/logs/.54befcdb84c15aad980705a31bcc9f555c3577ab-audit.json +20 -0
- package/logs/.57dfb70e22eddb84db2e3c0ceeefac5c0b9baffa-audit.json +20 -0
- package/logs/.5f0b24705a1eaad4eca4968f2d86f91b3f9be683-audit.json +20 -0
- package/logs/.61ba98fdda7db58576b382fee07904e5db1169d6-audit.json +20 -0
- package/logs/.6235017727ef6b199d569a99d6aa8c8e80a1b475-audit.json +20 -0
- package/logs/.63db16193699219489d218a1ddea5dde3750cae4-audit.json +20 -0
- package/logs/.64fb67dfe14149c9eef728d79bf30a54da809c60-audit.json +20 -0
- package/logs/.669137453368987c1f311b5345342527afb54e50-audit.json +20 -0
- package/logs/.7a71f8c89ea28ae266d356aeff6306e876a30fbb-audit.json +20 -0
- package/logs/.7afbaa90fe9dc3a7d682676f9bb79f9a1b1fd9a6-audit.json +20 -0
- package/logs/.7ca29e322cd05327035de850099e7610864f2347-audit.json +20 -0
- package/logs/.83335ab3347e449dae03455a110aaf7f120d4802-audit.json +20 -0
- package/logs/.8c2487b5fd445d2c8e5c483c80b9fa99bbf1ca58-audit.json +20 -0
- package/logs/.8c8b9dc386922c9f3b4c13251af7052aac1d24c0-audit.json +20 -0
- package/logs/.8d6155d94640c4863301ae0fee5e4e7372a21446-audit.json +20 -0
- package/logs/.944a3119a243deea7c8270d5d9e582bb1d0eaa10-audit.json +20 -0
- package/logs/.9816a845c30fb2909f3b26a23eeb3538ebcad5db-audit.json +20 -0
- package/logs/.9dc08784e38b865488177c26d4af5934555e0323-audit.json +20 -0
- package/logs/.9dd27d2e0e454ac0a37600206d1cac5493b0d7ee-audit.json +20 -0
- package/logs/.a3d486feeac7654c59b547de96600e8849a06d4f-audit.json +20 -0
- package/logs/.a5b811f4def22250f86cc18870d7c4573625df22-audit.json +20 -0
- package/logs/.a61648eb5f830e0b6f508ac35e4f8f629d2ad4c7-audit.json +20 -0
- package/logs/.a89016d507045771b4b5a65656944a9c0f1e528b-audit.json +20 -0
- package/logs/.a99bee160a1c590be959af46bacc02724803f691-audit.json +20 -0
- package/logs/.ada7906d6243fd7da802f03d86c4ae5dd9df6236-audit.json +20 -0
- package/logs/.b518339ee942143b6af983af167f5bbb6983b4de-audit.json +20 -0
- package/logs/.b51b124b166d53c9519017856ea610d61d65fabe-audit.json +20 -0
- package/logs/.b7a6aee19f58e55633d5e4a3709041c47dfff975-audit.json +20 -0
- package/logs/.bd7a8a6ba9c55d557a4867ab53f02e3ec2e1553d-audit.json +20 -0
- package/logs/.c1435dafe453b169d6392b25065f3cf4ab6fbb21-audit.json +20 -0
- package/logs/.c17e1ce043109f77dc2f0e2aa290a9d1ed842c03-audit.json +20 -0
- package/logs/.ca62637ce9540e5a38a2fbedb2115febb6ad308a-audit.json +15 -0
- package/logs/.ccee67b9c176967f8977071409a41f5cb5cd6ad4-audit.json +20 -0
- package/logs/.db24043417ea79a6f14cd947476399e53930b48d-audit.json +20 -0
- package/logs/.e0f12acccb57829f5f33712bb2e2607ecd808147-audit.json +20 -0
- package/logs/.e9b6cc33d0bbd2e644c4e2bf44d177f850016557-audit.json +20 -0
- package/logs/.f15291d434808e3bdca7963ccd2e73893be027e6-audit.json +20 -0
- package/logs/.f4bdf9e21ef84f8a3fae3ffb32bbc39275991351-audit.json +15 -0
- package/logs/.fbac3aefac1e81b4230df5aa50667cb90d51024f-audit.json +20 -0
- package/logs/.fcfd495c0a9169db243f4a4f21878ee02b76413c-audit.json +20 -0
- package/logs/admin-2025-09-12.log +580 -0
- package/logs/admin-2025-09-15.log +283 -0
- package/logs/admin-error-2025-09-12.log +22 -0
- package/logs/admin-error-2025-09-15.log +10 -0
- package/logs/api_key_manager-2025-09-12.log +658 -0
- package/logs/api_key_manager-2025-09-15.log +295 -0
- package/logs/api_key_manager-error-2025-09-12.log +0 -0
- package/logs/api_key_manager-error-2025-09-15.log +0 -0
- package/logs/auth_manager-2025-09-12.log +4432 -0
- package/logs/auth_manager-2025-09-15.log +2000 -0
- package/logs/auth_manager-error-2025-09-12.log +11 -0
- package/logs/auth_manager-error-2025-09-15.log +5 -0
- package/logs/auto_index_manager-2025-09-12.log +84 -0
- package/logs/auto_index_manager-2025-09-15.log +45 -0
- package/logs/auto_index_manager-error-2025-09-12.log +6 -0
- package/logs/auto_index_manager-error-2025-09-15.log +0 -0
- package/logs/backup_manager-2025-09-12.log +198 -0
- package/logs/backup_manager-2025-09-15.log +90 -0
- package/logs/backup_manager-error-2025-09-12.log +198 -0
- package/logs/backup_manager-error-2025-09-15.log +90 -0
- package/logs/bulk_write-2025-09-12.log +66 -0
- package/logs/bulk_write-2025-09-15.log +38 -0
- package/logs/bulk_write-error-2025-09-12.log +0 -0
- package/logs/bulk_write-error-2025-09-15.log +0 -0
- package/logs/connection_manager-2025-09-12.log +2412 -0
- package/logs/connection_manager-2025-09-15.log +1132 -0
- package/logs/connection_manager-error-2025-09-12.log +0 -0
- package/logs/connection_manager-error-2025-09-15.log +0 -0
- package/logs/create_index-2025-09-12.log +302 -0
- package/logs/create_index-2025-09-15.log +158 -0
- package/logs/create_index-error-2025-09-12.log +30 -0
- package/logs/create_index-error-2025-09-15.log +13 -0
- package/logs/delete_one-2025-09-12.log +73 -0
- package/logs/delete_one-2025-09-15.log +43 -0
- package/logs/delete_one-error-2025-09-12.log +0 -0
- package/logs/delete_one-error-2025-09-15.log +0 -0
- package/logs/disk_utils-2025-09-12.log +4954 -0
- package/logs/disk_utils-2025-09-15.log +2446 -0
- package/logs/disk_utils-error-2025-09-12.log +0 -0
- package/logs/disk_utils-error-2025-09-15.log +0 -0
- package/logs/drop_index-2025-09-12.log +41 -0
- package/logs/drop_index-2025-09-15.log +23 -0
- package/logs/drop_index-error-2025-09-12.log +11 -0
- package/logs/drop_index-error-2025-09-15.log +5 -0
- package/logs/find-2025-09-12.log +1050 -0
- package/logs/find-2025-09-15.log +592 -0
- package/logs/find-error-2025-09-12.log +1 -0
- package/logs/find-error-2025-09-15.log +0 -0
- package/logs/find_one-2025-09-12.log +425 -0
- package/logs/find_one-2025-09-15.log +264 -0
- package/logs/find_one-error-2025-09-12.log +5 -0
- package/logs/find_one-error-2025-09-15.log +0 -0
- package/logs/get_indexes-2025-09-12.log +84 -0
- package/logs/get_indexes-2025-09-15.log +56 -0
- package/logs/get_indexes-error-2025-09-12.log +6 -0
- package/logs/get_indexes-error-2025-09-15.log +0 -0
- package/logs/http_server-2025-09-12.log +2772 -0
- package/logs/http_server-2025-09-15.log +1276 -0
- package/logs/http_server-error-2025-09-12.log +212 -0
- package/logs/http_server-error-2025-09-15.log +44 -0
- package/logs/index_manager-2025-09-12.log +5031 -0
- package/logs/index_manager-2025-09-15.log +2909 -0
- package/logs/index_manager-error-2025-09-12.log +80 -0
- package/logs/index_manager-error-2025-09-15.log +38 -0
- package/logs/insert_one-2025-09-12.log +2181 -0
- package/logs/insert_one-2025-09-15.log +1293 -0
- package/logs/insert_one-error-2025-09-12.log +0 -0
- package/logs/insert_one-error-2025-09-15.log +0 -0
- package/logs/master-2025-09-12.log +1882 -0
- package/logs/master-2025-09-15.log +910 -0
- package/logs/master-error-2025-09-12.log +80 -0
- package/logs/master-error-2025-09-15.log +0 -0
- package/logs/operation_dispatcher-2025-09-12.log +751 -0
- package/logs/operation_dispatcher-2025-09-15.log +359 -0
- package/logs/operation_dispatcher-error-2025-09-12.log +33 -0
- package/logs/operation_dispatcher-error-2025-09-15.log +11 -0
- package/logs/performance_monitor-2025-09-12.log +14889 -0
- package/logs/performance_monitor-2025-09-15.log +6803 -0
- package/logs/performance_monitor-error-2025-09-12.log +0 -0
- package/logs/performance_monitor-error-2025-09-15.log +0 -0
- package/logs/query_engine-2025-09-12.log +5310 -0
- package/logs/query_engine-2025-09-15.log +2639 -0
- package/logs/query_engine-error-2025-09-12.log +0 -0
- package/logs/query_engine-error-2025-09-15.log +0 -0
- package/logs/recovery_manager-2025-09-12.log +462 -0
- package/logs/recovery_manager-2025-09-15.log +210 -0
- package/logs/recovery_manager-error-2025-09-12.log +22 -0
- package/logs/recovery_manager-error-2025-09-15.log +10 -0
- package/logs/replication-2025-09-12.log +1923 -0
- package/logs/replication-2025-09-15.log +917 -0
- package/logs/replication-error-2025-09-12.log +33 -0
- package/logs/replication-error-2025-09-15.log +15 -0
- package/logs/server-2025-09-12.log +2601 -0
- package/logs/server-2025-09-15.log +1191 -0
- package/logs/server-error-2025-09-12.log +0 -0
- package/logs/server-error-2025-09-15.log +0 -0
- package/logs/tcp_protocol-2025-09-12.log +22 -0
- package/logs/tcp_protocol-2025-09-15.log +10 -0
- package/logs/tcp_protocol-error-2025-09-12.log +22 -0
- package/logs/tcp_protocol-error-2025-09-15.log +10 -0
- package/logs/test-2025-09-12.log +0 -0
- package/logs/test-2025-09-15.log +0 -0
- package/logs/test-error-2025-09-12.log +0 -0
- package/logs/test-error-2025-09-15.log +0 -0
- package/logs/update_one-2025-09-12.log +173 -0
- package/logs/update_one-2025-09-15.log +118 -0
- package/logs/update_one-error-2025-09-12.log +0 -0
- package/logs/update_one-error-2025-09-15.log +0 -0
- package/logs/worker-2025-09-12.log +1457 -0
- package/logs/worker-2025-09-15.log +695 -0
- package/logs/worker-error-2025-09-12.log +0 -0
- package/logs/worker-error-2025-09-15.log +0 -0
- package/logs/write_forwarder-2025-09-12.log +1956 -0
- package/logs/write_forwarder-2025-09-15.log +932 -0
- package/logs/write_forwarder-error-2025-09-12.log +66 -0
- package/logs/write_forwarder-error-2025-09-15.log +30 -0
- package/logs/write_queue-2025-09-12.log +612 -0
- package/logs/write_queue-2025-09-15.log +301 -0
- package/logs/write_queue-error-2025-09-12.log +184 -0
- package/logs/write_queue-error-2025-09-15.log +83 -0
- package/package.json +48 -0
- package/prompts/01-core-infrastructure.md +56 -0
- package/prompts/02-secondary-indexing.md +65 -0
- package/prompts/03-write-serialization.md +63 -0
- package/prompts/04-enhanced-authentication.md +75 -0
- package/prompts/05-comprehensive-admin-operations.md +75 -0
- package/prompts/06-backup-and-restore-system.md +106 -0
- package/prompts/07-production-safety-features.md +107 -0
- package/prompts/08-tcp-client-library.md +121 -0
- package/prompts/09-api-method-chaining.md +134 -0
- package/prompts/10-automatic-index-creation.md +223 -0
- package/prompts/11-operation-naming-consistency.md +268 -0
- package/prompts/12-tcp-replication-system.md +333 -0
- package/prompts/13-master-read-write-operations.md +57 -0
- package/prompts/14-index-upsert-operations.md +68 -0
- package/prompts/15-client-api-return-types.md +81 -0
- package/prompts/16-server-setup-ui.md +97 -0
- package/prompts/17-emergency-password-change.md +108 -0
- package/prompts/18-joystick-framework-integration.md +116 -0
- package/prompts/19-api-key-authentication-system.md +137 -0
- package/prompts/20-configurable-server-port.md +105 -0
- package/prompts/21-multi-database-support.md +161 -0
- package/prompts/FULL_TEXT_SEARCH.md +293 -0
- package/prompts/PROMPTS.md +158 -0
- package/prompts/README.md +221 -0
- package/prompts/TYPESCRIPT_GENERATION.md +179 -0
- package/src/client/database.js +166 -0
- package/src/client/index.js +752 -0
- package/src/server/cluster/index.js +53 -0
- package/src/server/cluster/master.js +774 -0
- package/src/server/cluster/worker.js +537 -0
- package/src/server/index.js +540 -0
- package/src/server/lib/api_key_manager.js +473 -0
- package/src/server/lib/auth_manager.js +375 -0
- package/src/server/lib/auto_index_manager.js +681 -0
- package/src/server/lib/backup_manager.js +650 -0
- package/src/server/lib/connection_manager.js +218 -0
- package/src/server/lib/disk_utils.js +118 -0
- package/src/server/lib/http_server.js +1165 -0
- package/src/server/lib/index_manager.js +756 -0
- package/src/server/lib/load_settings.js +143 -0
- package/src/server/lib/logger.js +135 -0
- package/src/server/lib/op_types.js +29 -0
- package/src/server/lib/operation_dispatcher.js +268 -0
- package/src/server/lib/operations/admin.js +808 -0
- package/src/server/lib/operations/bulk_write.js +367 -0
- package/src/server/lib/operations/create_index.js +68 -0
- package/src/server/lib/operations/delete_one.js +114 -0
- package/src/server/lib/operations/drop_index.js +58 -0
- package/src/server/lib/operations/find.js +340 -0
- package/src/server/lib/operations/find_one.js +319 -0
- package/src/server/lib/operations/get_indexes.js +52 -0
- package/src/server/lib/operations/insert_one.js +113 -0
- package/src/server/lib/operations/update_one.js +225 -0
- package/src/server/lib/performance_monitor.js +313 -0
- package/src/server/lib/query_engine.js +243 -0
- package/src/server/lib/recovery_manager.js +388 -0
- package/src/server/lib/replication_manager.js +727 -0
- package/src/server/lib/safe_json_parse.js +21 -0
- package/src/server/lib/send_response.js +47 -0
- package/src/server/lib/tcp_protocol.js +130 -0
- package/src/server/lib/write_forwarder.js +636 -0
- package/src/server/lib/write_queue.js +335 -0
- package/test_data/data.mdb +0 -0
- package/test_data/lock.mdb +0 -0
- package/tests/client/index.test.js +1232 -0
- package/tests/server/cluster/cluster.test.js +248 -0
- package/tests/server/cluster/master_read_write_operations.test.js +577 -0
- package/tests/server/index.test.js +651 -0
- package/tests/server/integration/authentication_integration.test.js +294 -0
- package/tests/server/integration/auto_indexing_integration.test.js +268 -0
- package/tests/server/integration/backup_integration.test.js +513 -0
- package/tests/server/integration/indexing_integration.test.js +126 -0
- package/tests/server/integration/production_safety_integration.test.js +358 -0
- package/tests/server/integration/replication_integration.test.js +227 -0
- package/tests/server/integration/write_serialization_integration.test.js +246 -0
- package/tests/server/lib/api_key_manager.test.js +516 -0
- package/tests/server/lib/auth_manager.test.js +317 -0
- package/tests/server/lib/auto_index_manager.test.js +275 -0
- package/tests/server/lib/backup_manager.test.js +238 -0
- package/tests/server/lib/connection_manager.test.js +221 -0
- package/tests/server/lib/disk_utils.test.js +63 -0
- package/tests/server/lib/http_server.test.js +389 -0
- package/tests/server/lib/index_manager.test.js +301 -0
- package/tests/server/lib/load_settings.test.js +107 -0
- package/tests/server/lib/load_settings_port_config.test.js +243 -0
- package/tests/server/lib/logger.test.js +282 -0
- package/tests/server/lib/operations/admin.test.js +638 -0
- package/tests/server/lib/operations/bulk_write.test.js +128 -0
- package/tests/server/lib/operations/create_index.test.js +138 -0
- package/tests/server/lib/operations/delete_one.test.js +52 -0
- package/tests/server/lib/operations/drop_index.test.js +72 -0
- package/tests/server/lib/operations/find.test.js +93 -0
- package/tests/server/lib/operations/find_one.test.js +91 -0
- package/tests/server/lib/operations/get_indexes.test.js +87 -0
- package/tests/server/lib/operations/insert_one.test.js +42 -0
- package/tests/server/lib/operations/update_one.test.js +89 -0
- package/tests/server/lib/performance_monitor.test.js +185 -0
- package/tests/server/lib/query_engine.test.js +46 -0
- package/tests/server/lib/recovery_manager.test.js +414 -0
- package/tests/server/lib/replication_manager.test.js +202 -0
- package/tests/server/lib/safe_json_parse.test.js +45 -0
- package/tests/server/lib/send_response.test.js +155 -0
- package/tests/server/lib/tcp_protocol.test.js +169 -0
- package/tests/server/lib/write_forwarder.test.js +258 -0
- package/tests/server/lib/write_queue.test.js +255 -0
- package/tsconfig.json +30 -0
- package/types/client/index.d.ts +447 -0
- package/types/server/cluster/index.d.ts +28 -0
- package/types/server/cluster/master.d.ts +115 -0
- package/types/server/cluster/worker.d.ts +1 -0
- package/types/server/lib/auth_manager.d.ts +13 -0
- package/types/server/lib/backup_manager.d.ts +43 -0
- package/types/server/lib/connection_manager.d.ts +15 -0
- package/types/server/lib/disk_utils.d.ts +3 -0
- package/types/server/lib/index_manager.d.ts +24 -0
- package/types/server/lib/load_settings.d.ts +4 -0
- package/types/server/lib/logger.d.ts +44 -0
- package/types/server/lib/op_types.d.ts +6 -0
- package/types/server/lib/performance_monitor.d.ts +68 -0
- package/types/server/lib/query_engine.d.ts +10 -0
- package/types/server/lib/safe_json_parse.d.ts +7 -0
- package/types/server/lib/send_response.d.ts +3 -0
- package/types/server/lib/tcp_protocol.d.ts +12 -0
- package/types/server/lib/write_queue.d.ts +2 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{get_database as O}from"./query_engine.js";import j from"./logger.js";const{create_context_logger:k}=j("index_manager");let $=null;const z=()=>{try{$=O().openDB("indexes",{create:!0})}catch{throw new Error("Main database not initialized. Call initialize_database first.")}return $},_=()=>{if(!$)throw new Error("Index database not initialized. Call initialize_index_database first.");return $},g=(t,e,r,s)=>{const n=typeof s=="object"?JSON.stringify(s):String(s);return`index:${t}:${e}:${r}:${n}`},R=t=>{const e=t.split(":");return e.length<5||e[0]!=="index"?null:{database:e[1],collection:e[2],field:e[3],value:e.slice(4).join(":")}},I=(t,e,r)=>`meta:${t}:${e}:${r}`,v=(t,e)=>{const r=e.split(".");let s=t;for(const n of r){if(s==null)return;s=s[n]}return s},A=(t,e)=>t.unique===(e.unique||!1)&&t.sparse===(e.sparse||!1),C=(t,e,r,s,n,i)=>{const u=`index:${t}:${e}:${r}:`,o=n.getRange({start:u,end:u+"\xFF"});for(const{key:l}of o)n.remove(l);const c=`${t}:${e}:`,d=i.getRange({start:c,end:c+"\xFF"});if(s.unique){const l=new Map;for(const{key:x,value:p}of d){const y=JSON.parse(p),a=v(y,r);if(a!=null){const w=typeof a=="object"?JSON.stringify(a):String(a),h=l.get(w)||0;if(l.set(w,h+1),h>=1)throw new Error(`Duplicate value for unique index on ${t}.${e}.${r}: ${a}`)}}}const f=i.getRange({start:c,end:c+"\xFF"});for(const{key:l,value:x}of f){const p=JSON.parse(x),y=v(p,r);if(y==null){if(!s.sparse){const h=g(t,e,r,null),q=n.get(h)||[];q.push(p._id),n.put(h,q)}continue}const a=g(t,e,r,y),w=n.get(a)||[];w.push(p._id),n.put(a,w)}},J=async(t,e,r,s={})=>{const n=k();if(!t)throw new Error("Database name is required");if(!e)throw new Error("Collection name is required");if(!r)throw new Error("Field name is required");const i=_(),u=O(),o=I(t,e,r),c=s.upsert||!1;try{let d="created";return await i.transaction(()=>{const f=i.get(o);if(f){if(!c)throw new Error(`Index on ${t}.${e}.${r} already exists`);if(A(f,s)){d="unchanged";return}d="updated";try{C(t,e,r,s,i,u)}catch(p){throw p}}else{const x=`${t}:${e}:`,p=u.getRange({start:x,end:x+"\xFF"});for(const{key:y,value:a}of p){const w=JSON.parse(a),h=v(w,r);if(h==null){if(!s.sparse){const E=g(t,e,r,null),S=i.get(E)||[];S.push(w._id),i.put(E,S)}continue}if(s.unique){const E=g(t,e,r,h);if((i.get(E)||[]).length>0)throw new Error(`Duplicate value for unique index on ${t}.${e}.${r}: ${h}`)}const q=g(t,e,r,h),D=i.get(q)||[];D.push(w._id),i.put(q,D)}}const l={database:t,collection:e,field:r,unique:s.unique||!1,sparse:s.sparse||!1,created_at:f?f.created_at:new Date().toISOString(),updated_at:d==="updated"?new Date().toISOString():void 0};i.put(o,l)}),n.info(`Index ${d} successfully`,{database:t,collection:e,field:r,options:s,operation_type:d}),{acknowledged:!0,operation_type:d}}catch(d){throw n.error("Failed to create/upsert index",{database:t,collection:e,field:r,error:d.message}),d}},N=async(t,e,r)=>{const s=k();if(!t)throw new Error("Database name is required");if(!e)throw new Error("Collection name is required");if(!r)throw new Error("Field name is required");const n=_(),i=I(t,e,r);try{return await n.transaction(()=>{if(!n.get(i))throw new Error(`Index on ${t}.${e}.${r} does not exist`);n.remove(i);const o=`index:${t}:${e}:${r}:`,c=n.getRange({start:o,end:o+"\xFF"});for(const{key:d}of c)n.remove(d)}),s.info("Index dropped successfully",{database:t,collection:e,field:r}),{acknowledged:!0}}catch(u){throw s.error("Failed to drop index",{database:t,collection:e,field:r,error:u.message}),u}},F=(t,e)=>{const r=k();if(!t)throw new Error("Database name is required");if(!e)throw new Error("Collection name is required");const s=_(),n=[];try{const i=`meta:${t}:${e}:`,u=s.getRange({start:i,end:i+"\xFF"});for(const{key:o,value:c}of u)n.push(c);return r.info("Retrieved indexes",{database:t,collection:e,count:n.length}),n}catch(i){throw r.error("Failed to get indexes",{database:t,collection:e,error:i.message}),i}},M=async(t,e,r)=>{const s=k(),n=_();try{const i=F(t,e);await n.transaction(()=>{for(const u of i){const o=v(r,u.field);if(o==null){if(!u.sparse){const f=g(t,e,u.field,null),l=n.get(f)||[];l.push(r._id),n.put(f,l)}continue}if(u.unique){const f=g(t,e,u.field,o);if((n.get(f)||[]).length>0)throw new Error(`Duplicate value for unique index on ${t}.${e}.${u.field}: ${o}`)}const c=g(t,e,u.field,o),d=n.get(c)||[];d.push(r._id),n.put(c,d)}})}catch(i){throw s.error("Failed to update indexes on insert",{database:t,collection:e,document_id:r._id,error:i.message}),i}},b=async(t,e,r,s)=>{const n=k(),i=_();try{const u=F(t,e);await i.transaction(()=>{for(const o of u){const c=v(r,o.field),d=v(s,o.field);if(c!==d){if(c!=null){const f=g(t,e,o.field,c),x=(i.get(f)||[]).filter(p=>p!==r._id);x.length===0?i.remove(f):i.put(f,x)}else if(!o.sparse){const f=g(t,e,o.field,null),x=(i.get(f)||[]).filter(p=>p!==r._id);x.length===0?i.remove(f):i.put(f,x)}if(d!=null){if(o.unique){const x=g(t,e,o.field,d);if((i.get(x)||[]).length>0)throw new Error(`Duplicate value for unique index on ${t}.${e}.${o.field}: ${d}`)}const f=g(t,e,o.field,d),l=i.get(f)||[];l.push(s._id),i.put(f,l)}else if(!o.sparse){const f=g(t,e,o.field,null),l=i.get(f)||[];l.push(s._id),i.put(f,l)}}}})}catch(u){throw n.error("Failed to update indexes on update",{database:t,collection:e,document_id:r._id,error:u.message}),u}},B=async(t,e,r)=>{const s=k(),n=_();try{const i=F(t,e);await n.transaction(()=>{for(const u of i){const o=v(r,u.field);let c;if(o==null)if(!u.sparse)c=g(t,e,u.field,null);else continue;else c=g(t,e,u.field,o);const f=(n.get(c)||[]).filter(l=>l!==r._id);f.length===0?n.remove(c):n.put(c,f)}})}catch(i){throw s.error("Failed to update indexes on delete",{database:t,collection:e,document_id:r._id,error:i.message}),i}},G=(t,e,r,s,n)=>{const i=k(),u=_(),o=new Set;try{switch(s){case"$eq":case"eq":{const d=g(t,e,r,n);(u.get(d)||[]).forEach(l=>o.add(l));break}case"$in":{if(!Array.isArray(n))throw new Error("$in operator requires an array value");for(const d of n){const f=g(t,e,r,d);(u.get(f)||[]).forEach(x=>o.add(x))}break}case"$exists":{const d=`index:${t}:${e}:${r}:`,f=u.getRange({start:d,end:d+"\xFF"});for(const{key:l,value:x}of f){const p=R(l),y=p&&p.value!=="null";(n&&y||!n&&!y)&&x.forEach(a=>o.add(a))}break}default:return null}const c=Array.from(o);return i.info("Index query completed",{database:t,collection:e,field:r,operator:s,document_ids_found:c.length}),c.length>0?c:null}catch(c){return i.error("Failed to query index",{database:t,collection:e,field:r,operator:s,error:c.message}),null}},H=(t,e,r)=>{const s=F(t,e);for(const[n,i]of Object.entries(r))if(s.some(o=>o.field===n))if(typeof i=="object"&&i!==null&&!Array.isArray(i)){const o=Object.keys(i),c=["$eq","$in","$exists"];if(o.some(d=>c.includes(d)))return{field:n,operators:o.filter(d=>c.includes(d))}}else return{field:n,operators:["eq"]};return null},K=()=>{if($)try{const t=$.getRange();for(const{key:e}of t)$.remove(e)}catch{}$=null};export{g as build_index_key,H as can_use_index,K as cleanup_index_database,J as create_index,N as drop_index,G as find_documents_by_index,_ as get_index_database,F as get_indexes,z as initialize_index_database,R as parse_index_key,B as update_indexes_on_delete,M as update_indexes_on_insert,b as update_indexes_on_update};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
let e=null;const s=r=>{if(r.port!==void 0){if(!Number.isInteger(r.port))throw new Error("Port must be an integer");if(r.port<1024)throw new Error("Port must be between 1024 and 65534 (HTTP port will be port + 1)");const t=r.port+1;if(t>65535)throw new Error(`HTTP port (${t}) would exceed maximum port number (65535). Use a lower TCP port.`);if(r.port>65534)throw new Error("Port must be between 1024 and 65534 (HTTP port will be port + 1)")}},o=()=>{try{const r=process.env.JOYSTICK_DB_SETTINGS;if(!r)throw new Error("JOYSTICK_DB_SETTINGS environment variable is not set");const t=JSON.parse(r);return s(t),e=t,e}catch(r){throw r instanceof SyntaxError?new Error(`Invalid JSON in JOYSTICK_DB_SETTINGS environment variable: ${r.message}`):new Error(`Failed to load JoystickDB settings: ${r.message}`)}},n=()=>{if(!e)throw new Error("Settings not loaded. Call load_settings() first.");return e},a=()=>(e=null,o()),p=()=>{e=null},c=()=>process.env.NODE_ENV||"development",l=()=>!!process.env.JOYSTICK_DB_SETTINGS,_=()=>{let r=1983;try{let t;try{t=n()}catch{o(),t=n()}t.port!==void 0&&(r=t.port)}catch{}return{tcp_port:r,http_port:r+1}};export{p as clear_settings_cache,c as get_current_environment,_ as get_port_configuration,n as get_settings,l as has_settings,o as load_settings,a as reload_settings};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import o from"winston";import n from"winston-daily-rotate-file";import m from"path";import{fileURLToPath as d}from"url";const c=d(import.meta.url),g=m.dirname(c),l=m.join(g,"../../../logs"),u=(a="server")=>{const s=o.createLogger({level:process.env.LOG_LEVEL||"info",format:o.format.combine(o.format.timestamp({format:"YYYY-MM-DDTHH:mm:ss.SSSZ"}),o.format.errors({stack:!0}),o.format.json(),o.format.printf(e=>{const r={timestamp:e.timestamp,level:e.level,component:a,pid:process.pid,message:e.message,...e.context&&{context:e.context},...e.duration_ms&&{duration_ms:e.duration_ms},...e.request_id&&{request_id:e.request_id},...e.worker_id&&{worker_id:e.worker_id},...e.stack&&{stack:e.stack}};return JSON.stringify(r)})),transports:[new o.transports.Console({format:o.format.printf(e=>{const r={level:e.level,message:e.message,...e.context&&{context:e.context},...e.duration_ms&&{duration_ms:e.duration_ms},...e.request_id&&{request_id:e.request_id},...e.worker_id&&{worker_id:e.worker_id},timestamp:e.timestamp};return JSON.stringify(r)})}),new n({filename:m.join(l,`${a}-%DATE%.log`),datePattern:"YYYY-MM-DD",zippedArchive:!0,maxSize:"20m",maxFiles:"14d",format:o.format.json()}),new n({filename:m.join(l,`${a}-error-%DATE%.log`),datePattern:"YYYY-MM-DD",zippedArchive:!0,maxSize:"20m",maxFiles:"30d",level:"error",format:o.format.json()})]});return{logger:s,create_context_logger:(e={})=>({info:(r,t={})=>{s.info(r,{context:{...e,...t}})},warn:(r,t={})=>{s.warn(r,{context:{...e,...t}})},error:(r,t={})=>{t instanceof Error?s.error(r,{context:e,stack:t.stack,error_message:t.message}):s.error(r,{context:{...e,...t}})},debug:(r,t={})=>{s.debug(r,{context:{...e,...t}})},log_operation:(r,t,i={})=>{s.info(`Operation completed: ${r}`,{context:{...e,...i,operation:r},duration_ms:t})},log_request:(r,t,i={})=>{s.info(`Request: ${t}`,{request_id:r,context:{...e,...i,operation:t}})}})}};var f=u;export{f as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
const e=["authentication","setup","find_one","find","insert_one","update_one","delete_one","bulk_write","create_index","drop_index","get_indexes","admin","ping","reload"];var n=e;export{n as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{encode_message as s}from"./tcp_protocol.js";import{get_write_forwarder as g}from"./write_forwarder.js";import{get_replication_manager as h}from"./replication_manager.js";import{check_and_grow_map_size as b}from"./query_engine.js";import{performance_monitor as f}from"./performance_monitor.js";import x from"./logger.js";import k from"./operations/insert_one.js";import v from"./operations/update_one.js";import q from"./operations/delete_one.js";import D from"./operations/bulk_write.js";import A from"./operations/find_one.js";import I from"./operations/find.js";import $ from"./operations/create_index.js";import C from"./operations/drop_index.js";import E from"./operations/get_indexes.js";import L from"./operations/admin.js";const{create_context_logger:U}=x("operation_dispatcher"),Z=o=>!o||typeof o!="string"||o.length>64||["admin","config","local"].includes(o.toLowerCase())?!1:/^[a-zA-Z0-9_-]+$/.test(o),Y=async(o,r,e,u,c=0,a=null,_=null)=>{const l=U(),m=Date.now();if(!u(o)){const t=s({ok:0,error:"Authentication required"});o.write(t),f.log_structured_operation(o.id,r,null,0,"error","Authentication required",c,0);return}const i=e.database||"default";if(!Z(i)){const t=s({ok:0,error:"Invalid database name. Database names must be alphanumeric with underscores/hyphens, max 64 characters, and cannot be reserved names (admin, config, local)."});o.write(t),f.log_structured_operation(o.id,r,e.collection,0,"error","Invalid database name",c,0);return}if(!await g().forward_operation(o,r,e))try{let n;switch(r){case"insert_one":n=await k(i,e.collection,e.document,e.options);break;case"update_one":n=await v(i,e.collection,e.filter,e.update,e.options);break;case"delete_one":n=await q(i,e.collection,e.filter,e.options);break;case"bulk_write":n=await D(i,e.collection,e.operations,e.options);break;case"find_one":n=await A(i,e.collection,e.filter,e.options);break;case"find":n=await I(i,e.collection,e.filter,e.options);break;case"create_index":n=await $(i,e.collection,e.field,e.options);break;case"drop_index":n=await C(i,e.collection,e.field);break;case"get_indexes":n=await E(i,e.collection);break;default:throw new Error(`Unsupported operation: ${r}`)}const t=Date.now()-m;let d;r==="find_one"?d={ok:1,document:n}:r==="find"?d={ok:1,documents:n}:d={ok:1,...n};const p=s(d),w=p.length;o.write(p),f.log_structured_operation(o.id,r,e.collection,t,"success",null,c,w),l.info("Database operation completed",{client_id:o.id,op:r,collection:e.collection,duration_ms:t,status:"success",request_size:c,response_size:w}),r!=="find"&&r!=="find_one"&&r!=="get_indexes"&&(h().queue_replication(r,e.collection,e),setImmediate(()=>b()))}catch(n){const t=Date.now()-m;f.log_structured_operation(o.id,r,e.collection,t,"error",n.message,c,0),l.error("Database operation failed",{client_id:o.id,op:r,collection:e.collection,duration_ms:t,status:"error",error:n.message,request_size:c});const d={ok:0,error:n.message},p=s(d);o.write(p)}},y=async(o,r,e,u=null,c=null)=>{if(!e(o)){const _=s({ok:!1,error:"Authentication required"});o.write(_);return}try{const a=r?.admin_action,l=await L(a,r||{},u,c);if(a){const m={ok:1,...l},i=s(m);o.write(i)}else{const m={ok:!0,...l},i=s(m);o.write(i)}}catch(a){const _={ok:0,error:`Admin operation failed: ${a.message}`},l=s(_);o.write(l)}},ee=o=>{const e=s({ok:1});o.write(e)};export{y as handle_admin_operation,Y as handle_database_operation,ee as handle_ping_operation};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{get_database as x}from"../query_engine.js";import{get_settings as q}from"../load_settings.js";import{get_write_queue as F}from"../write_queue.js";import{get_auth_stats as C}from"../auth_manager.js";import{get_query_statistics as O,get_auto_index_statistics as R,force_index_evaluation as j,remove_automatic_indexes as W}from"../auto_index_manager.js";import{create_index as A,drop_index as U,get_indexes as J}from"../index_manager.js";import{test_s3_connection as B,create_backup as L,list_backups as T,restore_backup as I,cleanup_old_backups as G}from"../backup_manager.js";import{get_replication_manager as v}from"../replication_manager.js";import{get_write_forwarder as H}from"../write_forwarder.js";import K from"../logger.js";import{performance_monitor as w}from"../performance_monitor.js";const{create_context_logger:k}=K("admin"),Q=()=>{const r=k();try{const e=x();let a;try{a=q()}catch{a={port:1983}}let o={};try{const s=e.getStats?e.getStats():{};o={pageSize:s.pageSize||0,treeDepth:s.treeDepth||0,treeBranchPages:s.treeBranchPages||0,treeLeafPages:s.treeLeafPages||0,entryCount:s.entryCount||0,mapSize:s.mapSize||0,lastPageNumber:s.lastPageNumber||0}}catch{o={error:"Could not retrieve database stats"}}const n={};let _=0;try{for(const{key:s}of e.getRange())if(typeof s=="string"&&s.includes(":")&&!s.startsWith("_")){const l=s.split(":")[0];n[l]=(n[l]||0)+1,_++}}catch(s){r.warn("Could not iterate database range for stats",{error:s.message})}const t=process.memoryUsage(),i={rss:Math.round(t.rss/1024/1024),heapTotal:Math.round(t.heapTotal/1024/1024),heapUsed:Math.round(t.heapUsed/1024/1024),external:Math.round(t.external/1024/1024)},c=o.mapSize>0?Math.round(o.lastPageNumber*o.pageSize/o.mapSize*100):0;return{server:{uptime:Math.floor(process.uptime()),uptime_formatted:E(process.uptime()),memory_usage:i,memory_usage_raw:t,node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid,cpu_usage:process.cpuUsage()},database:{total_documents:_,total_collections:Object.keys(n).length,collections:n,stats:o,map_size_usage_percent:c,disk_usage:{map_size_mb:Math.round((o.mapSize||0)/1024/1024),used_space_mb:Math.round((o.lastPageNumber||0)*(o.pageSize||0)/1024/1024)}},performance:{ops_per_second:P(),avg_response_time_ms:N()}}}catch(e){throw r.error("Failed to get enhanced stats",{error:e.message}),e}},E=r=>{const e=Math.floor(r/86400),a=Math.floor(r%86400/3600),o=Math.floor(r%3600/60),n=Math.floor(r%60);return e>0?`${e}d ${a}h ${o}m ${n}s`:a>0?`${a}h ${o}m ${n}s`:o>0?`${o}m ${n}s`:`${n}s`};let $=0,D=0,V=Date.now();const P=()=>{const r=(Date.now()-V)/1e3;return r>0?Math.round($/r):0},N=()=>$>0?Math.round(D/$):0,X=r=>{$++,D+=r},Y=(r="default")=>{const e=k();try{const a=x(),o={};let n=0;try{for(const{key:t}of a.getRange())if(typeof t=="string"&&t.includes(":")&&!t.startsWith("_")){const i=t.split(":");if(i.length>=3){const c=i[0],s=i[1];c===r&&(o[s]||(o[s]={name:s,document_count:0,indexes:[],estimated_size_bytes:0}),o[s].document_count++,n++)}}}catch(t){e.warn("Could not iterate database range for collections",{error:t.message});const i=["admin_test","test_collection","queue_test","users","products","orders","sessions","logs","analytics","settings","another_collection","list_test","pagination_test","get_test","query_test","admin_insert_test","admin_update_test","admin_delete_test"];for(const c of i)try{const s=`${r}:${c}:`,l=a.getRange({start:s,end:s+"\xFF"});let u=0;for(const h of l)u++,n++;u>0&&(o[c]={name:c,document_count:u,indexes:[],estimated_size_bytes:u*100})}catch{continue}}try{const t=`index:${r}:`,i=a.getRange({start:t,end:t+"\xFF"});for(const{key:c,value:s}of i)if(typeof c=="string"&&c.startsWith(t)){const l=c.substring(t.length),u=l.split(":")[0];o[u]&&(o[u].indexes.includes(l.split(":")[1])||o[u].indexes.push(l.split(":")[1]))}}catch(t){e.warn("Could not iterate index range",{error:t.message})}const _=Object.values(o);return{collections:_,total_collections:_.length,total_documents:n}}catch(a){throw e.error("Failed to list collections",{error:a.message}),a}},Z=(r,e={})=>{const a=k();if(!r)throw new Error("Collection name is required");try{const o=x(),{limit:n=50,skip:_=0,sort_field:t,sort_order:i="asc",database:c="default"}=e,s=[],l=`${c}:${r}:`;let u=0,h=0;for(const{key:p,value:y}of o.getRange({start:l,end:l+"\xFF"}))if(typeof p=="string"&&p.startsWith(l)){if(h<_){h++;continue}if(u>=n)break;try{const d=JSON.parse(y),m=p.substring(l.length);s.push({_id:m,...d}),u++}catch(d){a.warn("Could not parse document",{collection:r,key:p,error:d.message})}}return t&&s.length>0&&s.sort((p,y)=>{const d=p[t],m=y[t];return i==="desc"?m>d?1:m<d?-1:0:d>m?1:d<m?-1:0}),{collection:r,documents:s,count:s.length,skip:_,limit:n,has_more:u===n}}catch(o){throw a.error("Failed to list documents",{collection:r,error:o.message}),o}},ee=(r,e,a="default")=>{const o=k();if(!r||!e)throw new Error("Collection name and document ID are required");try{const n=x(),_=`${a}:${r}:${e}`,t=n.get(_);if(!t)return{found:!1,collection:r,document_id:e};const i=JSON.parse(t);return{found:!0,collection:r,document_id:e,document:{_id:e,...i}}}catch(n){throw o.error("Failed to get document",{collection:r,document_id:e,error:n.message}),n}},te=(r,e={},a={})=>{const o=k();if(!r)throw new Error("Collection name is required");try{const n=x(),{limit:_=100,skip:t=0,database:i="default"}=a,c=[],s=`${i}:${r}:`;let l=0,u=0,h=0;for(const{key:p,value:y}of n.getRange({start:s,end:s+"\xFF"}))if(typeof p=="string"&&p.startsWith(s)){h++;try{const d=JSON.parse(y),z={_id:p.substring(s.length),...d};if(Object.keys(e).every(M=>{const b=e[M],g=z[M];return typeof b=="object"&&b!==null?Object.keys(b).every(S=>{const f=b[S];switch(S){case"$gt":return g>f;case"$gte":return g>=f;case"$lt":return g<f;case"$lte":return g<=f;case"$ne":return g!==f;case"$in":return Array.isArray(f)&&f.includes(g);case"$regex":return new RegExp(f).test(String(g));default:return g===b}}):g===b})){if(u<t){u++;continue}if(l>=_)break;c.push(z),l++}}catch(d){o.warn("Could not parse document during query",{collection:r,key:p,error:d.message})}}return{collection:r,filter:e,documents:c,count:c.length,total_examined:h,skip:t,limit:_,has_more:l===_}}catch(n){throw o.error("Failed to query documents",{collection:r,filter:e,error:n.message}),n}},re=async(r,e,a,o={})=>await(await import("./insert_one.js")).default(r,e,a,o),oe=async(r,e,a,o,n={})=>await(await import("./update_one.js")).default(r,e,a,o,n),se=async(r,e,a,o={})=>await(await import("./delete_one.js")).default(r,e,a,o);var he=async(r,e={},a,o)=>{const n=k(),_=Date.now();try{let t;switch(r){case"stats":t={server:{uptime:Math.floor(process.uptime()),uptime_formatted:E(process.uptime()),node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid},memory:w.get_memory_stats(),database:{...w.get_database_stats(),map_size_mb:Math.round((w.get_database_stats()?.map_size||0)/1024/1024),used_space_mb:Math.round((w.get_database_stats()?.used_space||0)/1024/1024),usage_percent:w.get_database_stats()?.usage_percent||0},performance:{ops_per_second:P(),avg_response_time_ms:N()},system:w.get_system_stats(),connections:a?.get_stats()||{},write_queue:F()?.get_stats()||{},authentication:{authenticated_clients:o?.size||0,...C()},settings:(()=>{try{return{port:q().port||1983}}catch{return{port:1983}}})()};break;case"list_collections":t=Y();break;case"list_documents":t=Z(e.collection,{limit:e.limit,skip:e.skip,sort_field:e.sort_field,sort_order:e.sort_order});break;case"get_document":t=ee(e.collection,e.document_id);break;case"query_documents":t=te(e.collection,e.filter,{limit:e.limit,skip:e.skip});break;case"insert_document":t=await re(e.database||"default",e.collection,e.document,e.options);break;case"update_document":const c=e.document_id?{_id:e.document_id}:e.filter;t=await oe(e.database||"default",e.collection,c,e.update,e.options);break;case"delete_document":const s=e.document_id?{_id:e.document_id}:e.filter;t=await se(e.database||"default",e.collection,s,e.options);break;case"test_s3_connection":t=await B();break;case"backup_now":t=await L();break;case"list_backups":t=await T();break;case"restore_backup":if(!e.backup_filename)throw new Error("backup_filename is required for restore operation");t=await I(e.backup_filename);break;case"cleanup_backups":t=await G();break;case"get_auto_index_stats":t=R();break;case"get_query_stats":t=O(e.collection);break;case"evaluate_auto_indexes":t=await j(e.collection);break;case"remove_auto_indexes":if(!e.collection)throw new Error("collection is required for remove_auto_indexes operation");t=await W(e.collection,e.field_names);break;case"create_index":if(!e.collection||!e.field)throw new Error("collection and field are required for create_index operation");t=await A(e.database||"default",e.collection,e.field,e.options);break;case"drop_index":if(!e.collection||!e.field)throw new Error("collection and field are required for drop_index operation");t=await U(e.database||"default",e.collection,e.field);break;case"get_indexes":if(!e.collection)throw new Error("collection is required for get_indexes operation");t={indexes:J(e.database||"default",e.collection)};break;case"get_replication_status":t=v().get_replication_status();break;case"add_secondary":if(!e.id||!e.ip||!e.port||!e.private_key)throw new Error("id, ip, port, and private_key are required for add_secondary operation");t=await v().add_secondary({id:e.id,ip:e.ip,port:e.port,private_key:e.private_key,enabled:!0});break;case"remove_secondary":if(!e.secondary_id)throw new Error("secondary_id is required for remove_secondary operation");t=v().remove_secondary(e.secondary_id);break;case"sync_secondaries":t=await v().sync_secondaries();break;case"get_secondary_health":t=v().get_secondary_health();break;case"get_forwarder_status":t=H().get_forwarder_status();break;default:t={...Q(),connections:a?.get_stats()||{},write_queue:F()?.get_stats()||{},authentication:{authenticated_clients:o?.size||0,...C()},settings:(()=>{try{return{port:q().port||1983}}catch{return{port:1983}}})()}}const i=Date.now()-_;return X(i),n.info("Admin operation completed",{admin_action:r||"default",duration_ms:i,status:"success"}),t}catch(t){const i=Date.now()-_;throw n.error("Admin operation failed",{admin_action:r||"default",duration_ms:i,status:"error",error:t.message}),t}};export{he as default,X as track_operation};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{get_database as m,generate_document_id as g,build_collection_key as y}from"../query_engine.js";import{get_write_queue as b}from"../write_queue.js";import k from"../logger.js";const{create_context_logger:S}=k("bulk_write"),O=(r,o)=>{const e={...r};for(const[s,n]of Object.entries(o))switch(s){case"$set":Object.assign(e,n);break;case"$unset":for(const t of Object.keys(n))delete e[t];break;case"$inc":for(const[t,i]of Object.entries(n))e[t]=(e[t]||0)+i;break;case"$push":for(const[t,i]of Object.entries(n))Array.isArray(e[t])||(e[t]=[]),e[t].push(i);break;case"$pull":for(const[t,i]of Object.entries(n))Array.isArray(e[t])&&(e[t]=e[t].filter(c=>c!==i));break;default:throw new Error(`Unsupported update operator: ${s}`)}return e},h=(r,o)=>{if(!o||Object.keys(o).length===0)return!0;for(const[e,s]of Object.entries(o))if(r[e]!==s)return!1;return!0},E=(r,o,e,s)=>{const n=s.document;if(!n||typeof n!="object")throw new Error("insertOne operation requires a valid document");const t=n._id||g(),i=y(o,e,t);if(r.get(i))throw new Error(`Document with _id ${t} already exists`);const u={...n,_id:t,_created_at:new Date().toISOString(),_updated_at:new Date().toISOString()};return r.put(i,JSON.stringify(u)),{inserted_id:t}},j=(r,o,e,s)=>{const{filter:n,update:t,upsert:i=!1}=s;if(!n||typeof n!="object")throw new Error("updateOne operation requires a valid filter");if(!t||typeof t!="object")throw new Error("updateOne operation requires a valid update");const c=`${o}:${e}:`;let u=0,a=0,_=null;const d=r.getRange({start:c,end:c+"\xFF"});for(const{key:f,value:w}of d){const l=JSON.parse(w);if(h(l,n)){u=1;const p=O(l,t);return p._updated_at=new Date().toISOString(),JSON.stringify(l)!==JSON.stringify(p)&&(r.put(f,JSON.stringify(p)),a=1),{matched_count:u,modified_count:a}}}if(i){const f=g(),w=y(o,e,f),l={...n,_id:f,_created_at:new Date().toISOString(),_updated_at:new Date().toISOString()},p=O(l,t);r.put(w,JSON.stringify(p)),_=f}return{matched_count:u,modified_count:a,upserted_id:_}},q=(r,o,e,s)=>{const{filter:n}=s;if(!n||typeof n!="object")throw new Error("deleteOne operation requires a valid filter");const t=`${o}:${e}:`,i=r.getRange({start:t,end:t+"\xFF"});for(const{key:c,value:u}of i){const a=JSON.parse(u);if(h(a,n))return r.remove(c),{deleted_count:1}}return{deleted_count:0}},$=async(r,o,e,s={})=>{const n=S();if(!r)throw new Error("Database name is required");if(!o)throw new Error("Collection name is required");if(!Array.isArray(e)||e.length===0)throw new Error("Operations must be a non-empty array");const t=m(),i={acknowledged:!0,inserted_count:0,matched_count:0,modified_count:0,deleted_count:0,upserted_count:0,inserted_ids:{},upserted_ids:{}};return await t.transaction(()=>{e.forEach((c,u)=>{const a=Object.keys(c)[0],_=c[a];switch(a){case"insert_one":case"insertOne":{const d=E(t,r,o,_);i.inserted_count++,i.inserted_ids[u]=d.inserted_id;break}case"update_one":case"updateOne":{const d=j(t,r,o,_);i.matched_count+=d.matched_count,i.modified_count+=d.modified_count,d.upserted_id&&(i.upserted_count++,i.upserted_ids[u]=d.upserted_id);break}case"delete_one":case"deleteOne":{const d=q(t,r,o,_);i.deleted_count+=d.deleted_count;break}default:throw new Error(`Unsupported bulk operation: ${a}`)}})}),n.info("Bulk write operation completed",{database:r,collection:o,operations_count:e.length,results:i}),i},v=async(r,o,e,s={})=>{if(!r)throw new Error("Database name is required");if(!o)throw new Error("Collection name is required");if(!Array.isArray(e)||e.length===0)throw new Error("Operations must be a non-empty array");return await b().enqueue_write_operation(()=>$(r,o,e,s),{operation:"bulk_write",database:r,collection:o,operations_count:e.length})};var J=v;export{J as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{create_index as a}from"../index_manager.js";import n from"../logger.js";const{create_context_logger:u}=n("create_index"),p=async(r,e,t,c={})=>{const s=u();if(!r)throw new Error("Database name is required");if(!e)throw new Error("Collection name is required");if(!t)throw new Error("Field name is required");try{const o=await a(r,e,t,c),i=o.operation_type||"created",d=i==="created"?"created":i==="updated"?"updated":"already exists";return s.info(`Index ${d} successfully`,{database:r,collection:e,field:t,options:c,operation_type:i}),{...o,message:`Index ${d} on ${r}.${e}.${t}`}}catch(o){throw s.error("Failed to create/upsert index",{database:r,collection:e,field:t,error:o.message}),o}};var g=p;export{g as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{get_database as _}from"../query_engine.js";import{update_indexes_on_delete as p}from"../index_manager.js";import{get_write_queue as w}from"../write_queue.js";import g from"../logger.js";const{create_context_logger:b}=g("delete_one"),m=(t,e)=>{if(!e||Object.keys(e).length===0)return!0;for(const[r,o]of Object.entries(e))if(t[r]!==o)return!1;return!0},y=async(t,e,r,o={})=>{const u=b();if(!t)throw new Error("Database name is required");if(!e)throw new Error("Collection name is required");if(!r||typeof r!="object")throw new Error("Filter must be a valid object");const n=_();let i=0,c=null;return await n.transaction(()=>{const a=`${t}:${e}:`,l=n.getRange({start:a,end:a+"\xFF"});for(const{key:d,value:f}of l)try{const s=JSON.parse(f);if(m(s,r)){n.remove(d),c=s,i=1;break}}catch{continue}}),c&&await p(t,e,c),u.info("Delete operation completed",{database:t,collection:e,deleted_count:i}),{acknowledged:!0,deleted_count:i}},h=async(t,e,r,o={})=>await w().enqueue_write_operation(()=>y(t,e,r,o),{operation:"delete_one",database:t,collection:e,filter_keys:Object.keys(r||{})});var v=h;export{v as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{drop_index as d}from"../index_manager.js";import s from"../logger.js";const{create_context_logger:p}=s("drop_index"),c=async(r,o,e)=>{const i=p();if(!r)throw new Error("Database name is required");if(!o)throw new Error("Collection name is required");if(!e)throw new Error("Field name is required");try{const t=await d(r,o,e);return i.info("Index dropped successfully",{database:r,collection:o,field:e}),{...t,message:`Index dropped on ${r}.${o}.${e}`}}catch(t){throw i.error("Failed to drop index",{database:r,collection:o,field:e,error:t.message}),t}};var w=c;export{w as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{get_database as D,build_collection_key as A}from"../query_engine.js";import{can_use_index as J,find_documents_by_index as v}from"../index_manager.js";import{record_query as N,record_index_usage as F}from"../auto_index_manager.js";import S from"../logger.js";const{create_context_logger:R}=S("find"),C=(n,r)=>{const s=r.split(".");let t=n;for(const e of s){if(t==null)return;t=t[e]}return t},P=(n,r)=>{const s=r.split(".");let t=n;for(let e=0;e<s.length;e++){if(t==null||typeof t!="object")return!1;if(e===s.length-1)return t.hasOwnProperty(s[e]);t=t[s[e]]}return!1},m=(n,r)=>{if(!r||Object.keys(r).length===0)return!0;for(const[s,t]of Object.entries(r)){const e=C(n,s);if(typeof t=="object"&&t!==null&&!Array.isArray(t))for(const[i,o]of Object.entries(t))switch(i){case"$eq":if(e!==o)return!1;break;case"$ne":if(e===o)return!1;break;case"$gt":if(e<=o)return!1;break;case"$gte":if(e<o)return!1;break;case"$lt":if(e>=o)return!1;break;case"$lte":if(e>o)return!1;break;case"$in":if(!Array.isArray(o)||!o.includes(e))return!1;break;case"$nin":if(!Array.isArray(o)||o.includes(e))return!1;break;case"$exists":const c=P(n,s);if(o&&!c||!o&&c)return!1;break;case"$regex":if(!new RegExp(o).test(e))return!1;break;default:throw new Error(`Unsupported query operator: ${i}`)}else if(e!==t)return!1}return!0},U=(n,r)=>{if(!r||Object.keys(r).length===0)return n;const s=Object.values(r).some(e=>e===1||e===!0),t={};if(s){t._id=n._id;for(const[e,i]of Object.entries(r))e==="_id"&&(i===0||i===!1)?delete t._id:(i===1||i===!0)&&(t[e]=n[e])}else{Object.assign(t,n);for(const[e,i]of Object.entries(r))(i===0||i===!1)&&delete t[e]}return t},z=(n,r)=>!r||Object.keys(r).length===0?n:n.sort((s,t)=>{for(const[e,i]of Object.entries(r)){const o=s[e],c=t[e];if(o===c)continue;if(o===void 0)return 1;if(c===void 0)return-1;const l=o<c?-1:o>c?1:0;return i===-1?-l:l}return 0}),B=async(n,r,s={},t={})=>{const e=R();if(!n)throw new Error("Database name is required");if(!r)throw new Error("Collection name is required");const i=D(),{projection:o,sort:c,limit:l,skip:O=0}=t,E=Date.now();try{let u=[],_=!1,h=null;const j=J(n,r,s);if(j){const{field:f,operators:b}=j,a=s[f];if(h=f,typeof a=="object"&&a!==null&&!Array.isArray(a)){for(const d of b)if(a[d]!==void 0){const p=v(n,r,f,d,a[d]);if(p){_=!0,F(n,r,f);for(const k of p){const x=A(n,r,k),y=i.get(x);if(y){const q=JSON.parse(y);m(q,s)&&u.push(q)}}break}}}else if(b.includes("eq")){const d=v(n,r,f,"eq",a);if(d){_=!0,F(n,r,f);for(const p of d){const k=A(n,r,p),x=i.get(k);if(x){const y=JSON.parse(x);m(y,s)&&u.push(y)}}}}}if(!_){const f=`${n}:${r}:`,b=i.getRange({start:f,end:f+"\xFF"});for(const{key:a,value:d}of b){const p=JSON.parse(d);m(p,s)&&u.push(p)}}let g=z(u,c);O>0&&(g=g.slice(O)),l&&l>0&&(g=g.slice(0,l));const w=g.map(f=>U(f,o)),$=Date.now()-E;try{N(r,s,$,_,h)}catch(f){e.warn("Failed to record query for auto-indexing",{error:f.message})}return e.info("Find operation completed",{database:n,collection:r,documents_found:w.length,total_matching:u.length,used_index:_,indexed_field:h,execution_time_ms:$}),w}catch(u){throw e.error("Failed to find documents",{database:n,collection:r,error:u.message}),u}};var L=B;export{L as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{get_database as A,build_collection_key as j}from"../query_engine.js";import{can_use_index as v,find_documents_by_index as m}from"../index_manager.js";import{record_query as D,record_index_usage as q}from"../auto_index_manager.js";import E from"../logger.js";const{create_context_logger:F}=E("find_one"),N=(o,r)=>{const n=r.split(".");let t=o;for(const e of n){if(t==null)return;t=t[e]}return t},J=(o,r)=>{const n=r.split(".");let t=o;for(let e=0;e<n.length;e++){if(t==null||typeof t!="object")return!1;if(e===n.length-1)return t.hasOwnProperty(n[e]);t=t[n[e]]}return!1},w=(o,r)=>{if(!r||Object.keys(r).length===0)return!0;for(const[n,t]of Object.entries(r)){const e=N(o,n);if(typeof t=="object"&&t!==null&&!Array.isArray(t))for(const[i,s]of Object.entries(t))switch(i){case"$eq":if(e!==s)return!1;break;case"$ne":if(e===s)return!1;break;case"$gt":if(e<=s)return!1;break;case"$gte":if(e<s)return!1;break;case"$lt":if(e>=s)return!1;break;case"$lte":if(e>s)return!1;break;case"$in":if(!Array.isArray(s)||!s.includes(e))return!1;break;case"$nin":if(!Array.isArray(s)||s.includes(e))return!1;break;case"$exists":const x=J(o,n);if(s&&!x||!s&&x)return!1;break;case"$regex":if(!new RegExp(s).test(e))return!1;break;default:throw new Error(`Unsupported query operator: ${i}`)}else if(e!==t)return!1}return!0},S=(o,r)=>{if(!r||Object.keys(r).length===0)return o;const n=Object.values(r).some(e=>e===1||e===!0),t={};if(n){t._id=o._id;for(const[e,i]of Object.entries(r))e==="_id"&&(i===0||i===!1)?delete t._id:(i===1||i===!0)&&(t[e]=o[e])}else{Object.assign(t,o);for(const[e,i]of Object.entries(r))(i===0||i===!1)&&delete t[e]}return t},R=async(o,r,n={},t={})=>{const e=F();if(!o)throw new Error("Database name is required");if(!r)throw new Error("Collection name is required");const i=A(),{projection:s,sort:x}=t,$=Date.now();try{let c=null,a=!1,_=null;const O=v(o,r,n);if(O){const{field:f,operators:g}=O,d=n[f];if(_=f,typeof d=="object"&&d!==null&&!Array.isArray(d)){for(const u of g)if(d[u]!==void 0){const l=m(o,r,f,u,d[u]);if(l&&l.length>0){a=!0,q(o,r,f);for(const k of l){const y=j(o,r,k),p=i.get(y);if(p)try{const h=JSON.parse(p);if(w(h,n)){c=h;break}}catch{continue}}break}}}else if(g.includes("eq")){const u=m(o,r,f,"eq",d);if(u&&u.length>0){a=!0,q(o,r,f);for(const l of u){const k=j(o,r,l),y=i.get(k);if(y)try{const p=JSON.parse(y);if(w(p,n)){c=p;break}}catch{continue}}}}}if(!a){const f=`${o}:${r}:`,g=i.getRange({start:f,end:f+"\xFF"});for(const{key:d,value:u}of g)try{const l=JSON.parse(u);if(w(l,n)){c=l;break}}catch{continue}}const b=Date.now()-$;try{D(r,n,b,a,_)}catch(f){e.warn("Failed to record query for auto-indexing",{error:f.message})}if(c){const f=S(c,s);return e.info("Document found",{database:o,collection:r,document_id:c._id,used_index:a,indexed_field:_,execution_time_ms:b}),f}return e.info("No document found",{database:o,collection:r,used_index:a,indexed_field:_,execution_time_ms:b}),null}catch(c){throw e.error("Failed to find document",{database:o,collection:r,error:c.message}),c}};var B=R;export{B as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{get_indexes as i}from"../index_manager.js";import s from"../logger.js";const{create_context_logger:n}=s("get_indexes"),c=async(r,t)=>{const o=n();if(!r)throw new Error("Database name is required");if(!t)throw new Error("Collection name is required");try{const e=i(r,t);return o.info("Retrieved indexes successfully",{database:r,collection:t,count:e.length}),{acknowledged:!0,indexes:e}}catch(e){throw o.error("Failed to get indexes",{database:r,collection:t,error:e.message}),e}};var l=c;export{l as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{get_database as _,generate_document_id as a,build_collection_key as w}from"../query_engine.js";import{update_indexes_on_insert as f}from"../index_manager.js";import{get_write_queue as l}from"../write_queue.js";import g from"../logger.js";const{create_context_logger:p}=g("insert_one"),m=async(t,r,e,n={})=>{const s=p();if(!t)throw new Error("Database name is required");if(!r)throw new Error("Collection name is required");if(!e||typeof e!="object")throw new Error("Document must be a valid object");const i=_(),o=e._id||a(),u=w(t,r,o),c={...e,_id:o,_created_at:new Date().toISOString(),_updated_at:new Date().toISOString()};let d=null;return await i.transaction(()=>{if(i.get(u))throw new Error(`Document with _id ${o} already exists`);i.put(u,JSON.stringify(c)),d=c}),await f(t,r,d),s.info("Document inserted successfully",{database:t,collection:r,document_id:o}),{acknowledged:!0,inserted_id:o}},y=async(t,r,e,n={})=>{if(!t)throw new Error("Database name is required");if(!r)throw new Error("Collection name is required");if(!e||typeof e!="object")throw new Error("Document must be a valid object");return await l().enqueue_write_operation(()=>m(t,r,e,n),{operation:"insert_one",database:t,collection:r,document_id:e._id||"auto-generated"})};var x=y;export{x as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{get_database as m,build_collection_key as h,generate_document_id as S}from"../query_engine.js";import{update_indexes_on_update as $,update_indexes_on_insert as v}from"../index_manager.js";import{get_write_queue as q}from"../write_queue.js";import x from"../logger.js";const{create_context_logger:E}=x("update_one"),k=(o,t)=>{const e={...o};for(const[n,s]of Object.entries(t))switch(n){case"$set":Object.assign(e,s);break;case"$unset":for(const r of Object.keys(s))delete e[r];break;case"$inc":for(const[r,i]of Object.entries(s))e[r]=(e[r]||0)+i;break;case"$push":for(const[r,i]of Object.entries(s))Array.isArray(e[r])||(e[r]=[]),e[r].push(i);break;case"$pull":for(const[r,i]of Object.entries(s))Array.isArray(e[r])&&(e[r]=e[r].filter(c=>c!==i));break;default:throw new Error(`Unsupported update operator: ${n}`)}return e},J=(o,t)=>{if(!t||Object.keys(t).length===0)return!0;for(const[e,n]of Object.entries(t))if(o[e]!==n)return!1;return!0},N=async(o,t,e,n,s={})=>{const r=E();if(!o)throw new Error("Database name is required");if(!t)throw new Error("Collection name is required");if(!e||typeof e!="object")throw new Error("Filter must be a valid object");if(!n||typeof n!="object")throw new Error("Update must be a valid object");const i=m();let c=0,f=0,l=null,_=null,w=null,p=null;await i.transaction(()=>{const y=`${o}:${t}:`;let O=!1;const j=i.getRange({start:y,end:y+"\xFF"});for(const{key:a,value:b}of j){let u;try{u=JSON.parse(b)}catch{continue}if(J(u,e)){O=!0,c=1;const d=k(u,n);d._updated_at=new Date().toISOString(),JSON.stringify(u)!==JSON.stringify(d)&&(i.put(a,JSON.stringify(d)),_=u,w=d,f=1);break}}if(!O&&s.upsert){const a=S(),b=h(o,t,a),u={...e,_id:a,_created_at:new Date().toISOString(),_updated_at:new Date().toISOString()};p=k(u,n),i.put(b,JSON.stringify(p)),l=a,c=0,f=0}}),_&&w&&await $(o,t,_,w),p&&await v(o,t,p),r.info("Update operation completed",{database:o,collection:t,matched_count:c,modified_count:f,upserted_id:l});const g={acknowledged:!0,matched_count:c,modified_count:f};return l&&(g.upserted_id=l),g},A=async(o,t,e,n,s={})=>await q().enqueue_write_operation(()=>N(o,t,e,n,s),{operation:"update_one",database:o,collection:t,filter_keys:Object.keys(e||{})});var C=A;export{C as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import u from"./logger.js";import{get_database as h}from"./query_engine.js";const{create_context_logger:d}=u("performance_monitor");class p{constructor(){this.log=d(),this.metrics={operations:new Map,errors:new Map,response_times:[],start_time:Date.now(),total_requests:0,total_errors:0},this.max_response_times=1e3}track_operation(e,t,r,n=null,a={}){this.metrics.total_requests++,this.metrics.operations.has(e)||this.metrics.operations.set(e,{count:0,total_time:0,errors:0});const c=this.metrics.operations.get(e);if(c.count++,c.total_time+=t,r==="error"){this.metrics.total_errors++,c.errors++;const i=n||"unknown_error";this.metrics.errors.has(i)||this.metrics.errors.set(i,0),this.metrics.errors.set(i,this.metrics.errors.get(i)+1)}this.metrics.response_times.push({timestamp:Date.now(),duration_ms:t,op_type:e,status:r}),this.metrics.response_times.length>this.max_response_times&&this.metrics.response_times.shift(),this.log.info("Operation tracked",{op:e,duration_ms:t,status:r,error:n||void 0,...a})}get_performance_stats(){const e=Date.now(),t=e-this.metrics.start_time,r=t/1e3,n=r>0?Math.round(this.metrics.total_requests/r):0,a=this.metrics.response_times.filter(s=>e-s.timestamp<6e4).map(s=>s.duration_ms),c=a.length>0?Math.round(a.reduce((s,o)=>s+o,0)/a.length):0,i=this.metrics.total_requests>0?Math.round(this.metrics.total_errors/this.metrics.total_requests*100*100)/100:0,m={};for(const[s,o]of this.metrics.operations)m[s]={count:o.count,avg_duration_ms:o.count>0?Math.round(o.total_time/o.count):0,error_count:o.errors,error_rate:o.count>0?Math.round(o.errors/o.count*100*100)/100:0};const _=Array.from(this.metrics.errors.entries()).sort((s,o)=>o[1]-s[1]).slice(0,10).map(([s,o])=>({error:s,count:o}));return{uptime_ms:t,uptime_seconds:Math.round(r),total_requests:this.metrics.total_requests,total_errors:this.metrics.total_errors,ops_per_second:n,avg_response_time_ms:c,error_rate_percent:i,operations:m,top_errors:_,recent_response_times:a.length}}get_memory_stats(){const e=process.memoryUsage();return{rss_mb:Math.round(e.rss/1024/1024),heap_total_mb:Math.round(e.heapTotal/1024/1024),heap_used_mb:Math.round(e.heapUsed/1024/1024),heap_used_percent:Math.round(e.heapUsed/e.heapTotal*100),external_mb:Math.round(e.external/1024/1024),array_buffers_mb:Math.round((e.arrayBuffers||0)/1024/1024)}}get_database_stats(){try{const e=h(),t=e.getStats?e.getStats():{},r=Math.round((t.mapSize||0)/1024/1024),n=Math.round((t.lastPageNumber||0)*(t.pageSize||0)/1024/1024),a=r>0?Math.round(n/r*100):0;return{map_size_mb:r,used_space_mb:n,usage_percent:a,page_size:t.pageSize||0,tree_depth:t.treeDepth||0,entry_count:t.entryCount||0,branch_pages:t.treeBranchPages||0,leaf_pages:t.treeLeafPages||0}}catch(e){return this.log.warn("Could not get database stats",{error:e.message}),{error:"Database stats unavailable",map_size_mb:0,used_space_mb:0,usage_percent:0}}}get_system_stats(){const e=process.cpuUsage();return{node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid,cpu_user_ms:Math.round(e.user/1e3),cpu_system_ms:Math.round(e.system/1e3)}}log_structured_operation(e,t,r,n,a,c=null,i=0,m=0){const _={ts:new Date().toISOString(),client_id:e,op:t,collection:r||void 0,duration_ms:n,status:a,error:c||void 0,request_size:i||void 0,response_size:m||void 0};Object.keys(_).forEach(s=>{_[s]===void 0&&delete _[s]}),this.log.info("Structured operation log",_),this.track_operation(t,n,a,c,{client_id:e,collection:r,request_size:i,response_size:m})}reset_metrics(){this.metrics={operations:new Map,errors:new Map,response_times:[],start_time:Date.now(),total_requests:0,total_errors:0},this.log.info("Performance metrics reset")}}const l=new p;export{p as PerformanceMonitor,l as performance_monitor};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import*as l from"lmdb";import{rmSync as d,existsSync as u}from"fs";import m from"./logger.js";import{calculate_map_size as p,get_disk_size as g,should_grow_map_size as f}from"./disk_utils.js";import{initialize_index_database as z,cleanup_index_database as b}from"./index_manager.js";import{initialize_auto_index_database as h,cleanup_auto_index_database as w}from"./auto_index_manager.js";const{create_context_logger:_}=m("query_engine");let a=null,r=null,o=null;const y=(t="./data")=>{const e=_();return a||(o=t,p(o).then(i=>{r=i,a&&a.resize&&(a.resize(r),e.info("Database map_size updated",{path:o,map_size:r,map_size_gb:Math.round(r/(1024*1024*1024)*100)/100}))}).catch(i=>{e.warn("Failed to calculate map_size, using default",{database_path:o,error:i.message})}),r=1024*1024*1024*10,a=l.open({path:o,compression:!0,useVersions:!1,encoding:"msgpack",mapSize:r}),e.info("Database initialized",{path:o,map_size:r,map_size_gb:Math.round(r/(1024*1024*1024)*100)/100}),z(),h()),a},k=async()=>{if(!a||!o)return;const t=_();try{const e=a.getStats?a.getStats():{},s=e.ms_psize*e.ms_leaf_pages||0;if(s===0)return;const i=await g(o),n=f(r,s,i);n&&(t.info("Growing map_size",{current_map_size:r,new_map_size:n,used_size:s,usage_percentage:Math.round(s/r*100)}),a.resize(n),r=n,t.info("Map size grown successfully",{new_map_size:n,new_map_size_gb:Math.round(n/(1024*1024*1024)*100)/100}))}catch(e){t.error("Failed to check/grow map_size",{error:e.message})}},x=()=>{if(!a)throw new Error("Database not initialized. Call initialize_database first.");return a},M=()=>`${Date.now()}-${Math.random().toString(36).substr(2,9)}`,S=(t,e,s)=>`${t}:${e}:${s}`,D=t=>{const e=t.split(":");return{database:e[0],collection:e[1],document_id:e.slice(2).join(":")}},$=async(t=!1)=>{const e=_(),s=o;if(a){try{await new Promise(i=>setTimeout(i,100)),w(),b(),await a.close(),e.info("Database closed successfully")}catch(i){e.warn("Error closing database",{error:i.message})}a=null,r=null,o=null}if(t&&s&&c(s))try{u(s)&&(d(s,{recursive:!0,force:!0}),e.info("Test database directory removed",{path:s}))}catch(i){e.warn("Failed to remove test database directory",{path:s,error:i.message})}},c=t=>t&&(t.includes("test_data")||t.startsWith("./test_")||t.startsWith("test_"));export{S as build_collection_key,k as check_and_grow_map_size,$ as cleanup_database,M as generate_document_id,x as get_database,y as initialize_database,c as is_test_database_path,D as parse_collection_key};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{readFileSync as g,writeFileSync as _,existsSync as d,unlinkSync as k}from"fs";import y from"crypto";import v from"bcrypt";import w from"./logger.js";const{create_context_logger:h}=w("recovery_manager"),o=h(),s="./recovery_token.json";import{load_settings as S,has_settings as x}from"./load_settings.js";const E=10,O=12,T=3;let e={token:null,expires_at:null,failed_attempts:0,locked_until:null};const D=()=>y.randomUUID(),I=()=>{const t=D(),r=Date.now()+E*60*1e3;e={token:t,expires_at:r,failed_attempts:0,locked_until:null};try{_(s,JSON.stringify(e,null,2),{mode:384}),o.info("Recovery token generated",{expires_at:new Date(r).toISOString()})}catch(a){throw o.error("Failed to save recovery token",{error:a.message}),new Error(`Failed to save recovery token: ${a.message}`)}return{token:t,expires_at:r,url:`http://localhost:1984/recovery?token=${t}`}},u=()=>{try{if(!d(s)){e={token:null,expires_at:null,failed_attempts:0,locked_until:null};return}const t=g(s,"utf8"),r=JSON.parse(t);e={token:r.token||null,expires_at:r.expires_at||null,failed_attempts:r.failed_attempts||0,locked_until:r.locked_until||null},o.info("Recovery state loaded")}catch(t){o.warn("Failed to load recovery state, using defaults",{error:t.message}),e={token:null,expires_at:null,failed_attempts:0,locked_until:null}}},f=()=>{try{_(s,JSON.stringify(e,null,2),{mode:384})}catch(t){o.error("Failed to save recovery state",{error:t.message})}},i=()=>{e={token:null,expires_at:null,failed_attempts:0,locked_until:null};try{d(s)&&k(s)}catch(t){o.warn("Failed to cleanup recovery token file",{error:t.message})}},F=t=>{u();const r=Date.now();return e.locked_until&&r<e.locked_until?{valid:!1,reason:"locked"}:!e.token||!e.expires_at?{valid:!1,reason:"no_token"}:r>e.expires_at?(i(),{valid:!1,reason:"expired"}):t!==e.token?{valid:!1,reason:"invalid"}:{valid:!0}},N=t=>{e.failed_attempts+=1,o.warn("Failed recovery attempt",{ip:t,attempt_count:e.failed_attempts}),e.failed_attempts>=T&&(e.locked_until=Date.now()+18e5,o.warn("Recovery locked due to too many failed attempts",{ip:t,locked_until:new Date(e.locked_until).toISOString()})),f()},R=t=>!t||typeof t!="string"?{valid:!1,message:"Password is required"}:t.length<12?{valid:!1,message:"Password must be at least 12 characters long"}:{valid:!0},J=async(t,r,a=null)=>{const c=R(t);if(!c.valid)throw new Error(c.message);try{if(!x())throw new Error("JOYSTICK_DB_SETTINGS environment variable not found");const n=S();if(!n.authentication)throw new Error("Authentication not configured");const m=await v.hash(t,O),l=new Date().toISOString();if(n.authentication.password_hash=m,n.authentication.last_updated=l,n.authentication.failed_attempts={},n.authentication.rate_limits={},process.env.JOYSTICK_DB_SETTINGS=JSON.stringify(n),a&&typeof a=="function")try{a(),o.info("All existing connections terminated due to password change")}catch(p){o.warn("Failed to terminate some connections",{error:p.message})}return i(),o.info("Emergency password change completed",{client_ip:r,timestamp:l}),{success:!0,timestamp:l,message:"Password changed successfully"}}catch(n){throw o.error("Emergency password change failed",{client_ip:r,error:n.message}),n}},A=()=>{const t=Date.now();return{token_active:!!(e.token&&e.expires_at&&t<e.expires_at),expires_at:e.expires_at,failed_attempts:e.failed_attempts,locked_until:e.locked_until,is_locked:!!(e.locked_until&&t<e.locked_until)}},P=()=>{u();const t=Date.now();e.expires_at&&t>e.expires_at&&i(),e.locked_until&&t>e.locked_until&&(e.locked_until=null,e.failed_attempts=0,f())},Y=()=>{i()};export{J as change_password,I as create_recovery_token,A as get_recovery_status,P as initialize_recovery_manager,F as is_token_valid,N as record_failed_recovery_attempt,Y as reset_recovery_state};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import p from"net";import u from"crypto";import{get_settings as g}from"./load_settings.js";import{encode_message as d,create_message_parser as m}from"./tcp_protocol.js";import y from"./logger.js";const{create_context_logger:f}=y("replication");class w{constructor(){this.secondary_connections=new Map,this.replication_queue=[],this.processing_replication=!1,this.sequence_number=0,this.enabled=!1,this.mode="async",this.timeout_ms=5e3,this.retry_attempts=3,this.batch_size=100,this.log=f(),this.stats={total_operations_replicated:0,successful_replications:0,failed_replications:0,connected_secondaries:0,avg_replication_latency_ms:0,total_replication_time_ms:0}}initialize(){try{const e=g().replication;if(!e||!e.enabled){this.log.info("Replication disabled in settings");return}this.enabled=e.enabled,this.mode=e.mode||"async",this.timeout_ms=e.timeout_ms||5e3,this.retry_attempts=e.retry_attempts||3,this.batch_size=e.batch_size||100;const n=e.secondaries||[];this.log.info("Initializing replication manager",{enabled:this.enabled,mode:this.mode,secondary_count:n.length,timeout_ms:this.timeout_ms,batch_size:this.batch_size});for(const s of n)s.enabled&&this.connect_to_secondary(s);this.start_replication_processing()}catch(t){this.log.warn("Could not initialize replication - settings not loaded",{error:t.message})}}async connect_to_secondary(t){const{id:e,ip:n,port:s,private_key:i}=t;this.log.info("Connecting to secondary node",{id:e,ip:n,port:s});try{const o=new p.Socket,l=m();o.connect(s,n,()=>{this.log.info("Connected to secondary node",{id:e,ip:n,port:s}),this.authenticate_with_secondary(o,e,i)}),o.on("data",a=>{try{const c=l.parse_messages(a);for(const _ of c)this.handle_secondary_response(e,_)}catch(c){this.log.error("Failed to parse secondary response",{secondary_id:e,error:c.message})}}),o.on("error",a=>{this.log.error("Secondary connection error",{secondary_id:e,error:a.message}),this.secondary_connections.delete(e),this.stats.connected_secondaries=this.secondary_connections.size,setTimeout(()=>{this.connect_to_secondary(t)},5e3)}),o.on("close",()=>{this.log.warn("Secondary connection closed",{secondary_id:e}),this.secondary_connections.delete(e),this.stats.connected_secondaries=this.secondary_connections.size,setTimeout(()=>{this.connect_to_secondary(t)},5e3)}),this.secondary_connections.set(e,{socket:o,id:e,ip:n,port:s,authenticated:!1,last_ping:Date.now(),pending_operations:new Map}),this.stats.connected_secondaries=this.secondary_connections.size}catch(o){this.log.error("Failed to connect to secondary",{secondary_id:e,ip:n,port:s,error:o.message})}}authenticate_with_secondary(t,e,n){try{const s=Date.now(),i=`${e}:${s}`,o=u.createHmac("sha256",Buffer.from(n,"base64")).update(i).digest("base64"),a=d({type:"replication_auth",node_id:e,timestamp:s,signature:o});t.write(a),this.log.debug("Sent authentication to secondary",{secondary_id:e})}catch(s){this.log.error("Failed to authenticate with secondary",{secondary_id:e,error:s.message})}}handle_secondary_response(t,e){const n=this.secondary_connections.get(t);if(n)try{const s=typeof e=="string"?JSON.parse(e):e;switch(s.type){case"auth_success":n.authenticated=!0,this.log.info("Secondary authentication successful",{secondary_id:t});break;case"auth_failed":this.log.error("Secondary authentication failed",{secondary_id:t}),n.socket.end();break;case"replication_ack":this.handle_replication_acknowledgment(t,s);break;case"ping_response":n.last_ping=Date.now();break;default:this.log.debug("Unknown message from secondary",{secondary_id:t,type:s.type})}}catch(s){this.log.error("Failed to handle secondary response",{secondary_id:t,error:s.message})}}handle_replication_acknowledgment(t,e){const n=this.secondary_connections.get(t);if(!n)return;const{sequence_number:s,status:i,error:o}=e,l=n.pending_operations.get(s);if(l){const a=Date.now()-l.sent_at;this.stats.total_replication_time_ms+=a,i==="success"?(this.stats.successful_replications++,this.log.debug("Replication acknowledged",{secondary_id:t,sequence_number:s,latency_ms:a})):(this.stats.failed_replications++,this.log.error("Replication failed on secondary",{secondary_id:t,sequence_number:s,error:o,latency_ms:a})),n.pending_operations.delete(s);const c=this.stats.successful_replications+this.stats.failed_replications;this.stats.avg_replication_latency_ms=c>0?Math.round(this.stats.total_replication_time_ms/c):0}}queue_replication(t,e,n,s=null){if(!this.enabled||this.secondary_connections.size===0)return;const i={operation:t,collection:e,data:n,transaction_id:s,timestamp:Date.now(),sequence_number:++this.sequence_number};this.replication_queue.push(i),this.stats.total_operations_replicated++,this.log.debug("Queued operation for replication",{operation:t,collection:e,sequence_number:i.sequence_number,queue_length:this.replication_queue.length}),this.processing_replication||setImmediate(()=>this.process_replication_queue())}start_replication_processing(){this.processing_replication||(this.processing_replication=!0,this.process_replication_queue())}async process_replication_queue(){for(;this.replication_queue.length>0&&this.enabled;){const t=this.replication_queue.splice(0,this.batch_size);if(t.length===0)break;await this.replicate_batch(t),this.replication_queue.length>0&&await new Promise(e=>setTimeout(e,10))}this.replication_queue.length>0&&setImmediate(()=>this.process_replication_queue())}async replicate_batch(t){const e={type:"replication",timestamp:Date.now(),sequence_number:t[0].sequence_number,operations:t},n=d(e),s=Array.from(this.secondary_connections.values()).filter(i=>i.authenticated);if(s.length===0){this.log.warn("No authenticated secondaries available for replication");return}this.log.debug("Replicating batch to secondaries",{batch_size:t.length,secondary_count:s.length,sequence_number:t[0].sequence_number});for(const i of s)try{i.socket.write(n);for(const o of t)i.pending_operations.set(o.sequence_number,{operation:o,sent_at:Date.now()})}catch(o){this.log.error("Failed to send replication to secondary",{secondary_id:i.id,error:o.message})}}async add_secondary(t){const{id:e}=t;if(this.secondary_connections.has(e))throw new Error(`Secondary node ${e} already exists`);return this.log.info("Adding new secondary node",{id:e}),await this.connect_to_secondary(t),{success:!0,message:`Secondary node ${e} added successfully`,secondary_id:e}}remove_secondary(t){const e=this.secondary_connections.get(t);if(!e)throw new Error(`Secondary node ${t} not found`);return this.log.info("Removing secondary node",{secondary_id:t}),e.socket.end(),this.secondary_connections.delete(t),this.stats.connected_secondaries=this.secondary_connections.size,{success:!0,message:`Secondary node ${t} removed successfully`,secondary_id:t}}async sync_secondaries(){if(!this.enabled)throw new Error("Replication is not enabled");this.log.info("Forcing secondary synchronization"),await this.process_replication_queue();const t={type:"ping",timestamp:Date.now()},e=d(t),n=[];for(const[s,i]of this.secondary_connections)try{i.authenticated?(i.socket.write(e),n.push({secondary_id:s,status:"ping_sent"})):n.push({secondary_id:s,status:"not_authenticated"})}catch(o){n.push({secondary_id:s,status:"error",error:o.message})}return{success:!0,message:"Secondary synchronization initiated",results:n}}get_replication_status(){const t=[];for(const[e,n]of this.secondary_connections)t.push({id:e,ip:n.ip,port:n.port,authenticated:n.authenticated,last_ping:n.last_ping,pending_operations:n.pending_operations.size,connected:!n.socket.destroyed});return{enabled:this.enabled,mode:this.mode,connected_secondaries:this.stats.connected_secondaries,queue_length:this.replication_queue.length,processing:this.processing_replication,stats:this.stats,secondaries:t}}get_secondary_health(){const t=[],e=Date.now();for(const[n,s]of this.secondary_connections){const i=e-s.last_ping,o=s.authenticated&&!s.socket.destroyed&&i<3e4;t.push({id:n,ip:s.ip,port:s.port,healthy:o,authenticated:s.authenticated,connected:!s.socket.destroyed,last_ping_age_ms:i,pending_operations:s.pending_operations.size})}return{total_secondaries:t.length,healthy_secondaries:t.filter(n=>n.healthy).length,secondaries:t}}async shutdown(){this.log.info("Shutting down replication manager"),this.enabled=!1,this.processing_replication=!1;for(const[t,e]of this.secondary_connections)try{e.socket.end()}catch(n){this.log.warn("Error closing secondary connection",{secondary_id:t,error:n.message})}this.secondary_connections.clear(),this.replication_queue=[],this.stats.connected_secondaries=0,this.log.info("Replication manager shutdown complete")}}let r=null;const b=()=>(r||(r=new w),r),v=()=>{b().initialize()},R=async()=>{r&&(await r.shutdown(),r=null)};export{b as get_replication_manager,v as initialize_replication_manager,R as shutdown_replication_manager};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
const e=r=>{try{return JSON.parse(r)}catch{return null}};var a=e;export{a as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{encode_message as r}from"./tcp_protocol.js";const t=(e,s={})=>{const o=r({ok:!0,data:s});e.write(o)},p=(e,s={})=>{const o=r({ok:!1,error:s});e.write(o)},d=(e,s="")=>{t(e,{message:s})};export{p as send_error,d as send_message,t as send_success};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{encode as d,decode as m}from"msgpackr";import u from"./logger.js";const{create_context_logger:l}=u("tcp_protocol"),f=e=>{const s=l();try{const t=d(e,{useFloat32:!1,int64AsType:"number",mapsAsObjects:!0}),o=Buffer.allocUnsafe(4);o.writeUInt32BE(t.length,0);const c=Buffer.concat([o,t]);return s.debug("Message encoded",{payload_size:t.length,total_size:c.length}),c}catch(t){throw s.error("Failed to encode message",{error:t.message}),t}},i=()=>{let e=Buffer.alloc(0),s=null;const t=l();return{parse_messages:g=>{e=Buffer.concat([e,g]);const n=[];for(;e.length>0;){if(s===null){if(e.length<4)break;s=e.readUInt32BE(0),e=e.slice(4),t.debug("Length prefix read",{expected_length:s})}if(e.length<s)break;const r=e.slice(0,s);e=e.slice(s),s=null;try{const a=m(r,{useFloat32:!1,int64AsType:"number",mapsAsObjects:!0});n.push(a),t.debug("Message decoded",{message_size:r.length})}catch(a){throw t.error("Failed to decode message",{message_size:r.length,error:a.message,hex_data:r.toString("hex")}),new Error(`Invalid message format: ${a.message}`)}}return n},reset:()=>{e=Buffer.alloc(0),s=null,t.debug("Parser reset")}}};export{i as create_message_parser,f as encode_message};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import h from"net";import m from"crypto";import{get_settings as p}from"./load_settings.js";import{encode_message as a,create_message_parser as l}from"./tcp_protocol.js";import f from"./logger.js";const{create_context_logger:w}=f("write_forwarder");class g{constructor(){this.primary_connection=null,this.is_secondary_mode=!1,this.primary_config=null,this.pending_forwards=new Map,this.forward_timeout_ms=1e4,this.reconnect_delay_ms=5e3,this.log=w(),this.stats={total_forwards:0,successful_forwards:0,failed_forwards:0,avg_forward_latency_ms:0,total_forward_time_ms:0,connection_attempts:0,last_connection_attempt:null}}initialize(){try{const r=p();if(r.mode!=="secondary"||!r.primary){this.log.info("Node not configured as secondary - write forwarding disabled");return}this.is_secondary_mode=!0,this.primary_config=r.primary,this.forward_timeout_ms=r.replication?.timeout_ms||1e4,this.log.info("Initializing write forwarder for secondary mode",{primary_ip:this.primary_config.ip,primary_port:this.primary_config.port,timeout_ms:this.forward_timeout_ms}),this.connect_to_primary()}catch(r){this.log.warn("Could not initialize write forwarder - settings not loaded",{error:r.message})}}async connect_to_primary(){if(!this.is_secondary_mode||!this.primary_config)return;const{ip:r,port:t,public_key:o}=this.primary_config;this.log.info("Connecting to primary node",{ip:r,port:t}),this.stats.connection_attempts++,this.stats.last_connection_attempt=Date.now();try{const e=new h.Socket,n=l();e.connect(t,r,()=>{this.log.info("Connected to primary node",{ip:r,port:t}),this.authenticate_with_primary(e,o)}),e.on("data",i=>{try{const s=n.parse_messages(i);for(const d of s)this.handle_primary_response(d)}catch(s){this.log.error("Failed to parse primary response",{error:s.message})}}),e.on("error",i=>{this.log.error("Primary connection error",{error:i.message}),this.primary_connection=null,this.fail_pending_forwards("Primary connection error"),setTimeout(()=>{this.connect_to_primary()},this.reconnect_delay_ms)}),e.on("close",()=>{this.log.warn("Primary connection closed"),this.primary_connection=null,this.fail_pending_forwards("Primary connection closed"),setTimeout(()=>{this.connect_to_primary()},this.reconnect_delay_ms)}),this.primary_connection={socket:e,authenticated:!1,last_ping:Date.now()}}catch(e){this.log.error("Failed to connect to primary",{ip:r,port:t,error:e.message}),setTimeout(()=>{this.connect_to_primary()},this.reconnect_delay_ms)}}authenticate_with_primary(r,t){try{const o=Date.now(),e=`secondary-${process.pid}`,n=`${e}:${o}`,s={op:"authentication",data:{password:m.createHmac("sha256",Buffer.from(t,"base64")).update(n).digest("base64"),node_type:"secondary",node_id:e}},d=a(s);r.write(d),this.log.debug("Sent authentication to primary")}catch(o){this.log.error("Failed to authenticate with primary",{error:o.message})}}handle_primary_response(r){if(this.primary_connection)try{const t=typeof r=="string"?JSON.parse(r):r;if(t.ok===1&&t.message==="Authentication successful"){this.primary_connection.authenticated=!0,this.log.info("Primary authentication successful");return}if(t.forward_id){this.handle_forward_response(t);return}if(t.ok===1&&!t.forward_id){this.primary_connection.last_ping=Date.now();return}this.log.debug("Unhandled primary response",{type:typeof t,keys:Object.keys(t)})}catch(t){this.log.error("Failed to handle primary response",{error:t.message})}}handle_forward_response(r){const{forward_id:t}=r,o=this.pending_forwards.get(t);if(!o){this.log.warn("Received response for unknown forward",{forward_id:t});return}const e=Date.now()-o.sent_at;this.stats.total_forward_time_ms+=e,clearTimeout(o.timeout),this.pending_forwards.delete(t);const n={...r};delete n.forward_id,r.ok===1||r.ok===!0?(this.stats.successful_forwards++,this.log.debug("Forward operation successful",{forward_id:t,latency_ms:e,operation:o.operation})):(this.stats.failed_forwards++,this.log.error("Forward operation failed",{forward_id:t,latency_ms:e,operation:o.operation,error:r.error}));const i=this.stats.successful_forwards+this.stats.failed_forwards;this.stats.avg_forward_latency_ms=i>0?Math.round(this.stats.total_forward_time_ms/i):0;try{const s=a(n);o.client_socket.write(s)}catch(s){this.log.error("Failed to send response to client",{forward_id:t,error:s.message})}}should_forward_operation(r){return this.is_secondary_mode?["insert_one","update_one","delete_one","bulk_write","create_index","drop_index"].includes(r):!1}async forward_operation(r,t,o){if(!this.is_secondary_mode||!this.should_forward_operation(t))return!1;if(!this.primary_connection||!this.primary_connection.authenticated){this.log.error("Cannot forward operation - not connected to primary",{operation:t,connected:!!this.primary_connection,authenticated:this.primary_connection?.authenticated||!1});const s=a({ok:0,error:"Secondary node not connected to primary"});return r.write(s),!0}const e=this.generate_forward_id();this.log.debug("Forwarding operation to primary",{forward_id:e,operation:t,client_id:r.id});const n={op:t,data:o,forward_id:e,forwarded_by:`secondary-${process.pid}`,original_client_id:r.id,timestamp:Date.now()};try{const i=a(n);this.primary_connection.socket.write(i);const s=setTimeout(()=>{this.handle_forward_timeout(e)},this.forward_timeout_ms);return this.pending_forwards.set(e,{client_socket:r,operation:t,data:o,sent_at:Date.now(),timeout:s}),this.stats.total_forwards++,!0}catch(i){this.log.error("Failed to forward operation",{forward_id:e,operation:t,error:i.message});const s={ok:0,error:`Failed to forward operation: ${i.message}`},d=a(s);return r.write(d),!0}}handle_forward_timeout(r){const t=this.pending_forwards.get(r);if(!t)return;this.log.error("Forward operation timed out",{forward_id:r,operation:t.operation,timeout_ms:this.forward_timeout_ms}),this.pending_forwards.delete(r),this.stats.failed_forwards++;const o=Date.now()-t.sent_at;this.stats.total_forward_time_ms+=o;const e=this.stats.successful_forwards+this.stats.failed_forwards;this.stats.avg_forward_latency_ms=e>0?Math.round(this.stats.total_forward_time_ms/e):0;try{const i=a({ok:0,error:"Operation forwarding timed out"});t.client_socket.write(i)}catch(n){this.log.error("Failed to send timeout error to client",{forward_id:r,error:n.message})}}fail_pending_forwards(r){for(const[t,o]of this.pending_forwards){clearTimeout(o.timeout),this.log.error("Failing pending forward operation",{forward_id:t,operation:o.operation,reason:r});try{const e={ok:0,error:`Forward operation failed: ${r}`},n=a(e);o.client_socket.write(n)}catch(e){this.log.error("Failed to send error to client",{forward_id:t,error:e.message})}}this.stats.failed_forwards+=this.pending_forwards.size,this.pending_forwards.clear()}generate_forward_id(){return`fwd_${Date.now()}_${Math.random().toString(36).substr(2,9)}`}get_forwarder_status(){return{enabled:this.is_secondary_mode,connected_to_primary:!!this.primary_connection&&this.primary_connection.authenticated,primary_config:this.primary_config?{ip:this.primary_config.ip,port:this.primary_config.port}:null,pending_forwards:this.pending_forwards.size,stats:this.stats}}async shutdown(){if(this.log.info("Shutting down write forwarder"),this.fail_pending_forwards("Server shutting down"),this.primary_connection){try{this.primary_connection.socket.end()}catch(r){this.log.warn("Error closing primary connection",{error:r.message})}this.primary_connection=null}this.is_secondary_mode=!1,this.log.info("Write forwarder shutdown complete")}}let c=null;const u=()=>(c||(c=new g),c),$=()=>{u().initialize()},z=async()=>{c&&(await c.shutdown(),c=null)};export{u as get_write_forwarder,$ as initialize_write_forwarder,z as shutdown_write_forwarder};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import _ from"./logger.js";const{create_context_logger:u}=_("write_queue");class h{constructor(){this.queue=[],this.processing=!1,this.shutting_down=!1,this.stats={total_operations:0,completed_operations:0,failed_operations:0,current_queue_depth:0,max_queue_depth:0,total_wait_time_ms:0,total_processing_time_ms:0},this.log=u()}async enqueue_write_operation(t,s={}){if(this.shutting_down)throw new Error("Server shutting down");return new Promise((o,i)=>{if(this.shutting_down){i(new Error("Server shutting down"));return}const e={operation_fn:t,context:s,resolve:o,reject:i,enqueued_at:Date.now(),id:this.generate_operation_id()};this.queue.push(e),this.stats.total_operations++,this.stats.current_queue_depth=this.queue.length,this.stats.current_queue_depth>this.stats.max_queue_depth&&(this.stats.max_queue_depth=this.stats.current_queue_depth),this.log.debug("Write operation enqueued",{operation_id:e.id,queue_depth:this.stats.current_queue_depth,context:s}),this.process_queue()})}async process_queue(){if(!(this.processing||this.queue.length===0||this.shutting_down)){for(this.processing=!0;this.queue.length>0&&!this.shutting_down;){const t=this.queue.shift();this.stats.current_queue_depth=this.queue.length;const s=Date.now()-t.enqueued_at;this.stats.total_wait_time_ms+=s;const o=Date.now();try{this.log.debug("Processing write operation",{operation_id:t.id,wait_time_ms:s,context:t.context});const i=await this.execute_with_retry(t.operation_fn,t.context),e=Date.now()-o;this.stats.total_processing_time_ms+=e,this.stats.completed_operations++,this.log.debug("Write operation completed",{operation_id:t.id,wait_time_ms:s,processing_time_ms:e,context:t.context}),t.resolve(i)}catch(i){const e=Date.now()-o;this.stats.total_processing_time_ms+=e,this.stats.failed_operations++,this.log.error("Write operation failed",{operation_id:t.id,wait_time_ms:s,processing_time_ms:e,error:i.message,context:t.context}),t.reject(i)}}this.processing=!1}}async execute_with_retry(t,s,o=3){let i=null;for(let e=1;e<=o;e++)try{return await t()}catch(n){if(i=n,this.is_retryable_error(n)&&e<o){const a=this.calculate_backoff_delay(e);this.log.warn("Write operation failed, retrying",{attempt:e,max_retries:o,delay_ms:a,error:n.message,context:s}),await this.sleep(a);continue}break}throw i}is_retryable_error(t){return["MDB_MAP_FULL","MDB_TXN_FULL","MDB_READERS_FULL","EAGAIN","EBUSY"].some(o=>t.message.includes(o)||t.code===o)}calculate_backoff_delay(t){const i=100*Math.pow(2,t-1),e=Math.random()*.1*i;return Math.min(i+e,5e3)}sleep(t){return new Promise(s=>setTimeout(s,t))}generate_operation_id(){return`${Date.now()}-${Math.random().toString(36).substr(2,9)}`}get_stats(){const t=this.stats.completed_operations>0?Math.round(this.stats.total_wait_time_ms/this.stats.completed_operations):0,s=this.stats.completed_operations>0?Math.round(this.stats.total_processing_time_ms/this.stats.completed_operations):0;return{...this.stats,avg_wait_time_ms:t,avg_processing_time_ms:s,success_rate:this.stats.total_operations>0?Math.round(this.stats.completed_operations/this.stats.total_operations*100):100}}clear_stats(){this.stats={total_operations:0,completed_operations:0,failed_operations:0,current_queue_depth:this.queue.length,max_queue_depth:0,total_wait_time_ms:0,total_processing_time_ms:0}}async shutdown(){for(this.log.info("Shutting down write queue",{pending_operations:this.queue.length,currently_processing:this.processing}),this.shutting_down=!0;this.processing;)await this.sleep(50);this.queue.forEach(t=>{t.reject(new Error("Server shutting down"))}),this.queue=[],this.processing=!1}}let r=null;const p=()=>(r||(r=new h),r),d=async()=>{r&&(await r.shutdown(),r=null)};export{p as get_write_queue,d as shutdown_write_queue};
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 14
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.013e15b54597d05db4b4b53ecc37b10c92a72927-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378581,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/drop_index-2025-09-12.log",
|
|
11
|
+
"hash": "bee5a28cd89d7839aea90b9dc9583f370c08e6c3eb143a1c7e614ac480d2106c"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354633,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/drop_index-2025-09-15.log",
|
|
16
|
+
"hash": "307059f96ec17fd44dc5f42f4111fca1a5985a41d554f99437cb89c033fab7c1"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 14
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.02de550a67ea0f5961faa2dfd458a4d06f59ebd1-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378583,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/replication-2025-09-12.log",
|
|
11
|
+
"hash": "a8f2b588bd7950be800c45b00ee886b4d6b5796f1ca7086f682f526aba4f32f4"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354640,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/replication-2025-09-15.log",
|
|
16
|
+
"hash": "7f8d556c3128798f76d076a1d159314e56f2db439fd74a49098e7c86734d71cf"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 14
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.03494ba24eb3c72214b4068a77d54b8993bee651-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378582,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/api_key_manager-2025-09-12.log",
|
|
11
|
+
"hash": "d0f59a1f55e9588539c8b7e26a28079b01d363c08d1f9c382e610044b56fd17e"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354636,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/api_key_manager-2025-09-15.log",
|
|
16
|
+
"hash": "54836c1d856a85faa4a66ca4de74b27139c131a2699afa79206910b327c75f7d"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 30
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.06309ec60b339be1259a7993dd09c732f8907fbc-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692444600,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/master-error-2025-09-12.log",
|
|
11
|
+
"hash": "b296ba3bb58f413ad19644ec155d2a541281933d2a0be94e1f41c1aa468c8cf5"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954358731,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/master-error-2025-09-15.log",
|
|
16
|
+
"hash": "eb4c5bbd8767de5c7b99cf4b3626edd08a93388cc7215004ee22541db2bd202e"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 30
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.0663a04dcfa17285661e5e1b8cfa51f41523b210-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378483,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/tcp_protocol-error-2025-09-12.log",
|
|
11
|
+
"hash": "3df8475f700deb3b0fcf1534983058b62784b8e44f25e46b0e8536d0b72cab45"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354483,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/tcp_protocol-error-2025-09-15.log",
|
|
16
|
+
"hash": "19ec7d60c3c51ab1b1b2ea4abfe44c50bb6cc521eee46903ce9761f32f04e239"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 14
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.0f06e6c4c9b824622729e13927587479e5060391-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378574,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/query_engine-2025-09-12.log",
|
|
11
|
+
"hash": "982940c0232d329672c3cbaa0a5d714d86e94463c140b7989bab1abb96c9f133"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354613,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/query_engine-2025-09-15.log",
|
|
16
|
+
"hash": "5099e004f6223a6a422a2190ed0a544fb75006f72c4d0110e8176a4a1817d91c"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 14
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.16ccf58682ecb22b3e3ec63f0da1b7fe9be56528-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378584,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/operation_dispatcher-2025-09-12.log",
|
|
11
|
+
"hash": "6e769fe55232f30af33896f20afb03d34e8c8367d828fd82134c6471d36d0d2f"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354644,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/operation_dispatcher-2025-09-15.log",
|
|
16
|
+
"hash": "b679936e296b62691cf8fca37d4bf78827d2e7b739965d6555c77b2a16ac0f36"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 30
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.1fa1a5d02f496474b1ab473524c65c984146a9ad-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692597197,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/test-error-2025-09-12.log",
|
|
11
|
+
"hash": "9ab53cdeaecc308f4f36b4dcd5d3734e495829f0472cf94c1a4a4449e64e0971"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954579438,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/test-error-2025-09-15.log",
|
|
16
|
+
"hash": "2462c265be1b05801908f268cef31bd1d30c7fcaf96a4d6183b5e28f96453263"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 14
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.2223c0ae3bea6f0d62c62b1d319cc8634856abb7-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378588,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/server-2025-09-12.log",
|
|
11
|
+
"hash": "2fe4dd9e32763bac84ce4562cef43076387dde94dfa0c348f94515da9b5c21db"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954422423,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/server-2025-09-15.log",
|
|
16
|
+
"hash": "75cfc33ddcf9f14330d538f8a4e68c90f37ab682573c254afc423c7efd673931"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 14
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.23dc79ffda3e083665e6f5993f59397adcbf4a46-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378584,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/admin-2025-09-12.log",
|
|
11
|
+
"hash": "1c2719c6d515e113823579602be87e0489acec0c9ea696973e9edaf08af6104d"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354643,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/admin-2025-09-15.log",
|
|
16
|
+
"hash": "4ccae0713f27ad8a4c4d69a011aa44d742429c4eaacb054af5b3eedb4f6691fd"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 14
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.28104f49b03906b189eefd1cd462cb46c3c0af22-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378581,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/create_index-2025-09-12.log",
|
|
11
|
+
"hash": "c79451e383ef5544a8cff5bbad34ce512c173091bcda6a99b4fd79592d29b711"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354632,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/create_index-2025-09-15.log",
|
|
16
|
+
"hash": "9ace5ae5362801cd4bef2e1aa70844bc8ad067d6353ff127d416a9551d69cab3"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 30
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.29cdbf13808abe6a0ce70ee2f2efdd680ce3fd8e-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378579,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/update_one-error-2025-09-12.log",
|
|
11
|
+
"hash": "0a9adb63d5368bcded01309323494f6e2c7b64f4eb7d539e27e70293bd3c09da"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354627,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/update_one-error-2025-09-15.log",
|
|
16
|
+
"hash": "868cf478047be56af4793ca0107c200bc32cf5552c7d9696699d3ddd7009915a"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 30
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.2a9889afd071f77f41f5170d08703a0afca866b7-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378581,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/drop_index-error-2025-09-12.log",
|
|
11
|
+
"hash": "b6403c4f8b1384f8dcb710640564e8c02049474ab9d6da3810fa1700a9927e12"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354634,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/drop_index-error-2025-09-15.log",
|
|
16
|
+
"hash": "1a60945741f7338358450595f5c08f7b00e1aa0f5e4cb8190b3ee29aa3cc955c"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 14
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.2acec3d1940a2bbed487528b703ee5948959a599-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378577,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/auth_manager-2025-09-12.log",
|
|
11
|
+
"hash": "a67ae030065459fc0077dee0303145a093e07dc63f9143a480a1350efe2dd77c"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354623,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/auth_manager-2025-09-15.log",
|
|
16
|
+
"hash": "f618f438dfae224aaad9eecc60bf73b28a898d9fa3b796234600890a0466fd39"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 14
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.2fb60ff326338c02bfedbcd0e936444e4a216750-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378574,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/backup_manager-2025-09-12.log",
|
|
11
|
+
"hash": "502f382e62ec0140f7b418df8eb3ba11859a874439ff28616eaabebab6eb766f"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354615,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/backup_manager-2025-09-15.log",
|
|
16
|
+
"hash": "53dc83dcefbbe87498a239008b3ecdf9e8472a07c2b967bae784a3d36eece370"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"keep": {
|
|
3
|
+
"days": true,
|
|
4
|
+
"amount": 30
|
|
5
|
+
},
|
|
6
|
+
"auditLog": "/Users/rglover/projects/cheatcode/joystick/db/logs/.318fc7a19530d76a345f030f7cad00dda15300e7-audit.json",
|
|
7
|
+
"files": [
|
|
8
|
+
{
|
|
9
|
+
"date": 1757692378582,
|
|
10
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/recovery_manager-error-2025-09-12.log",
|
|
11
|
+
"hash": "e3a5c132c74ca97f8ca4f53c26bc0d21e4728c6c86b50ca36883f2a32b7371ef"
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"date": 1757954354636,
|
|
15
|
+
"name": "/Users/rglover/projects/cheatcode/joystick/db/logs/recovery_manager-error-2025-09-15.log",
|
|
16
|
+
"hash": "f5addea600b32ba52a67a871d590f64c4d45c3d23dd3cc5dc108281cd85017fe"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"hashType": "sha256"
|
|
20
|
+
}
|