flow-frame-core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +64 -0
- package/dist/Dockerfile +86 -0
- package/dist/GPU_DEPLOYMENT_README.md +324 -0
- package/dist/OPS_AGENT_README.md +174 -0
- package/dist/README-H100-VM.md +192 -0
- package/dist/README-worker-pools.md +231 -0
- package/dist/README.md +8 -0
- package/dist/WEB-ELEMENT-REQUESTS-README.md +302 -0
- package/dist/append.d.ts +3 -0
- package/dist/append.d.ts.map +1 -0
- package/dist/append.js +42 -0
- package/dist/append.js.map +1 -0
- package/dist/audioRoutes.d.ts +2 -0
- package/dist/audioRoutes.d.ts.map +1 -0
- package/dist/audioRoutes.js +97 -0
- package/dist/audioRoutes.js.map +1 -0
- package/dist/augment-parallel.d.ts +6 -0
- package/dist/augment-parallel.d.ts.map +1 -0
- package/dist/augment-parallel.js +128 -0
- package/dist/augment-parallel.js.map +1 -0
- package/dist/augment-worker.d.ts +2 -0
- package/dist/augment-worker.d.ts.map +1 -0
- package/dist/augment-worker.js +100 -0
- package/dist/augment-worker.js.map +1 -0
- package/dist/browerRoutes.d.ts +2 -0
- package/dist/browerRoutes.d.ts.map +1 -0
- package/dist/browerRoutes.js +323 -0
- package/dist/browerRoutes.js.map +1 -0
- package/dist/browser-utils/utils.d.ts +6 -0
- package/dist/browser-utils/utils.d.ts.map +1 -0
- package/dist/browser-utils/utils.js +133 -0
- package/dist/browser-utils/utils.js.map +1 -0
- package/dist/capture_training_data_endpoints.d.ts +158 -0
- package/dist/capture_training_data_endpoints.d.ts.map +1 -0
- package/dist/capture_training_data_endpoints.js +1812 -0
- package/dist/capture_training_data_endpoints.js.map +1 -0
- package/dist/config.json +28 -0
- package/dist/configEndpoints.d.ts +2 -0
- package/dist/configEndpoints.d.ts.map +1 -0
- package/dist/configEndpoints.js +459 -0
- package/dist/configEndpoints.js.map +1 -0
- package/dist/constants.d.ts +109 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +110 -0
- package/dist/constants.js.map +1 -0
- package/dist/docs/workflow_nodes.md +257 -0
- package/dist/download.d.ts +11 -0
- package/dist/download.d.ts.map +1 -0
- package/dist/download.js +31 -0
- package/dist/download.js.map +1 -0
- package/dist/download.py +61 -0
- package/dist/ecosystem.config.json +63 -0
- package/dist/email-body-extractor.d.ts +20 -0
- package/dist/email-body-extractor.d.ts.map +1 -0
- package/dist/email-body-extractor.js +103 -0
- package/dist/email-body-extractor.js.map +1 -0
- package/dist/express_util.d.ts +2 -0
- package/dist/express_util.d.ts.map +1 -0
- package/dist/express_util.js +30 -0
- package/dist/express_util.js.map +1 -0
- package/dist/extension/background.d.ts +2 -0
- package/dist/extension/background.d.ts.map +1 -0
- package/dist/extension/background.js +268 -0
- package/dist/extension/background.js.map +1 -0
- package/dist/extension/manifest.json +19 -0
- package/dist/extensionUtils.d.ts +2 -0
- package/dist/extensionUtils.d.ts.map +1 -0
- package/dist/extensionUtils.js +48 -0
- package/dist/extensionUtils.js.map +1 -0
- package/dist/filter-gmail-poller/README.md +320 -0
- package/dist/filter-gmail-poller/demo.d.ts +2 -0
- package/dist/filter-gmail-poller/demo.d.ts.map +1 -0
- package/dist/filter-gmail-poller/demo.js +79 -0
- package/dist/filter-gmail-poller/demo.js.map +1 -0
- package/dist/filter-gmail-poller/example-existing-app.d.ts +2 -0
- package/dist/filter-gmail-poller/example-existing-app.d.ts.map +1 -0
- package/dist/filter-gmail-poller/example-existing-app.js +72 -0
- package/dist/filter-gmail-poller/example-existing-app.js.map +1 -0
- package/dist/filter-gmail-poller/filter-gmail-poller.d.ts +160 -0
- package/dist/filter-gmail-poller/filter-gmail-poller.d.ts.map +1 -0
- package/dist/filter-gmail-poller/filter-gmail-poller.js +1048 -0
- package/dist/filter-gmail-poller/filter-gmail-poller.js.map +1 -0
- package/dist/filter-gmail-poller/index.d.ts +3 -0
- package/dist/filter-gmail-poller/index.d.ts.map +1 -0
- package/dist/filter-gmail-poller/index.js +18 -0
- package/dist/filter-gmail-poller/index.js.map +1 -0
- package/dist/filter-gmail-poller/manual-test.d.ts +2 -0
- package/dist/filter-gmail-poller/manual-test.d.ts.map +1 -0
- package/dist/filter-gmail-poller/manual-test.js +70 -0
- package/dist/filter-gmail-poller/manual-test.js.map +1 -0
- package/dist/filter-gmail-poller/poller-prompts.d.ts +12 -0
- package/dist/filter-gmail-poller/poller-prompts.d.ts.map +1 -0
- package/dist/filter-gmail-poller/poller-prompts.js +330 -0
- package/dist/filter-gmail-poller/poller-prompts.js.map +1 -0
- package/dist/filter-gmail-poller/test.js +69 -0
- package/dist/flowframe-auto-firebase-adminsdk.json +13 -0
- package/dist/gmail-poller/README-microsoft-email-poller.md +203 -0
- package/dist/gmail-poller/README.md +129 -0
- package/dist/gmail-poller/example.d.ts +5 -0
- package/dist/gmail-poller/example.d.ts.map +1 -0
- package/dist/gmail-poller/example.js +83 -0
- package/dist/gmail-poller/example.js.map +1 -0
- package/dist/gmail-poller/gmail-poller.d.ts +82 -0
- package/dist/gmail-poller/gmail-poller.d.ts.map +1 -0
- package/dist/gmail-poller/gmail-poller.js +455 -0
- package/dist/gmail-poller/gmail-poller.js.map +1 -0
- package/dist/gmail-poller/manual-test.d.ts +2 -0
- package/dist/gmail-poller/manual-test.d.ts.map +1 -0
- package/dist/gmail-poller/manual-test.js +37 -0
- package/dist/gmail-poller/manual-test.js.map +1 -0
- package/dist/gmail-poller/microsoft-email-example.d.ts +8 -0
- package/dist/gmail-poller/microsoft-email-example.d.ts.map +1 -0
- package/dist/gmail-poller/microsoft-email-example.js +58 -0
- package/dist/gmail-poller/microsoft-email-example.js.map +1 -0
- package/dist/gmail-poller/microsoft-email-poller.d.ts +73 -0
- package/dist/gmail-poller/microsoft-email-poller.d.ts.map +1 -0
- package/dist/gmail-poller/microsoft-email-poller.js +346 -0
- package/dist/gmail-poller/microsoft-email-poller.js.map +1 -0
- package/dist/gmail-poller/setup-auth.d.ts +3 -0
- package/dist/gmail-poller/setup-auth.d.ts.map +1 -0
- package/dist/gmail-poller/setup-auth.js +36 -0
- package/dist/gmail-poller/setup-auth.js.map +1 -0
- package/dist/gmail-poller/test.js +36 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +28 -0
- package/dist/index.js.map +1 -0
- package/dist/inference/augment_levels.d.ts +2 -0
- package/dist/inference/augment_levels.d.ts.map +1 -0
- package/dist/inference/augment_levels.js +1 -0
- package/dist/inference/augment_levels.js.map +1 -0
- package/dist/inference/capture-overlay.d.ts +13 -0
- package/dist/inference/capture-overlay.d.ts.map +1 -0
- package/dist/inference/capture-overlay.js +355 -0
- package/dist/inference/capture-overlay.js.map +1 -0
- package/dist/inference/capturescreenshot.d.ts +12 -0
- package/dist/inference/capturescreenshot.d.ts.map +1 -0
- package/dist/inference/capturescreenshot.js +157 -0
- package/dist/inference/capturescreenshot.js.map +1 -0
- package/dist/jsonHandler.d.ts +37 -0
- package/dist/jsonHandler.d.ts.map +1 -0
- package/dist/jsonHandler.js +191 -0
- package/dist/jsonHandler.js.map +1 -0
- package/dist/localStorage.json +11 -0
- package/dist/media_data_endpoints.d.ts +2 -0
- package/dist/media_data_endpoints.d.ts.map +1 -0
- package/dist/media_data_endpoints.js +102 -0
- package/dist/media_data_endpoints.js.map +1 -0
- package/dist/operations/blender-ops.d.ts +4 -0
- package/dist/operations/blender-ops.d.ts.map +1 -0
- package/dist/operations/blender-ops.js +55 -0
- package/dist/operations/blender-ops.js.map +1 -0
- package/dist/operations.d.ts +34 -0
- package/dist/operations.d.ts.map +1 -0
- package/dist/operations.js +1514 -0
- package/dist/operations.js.map +1 -0
- package/dist/pdfRoutes.d.ts +2 -0
- package/dist/pdfRoutes.d.ts.map +1 -0
- package/dist/pdfRoutes.js +56 -0
- package/dist/pdfRoutes.js.map +1 -0
- package/dist/peers.d.ts +9 -0
- package/dist/peers.d.ts.map +1 -0
- package/dist/peers.js +70 -0
- package/dist/peers.js.map +1 -0
- package/dist/playparser.d.ts +2 -0
- package/dist/playparser.d.ts.map +1 -0
- package/dist/playparser.js +281 -0
- package/dist/playparser.js.map +1 -0
- package/dist/process.d.ts +4 -0
- package/dist/process.d.ts.map +1 -0
- package/dist/process.js +375 -0
- package/dist/process.js.map +1 -0
- package/dist/promptRoutes.d.ts +7 -0
- package/dist/promptRoutes.d.ts.map +1 -0
- package/dist/promptRoutes.js +68 -0
- package/dist/promptRoutes.js.map +1 -0
- package/dist/queueManager.d.ts +23 -0
- package/dist/queueManager.d.ts.map +1 -0
- package/dist/queueManager.js +96 -0
- package/dist/queueManager.js.map +1 -0
- package/dist/run-flow.d.ts +8 -0
- package/dist/run-flow.d.ts.map +1 -0
- package/dist/run-flow.js +220 -0
- package/dist/run-flow.js.map +1 -0
- package/dist/scraper.d.ts +2 -0
- package/dist/scraper.d.ts.map +1 -0
- package/dist/scraper.js +75 -0
- package/dist/scraper.js.map +1 -0
- package/dist/scraper_endpoints.d.ts +2 -0
- package/dist/scraper_endpoints.d.ts.map +1 -0
- package/dist/scraper_endpoints.js +40 -0
- package/dist/scraper_endpoints.js.map +1 -0
- package/dist/server.d.ts +2 -0
- package/dist/server.d.ts.map +1 -0
- package/dist/server.js +528 -0
- package/dist/server.js.map +1 -0
- package/dist/services/ModelContext.d.ts +7 -0
- package/dist/services/ModelContext.d.ts.map +1 -0
- package/dist/services/ModelContext.js +7 -0
- package/dist/services/ModelContext.js.map +1 -0
- package/dist/services/agenticUiPlanner.d.ts +27 -0
- package/dist/services/agenticUiPlanner.d.ts.map +1 -0
- package/dist/services/agenticUiPlanner.js +161 -0
- package/dist/services/agenticUiPlanner.js.map +1 -0
- package/dist/services/apiKeyService.d.ts +3 -0
- package/dist/services/apiKeyService.d.ts.map +1 -0
- package/dist/services/apiKeyService.js +7 -0
- package/dist/services/apiKeyService.js.map +1 -0
- package/dist/services/audioService.d.ts +10 -0
- package/dist/services/audioService.d.ts.map +1 -0
- package/dist/services/audioService.js +140 -0
- package/dist/services/audioService.js.map +1 -0
- package/dist/services/autoPromptOptimizer.d.ts +44 -0
- package/dist/services/autoPromptOptimizer.d.ts.map +1 -0
- package/dist/services/autoPromptOptimizer.js +344 -0
- package/dist/services/autoPromptOptimizer.js.map +1 -0
- package/dist/services/autoPromptOptimizer.manual-test.d.ts +2 -0
- package/dist/services/autoPromptOptimizer.manual-test.d.ts.map +1 -0
- package/dist/services/autoPromptOptimizer.manual-test.js +27 -0
- package/dist/services/autoPromptOptimizer.manual-test.js.map +1 -0
- package/dist/services/chainExecutor.d.ts +26 -0
- package/dist/services/chainExecutor.d.ts.map +1 -0
- package/dist/services/chainExecutor.js +399 -0
- package/dist/services/chainExecutor.js.map +1 -0
- package/dist/services/classifyImageQuestion.d.ts +55 -0
- package/dist/services/classifyImageQuestion.d.ts.map +1 -0
- package/dist/services/classifyImageQuestion.js +428 -0
- package/dist/services/classifyImageQuestion.js.map +1 -0
- package/dist/services/configuration/executor.d.ts +3 -0
- package/dist/services/configuration/executor.d.ts.map +1 -0
- package/dist/services/configuration/executor.js +795 -0
- package/dist/services/configuration/executor.js.map +1 -0
- package/dist/services/error.d.ts +13 -0
- package/dist/services/error.d.ts.map +1 -0
- package/dist/services/error.js +34 -0
- package/dist/services/error.js.map +1 -0
- package/dist/services/executor.d.ts +11 -0
- package/dist/services/executor.d.ts.map +1 -0
- package/dist/services/executor.js +1587 -0
- package/dist/services/executor.js.map +1 -0
- package/dist/services/extractPdf.d.ts +26 -0
- package/dist/services/extractPdf.d.ts.map +1 -0
- package/dist/services/extractPdf.js +256 -0
- package/dist/services/extractPdf.js.map +1 -0
- package/dist/services/generateJsTransformFromPrompt.d.ts +11 -0
- package/dist/services/generateJsTransformFromPrompt.d.ts.map +1 -0
- package/dist/services/generateJsTransformFromPrompt.js +328 -0
- package/dist/services/generateJsTransformFromPrompt.js.map +1 -0
- package/dist/services/localizeFirebaseMedia.d.ts +20 -0
- package/dist/services/localizeFirebaseMedia.d.ts.map +1 -0
- package/dist/services/localizeFirebaseMedia.js +135 -0
- package/dist/services/localizeFirebaseMedia.js.map +1 -0
- package/dist/services/polyfill_canvas.d.ts +2 -0
- package/dist/services/polyfill_canvas.d.ts.map +1 -0
- package/dist/services/polyfill_canvas.js +19 -0
- package/dist/services/polyfill_canvas.js.map +1 -0
- package/dist/services/promptRoutes.d.ts +7 -0
- package/dist/services/promptRoutes.d.ts.map +1 -0
- package/dist/services/promptRoutes.js +70 -0
- package/dist/services/promptRoutes.js.map +1 -0
- package/dist/services/runPrompt.d.ts +29 -0
- package/dist/services/runPrompt.d.ts.map +1 -0
- package/dist/services/runPrompt.js +232 -0
- package/dist/services/runPrompt.js.map +1 -0
- package/dist/services/schemaInference.d.ts +2 -0
- package/dist/services/schemaInference.d.ts.map +1 -0
- package/dist/services/schemaInference.js +17 -0
- package/dist/services/schemaInference.js.map +1 -0
- package/dist/services/self-learning/api.d.ts +2 -0
- package/dist/services/self-learning/api.d.ts.map +1 -0
- package/dist/services/self-learning/api.js +84 -0
- package/dist/services/self-learning/api.js.map +1 -0
- package/dist/services/self-learning/autolearn.d.ts +23 -0
- package/dist/services/self-learning/autolearn.d.ts.map +1 -0
- package/dist/services/self-learning/autolearn.js +308 -0
- package/dist/services/self-learning/autolearn.js.map +1 -0
- package/dist/services/self-learning/discover.d.ts +11 -0
- package/dist/services/self-learning/discover.d.ts.map +1 -0
- package/dist/services/self-learning/discover.js +446 -0
- package/dist/services/self-learning/discover.js.map +1 -0
- package/dist/services/self-learning/image.d.ts +10 -0
- package/dist/services/self-learning/image.d.ts.map +1 -0
- package/dist/services/self-learning/image.js +38 -0
- package/dist/services/self-learning/image.js.map +1 -0
- package/dist/services/self-learning/injest.d.ts +25 -0
- package/dist/services/self-learning/injest.d.ts.map +1 -0
- package/dist/services/self-learning/injest.js +110 -0
- package/dist/services/self-learning/injest.js.map +1 -0
- package/dist/services/self-learning/learn.d.ts +2 -0
- package/dist/services/self-learning/learn.d.ts.map +1 -0
- package/dist/services/self-learning/learn.js +145 -0
- package/dist/services/self-learning/learn.js.map +1 -0
- package/dist/services/self-learning/matcher.d.ts +2 -0
- package/dist/services/self-learning/matcher.d.ts.map +1 -0
- package/dist/services/self-learning/matcher.js +38 -0
- package/dist/services/self-learning/matcher.js.map +1 -0
- package/dist/services/self-learning/openai.d.ts +8 -0
- package/dist/services/self-learning/openai.d.ts.map +1 -0
- package/dist/services/self-learning/openai.js +97 -0
- package/dist/services/self-learning/openai.js.map +1 -0
- package/dist/services/self-learning/phash.d.ts +5 -0
- package/dist/services/self-learning/phash.d.ts.map +1 -0
- package/dist/services/self-learning/phash.js +68 -0
- package/dist/services/self-learning/phash.js.map +1 -0
- package/dist/services/self-learning/recognize.d.ts +17 -0
- package/dist/services/self-learning/recognize.d.ts.map +1 -0
- package/dist/services/self-learning/recognize.js +116 -0
- package/dist/services/self-learning/recognize.js.map +1 -0
- package/dist/services/self-learning/record_transition.d.ts +8 -0
- package/dist/services/self-learning/record_transition.d.ts.map +1 -0
- package/dist/services/self-learning/record_transition.js +20 -0
- package/dist/services/self-learning/record_transition.js.map +1 -0
- package/dist/services/self-learning/registry.d.ts +4 -0
- package/dist/services/self-learning/registry.d.ts.map +1 -0
- package/dist/services/self-learning/registry.js +19 -0
- package/dist/services/self-learning/registry.js.map +1 -0
- package/dist/services/self-learning/schema.d.ts +114 -0
- package/dist/services/self-learning/schema.d.ts.map +1 -0
- package/dist/services/self-learning/schema.js +70 -0
- package/dist/services/self-learning/schema.js.map +1 -0
- package/dist/services/self-learning/schemaStrictify.d.ts +2 -0
- package/dist/services/self-learning/schemaStrictify.d.ts.map +1 -0
- package/dist/services/self-learning/schemaStrictify.js +34 -0
- package/dist/services/self-learning/schemaStrictify.js.map +1 -0
- package/dist/services/self-learning/transition_graph.d.ts +6 -0
- package/dist/services/self-learning/transition_graph.d.ts.map +1 -0
- package/dist/services/self-learning/transition_graph.js +83 -0
- package/dist/services/self-learning/transition_graph.js.map +1 -0
- package/dist/services/self-learning/transition_log.d.ts +3 -0
- package/dist/services/self-learning/transition_log.d.ts.map +1 -0
- package/dist/services/self-learning/transition_log.js +42 -0
- package/dist/services/self-learning/transition_log.js.map +1 -0
- package/dist/services/self-learning/util.d.ts +3 -0
- package/dist/services/self-learning/util.d.ts.map +1 -0
- package/dist/services/self-learning/util.js +11 -0
- package/dist/services/self-learning/util.js.map +1 -0
- package/dist/services/stepByStepAiPlanner.d.ts +39 -0
- package/dist/services/stepByStepAiPlanner.d.ts.map +1 -0
- package/dist/services/stepByStepAiPlanner.js +379 -0
- package/dist/services/stepByStepAiPlanner.js.map +1 -0
- package/dist/services/test-genjs.js +39 -0
- package/dist/services/test-genjs.manual-test.d.ts +2 -0
- package/dist/services/test-genjs.manual-test.d.ts.map +1 -0
- package/dist/services/test-genjs.manual-test.js +40 -0
- package/dist/services/test-genjs.manual-test.js.map +1 -0
- package/dist/services/uiMapPathFinder.d.ts +13 -0
- package/dist/services/uiMapPathFinder.d.ts.map +1 -0
- package/dist/services/uiMapPathFinder.js +79 -0
- package/dist/services/uiMapPathFinder.js.map +1 -0
- package/dist/services/uiMapService.d.ts +26 -0
- package/dist/services/uiMapService.d.ts.map +1 -0
- package/dist/services/uiMapService.js +275 -0
- package/dist/services/uiMapService.js.map +1 -0
- package/dist/services/uiPlanner.d.ts +54 -0
- package/dist/services/uiPlanner.d.ts.map +1 -0
- package/dist/services/uiPlanner.js +558 -0
- package/dist/services/uiPlanner.js.map +1 -0
- package/dist/services/utilityFunctions.d.ts +80 -0
- package/dist/services/utilityFunctions.d.ts.map +1 -0
- package/dist/services/utilityFunctions.js +352 -0
- package/dist/services/utilityFunctions.js.map +1 -0
- package/dist/services/variableGenerator.d.ts +39 -0
- package/dist/services/variableGenerator.d.ts.map +1 -0
- package/dist/services/variableGenerator.js +157 -0
- package/dist/services/variableGenerator.js.map +1 -0
- package/dist/services/workflow/build-workflow.d.ts +49 -0
- package/dist/services/workflow/build-workflow.d.ts.map +1 -0
- package/dist/services/workflow/build-workflow.js +119 -0
- package/dist/services/workflow/build-workflow.js.map +1 -0
- package/dist/standardRoutes.d.ts +2 -0
- package/dist/standardRoutes.d.ts.map +1 -0
- package/dist/standardRoutes.js +1495 -0
- package/dist/standardRoutes.js.map +1 -0
- package/dist/stepWorkflowRoutes.d.ts +2 -0
- package/dist/stepWorkflowRoutes.d.ts.map +1 -0
- package/dist/stepWorkflowRoutes.js +1007 -0
- package/dist/stepWorkflowRoutes.js.map +1 -0
- package/dist/storage.d.ts +19 -0
- package/dist/storage.d.ts.map +1 -0
- package/dist/storage.docker.json +61 -0
- package/dist/storage.js +131 -0
- package/dist/storage.js.map +1 -0
- package/dist/storage.json +78 -0
- package/dist/storage_cache/boxes.json +48 -0
- package/dist/storage_cache/suno_state.json +3 -0
- package/dist/suno_download.d.ts +11 -0
- package/dist/suno_download.d.ts.map +1 -0
- package/dist/suno_download.js +33 -0
- package/dist/suno_download.js.map +1 -0
- package/dist/suno_download.py +119 -0
- package/dist/test-web-element-requests.d.ts +6 -0
- package/dist/test-web-element-requests.d.ts.map +1 -0
- package/dist/test-web-element-requests.js +114 -0
- package/dist/test-web-element-requests.js.map +1 -0
- package/dist/test_pdf_render.d.ts +2 -0
- package/dist/test_pdf_render.d.ts.map +1 -0
- package/dist/test_pdf_render.js +50 -0
- package/dist/test_pdf_render.js.map +1 -0
- package/dist/training_data_viewer_endpoints.d.ts +2 -0
- package/dist/training_data_viewer_endpoints.d.ts.map +1 -0
- package/dist/training_data_viewer_endpoints.js +141 -0
- package/dist/training_data_viewer_endpoints.js.map +1 -0
- package/dist/utils.d.ts +353 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +1517 -0
- package/dist/utils.js.map +1 -0
- package/dist/vm-h100.env.template +55 -0
- package/dist/web-element-requests.d.ts +102 -0
- package/dist/web-element-requests.d.ts.map +1 -0
- package/dist/web-element-requests.js +278 -0
- package/dist/web-element-requests.js.map +1 -0
- package/dist/workflowRoutes.d.ts +2 -0
- package/dist/workflowRoutes.d.ts.map +1 -0
- package/dist/workflowRoutes.js +441 -0
- package/dist/workflowRoutes.js.map +1 -0
- package/package.json +109 -0
|
@@ -0,0 +1,1812 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { console_log_it, downloadFile } from './utils.js';
|
|
4
|
+
import ffmpeg from 'fluent-ffmpeg';
|
|
5
|
+
import sharp from 'sharp';
|
|
6
|
+
import admin from 'firebase-admin';
|
|
7
|
+
import serviceAccount from './flowframe-auto-firebase-adminsdk.json' with { type: "json" };
|
|
8
|
+
admin.initializeApp({
|
|
9
|
+
credential: admin.credential.cert(serviceAccount),
|
|
10
|
+
databaseURL: "https://flowframe-auto-default-rtdb.firebaseio.com",
|
|
11
|
+
storageBucket: "flowframe-auto.firebasestorage.app"
|
|
12
|
+
});
|
|
13
|
+
/**
|
|
14
|
+
* Recursively list all files under a root directory.
|
|
15
|
+
* @param {string} rootDir - The starting directory.
|
|
16
|
+
* @param {{ followSymlinks?: boolean }} [opts]
|
|
17
|
+
* @returns {Promise<string[]>} Absolute file paths.
|
|
18
|
+
*/
|
|
19
|
+
export async function listFiles(rootDir, opts = {}) {
|
|
20
|
+
const { followSymlinks = false } = opts;
|
|
21
|
+
const start = path.resolve(rootDir);
|
|
22
|
+
const out = [];
|
|
23
|
+
const { readdir, stat } = fs.promises;
|
|
24
|
+
async function walk(dir) {
|
|
25
|
+
let entries;
|
|
26
|
+
try {
|
|
27
|
+
entries = await readdir(dir, { withFileTypes: true });
|
|
28
|
+
}
|
|
29
|
+
catch {
|
|
30
|
+
return; // unreadable dir — skip
|
|
31
|
+
}
|
|
32
|
+
await Promise.all(entries.map(async (ent) => {
|
|
33
|
+
const full = path.join(dir, ent.name);
|
|
34
|
+
if (ent.isDirectory()) {
|
|
35
|
+
await walk(full);
|
|
36
|
+
}
|
|
37
|
+
else if (ent.isFile()) {
|
|
38
|
+
out.push(full);
|
|
39
|
+
}
|
|
40
|
+
else if (followSymlinks && ent.isSymbolicLink()) {
|
|
41
|
+
try {
|
|
42
|
+
const s = await stat(full);
|
|
43
|
+
if (s.isDirectory())
|
|
44
|
+
await walk(full);
|
|
45
|
+
else if (s.isFile())
|
|
46
|
+
out.push(full);
|
|
47
|
+
}
|
|
48
|
+
catch {
|
|
49
|
+
// broken/unreadable symlink — skip
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}));
|
|
53
|
+
}
|
|
54
|
+
await walk(start);
|
|
55
|
+
return out;
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Cleans up training directories by keeping only level_#/runs/exp folders and deleting other paths
|
|
59
|
+
* @param {string} rootPath - Root directory to scan for level folders
|
|
60
|
+
* @param {Object} options - Cleanup options
|
|
61
|
+
* @param {boolean} options.dryRun - If true, only logs what would be deleted without actually deleting (default: false)
|
|
62
|
+
* @param {Function} options.onProgress - Progress callback function
|
|
63
|
+
* @param {Array<string>} options.preservePaths - Additional paths to preserve (relative to level folder)
|
|
64
|
+
* @returns {Promise<Object>} Cleanup result with statistics
|
|
65
|
+
*/
|
|
66
|
+
async function cleanupTrainingDirectories(rootPath, options = {}) {
|
|
67
|
+
const { dryRun = false, onProgress = null, preservePaths = ['runs/exp'] // Always preserve runs/exp
|
|
68
|
+
} = options;
|
|
69
|
+
console_log_it(`Starting cleanup of training directories in: ${rootPath}`, 'cleanup_training_directories');
|
|
70
|
+
if (!fs.existsSync(rootPath)) {
|
|
71
|
+
throw new Error(`Root path does not exist: ${rootPath}`);
|
|
72
|
+
}
|
|
73
|
+
const stats = {
|
|
74
|
+
levelFoldersFound: 0,
|
|
75
|
+
pathsPreserved: 0,
|
|
76
|
+
pathsDeleted: 0,
|
|
77
|
+
spaceFreed: 0,
|
|
78
|
+
errors: []
|
|
79
|
+
};
|
|
80
|
+
try {
|
|
81
|
+
// Find all level_# folders
|
|
82
|
+
const files = await listFiles(rootPath);
|
|
83
|
+
for (let i = 0; i < files.length; i++) {
|
|
84
|
+
const file = files[i];
|
|
85
|
+
if (file.includes('runs/exp') || file.includes('annotations.json'))
|
|
86
|
+
continue;
|
|
87
|
+
// Process each file as needed
|
|
88
|
+
//delete the files.
|
|
89
|
+
if (!dryRun) {
|
|
90
|
+
fs.unlinkSync(file);
|
|
91
|
+
}
|
|
92
|
+
stats.pathsDeleted++;
|
|
93
|
+
if (onProgress) {
|
|
94
|
+
onProgress({
|
|
95
|
+
type: 'start',
|
|
96
|
+
levelFoldersFound: stats.levelFoldersFound,
|
|
97
|
+
dryRun
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
if (onProgress) {
|
|
102
|
+
onProgress({
|
|
103
|
+
type: 'complete',
|
|
104
|
+
result: {
|
|
105
|
+
success: true,
|
|
106
|
+
dryRun,
|
|
107
|
+
rootPath,
|
|
108
|
+
stats: {
|
|
109
|
+
...stats,
|
|
110
|
+
spaceFreedMB: (stats.spaceFreed / 1024 / 1024).toFixed(2),
|
|
111
|
+
spaceFreedGB: (stats.spaceFreed / 1024 / 1024 / 1024).toFixed(2)
|
|
112
|
+
},
|
|
113
|
+
message: `Cleanup ${dryRun ? 'simulation' : 'completed'}. Preserved: ${stats.pathsPreserved}, Deleted: ${stats.pathsDeleted}, Freed: ${(stats.spaceFreed / 1024 / 1024).toFixed(2)} MB`
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
return true;
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
console_log_it(`Cleanup failed: ${error.message}`, 'cleanup_training_directories');
|
|
121
|
+
throw error;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Recursively processes subdirectories to preserve only specified paths
|
|
126
|
+
*/
|
|
127
|
+
async function cleanupDirectoryRecursive(dirPath, preservePaths, currentRelativePath, stats, dryRun) {
|
|
128
|
+
try {
|
|
129
|
+
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
|
130
|
+
for (const entry of entries) {
|
|
131
|
+
const entryPath = path.join(dirPath, entry.name);
|
|
132
|
+
const newRelativePath = path.join(currentRelativePath, entry.name).replace(/\\/g, '/');
|
|
133
|
+
// Check if this path should be preserved
|
|
134
|
+
const shouldPreserve = preservePaths.some(preservePath => {
|
|
135
|
+
return preservePath.startsWith(newRelativePath) || newRelativePath === preservePath;
|
|
136
|
+
});
|
|
137
|
+
if (shouldPreserve) {
|
|
138
|
+
stats.pathsPreserved++;
|
|
139
|
+
// If this is a directory that's part of a preserve path, continue recursively
|
|
140
|
+
if (entry.isDirectory()) {
|
|
141
|
+
const matchingPreservePaths = preservePaths.filter(p => p.startsWith(newRelativePath + '/'));
|
|
142
|
+
if (matchingPreservePaths.length > 0) {
|
|
143
|
+
await cleanupDirectoryRecursive(entryPath, matchingPreservePaths, newRelativePath, stats, dryRun);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
else {
|
|
148
|
+
// Calculate size and delete
|
|
149
|
+
let size = 0;
|
|
150
|
+
try {
|
|
151
|
+
if (entry.isDirectory()) {
|
|
152
|
+
size = await getDirectorySize(entryPath);
|
|
153
|
+
}
|
|
154
|
+
else {
|
|
155
|
+
const stat = fs.statSync(entryPath);
|
|
156
|
+
size = stat.size;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
catch (sizeError) {
|
|
160
|
+
console_log_it(`Warning: Could not calculate size of ${entryPath}`, 'cleanup_training_directories');
|
|
161
|
+
}
|
|
162
|
+
if (!dryRun) {
|
|
163
|
+
try {
|
|
164
|
+
if (entry.isDirectory()) {
|
|
165
|
+
fs.rmSync(entryPath, { recursive: true, force: true });
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
fs.unlinkSync(entryPath);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
catch (deleteError) {
|
|
172
|
+
stats.errors.push({
|
|
173
|
+
path: entryPath,
|
|
174
|
+
error: deleteError.message
|
|
175
|
+
});
|
|
176
|
+
continue;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
stats.pathsDeleted++;
|
|
180
|
+
stats.spaceFreed += size;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
catch (error) {
|
|
185
|
+
stats.errors.push({
|
|
186
|
+
path: dirPath,
|
|
187
|
+
error: error.message
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* Calculates the total size of a directory recursively
|
|
193
|
+
*/
|
|
194
|
+
async function getDirectorySize(dirPath) {
|
|
195
|
+
let totalSize = 0;
|
|
196
|
+
try {
|
|
197
|
+
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
|
198
|
+
for (const entry of entries) {
|
|
199
|
+
const entryPath = path.join(dirPath, entry.name);
|
|
200
|
+
try {
|
|
201
|
+
if (entry.isDirectory()) {
|
|
202
|
+
totalSize += await getDirectorySize(entryPath);
|
|
203
|
+
}
|
|
204
|
+
else {
|
|
205
|
+
const stat = fs.statSync(entryPath);
|
|
206
|
+
totalSize += stat.size;
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
catch (error) {
|
|
210
|
+
// Skip files that can't be accessed
|
|
211
|
+
continue;
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
catch (error) {
|
|
216
|
+
// Return 0 if directory can't be read
|
|
217
|
+
return 0;
|
|
218
|
+
}
|
|
219
|
+
return totalSize;
|
|
220
|
+
}
|
|
221
|
+
// Database connection management
|
|
222
|
+
let databaseConnectionActive = true;
|
|
223
|
+
let reconnectPromise = null;
|
|
224
|
+
/**
|
|
225
|
+
* Disconnects from Firebase Realtime Database to save resources during long operations
|
|
226
|
+
* @returns {Promise<void>}
|
|
227
|
+
*/
|
|
228
|
+
async function disconnectFromDatabase() {
|
|
229
|
+
if (!databaseConnectionActive) {
|
|
230
|
+
console.log('Database already disconnected', 'database_management');
|
|
231
|
+
return;
|
|
232
|
+
}
|
|
233
|
+
try {
|
|
234
|
+
console.log('Disconnecting from Firebase Realtime Database...', 'database_management');
|
|
235
|
+
await admin.database().goOffline();
|
|
236
|
+
databaseConnectionActive = false;
|
|
237
|
+
console.log('Successfully disconnected from Firebase Realtime Database', 'database_management');
|
|
238
|
+
}
|
|
239
|
+
catch (error) {
|
|
240
|
+
console.error('Error disconnecting from database:', error.message, 'database_management');
|
|
241
|
+
throw error;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
/**
|
|
245
|
+
* Reconnects to Firebase Realtime Database after long operations
|
|
246
|
+
* @returns {Promise<void>}
|
|
247
|
+
*/
|
|
248
|
+
async function reconnectToDatabase() {
|
|
249
|
+
if (databaseConnectionActive) {
|
|
250
|
+
console.log('Database already connected', 'database_management');
|
|
251
|
+
return;
|
|
252
|
+
}
|
|
253
|
+
// Prevent multiple reconnection attempts
|
|
254
|
+
if (reconnectPromise) {
|
|
255
|
+
console.log('Reconnection already in progress, waiting...', 'database_management');
|
|
256
|
+
return await reconnectPromise;
|
|
257
|
+
}
|
|
258
|
+
reconnectPromise = (async () => {
|
|
259
|
+
try {
|
|
260
|
+
console.log('Reconnecting to Firebase Realtime Database...', 'database_management');
|
|
261
|
+
await admin.database().goOnline();
|
|
262
|
+
databaseConnectionActive = true;
|
|
263
|
+
console.log('Successfully reconnected to Firebase Realtime Database', 'database_management');
|
|
264
|
+
}
|
|
265
|
+
catch (error) {
|
|
266
|
+
console.error('Error reconnecting to database:', error.message, 'database_management');
|
|
267
|
+
throw error;
|
|
268
|
+
}
|
|
269
|
+
finally {
|
|
270
|
+
reconnectPromise = null;
|
|
271
|
+
}
|
|
272
|
+
})();
|
|
273
|
+
return await reconnectPromise;
|
|
274
|
+
}
|
|
275
|
+
/**
|
|
276
|
+
* Executes a function with database disconnected, then reconnects
|
|
277
|
+
* @param {Function} asyncOperation - The async operation to execute while disconnected
|
|
278
|
+
* @param {Object} options - Options for the operation
|
|
279
|
+
* @param {boolean} options.autoReconnect - Whether to automatically reconnect after operation (default: true)
|
|
280
|
+
* @returns {Promise<any>} Result of the operation
|
|
281
|
+
*/
|
|
282
|
+
async function executeWithDatabaseDisconnected(asyncOperation, options = {}) {
|
|
283
|
+
const { autoReconnect = true } = options;
|
|
284
|
+
const wasConnected = databaseConnectionActive;
|
|
285
|
+
try {
|
|
286
|
+
if (wasConnected) {
|
|
287
|
+
await disconnectFromDatabase();
|
|
288
|
+
}
|
|
289
|
+
console.log('Executing operation with database disconnected...', 'database_management');
|
|
290
|
+
const result = await asyncOperation();
|
|
291
|
+
console.log('Operation completed successfully', 'database_management');
|
|
292
|
+
return result;
|
|
293
|
+
}
|
|
294
|
+
catch (error) {
|
|
295
|
+
console.error('Operation failed while database disconnected:', error.message, 'database_management');
|
|
296
|
+
throw error;
|
|
297
|
+
}
|
|
298
|
+
finally {
|
|
299
|
+
if (wasConnected && autoReconnect) {
|
|
300
|
+
try {
|
|
301
|
+
await reconnectToDatabase();
|
|
302
|
+
}
|
|
303
|
+
catch (reconnectError) {
|
|
304
|
+
console.error('Failed to reconnect to database after operation:', reconnectError.message, 'database_management');
|
|
305
|
+
// Don't throw here as the main operation might have succeeded
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
/**
|
|
311
|
+
* Safe database operation that handles connection state
|
|
312
|
+
* @param {Function} dbOperation - Database operation function
|
|
313
|
+
* @param {Object} options - Options for the operation
|
|
314
|
+
* @param {number} options.maxRetries - Maximum number of retry attempts (default: 3)
|
|
315
|
+
* @param {number} options.retryDelay - Delay between retries in ms (default: 1000)
|
|
316
|
+
* @returns {Promise<any>} Result of the database operation
|
|
317
|
+
*/
|
|
318
|
+
async function safeDatabaseOperation(dbOperation, options = {}) {
|
|
319
|
+
const { maxRetries = 3, retryDelay = 1000 } = options;
|
|
320
|
+
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
321
|
+
try {
|
|
322
|
+
// Ensure we're connected before attempting the operation
|
|
323
|
+
if (!databaseConnectionActive) {
|
|
324
|
+
await reconnectToDatabase();
|
|
325
|
+
}
|
|
326
|
+
return await dbOperation();
|
|
327
|
+
}
|
|
328
|
+
catch (error) {
|
|
329
|
+
console.error(`Database operation attempt ${attempt} failed:`, error.message, 'database_management');
|
|
330
|
+
if (attempt === maxRetries) {
|
|
331
|
+
throw error;
|
|
332
|
+
}
|
|
333
|
+
// If it's a connection error, try to reconnect
|
|
334
|
+
if (error.message.includes('offline') || error.message.includes('connection')) {
|
|
335
|
+
console.log('Connection error detected, attempting to reconnect...', 'database_management');
|
|
336
|
+
databaseConnectionActive = false;
|
|
337
|
+
try {
|
|
338
|
+
await reconnectToDatabase();
|
|
339
|
+
}
|
|
340
|
+
catch (reconnectError) {
|
|
341
|
+
console.error('Reconnection failed:', reconnectError.message, 'database_management');
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
// Wait before retrying
|
|
345
|
+
await new Promise(resolve => setTimeout(resolve, retryDelay * attempt));
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
// Progress throttling system to prevent Firestore contention
|
|
350
|
+
const progressThrottlers = new Map();
|
|
351
|
+
/**
|
|
352
|
+
* Creates a throttled progress updater to prevent database contention
|
|
353
|
+
* @param {string} subJobId - The subjob ID
|
|
354
|
+
* @param {Function} updateSubJobFn - The x function to use
|
|
355
|
+
* @param {number} throttleMs - Minimum milliseconds between updates (default: 2000)
|
|
356
|
+
* @returns {Function} Throttled update function
|
|
357
|
+
*/
|
|
358
|
+
function createThrottledProgressUpdater(subJobId, updateSubJobFn, throttleMs = 2000) {
|
|
359
|
+
if (progressThrottlers.has(subJobId)) {
|
|
360
|
+
return progressThrottlers.get(subJobId);
|
|
361
|
+
}
|
|
362
|
+
let lastUpdate = 0;
|
|
363
|
+
let lastPercent = -1;
|
|
364
|
+
let pendingUpdate = null;
|
|
365
|
+
let isUpdating = false;
|
|
366
|
+
const throttledUpdater = async (updateData) => {
|
|
367
|
+
const now = Date.now();
|
|
368
|
+
const currentPercent = Math.floor(updateData.percent || 0);
|
|
369
|
+
// Always update if it's a significant change (5% or more) or enough time has passed
|
|
370
|
+
const significantChange = Math.abs(currentPercent - lastPercent) >= 5;
|
|
371
|
+
const enoughTimePassed = (now - lastUpdate) >= throttleMs;
|
|
372
|
+
if (significantChange || enoughTimePassed || updateData.status || updateData.message) {
|
|
373
|
+
// Cancel any pending update since we're doing one now
|
|
374
|
+
if (pendingUpdate) {
|
|
375
|
+
clearTimeout(pendingUpdate);
|
|
376
|
+
pendingUpdate = null;
|
|
377
|
+
}
|
|
378
|
+
if (!isUpdating) {
|
|
379
|
+
isUpdating = true;
|
|
380
|
+
try {
|
|
381
|
+
await safeDatabaseOperation(async () => {
|
|
382
|
+
await updateSubJobFn(subJobId, updateData);
|
|
383
|
+
});
|
|
384
|
+
lastUpdate = now;
|
|
385
|
+
lastPercent = currentPercent;
|
|
386
|
+
}
|
|
387
|
+
catch (error) {
|
|
388
|
+
console.error(`Failed to update progress for subjob ${subJobId}:`, error.message);
|
|
389
|
+
}
|
|
390
|
+
finally {
|
|
391
|
+
isUpdating = false;
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
else {
|
|
396
|
+
// Schedule a delayed update for the latest data
|
|
397
|
+
if (pendingUpdate) {
|
|
398
|
+
clearTimeout(pendingUpdate);
|
|
399
|
+
}
|
|
400
|
+
pendingUpdate = setTimeout(async () => {
|
|
401
|
+
if (!isUpdating) {
|
|
402
|
+
isUpdating = true;
|
|
403
|
+
try {
|
|
404
|
+
await safeDatabaseOperation(async () => {
|
|
405
|
+
await updateSubJobFn(subJobId, updateData);
|
|
406
|
+
});
|
|
407
|
+
lastUpdate = Date.now();
|
|
408
|
+
lastPercent = Math.floor(updateData.percent || 0);
|
|
409
|
+
}
|
|
410
|
+
catch (error) {
|
|
411
|
+
console.error(`Failed to update delayed progress for subjob ${subJobId}:`, error.message);
|
|
412
|
+
}
|
|
413
|
+
finally {
|
|
414
|
+
isUpdating = false;
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
pendingUpdate = null;
|
|
418
|
+
}, throttleMs);
|
|
419
|
+
}
|
|
420
|
+
};
|
|
421
|
+
// Cleanup function
|
|
422
|
+
throttledUpdater.cleanup = () => {
|
|
423
|
+
if (pendingUpdate) {
|
|
424
|
+
clearTimeout(pendingUpdate);
|
|
425
|
+
pendingUpdate = null;
|
|
426
|
+
}
|
|
427
|
+
progressThrottlers.delete(subJobId);
|
|
428
|
+
};
|
|
429
|
+
progressThrottlers.set(subJobId, throttledUpdater);
|
|
430
|
+
return throttledUpdater;
|
|
431
|
+
}
|
|
432
|
+
/**
|
|
433
|
+
* Cleans up throttled progress updater for a subjob
|
|
434
|
+
* @param {string} subJobId - The subjob ID
|
|
435
|
+
*/
|
|
436
|
+
function cleanupProgressThrottler(subJobId) {
|
|
437
|
+
const throttler = progressThrottlers.get(subJobId);
|
|
438
|
+
if (throttler && throttler.cleanup) {
|
|
439
|
+
throttler.cleanup();
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
// Get Firebase Storage bucket
|
|
443
|
+
const bucketContext = {};
|
|
444
|
+
function getBucket() {
|
|
445
|
+
if (!bucketContext.bucket) {
|
|
446
|
+
bucketContext.bucket = admin.storage().bucket();
|
|
447
|
+
}
|
|
448
|
+
return bucketContext.bucket;
|
|
449
|
+
}
|
|
450
|
+
/**
|
|
451
|
+
* Tests if the Firebase Storage bucket exists and is accessible
|
|
452
|
+
* @param {string} bucketName - Optional bucket name to test (uses default if not provided)
|
|
453
|
+
* @returns {Promise<Object>} Test result with bucket information
|
|
454
|
+
*/
|
|
455
|
+
async function testStorageBucketExists(bucketName = null) {
|
|
456
|
+
try {
|
|
457
|
+
console.log('Testing storage bucket existence and accessibility', 'test_storage_bucket');
|
|
458
|
+
let bucket;
|
|
459
|
+
let testBucketName;
|
|
460
|
+
if (bucketName) {
|
|
461
|
+
// Test specific bucket
|
|
462
|
+
bucket = admin.storage().bucket(bucketName);
|
|
463
|
+
testBucketName = bucketName;
|
|
464
|
+
}
|
|
465
|
+
else {
|
|
466
|
+
// Test default bucket
|
|
467
|
+
bucket = getBucket();
|
|
468
|
+
testBucketName = bucket.name;
|
|
469
|
+
}
|
|
470
|
+
console.log(`Testing bucket: ${testBucketName}`, 'test_storage_bucket');
|
|
471
|
+
// Test 1: Check if bucket exists
|
|
472
|
+
const [exists] = await bucket.exists();
|
|
473
|
+
if (!exists) {
|
|
474
|
+
console.log(`Bucket does not exist: ${testBucketName}`, 'test_storage_bucket');
|
|
475
|
+
return {
|
|
476
|
+
exists: false,
|
|
477
|
+
accessible: false,
|
|
478
|
+
bucketName: testBucketName,
|
|
479
|
+
error: 'Bucket does not exist',
|
|
480
|
+
tests: {
|
|
481
|
+
existence: false,
|
|
482
|
+
metadata: false,
|
|
483
|
+
listFiles: false,
|
|
484
|
+
writeAccess: false
|
|
485
|
+
}
|
|
486
|
+
};
|
|
487
|
+
}
|
|
488
|
+
console.log(`Bucket exists: ${testBucketName}`, 'test_storage_bucket');
|
|
489
|
+
// Test 2: Get bucket metadata
|
|
490
|
+
let metadata = null;
|
|
491
|
+
let metadataAccessible = false;
|
|
492
|
+
try {
|
|
493
|
+
[metadata] = await bucket.getMetadata();
|
|
494
|
+
metadataAccessible = true;
|
|
495
|
+
console.log(`Bucket metadata accessible. Location: ${metadata.location}, Storage class: ${metadata.storageClass}`, 'test_storage_bucket');
|
|
496
|
+
}
|
|
497
|
+
catch (metadataError) {
|
|
498
|
+
console.log(`Cannot access bucket metadata: ${metadataError.message}`, 'test_storage_bucket');
|
|
499
|
+
}
|
|
500
|
+
// Test 3: Test list files permission (with minimal query)
|
|
501
|
+
let canListFiles = false;
|
|
502
|
+
let fileCount = 0;
|
|
503
|
+
try {
|
|
504
|
+
const [files] = await bucket.getFiles({ maxResults: 1 });
|
|
505
|
+
canListFiles = true;
|
|
506
|
+
// Get actual count with a separate query
|
|
507
|
+
try {
|
|
508
|
+
const [allFiles] = await bucket.getFiles({ maxResults: 100 });
|
|
509
|
+
fileCount = allFiles.length;
|
|
510
|
+
}
|
|
511
|
+
catch (countError) {
|
|
512
|
+
fileCount = 'unknown';
|
|
513
|
+
}
|
|
514
|
+
console.log(`Can list files in bucket. Sample files found: ${fileCount}`, 'test_storage_bucket');
|
|
515
|
+
}
|
|
516
|
+
catch (listError) {
|
|
517
|
+
console.log(`Cannot list files in bucket: ${listError.message}`, 'test_storage_bucket');
|
|
518
|
+
}
|
|
519
|
+
// Test 4: Test write access by creating a test file
|
|
520
|
+
let canWrite = false;
|
|
521
|
+
let writeError = null;
|
|
522
|
+
const testFileName = `test_access_${Date.now()}.txt`;
|
|
523
|
+
const testFilePath = `test/${testFileName}`;
|
|
524
|
+
try {
|
|
525
|
+
const testFile = bucket.file(testFilePath);
|
|
526
|
+
await testFile.save('test content for bucket access verification', {
|
|
527
|
+
metadata: {
|
|
528
|
+
contentType: 'text/plain',
|
|
529
|
+
metadata: {
|
|
530
|
+
purpose: 'bucket_access_test',
|
|
531
|
+
createdAt: new Date().toISOString()
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
});
|
|
535
|
+
// Verify the file was created
|
|
536
|
+
const [createdExists] = await testFile.exists();
|
|
537
|
+
if (createdExists) {
|
|
538
|
+
canWrite = true;
|
|
539
|
+
console.log(`Write access confirmed. Test file created: ${testFilePath}`, 'test_storage_bucket');
|
|
540
|
+
// Clean up test file
|
|
541
|
+
try {
|
|
542
|
+
await testFile.delete();
|
|
543
|
+
console.log(`Test file cleaned up: ${testFilePath}`, 'test_storage_bucket');
|
|
544
|
+
}
|
|
545
|
+
catch (deleteError) {
|
|
546
|
+
console.log(`Warning: Could not delete test file ${testFilePath}: ${deleteError.message}`, 'test_storage_bucket');
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
else {
|
|
550
|
+
writeError = 'Test file was not created successfully';
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
catch (error) {
|
|
554
|
+
writeError = error.message;
|
|
555
|
+
console.log(`Cannot write to bucket: ${error.message}`, 'test_storage_bucket');
|
|
556
|
+
}
|
|
557
|
+
// Test 5: Check bucket configuration
|
|
558
|
+
let bucketConfig = {};
|
|
559
|
+
try {
|
|
560
|
+
if (metadata) {
|
|
561
|
+
bucketConfig = {
|
|
562
|
+
location: metadata.location,
|
|
563
|
+
storageClass: metadata.storageClass,
|
|
564
|
+
timeCreated: metadata.timeCreated,
|
|
565
|
+
updated: metadata.updated,
|
|
566
|
+
versioning: metadata.versioning?.enabled || false,
|
|
567
|
+
cors: metadata.cors ? metadata.cors.length : 0,
|
|
568
|
+
lifecycle: metadata.lifecycle ? metadata.lifecycle.rule?.length || 0 : 0
|
|
569
|
+
};
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
catch (configError) {
|
|
573
|
+
console.log(`Could not retrieve bucket configuration: ${configError.message}`, 'test_storage_bucket');
|
|
574
|
+
}
|
|
575
|
+
const isFullyAccessible = exists && metadataAccessible && canListFiles && canWrite;
|
|
576
|
+
const result = {
|
|
577
|
+
exists: true,
|
|
578
|
+
accessible: isFullyAccessible,
|
|
579
|
+
bucketName: testBucketName,
|
|
580
|
+
fileCount,
|
|
581
|
+
configuration: bucketConfig,
|
|
582
|
+
tests: {
|
|
583
|
+
existence: exists,
|
|
584
|
+
metadata: metadataAccessible,
|
|
585
|
+
listFiles: canListFiles,
|
|
586
|
+
writeAccess: canWrite
|
|
587
|
+
},
|
|
588
|
+
permissions: {
|
|
589
|
+
read: canListFiles,
|
|
590
|
+
write: canWrite,
|
|
591
|
+
admin: metadataAccessible
|
|
592
|
+
},
|
|
593
|
+
errors: {
|
|
594
|
+
write: writeError
|
|
595
|
+
}
|
|
596
|
+
};
|
|
597
|
+
console.log(`Bucket test completed. Fully accessible: ${isFullyAccessible}`, 'test_storage_bucket');
|
|
598
|
+
console.log(`Test result: ${JSON.stringify(result, null, 2)}`, 'test_storage_bucket');
|
|
599
|
+
return result;
|
|
600
|
+
}
|
|
601
|
+
catch (error) {
|
|
602
|
+
console.log(`Storage bucket test failed: ${error.message}`, 'test_storage_bucket');
|
|
603
|
+
return {
|
|
604
|
+
exists: false,
|
|
605
|
+
accessible: false,
|
|
606
|
+
bucketName: bucketName || 'default',
|
|
607
|
+
error: error.message,
|
|
608
|
+
tests: {
|
|
609
|
+
existence: false,
|
|
610
|
+
metadata: false,
|
|
611
|
+
listFiles: false,
|
|
612
|
+
writeAccess: false
|
|
613
|
+
},
|
|
614
|
+
permissions: {
|
|
615
|
+
read: false,
|
|
616
|
+
write: false,
|
|
617
|
+
admin: false
|
|
618
|
+
}
|
|
619
|
+
};
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
/**
|
|
623
|
+
* Retry downloadFile with exponential backoff and file validation
|
|
624
|
+
* @param {string} url - URL to download from
|
|
625
|
+
* @param {string} filePath - Local path to save the file
|
|
626
|
+
* @param {Object} options - Retry options
|
|
627
|
+
* @param {number} options.maxAttempts - Maximum number of retry attempts (default: 10)
|
|
628
|
+
* @param {number} options.initialDelay - Initial delay in milliseconds (default: 10000)
|
|
629
|
+
* @param {number} options.maxDelay - Maximum delay in milliseconds (default: 300000)
|
|
630
|
+
* @param {Function} options.onProgress - Progress callback function
|
|
631
|
+
* @param {boolean} options.validateFile - Whether to validate the downloaded file (default: true)
|
|
632
|
+
* @returns {Promise<void>} - Resolves when download succeeds
|
|
633
|
+
*/
|
|
634
|
+
async function downloadFileWithRetry(url, filePath, options = {}) {
|
|
635
|
+
const { maxAttempts = 15, initialDelay = 10000, // 10 seconds
|
|
636
|
+
maxDelay = 300000, // 5 minutes
|
|
637
|
+
onProgress = null, validateFile = true } = options;
|
|
638
|
+
let lastError;
|
|
639
|
+
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
640
|
+
try {
|
|
641
|
+
if (onProgress) {
|
|
642
|
+
onProgress(`Download attempt ${attempt}/${maxAttempts}`);
|
|
643
|
+
}
|
|
644
|
+
console_log_it(`Download attempt ${attempt}/${maxAttempts} for ${url}`, 'download_file_retry');
|
|
645
|
+
await downloadFile(url, filePath);
|
|
646
|
+
// Validate the downloaded file if requested
|
|
647
|
+
if (validateFile) {
|
|
648
|
+
try {
|
|
649
|
+
console_log_it(`Validating downloaded file: ${filePath}`, 'download_file_retry');
|
|
650
|
+
// Check if file exists and has size > 0
|
|
651
|
+
const stats = fs.statSync(filePath);
|
|
652
|
+
if (stats.size === 0) {
|
|
653
|
+
throw new Error('Downloaded file is empty');
|
|
654
|
+
}
|
|
655
|
+
// Determine file type based on extension and content
|
|
656
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
657
|
+
const imageExtensions = ['.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.tiff', '.tif'];
|
|
658
|
+
const videoExtensions = ['.mp4', '.avi', '.mov', '.wmv', '.flv', '.webm', '.mkv'];
|
|
659
|
+
if (imageExtensions.includes(ext)) {
|
|
660
|
+
// Validate image files with Sharp
|
|
661
|
+
const metadata = await sharp(filePath).metadata();
|
|
662
|
+
console_log_it(`Image validation successful: ${metadata.width}x${metadata.height} ${metadata.format}`, 'download_file_retry');
|
|
663
|
+
}
|
|
664
|
+
else if (videoExtensions.includes(ext)) {
|
|
665
|
+
// For video files, just check if they're readable by ffmpeg (basic validation)
|
|
666
|
+
console_log_it(`Video file detected: ${ext}, basic validation passed`, 'download_file_retry');
|
|
667
|
+
// Could add ffprobe validation here if needed
|
|
668
|
+
}
|
|
669
|
+
else {
|
|
670
|
+
// For other file types, just verify file exists and has content
|
|
671
|
+
console_log_it(`Non-media file detected: ${ext}, size: ${stats.size} bytes`, 'download_file_retry');
|
|
672
|
+
}
|
|
673
|
+
}
|
|
674
|
+
catch (validationError) {
|
|
675
|
+
console_log_it(`File validation failed: ${validationError.message}`, 'download_file_retry');
|
|
676
|
+
// Delete the corrupted file
|
|
677
|
+
try {
|
|
678
|
+
fs.unlinkSync(filePath);
|
|
679
|
+
console_log_it(`Deleted corrupted file: ${filePath}`, 'download_file_retry');
|
|
680
|
+
}
|
|
681
|
+
catch (deleteError) {
|
|
682
|
+
console_log_it(`Failed to delete corrupted file: ${deleteError.message}`, 'download_file_retry');
|
|
683
|
+
}
|
|
684
|
+
throw new Error(`Downloaded file validation failed: ${validationError.message}`);
|
|
685
|
+
}
|
|
686
|
+
}
|
|
687
|
+
console_log_it(`Download and validation successful on attempt ${attempt}`, 'download_file_retry');
|
|
688
|
+
if (onProgress) {
|
|
689
|
+
onProgress(`Download completed successfully`);
|
|
690
|
+
}
|
|
691
|
+
return; // Success - exit the function
|
|
692
|
+
}
|
|
693
|
+
catch (error) {
|
|
694
|
+
lastError = error;
|
|
695
|
+
console_log_it(`Download attempt ${attempt} failed: ${error.message}`, 'download_file_retry');
|
|
696
|
+
if (attempt === maxAttempts) {
|
|
697
|
+
// Last attempt failed
|
|
698
|
+
console_log_it(`All ${maxAttempts} download attempts failed`, 'download_file_retry');
|
|
699
|
+
if (onProgress) {
|
|
700
|
+
onProgress(`Download failed after ${maxAttempts} attempts`);
|
|
701
|
+
}
|
|
702
|
+
throw new Error(`Download failed after ${maxAttempts} attempts. Last error: ${error.message}`);
|
|
703
|
+
}
|
|
704
|
+
// Calculate delay for next attempt (exponential backoff)
|
|
705
|
+
const delayMs = Math.min(initialDelay * Math.pow(2, attempt - 1), maxDelay);
|
|
706
|
+
console_log_it(`Waiting ${delayMs}ms before retry attempt ${attempt + 1}`, 'download_file_retry');
|
|
707
|
+
if (onProgress) {
|
|
708
|
+
onProgress(`Retrying in ${Math.round(delayMs / 1000)} seconds...`);
|
|
709
|
+
}
|
|
710
|
+
await new Promise(resolve => setTimeout(resolve, delayMs));
|
|
711
|
+
}
|
|
712
|
+
}
|
|
713
|
+
}
|
|
714
|
+
/**
|
|
715
|
+
* Uploads a file to Firebase Storage at a specific location
|
|
716
|
+
* @param {string} localFilePath - Path to the local file to upload
|
|
717
|
+
* @param {string} bucketPath - Destination path in the bucket (e.g., 'uploads/images/myfile.jpg')
|
|
718
|
+
* @param {Object} options - Upload options
|
|
719
|
+
* @param {string} options.contentType - MIME type of the file (auto-detected if not provided)
|
|
720
|
+
* @param {Object} options.metadata - Additional metadata for the file
|
|
721
|
+
* @param {boolean} options.public - Whether to make the file publicly accessible (default: false)
|
|
722
|
+
* @param {Function} options.onProgress - Progress callback function
|
|
723
|
+
* @returns {Promise<Object>} Upload result with download URL and metadata
|
|
724
|
+
*/
|
|
725
|
+
async function uploadFileToBucket(localFilePath, bucketPath, options = {}) {
|
|
726
|
+
try {
|
|
727
|
+
const bucket = getBucket();
|
|
728
|
+
console.log(`Starting upload: ${localFilePath} -> ${bucketPath}`, 'upload_file_to_bucket');
|
|
729
|
+
// Normalize bucket path - replace backslashes with forward slashes for GCS compatibility
|
|
730
|
+
const normalizedBucketPath = bucketPath.replace(/\\/g, '/');
|
|
731
|
+
console.log(`Normalized bucket path: ${normalizedBucketPath}`, 'upload_file_to_bucket');
|
|
732
|
+
// Check if local file exists
|
|
733
|
+
if (!fs.existsSync(localFilePath)) {
|
|
734
|
+
throw new Error(`Local file does not exist: ${localFilePath}`);
|
|
735
|
+
}
|
|
736
|
+
// Get file stats
|
|
737
|
+
const stats = fs.statSync(localFilePath);
|
|
738
|
+
const fileSizeMB = (stats.size / 1024 / 1024).toFixed(2);
|
|
739
|
+
console.log(`File size: ${fileSizeMB} MB`, 'upload_file_to_bucket');
|
|
740
|
+
// Auto-detect content type if not provided
|
|
741
|
+
let contentType = options.contentType;
|
|
742
|
+
if (!contentType) {
|
|
743
|
+
const ext = path.extname(localFilePath).toLowerCase();
|
|
744
|
+
const mimeTypes = {
|
|
745
|
+
'.jpg': 'image/jpeg',
|
|
746
|
+
'.jpeg': 'image/jpeg',
|
|
747
|
+
'.png': 'image/png',
|
|
748
|
+
'.gif': 'image/gif',
|
|
749
|
+
'.webp': 'image/webp',
|
|
750
|
+
'.mp4': 'video/mp4',
|
|
751
|
+
'.mov': 'video/quicktime',
|
|
752
|
+
'.avi': 'video/x-msvideo',
|
|
753
|
+
'.json': 'application/json',
|
|
754
|
+
'.txt': 'text/plain',
|
|
755
|
+
'.zip': 'application/zip',
|
|
756
|
+
'.pdf': 'application/pdf'
|
|
757
|
+
};
|
|
758
|
+
contentType = mimeTypes[ext] || 'application/octet-stream';
|
|
759
|
+
}
|
|
760
|
+
// Prepare upload options
|
|
761
|
+
const uploadOptions = {
|
|
762
|
+
destination: normalizedBucketPath,
|
|
763
|
+
metadata: {
|
|
764
|
+
contentType,
|
|
765
|
+
metadata: {
|
|
766
|
+
uploadedAt: new Date().toISOString(),
|
|
767
|
+
originalName: path.basename(localFilePath),
|
|
768
|
+
size: stats.size.toString(),
|
|
769
|
+
...options.metadata
|
|
770
|
+
}
|
|
771
|
+
},
|
|
772
|
+
resumable: stats.size > 5 * 1024 * 1024, // Use resumable upload for files > 5MB
|
|
773
|
+
};
|
|
774
|
+
// Set public access if requested
|
|
775
|
+
if (options.public) {
|
|
776
|
+
uploadOptions.metadata.metadata.publicAccess = 'true';
|
|
777
|
+
}
|
|
778
|
+
// Create file reference
|
|
779
|
+
const file = bucket.file(normalizedBucketPath);
|
|
780
|
+
// Upload the file
|
|
781
|
+
const [uploadedFile] = await bucket.upload(localFilePath, uploadOptions);
|
|
782
|
+
console.log(`Upload completed: ${normalizedBucketPath}`, 'upload_file_to_bucket');
|
|
783
|
+
// Make public if requested
|
|
784
|
+
if (options.public) {
|
|
785
|
+
await uploadedFile.makePublic();
|
|
786
|
+
console.log(`File made public: ${normalizedBucketPath}`, 'upload_file_to_bucket');
|
|
787
|
+
}
|
|
788
|
+
// Get download URL
|
|
789
|
+
let downloadURL;
|
|
790
|
+
if (options.public) {
|
|
791
|
+
downloadURL = `https://storage.googleapis.com/${bucket.name}/${normalizedBucketPath}`;
|
|
792
|
+
}
|
|
793
|
+
else {
|
|
794
|
+
// Generate signed URL for private files (valid for 1 hour by default)
|
|
795
|
+
const [url] = await uploadedFile.getSignedUrl({
|
|
796
|
+
action: 'read',
|
|
797
|
+
expires: Date.now() + 60 * 60 * 1000, // 1 hour
|
|
798
|
+
});
|
|
799
|
+
downloadURL = url;
|
|
800
|
+
}
|
|
801
|
+
// Get file metadata
|
|
802
|
+
const [metadata] = await uploadedFile.getMetadata();
|
|
803
|
+
const result = {
|
|
804
|
+
success: true,
|
|
805
|
+
bucketPath: normalizedBucketPath,
|
|
806
|
+
downloadURL,
|
|
807
|
+
contentType,
|
|
808
|
+
size: stats.size,
|
|
809
|
+
sizeMB: fileSizeMB,
|
|
810
|
+
metadata: metadata.metadata,
|
|
811
|
+
timeCreated: metadata.timeCreated,
|
|
812
|
+
updated: metadata.updated
|
|
813
|
+
};
|
|
814
|
+
console.log(`Upload result: ${JSON.stringify(result, null, 2)}`, 'upload_file_to_bucket');
|
|
815
|
+
return result;
|
|
816
|
+
}
|
|
817
|
+
catch (error) {
|
|
818
|
+
console.log(`Upload failed: ${error.message}`, 'upload_file_to_bucket');
|
|
819
|
+
throw new Error(`Failed to upload file to bucket: ${error.message}`);
|
|
820
|
+
}
|
|
821
|
+
}
|
|
822
|
+
/**
|
|
823
|
+
* Downloads a file from Firebase Storage bucket to local filesystem
|
|
824
|
+
* @param {string} bucketPath - Path to the file in the bucket (e.g., 'uploads/images/myfile.jpg')
|
|
825
|
+
* @param {string} localFilePath - Local path where the file should be saved
|
|
826
|
+
* @param {Object} options - Download options
|
|
827
|
+
* @param {Function} options.onProgress - Progress callback function
|
|
828
|
+
* @param {boolean} options.createDirs - Whether to create directories if they don't exist (default: true)
|
|
829
|
+
* @param {boolean} options.overwrite - Whether to overwrite existing file (default: true)
|
|
830
|
+
* @param {boolean} options.validateFile - Whether to validate the downloaded file (default: true)
|
|
831
|
+
* @returns {Promise<Object>} Download result with file metadata
|
|
832
|
+
*/
|
|
833
|
+
async function downloadFileFromBucket(bucketPath, localFilePath, options = {}) {
|
|
834
|
+
try {
|
|
835
|
+
const { onProgress = null, createDirs = true, overwrite = true, validateFile = true } = options;
|
|
836
|
+
const bucket = getBucket();
|
|
837
|
+
console.log(`Starting download: ${bucketPath} -> ${localFilePath}`);
|
|
838
|
+
// Normalize bucket path - replace backslashes with forward slashes for GCS compatibility
|
|
839
|
+
const normalizedBucketPath = bucketPath.replace(/\\/g, '/');
|
|
840
|
+
console.log(`Normalized bucket path: ${normalizedBucketPath}`, 'download_file_from_bucket');
|
|
841
|
+
// Create file reference
|
|
842
|
+
const file = bucket.file(normalizedBucketPath);
|
|
843
|
+
// Check if file exists in bucket
|
|
844
|
+
const [exists] = await file.exists();
|
|
845
|
+
if (!exists) {
|
|
846
|
+
throw new Error(`File does not exist in bucket: ${normalizedBucketPath}`);
|
|
847
|
+
}
|
|
848
|
+
// Check if local file already exists
|
|
849
|
+
if (fs.existsSync(localFilePath) && !overwrite) {
|
|
850
|
+
throw new Error(`Local file already exists and overwrite is disabled: ${localFilePath}`);
|
|
851
|
+
}
|
|
852
|
+
// Create local directories if needed
|
|
853
|
+
if (createDirs) {
|
|
854
|
+
const localDir = path.dirname(localFilePath);
|
|
855
|
+
if (!fs.existsSync(localDir)) {
|
|
856
|
+
fs.mkdirSync(localDir, { recursive: true });
|
|
857
|
+
console.log(`Created directory: ${localDir}`);
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
// Get file metadata
|
|
861
|
+
const [metadata] = await file.getMetadata();
|
|
862
|
+
const fileSizeMB = (parseInt(metadata.size) / 1024 / 1024).toFixed(2);
|
|
863
|
+
console.log(`File size: ${fileSizeMB} MB`, 'download_file_from_bucket');
|
|
864
|
+
if (onProgress) {
|
|
865
|
+
await onProgress({
|
|
866
|
+
type: 'start',
|
|
867
|
+
bucketPath,
|
|
868
|
+
localFilePath,
|
|
869
|
+
size: parseInt(metadata.size),
|
|
870
|
+
sizeMB: fileSizeMB
|
|
871
|
+
});
|
|
872
|
+
}
|
|
873
|
+
// Download the file
|
|
874
|
+
await new Promise((resolve, reject) => {
|
|
875
|
+
const writeStream = fs.createWriteStream(localFilePath);
|
|
876
|
+
let downloadedBytes = 0;
|
|
877
|
+
const totalBytes = parseInt(metadata.size);
|
|
878
|
+
const readStream = file.createReadStream();
|
|
879
|
+
readStream.on('data', async (chunk) => {
|
|
880
|
+
downloadedBytes += chunk.length;
|
|
881
|
+
if (onProgress) {
|
|
882
|
+
const percentage = (downloadedBytes / totalBytes) * 100;
|
|
883
|
+
await onProgress({
|
|
884
|
+
type: 'progress',
|
|
885
|
+
downloadedBytes,
|
|
886
|
+
totalBytes,
|
|
887
|
+
percentage: percentage.toFixed(1)
|
|
888
|
+
});
|
|
889
|
+
}
|
|
890
|
+
});
|
|
891
|
+
readStream.on('error', (error) => {
|
|
892
|
+
writeStream.destroy();
|
|
893
|
+
if (fs.existsSync(localFilePath)) {
|
|
894
|
+
fs.unlinkSync(localFilePath);
|
|
895
|
+
}
|
|
896
|
+
reject(error);
|
|
897
|
+
});
|
|
898
|
+
writeStream.on('error', (error) => {
|
|
899
|
+
readStream.destroy();
|
|
900
|
+
if (fs.existsSync(localFilePath)) {
|
|
901
|
+
fs.unlinkSync(localFilePath);
|
|
902
|
+
}
|
|
903
|
+
reject(error);
|
|
904
|
+
});
|
|
905
|
+
writeStream.on('finish', () => {
|
|
906
|
+
resolve();
|
|
907
|
+
});
|
|
908
|
+
readStream.pipe(writeStream);
|
|
909
|
+
});
|
|
910
|
+
console_log_it(`Download completed: ${localFilePath}`, 'download_file_from_bucket');
|
|
911
|
+
// Validate the downloaded file if requested
|
|
912
|
+
if (validateFile) {
|
|
913
|
+
try {
|
|
914
|
+
const stats = fs.statSync(localFilePath);
|
|
915
|
+
if (stats.size === 0) {
|
|
916
|
+
throw new Error('Downloaded file is empty');
|
|
917
|
+
}
|
|
918
|
+
if (stats.size !== parseInt(metadata.size)) {
|
|
919
|
+
throw new Error(`File size mismatch. Expected: ${metadata.size}, Actual: ${stats.size}`);
|
|
920
|
+
}
|
|
921
|
+
// Additional validation based on file type
|
|
922
|
+
const ext = path.extname(localFilePath).toLowerCase();
|
|
923
|
+
const imageExtensions = ['.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.tiff', '.tif'];
|
|
924
|
+
if (imageExtensions.includes(ext)) {
|
|
925
|
+
const imageMetadata = await sharp(localFilePath).metadata();
|
|
926
|
+
console_log_it(`Image validation successful: ${imageMetadata.width}x${imageMetadata.height} ${imageMetadata.format}`, 'download_file_from_bucket');
|
|
927
|
+
}
|
|
928
|
+
}
|
|
929
|
+
catch (validationError) {
|
|
930
|
+
console_log_it(`File validation failed: ${validationError.message}`, 'download_file_from_bucket');
|
|
931
|
+
// Delete the corrupted file
|
|
932
|
+
try {
|
|
933
|
+
fs.unlinkSync(localFilePath);
|
|
934
|
+
console_log_it(`Deleted corrupted file: ${localFilePath}`, 'download_file_from_bucket');
|
|
935
|
+
}
|
|
936
|
+
catch (deleteError) {
|
|
937
|
+
console_log_it(`Failed to delete corrupted file: ${deleteError.message}`, 'download_file_from_bucket');
|
|
938
|
+
}
|
|
939
|
+
throw new Error(`Downloaded file validation failed: ${validationError.message}`);
|
|
940
|
+
}
|
|
941
|
+
}
|
|
942
|
+
if (onProgress) {
|
|
943
|
+
await onProgress({
|
|
944
|
+
type: 'complete',
|
|
945
|
+
bucketPath: normalizedBucketPath,
|
|
946
|
+
localFilePath,
|
|
947
|
+
size: parseInt(metadata.size),
|
|
948
|
+
sizeMB: fileSizeMB
|
|
949
|
+
});
|
|
950
|
+
}
|
|
951
|
+
const result = {
|
|
952
|
+
success: true,
|
|
953
|
+
bucketPath: normalizedBucketPath,
|
|
954
|
+
localFilePath,
|
|
955
|
+
size: parseInt(metadata.size),
|
|
956
|
+
sizeMB: fileSizeMB,
|
|
957
|
+
contentType: metadata.contentType,
|
|
958
|
+
timeCreated: metadata.timeCreated,
|
|
959
|
+
updated: metadata.updated,
|
|
960
|
+
metadata: metadata.metadata
|
|
961
|
+
};
|
|
962
|
+
console_log_it(`Download result: ${JSON.stringify(result, null, 2)}`, 'download_file_from_bucket');
|
|
963
|
+
return result;
|
|
964
|
+
}
|
|
965
|
+
catch (error) {
|
|
966
|
+
console_log_it(`Download failed: ${error.message}`, 'download_file_from_bucket');
|
|
967
|
+
throw new Error(`Failed to download file from bucket: ${error.message}`);
|
|
968
|
+
}
|
|
969
|
+
}
|
|
970
|
+
/**
|
|
971
|
+
* Downloads multiple files from Firebase Storage
|
|
972
|
+
* @param {Array} fileList - Array of {bucketPath, localPath, options} objects
|
|
973
|
+
* @param {Function} onProgress - Progress callback function
|
|
974
|
+
* @returns {Promise<Object>} Download results for all files
|
|
975
|
+
*/
|
|
976
|
+
export async function downloadMultipleFilesFromBucket(fileList, onProgress) {
|
|
977
|
+
const results = {
|
|
978
|
+
success: [],
|
|
979
|
+
failed: [],
|
|
980
|
+
totalFiles: fileList.length,
|
|
981
|
+
totalProcessed: 0
|
|
982
|
+
};
|
|
983
|
+
for (let i = 0; i < fileList.length; i++) {
|
|
984
|
+
const { bucketPath, localPath, options = {} } = fileList[i];
|
|
985
|
+
try {
|
|
986
|
+
if (onProgress) {
|
|
987
|
+
onProgress({
|
|
988
|
+
type: 'file_start',
|
|
989
|
+
current: i + 1,
|
|
990
|
+
total: fileList.length,
|
|
991
|
+
bucketPath,
|
|
992
|
+
localPath
|
|
993
|
+
});
|
|
994
|
+
}
|
|
995
|
+
const result = await downloadFileFromBucket(bucketPath, localPath, {
|
|
996
|
+
...options,
|
|
997
|
+
onProgress: (progressData) => {
|
|
998
|
+
if (onProgress) {
|
|
999
|
+
onProgress({
|
|
1000
|
+
type: 'file_progress',
|
|
1001
|
+
current: i + 1,
|
|
1002
|
+
total: fileList.length,
|
|
1003
|
+
bucketPath,
|
|
1004
|
+
localPath,
|
|
1005
|
+
progress: progressData
|
|
1006
|
+
});
|
|
1007
|
+
}
|
|
1008
|
+
}
|
|
1009
|
+
});
|
|
1010
|
+
results.success.push({ bucketPath, localPath, ...result });
|
|
1011
|
+
results.totalProcessed++;
|
|
1012
|
+
if (onProgress) {
|
|
1013
|
+
onProgress({
|
|
1014
|
+
type: 'file_complete',
|
|
1015
|
+
current: i + 1,
|
|
1016
|
+
total: fileList.length,
|
|
1017
|
+
bucketPath,
|
|
1018
|
+
localPath,
|
|
1019
|
+
result
|
|
1020
|
+
});
|
|
1021
|
+
}
|
|
1022
|
+
}
|
|
1023
|
+
catch (error) {
|
|
1024
|
+
console_log_it(`Failed to download ${bucketPath}: ${error.message}`, 'download_multiple_files');
|
|
1025
|
+
results.failed.push({ bucketPath, localPath, error: error.message });
|
|
1026
|
+
if (onProgress) {
|
|
1027
|
+
onProgress({
|
|
1028
|
+
type: 'file_error',
|
|
1029
|
+
current: i + 1,
|
|
1030
|
+
total: fileList.length,
|
|
1031
|
+
bucketPath,
|
|
1032
|
+
localPath,
|
|
1033
|
+
error: error.message
|
|
1034
|
+
});
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
}
|
|
1038
|
+
console_log_it(`Batch download complete. Success: ${results.success.length}, Failed: ${results.failed.length}`, 'download_multiple_files');
|
|
1039
|
+
return results;
|
|
1040
|
+
}
|
|
1041
|
+
/**
|
|
1042
|
+
* Downloads a file from Firebase Storage with retry logic and exponential backoff
|
|
1043
|
+
* @param {string} bucketPath - Path to the file in the bucket
|
|
1044
|
+
* @param {string} localFilePath - Local path where the file should be saved
|
|
1045
|
+
* @param {Object} options - Download and retry options
|
|
1046
|
+
* @param {number} options.maxAttempts - Maximum number of retry attempts (default: 3)
|
|
1047
|
+
* @param {number} options.initialDelay - Initial delay in milliseconds (default: 1000)
|
|
1048
|
+
* @param {number} options.maxDelay - Maximum delay in milliseconds (default: 30000)
|
|
1049
|
+
* @param {Function} options.onProgress - Progress callback function
|
|
1050
|
+
* @param {boolean} options.createDirs - Whether to create directories if they don't exist (default: true)
|
|
1051
|
+
* @param {boolean} options.overwrite - Whether to overwrite existing file (default: true)
|
|
1052
|
+
* @param {boolean} options.validateFile - Whether to validate the downloaded file (default: true)
|
|
1053
|
+
* @returns {Promise<Object>} Download result with file metadata
|
|
1054
|
+
*/
|
|
1055
|
+
export async function downloadFileFromBucketWithRetry(bucketPath, localFilePath, options = {}) {
|
|
1056
|
+
const { maxAttempts = 15, initialDelay = 1000, maxDelay = 30000, onProgress = null, ...downloadOptions } = options;
|
|
1057
|
+
let lastError;
|
|
1058
|
+
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
1059
|
+
try {
|
|
1060
|
+
if (onProgress) {
|
|
1061
|
+
await onProgress({
|
|
1062
|
+
type: 'retry_attempt',
|
|
1063
|
+
attempt,
|
|
1064
|
+
maxAttempts,
|
|
1065
|
+
bucketPath,
|
|
1066
|
+
localFilePath
|
|
1067
|
+
});
|
|
1068
|
+
}
|
|
1069
|
+
console_log_it(`Download attempt ${attempt}/${maxAttempts} for ${bucketPath}`, 'download_file_bucket_retry');
|
|
1070
|
+
const result = await downloadFileFromBucket(bucketPath, localFilePath, {
|
|
1071
|
+
...downloadOptions,
|
|
1072
|
+
onProgress: async (progressData) => {
|
|
1073
|
+
if (onProgress) {
|
|
1074
|
+
await onProgress({
|
|
1075
|
+
type: 'download_progress',
|
|
1076
|
+
attempt,
|
|
1077
|
+
maxAttempts,
|
|
1078
|
+
...progressData
|
|
1079
|
+
});
|
|
1080
|
+
}
|
|
1081
|
+
}
|
|
1082
|
+
});
|
|
1083
|
+
console_log_it(`Download successful on attempt ${attempt}`, 'download_file_bucket_retry');
|
|
1084
|
+
if (onProgress) {
|
|
1085
|
+
await onProgress({
|
|
1086
|
+
type: 'retry_success',
|
|
1087
|
+
attempt,
|
|
1088
|
+
maxAttempts,
|
|
1089
|
+
result
|
|
1090
|
+
});
|
|
1091
|
+
}
|
|
1092
|
+
return result;
|
|
1093
|
+
}
|
|
1094
|
+
catch (error) {
|
|
1095
|
+
lastError = error;
|
|
1096
|
+
console_log_it(`Download attempt ${attempt} failed: ${error.message}`, 'download_file_bucket_retry');
|
|
1097
|
+
if (attempt === maxAttempts) {
|
|
1098
|
+
console_log_it(`All ${maxAttempts} download attempts failed`, 'download_file_bucket_retry');
|
|
1099
|
+
if (onProgress) {
|
|
1100
|
+
await onProgress({
|
|
1101
|
+
type: 'retry_failed',
|
|
1102
|
+
attempt,
|
|
1103
|
+
maxAttempts,
|
|
1104
|
+
error: error.message
|
|
1105
|
+
});
|
|
1106
|
+
}
|
|
1107
|
+
throw new Error(`Download failed after ${maxAttempts} attempts. Last error: ${error.message}`);
|
|
1108
|
+
}
|
|
1109
|
+
// Calculate delay for next attempt (exponential backoff)
|
|
1110
|
+
const delayMs = Math.min(initialDelay * Math.pow(2, attempt - 1), maxDelay);
|
|
1111
|
+
console_log_it(`Waiting ${delayMs}ms before retry attempt ${attempt + 1}`, 'download_file_bucket_retry');
|
|
1112
|
+
if (onProgress) {
|
|
1113
|
+
await onProgress({
|
|
1114
|
+
type: 'retry_delay',
|
|
1115
|
+
attempt,
|
|
1116
|
+
maxAttempts,
|
|
1117
|
+
delayMs,
|
|
1118
|
+
error: error.message
|
|
1119
|
+
});
|
|
1120
|
+
}
|
|
1121
|
+
await new Promise(resolve => setTimeout(resolve, delayMs));
|
|
1122
|
+
}
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1125
|
+
/**
|
|
1126
|
+
* Uploads multiple files to Firebase Storage
|
|
1127
|
+
* @param {Array} fileList - Array of {localPath, bucketPath, options} objects
|
|
1128
|
+
* @param {Function} onProgress - Progress callback function
|
|
1129
|
+
* @returns {Promise<Object>} Upload results for all files
|
|
1130
|
+
*/
|
|
1131
|
+
export async function uploadMultipleFilesToBucket(fileList, onProgress) {
|
|
1132
|
+
const results = {
|
|
1133
|
+
success: [],
|
|
1134
|
+
failed: [],
|
|
1135
|
+
totalFiles: fileList.length,
|
|
1136
|
+
totalProcessed: 0
|
|
1137
|
+
};
|
|
1138
|
+
for (let i = 0; i < fileList.length; i++) {
|
|
1139
|
+
const { localPath, bucketPath, options = {} } = fileList[i];
|
|
1140
|
+
try {
|
|
1141
|
+
if (onProgress) {
|
|
1142
|
+
onProgress({
|
|
1143
|
+
type: 'file_start',
|
|
1144
|
+
current: i + 1,
|
|
1145
|
+
total: fileList.length,
|
|
1146
|
+
localPath,
|
|
1147
|
+
bucketPath
|
|
1148
|
+
});
|
|
1149
|
+
}
|
|
1150
|
+
const result = await uploadFileToBucket(localPath, bucketPath, options);
|
|
1151
|
+
results.success.push({ localPath, bucketPath, ...result });
|
|
1152
|
+
results.totalProcessed++;
|
|
1153
|
+
if (onProgress) {
|
|
1154
|
+
onProgress({
|
|
1155
|
+
type: 'file_complete',
|
|
1156
|
+
current: i + 1,
|
|
1157
|
+
total: fileList.length,
|
|
1158
|
+
localPath,
|
|
1159
|
+
bucketPath,
|
|
1160
|
+
result
|
|
1161
|
+
});
|
|
1162
|
+
}
|
|
1163
|
+
}
|
|
1164
|
+
catch (error) {
|
|
1165
|
+
console_log_it(`Failed to upload ${localPath}: ${error.message}`, 'upload_multiple_files');
|
|
1166
|
+
results.failed.push({ localPath, bucketPath, error: error.message });
|
|
1167
|
+
if (onProgress) {
|
|
1168
|
+
onProgress({
|
|
1169
|
+
type: 'file_error',
|
|
1170
|
+
current: i + 1,
|
|
1171
|
+
total: fileList.length,
|
|
1172
|
+
localPath,
|
|
1173
|
+
bucketPath,
|
|
1174
|
+
error: error.message
|
|
1175
|
+
});
|
|
1176
|
+
}
|
|
1177
|
+
}
|
|
1178
|
+
}
|
|
1179
|
+
console_log_it(`Batch upload complete. Success: ${results.success.length}, Failed: ${results.failed.length}`, 'upload_multiple_files');
|
|
1180
|
+
return results;
|
|
1181
|
+
}
|
|
1182
|
+
/**
|
|
1183
|
+
* Lists files in a Firebase Storage bucket directory
|
|
1184
|
+
* @param {string} prefix - Directory prefix to search (e.g., 'uploads/images/')
|
|
1185
|
+
* @param {Object} options - List options
|
|
1186
|
+
* @param {number} options.maxResults - Maximum number of files to return
|
|
1187
|
+
* @param {string} options.delimiter - Delimiter for directory-like listing
|
|
1188
|
+
* @param {string} options.pageToken - Token for pagination
|
|
1189
|
+
* @returns {Promise<Object>} List of files and metadata
|
|
1190
|
+
*/
|
|
1191
|
+
export async function listBucketFiles(prefix = '', options = {}) {
|
|
1192
|
+
try {
|
|
1193
|
+
const bucket = getBucket();
|
|
1194
|
+
console_log_it(`Listing files in bucket with prefix: '${prefix}'`, 'list_bucket_files');
|
|
1195
|
+
const listOptions = {
|
|
1196
|
+
prefix,
|
|
1197
|
+
...options
|
|
1198
|
+
};
|
|
1199
|
+
const [files, , metadata] = await bucket.getFiles(listOptions);
|
|
1200
|
+
const fileList = files.map(file => ({
|
|
1201
|
+
name: file.name,
|
|
1202
|
+
bucket: file.bucket.name,
|
|
1203
|
+
size: parseInt(file.metadata.size || 0),
|
|
1204
|
+
sizeMB: (parseInt(file.metadata.size || 0) / 1024 / 1024).toFixed(2),
|
|
1205
|
+
contentType: file.metadata.contentType,
|
|
1206
|
+
timeCreated: file.metadata.timeCreated,
|
|
1207
|
+
updated: file.metadata.updated,
|
|
1208
|
+
md5Hash: file.metadata.md5Hash,
|
|
1209
|
+
etag: file.metadata.etag
|
|
1210
|
+
}));
|
|
1211
|
+
const result = {
|
|
1212
|
+
files: fileList,
|
|
1213
|
+
totalFiles: fileList.length,
|
|
1214
|
+
prefix,
|
|
1215
|
+
nextPageToken: metadata.nextPageToken || null,
|
|
1216
|
+
totalSize: fileList.reduce((sum, file) => sum + file.size, 0),
|
|
1217
|
+
totalSizeMB: (fileList.reduce((sum, file) => sum + file.size, 0) / 1024 / 1024).toFixed(2)
|
|
1218
|
+
};
|
|
1219
|
+
console_log_it(`Found ${result.totalFiles} files, total size: ${result.totalSizeMB} MB`, 'list_bucket_files');
|
|
1220
|
+
return result;
|
|
1221
|
+
}
|
|
1222
|
+
catch (error) {
|
|
1223
|
+
console_log_it(`Failed to list bucket files: ${error.message}`, 'list_bucket_files');
|
|
1224
|
+
throw new Error(`Failed to list bucket files: ${error.message}`);
|
|
1225
|
+
}
|
|
1226
|
+
}
|
|
1227
|
+
/**
|
|
1228
|
+
* Checks if a file exists in the Firebase Storage bucket
|
|
1229
|
+
* @param {string} bucketPath - Path to the file in the bucket
|
|
1230
|
+
* @returns {Promise<Object>} Existence check result with metadata
|
|
1231
|
+
*/
|
|
1232
|
+
export async function checkBucketFileExists(bucketPath) {
|
|
1233
|
+
try {
|
|
1234
|
+
const bucket = getBucket();
|
|
1235
|
+
// Normalize bucket path - replace backslashes with forward slashes for GCS compatibility
|
|
1236
|
+
const normalizedBucketPath = bucketPath.replace(/\\/g, '/');
|
|
1237
|
+
const file = bucket.file(normalizedBucketPath);
|
|
1238
|
+
const [exists] = await file.exists();
|
|
1239
|
+
if (exists) {
|
|
1240
|
+
const [metadata] = await file.getMetadata();
|
|
1241
|
+
return {
|
|
1242
|
+
exists: true,
|
|
1243
|
+
bucketPath: normalizedBucketPath,
|
|
1244
|
+
size: parseInt(metadata.size),
|
|
1245
|
+
sizeMB: (parseInt(metadata.size) / 1024 / 1024).toFixed(2),
|
|
1246
|
+
contentType: metadata.contentType,
|
|
1247
|
+
timeCreated: metadata.timeCreated,
|
|
1248
|
+
updated: metadata.updated,
|
|
1249
|
+
md5Hash: metadata.md5Hash,
|
|
1250
|
+
etag: metadata.etag
|
|
1251
|
+
};
|
|
1252
|
+
}
|
|
1253
|
+
else {
|
|
1254
|
+
return {
|
|
1255
|
+
exists: false,
|
|
1256
|
+
bucketPath: normalizedBucketPath
|
|
1257
|
+
};
|
|
1258
|
+
}
|
|
1259
|
+
}
|
|
1260
|
+
catch (error) {
|
|
1261
|
+
console_log_it(`Failed to check file existence: ${error.message}`, 'check_bucket_file_exists');
|
|
1262
|
+
return {
|
|
1263
|
+
exists: false,
|
|
1264
|
+
bucketPath,
|
|
1265
|
+
error: error.message
|
|
1266
|
+
};
|
|
1267
|
+
}
|
|
1268
|
+
}
|
|
1269
|
+
/**
|
|
1270
|
+
* Deletes a file from Firebase Storage bucket
|
|
1271
|
+
* @param {string} bucketPath - Path to the file in the bucket
|
|
1272
|
+
* @returns {Promise<Object>} Deletion result
|
|
1273
|
+
*/
|
|
1274
|
+
export async function deleteBucketFile(bucketPath) {
|
|
1275
|
+
try {
|
|
1276
|
+
const bucket = getBucket();
|
|
1277
|
+
// Normalize bucket path - replace backslashes with forward slashes for GCS compatibility
|
|
1278
|
+
const normalizedBucketPath = bucketPath.replace(/\\/g, '/');
|
|
1279
|
+
const file = bucket.file(normalizedBucketPath);
|
|
1280
|
+
// Check if file exists first
|
|
1281
|
+
const [exists] = await file.exists();
|
|
1282
|
+
if (!exists) {
|
|
1283
|
+
return {
|
|
1284
|
+
success: false,
|
|
1285
|
+
bucketPath: normalizedBucketPath,
|
|
1286
|
+
error: 'File does not exist'
|
|
1287
|
+
};
|
|
1288
|
+
}
|
|
1289
|
+
// Get metadata before deletion
|
|
1290
|
+
const [metadata] = await file.getMetadata();
|
|
1291
|
+
// Delete the file
|
|
1292
|
+
await file.delete();
|
|
1293
|
+
console_log_it(`File deleted from bucket: ${normalizedBucketPath}`, 'delete_bucket_file');
|
|
1294
|
+
return {
|
|
1295
|
+
success: true,
|
|
1296
|
+
bucketPath: normalizedBucketPath,
|
|
1297
|
+
size: parseInt(metadata.size),
|
|
1298
|
+
sizeMB: (parseInt(metadata.size) / 1024 / 1024).toFixed(2),
|
|
1299
|
+
deletedAt: new Date().toISOString()
|
|
1300
|
+
};
|
|
1301
|
+
}
|
|
1302
|
+
catch (error) {
|
|
1303
|
+
console_log_it(`Failed to delete file from bucket: ${error.message}`, 'delete_bucket_file');
|
|
1304
|
+
throw new Error(`Failed to delete file from bucket: ${error.message}`);
|
|
1305
|
+
}
|
|
1306
|
+
}
|
|
1307
|
+
/**
|
|
1308
|
+
* Gets a signed URL for accessing a private file in Firebase Storage
|
|
1309
|
+
* @param {string} bucketPath - Path to the file in the bucket
|
|
1310
|
+
* @param {Object} options - URL options
|
|
1311
|
+
* @param {string} options.action - Action type ('read', 'write', 'delete', 'resumable')
|
|
1312
|
+
* @param {number} options.expires - Expiration time in milliseconds from now (default: 1 hour)
|
|
1313
|
+
* @param {string} options.contentType - Content type for write operations
|
|
1314
|
+
* @returns {Promise<Object>} Signed URL result
|
|
1315
|
+
*/
|
|
1316
|
+
export async function getBucketFileSignedUrl(bucketPath, options = {}) {
|
|
1317
|
+
try {
|
|
1318
|
+
const { action = 'read', expires = 60 * 60 * 1000, // 1 hour
|
|
1319
|
+
contentType = null } = options;
|
|
1320
|
+
const bucket = getBucket();
|
|
1321
|
+
// Normalize bucket path - replace backslashes with forward slashes for GCS compatibility
|
|
1322
|
+
const normalizedBucketPath = bucketPath.replace(/\\/g, '/');
|
|
1323
|
+
const file = bucket.file(normalizedBucketPath);
|
|
1324
|
+
// Check if file exists for read operations
|
|
1325
|
+
if (action === 'read') {
|
|
1326
|
+
const [exists] = await file.exists();
|
|
1327
|
+
if (!exists) {
|
|
1328
|
+
throw new Error('File does not exist');
|
|
1329
|
+
}
|
|
1330
|
+
}
|
|
1331
|
+
const signOptions = {
|
|
1332
|
+
action,
|
|
1333
|
+
expires: Date.now() + expires
|
|
1334
|
+
};
|
|
1335
|
+
if (contentType && (action === 'write' || action === 'resumable')) {
|
|
1336
|
+
signOptions.contentType = contentType;
|
|
1337
|
+
}
|
|
1338
|
+
const [url] = await file.getSignedUrl(signOptions);
|
|
1339
|
+
console_log_it(`Generated signed URL for ${action} operation: ${normalizedBucketPath}`, 'get_bucket_file_signed_url');
|
|
1340
|
+
return {
|
|
1341
|
+
success: true,
|
|
1342
|
+
bucketPath: normalizedBucketPath,
|
|
1343
|
+
url,
|
|
1344
|
+
action,
|
|
1345
|
+
expires: new Date(Date.now() + expires).toISOString(),
|
|
1346
|
+
expiresIn: expires
|
|
1347
|
+
};
|
|
1348
|
+
}
|
|
1349
|
+
catch (error) {
|
|
1350
|
+
console_log_it(`Failed to generate signed URL: ${error.message}`, 'get_bucket_file_signed_url');
|
|
1351
|
+
throw new Error(`Failed to generate signed URL: ${error.message}`);
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
export function getJobZipPath(job, fileName) {
|
|
1355
|
+
return path.join(getJobZipFolderPath(job), fileName);
|
|
1356
|
+
}
|
|
1357
|
+
export function getJobZipFolderPath(job) {
|
|
1358
|
+
return `training/sets/${job.ownerUid}/${job.workflowId}`;
|
|
1359
|
+
}
|
|
1360
|
+
export function getModelBucketPath(jobId, workflowData, stepId, ownerUid) {
|
|
1361
|
+
return `models/users/${ownerUid}/${jobId}/${workflowData.id}/${stepId}.zip`;
|
|
1362
|
+
}
|
|
1363
|
+
export function getZipResourcesName(jobId, workflowData, stepId) {
|
|
1364
|
+
return `job_${jobId}_workflow_${workflowData.id}_step_${stepId}.zip`;
|
|
1365
|
+
}
|
|
1366
|
+
export function getZipModelFileName(jobId, workflowData, stepId) {
|
|
1367
|
+
return `job_${jobId}_workflow_${workflowData.id}_step_${stepId}_model.zip`;
|
|
1368
|
+
}
|
|
1369
|
+
export function modelFileZipDirName(jobId, stepId) {
|
|
1370
|
+
return `${jobId}-${stepId}`;
|
|
1371
|
+
}
|
|
1372
|
+
async function extractVideoFrame(videoPath, timestampSeconds, outputPath) {
|
|
1373
|
+
return new Promise((resolve, reject) => {
|
|
1374
|
+
ffmpeg(videoPath)
|
|
1375
|
+
.screenshots({
|
|
1376
|
+
timestamps: [timestampSeconds],
|
|
1377
|
+
filename: path.basename(outputPath),
|
|
1378
|
+
folder: path.dirname(outputPath),
|
|
1379
|
+
// size: '?x?' // Keep original resolution, or specify like '1920x1080'
|
|
1380
|
+
})
|
|
1381
|
+
.on('end', () => {
|
|
1382
|
+
console_log_it(`Frame extracted successfully: ${outputPath}`, 'extract_video_frame');
|
|
1383
|
+
resolve(outputPath);
|
|
1384
|
+
})
|
|
1385
|
+
.on('error', (err) => {
|
|
1386
|
+
console_log_it(`Error extracting frame: ${err.message}`, 'extract_video_frame');
|
|
1387
|
+
reject(err);
|
|
1388
|
+
});
|
|
1389
|
+
});
|
|
1390
|
+
}
|
|
1391
|
+
// Alternative function with more control over output format:
|
|
1392
|
+
async function extractVideoFrameAdvanced(videoPath, timestampSeconds, outputPath, options = {}) {
|
|
1393
|
+
const { width = null, height = null, format = 'png', // png, jpg, jpeg, webp
|
|
1394
|
+
quality = 90 // for jpg/jpeg format
|
|
1395
|
+
} = options;
|
|
1396
|
+
return new Promise((resolve, reject) => {
|
|
1397
|
+
let ffmpegCommand = ffmpeg(videoPath)
|
|
1398
|
+
.seekInput(timestampSeconds)
|
|
1399
|
+
.frames(1)
|
|
1400
|
+
.format(format)
|
|
1401
|
+
.output(outputPath);
|
|
1402
|
+
// Set size if specified
|
|
1403
|
+
if (width && height) {
|
|
1404
|
+
ffmpegCommand = ffmpegCommand.size(`${width}x${height}`);
|
|
1405
|
+
}
|
|
1406
|
+
else if (width || height) {
|
|
1407
|
+
ffmpegCommand = ffmpegCommand.size(`${width || '?'}x${height || '?'}`);
|
|
1408
|
+
}
|
|
1409
|
+
// Set quality for JPEG
|
|
1410
|
+
if (format === 'jpg' || format === 'jpeg') {
|
|
1411
|
+
ffmpegCommand = ffmpegCommand.outputOptions([`-q:v ${Math.round((100 - quality) / 10)}`]);
|
|
1412
|
+
}
|
|
1413
|
+
ffmpegCommand
|
|
1414
|
+
.on('end', () => {
|
|
1415
|
+
console_log_it(`Frame extracted successfully: ${outputPath}`, 'extract_video_frame_advanced');
|
|
1416
|
+
resolve(outputPath);
|
|
1417
|
+
})
|
|
1418
|
+
.on('error', (err) => {
|
|
1419
|
+
console_log_it(`Error extracting frame: ${err.message}`, 'extract_video_frame_advanced');
|
|
1420
|
+
reject(err);
|
|
1421
|
+
})
|
|
1422
|
+
.run();
|
|
1423
|
+
});
|
|
1424
|
+
}
|
|
1425
|
+
/**
|
|
1426
|
+
* Resize an image to fit within maximum dimensions while maintaining aspect ratio
|
|
1427
|
+
* No padding/blank parts will be added - image will be smaller than or equal to max dimensions
|
|
1428
|
+
* @param {string} inputPath - Path to the input image
|
|
1429
|
+
* @param {string} outputPath - Path where the resized image will be saved
|
|
1430
|
+
* @param {Object} dimensions - Maximum dimensions {width, height}
|
|
1431
|
+
* @param {Object} options - Additional options for resizing
|
|
1432
|
+
* @returns {Promise<string>} - Path to the resized image
|
|
1433
|
+
*/
|
|
1434
|
+
async function resizeImage(inputPath, outputPath, dimensions, options = {}) {
|
|
1435
|
+
const { quality = 90, // JPEG quality (if output is JPEG)
|
|
1436
|
+
format = null, // Auto-detect from output path extension
|
|
1437
|
+
withoutEnlargement = true // Don't enlarge if source is smaller than target
|
|
1438
|
+
} = options;
|
|
1439
|
+
try {
|
|
1440
|
+
console_log_it(`Resizing image from ${inputPath} to ${outputPath}`, 'resize_image');
|
|
1441
|
+
console_log_it(`Maximum dimensions: ${dimensions.width}x${dimensions.height}`, 'resize_image');
|
|
1442
|
+
// Create sharp instance
|
|
1443
|
+
let sharpInstance = sharp(inputPath);
|
|
1444
|
+
// Get original image metadata
|
|
1445
|
+
const metadata = await sharpInstance.metadata();
|
|
1446
|
+
console_log_it(`Original dimensions: ${metadata.width}x${metadata.height}`, 'resize_image');
|
|
1447
|
+
// Calculate aspect ratios
|
|
1448
|
+
const originalAspectRatio = metadata.width / metadata.height;
|
|
1449
|
+
const maxAspectRatio = dimensions.width / dimensions.height;
|
|
1450
|
+
console_log_it(`Original aspect ratio: ${originalAspectRatio.toFixed(3)}`, 'resize_image');
|
|
1451
|
+
console_log_it(`Max container aspect ratio: ${maxAspectRatio.toFixed(3)}`, 'resize_image');
|
|
1452
|
+
// Calculate the actual dimensions to fit within the maximum bounds
|
|
1453
|
+
let newWidth, newHeight;
|
|
1454
|
+
if (originalAspectRatio > maxAspectRatio) {
|
|
1455
|
+
// Image is wider relative to container - constrain by width
|
|
1456
|
+
newWidth = Math.min(dimensions.width, metadata.width);
|
|
1457
|
+
newHeight = Math.round(newWidth / originalAspectRatio);
|
|
1458
|
+
}
|
|
1459
|
+
else {
|
|
1460
|
+
// Image is taller relative to container - constrain by height
|
|
1461
|
+
newHeight = Math.min(dimensions.height, metadata.height);
|
|
1462
|
+
newWidth = Math.round(newHeight * originalAspectRatio);
|
|
1463
|
+
}
|
|
1464
|
+
// If withoutEnlargement is true, don't make image larger than original
|
|
1465
|
+
if (withoutEnlargement) {
|
|
1466
|
+
newWidth = Math.min(newWidth, metadata.width);
|
|
1467
|
+
newHeight = Math.min(newHeight, metadata.height);
|
|
1468
|
+
}
|
|
1469
|
+
console_log_it(`Calculated new dimensions: ${newWidth}x${newHeight}`, 'resize_image');
|
|
1470
|
+
// Resize to exact calculated dimensions (no padding)
|
|
1471
|
+
sharpInstance = sharpInstance.resize({
|
|
1472
|
+
width: newWidth,
|
|
1473
|
+
height: newHeight,
|
|
1474
|
+
fit: 'fill', // Use 'fill' since we calculated exact dimensions
|
|
1475
|
+
withoutEnlargement: false // We already handled enlargement above
|
|
1476
|
+
});
|
|
1477
|
+
// Determine output format
|
|
1478
|
+
let outputFormat = format;
|
|
1479
|
+
if (!outputFormat) {
|
|
1480
|
+
const ext = path.extname(outputPath).toLowerCase();
|
|
1481
|
+
switch (ext) {
|
|
1482
|
+
case '.jpg':
|
|
1483
|
+
case '.jpeg':
|
|
1484
|
+
outputFormat = 'jpeg';
|
|
1485
|
+
break;
|
|
1486
|
+
case '.png':
|
|
1487
|
+
outputFormat = 'png';
|
|
1488
|
+
break;
|
|
1489
|
+
case '.webp':
|
|
1490
|
+
outputFormat = 'webp';
|
|
1491
|
+
break;
|
|
1492
|
+
case '.tiff':
|
|
1493
|
+
case '.tif':
|
|
1494
|
+
outputFormat = 'tiff';
|
|
1495
|
+
break;
|
|
1496
|
+
default:
|
|
1497
|
+
outputFormat = 'png'; // Default fallback
|
|
1498
|
+
}
|
|
1499
|
+
}
|
|
1500
|
+
// Apply format-specific options
|
|
1501
|
+
switch (outputFormat) {
|
|
1502
|
+
case 'jpeg':
|
|
1503
|
+
sharpInstance = sharpInstance.jpeg({ quality: quality });
|
|
1504
|
+
break;
|
|
1505
|
+
case 'png':
|
|
1506
|
+
sharpInstance = sharpInstance.png({ compressionLevel: 6 });
|
|
1507
|
+
break;
|
|
1508
|
+
case 'webp':
|
|
1509
|
+
sharpInstance = sharpInstance.webp({ quality: quality });
|
|
1510
|
+
break;
|
|
1511
|
+
case 'tiff':
|
|
1512
|
+
sharpInstance = sharpInstance.tiff({ quality: quality });
|
|
1513
|
+
break;
|
|
1514
|
+
}
|
|
1515
|
+
// Save the resized image
|
|
1516
|
+
await sharpInstance.toFile(outputPath);
|
|
1517
|
+
// Get final image metadata to confirm dimensions
|
|
1518
|
+
const finalMetadata = await sharp(outputPath).metadata();
|
|
1519
|
+
console_log_it(`Final dimensions: ${finalMetadata.width}x${finalMetadata.height}`, 'resize_image');
|
|
1520
|
+
console_log_it(`Image resized successfully: ${outputPath}`, 'resize_image');
|
|
1521
|
+
return outputPath;
|
|
1522
|
+
}
|
|
1523
|
+
catch (error) {
|
|
1524
|
+
console_log_it(`Error resizing image: ${error.message}`, 'resize_image');
|
|
1525
|
+
throw new Error(`Failed to resize image: ${error.message}`);
|
|
1526
|
+
}
|
|
1527
|
+
}
|
|
1528
|
+
/**
|
|
1529
|
+
* Get image dimensions using Sharp
|
|
1530
|
+
* @param {string} imagePath - Path to the image file
|
|
1531
|
+
* @returns {Promise<{width: number, height: number}>} - Image dimensions
|
|
1532
|
+
*/
|
|
1533
|
+
async function getImageSizeWithSharp(imagePath) {
|
|
1534
|
+
try {
|
|
1535
|
+
const metadata = await sharp(imagePath).metadata();
|
|
1536
|
+
if (!metadata.width || !metadata.height) {
|
|
1537
|
+
throw new Error(`Could not determine image dimensions for: ${imagePath}`);
|
|
1538
|
+
}
|
|
1539
|
+
console_log_it(`Image dimensions for ${imagePath}: ${metadata.width}x${metadata.height}`, 'get_image_size_sharp');
|
|
1540
|
+
return {
|
|
1541
|
+
width: metadata.width,
|
|
1542
|
+
height: metadata.height,
|
|
1543
|
+
format: metadata.format,
|
|
1544
|
+
channels: metadata.channels,
|
|
1545
|
+
density: metadata.density
|
|
1546
|
+
};
|
|
1547
|
+
}
|
|
1548
|
+
catch (error) {
|
|
1549
|
+
console_log_it(`Error getting image size for ${imagePath}: ${error.message}`, 'get_image_size_sharp');
|
|
1550
|
+
throw new Error(`Failed to get image dimensions: ${error.message}`);
|
|
1551
|
+
}
|
|
1552
|
+
}
|
|
1553
|
+
/**
|
|
1554
|
+
* Resize all images in a folder to fit within maximum dimensions (replaces originals)
|
|
1555
|
+
* @param {string} folder - Path to folder containing images to resize
|
|
1556
|
+
* @param {Object} dimensions - Maximum dimensions {width, height}
|
|
1557
|
+
* @param {Object} options - Additional options for resizing
|
|
1558
|
+
* @returns {Promise<Object>} - Results summary with success/failure counts
|
|
1559
|
+
*/
|
|
1560
|
+
async function resizeImagesInFolder(folder, dimensions, options = {}) {
|
|
1561
|
+
const { quality = 99, format = null, withoutEnlargement = false, // Don't enlarge if source is smaller than target
|
|
1562
|
+
supportedFormats = ['.jpg', '.jpeg', '.png', '.webp', '.tiff', '.tif', '.bmp'], onProgress = null, // Callback function for progress updates
|
|
1563
|
+
onError = null, // Callback function for individual file errors
|
|
1564
|
+
backupOriginals = false, // Create backup copies before resizing
|
|
1565
|
+
backupSuffix = '_original', force_resize = true } = options;
|
|
1566
|
+
try {
|
|
1567
|
+
console_log_it(`Starting in-place resize for folder: ${folder}`, 'resize_images_folder_path');
|
|
1568
|
+
console_log_it(`Target dimensions: ${dimensions.width}x${dimensions.height}`, 'resize_images_folder_dim');
|
|
1569
|
+
// Ensure folder exists
|
|
1570
|
+
if (!fs.existsSync(folder)) {
|
|
1571
|
+
throw new Error(`Folder does not exist: ${folder}`);
|
|
1572
|
+
}
|
|
1573
|
+
// Get all files in the folder
|
|
1574
|
+
const allFiles = fs.readdirSync(folder);
|
|
1575
|
+
// Filter for image files
|
|
1576
|
+
const imageFiles = allFiles.filter(file => {
|
|
1577
|
+
const ext = path.extname(file).toLowerCase();
|
|
1578
|
+
return supportedFormats.includes(ext);
|
|
1579
|
+
});
|
|
1580
|
+
console_log_it(`Found ${imageFiles.length} image files to process`, 'resize_images_folder_count');
|
|
1581
|
+
if (imageFiles.length === 0) {
|
|
1582
|
+
return {
|
|
1583
|
+
success: true,
|
|
1584
|
+
message: 'No image files found to resize',
|
|
1585
|
+
processed: 0,
|
|
1586
|
+
skipped: 0,
|
|
1587
|
+
failed: 0,
|
|
1588
|
+
results: []
|
|
1589
|
+
};
|
|
1590
|
+
}
|
|
1591
|
+
const results = {
|
|
1592
|
+
success: true,
|
|
1593
|
+
processed: 0,
|
|
1594
|
+
skipped: 0,
|
|
1595
|
+
failed: 0,
|
|
1596
|
+
results: []
|
|
1597
|
+
};
|
|
1598
|
+
// Process each image file
|
|
1599
|
+
for (let i = 0; i < imageFiles.length; i++) {
|
|
1600
|
+
const fileName = imageFiles[i];
|
|
1601
|
+
const filePath = path.join(folder, fileName);
|
|
1602
|
+
try {
|
|
1603
|
+
console_log_it(`Processing ${fileName} (${i + 1}/${imageFiles.length})`, 'resize_images_folder');
|
|
1604
|
+
// Get original image dimensions
|
|
1605
|
+
const originalSize = await getImageSizeWithSharp(filePath);
|
|
1606
|
+
console_log_it(`Original size: ${originalSize.width}x${originalSize.height}`, 'resize_images_folder');
|
|
1607
|
+
// Check if image needs resizing
|
|
1608
|
+
if (!force_resize && (originalSize.width <= dimensions.width && originalSize.height <= dimensions.height) && withoutEnlargement) {
|
|
1609
|
+
console_log_it(`Skipping ${fileName} - already within target dimensions`, 'resize_images_folder');
|
|
1610
|
+
results.skipped++;
|
|
1611
|
+
results.results.push({
|
|
1612
|
+
fileName,
|
|
1613
|
+
status: 'skipped',
|
|
1614
|
+
reason: 'Already within target dimensions',
|
|
1615
|
+
filePath,
|
|
1616
|
+
originalSize: `${originalSize.width}x${originalSize.height}`
|
|
1617
|
+
});
|
|
1618
|
+
// Call progress callback
|
|
1619
|
+
if (onProgress) {
|
|
1620
|
+
await onProgress({
|
|
1621
|
+
current: i + 1,
|
|
1622
|
+
total: imageFiles.length,
|
|
1623
|
+
percentage: Math.round(((i + 1) / imageFiles.length) * 100),
|
|
1624
|
+
fileName,
|
|
1625
|
+
status: 'skipped',
|
|
1626
|
+
progress: Math.round(((i + 1) / imageFiles.length) * 100)
|
|
1627
|
+
});
|
|
1628
|
+
}
|
|
1629
|
+
continue;
|
|
1630
|
+
}
|
|
1631
|
+
// Create backup if requested
|
|
1632
|
+
if (backupOriginals) {
|
|
1633
|
+
const nameWithoutExt = path.parse(fileName).name;
|
|
1634
|
+
const ext = path.parse(fileName).ext;
|
|
1635
|
+
const backupFileName = `${nameWithoutExt}${backupSuffix}${ext}`;
|
|
1636
|
+
const backupPath = path.join(folder, backupFileName);
|
|
1637
|
+
console_log_it(`Creating backup: ${backupFileName}`, 'resize_images_folder');
|
|
1638
|
+
fs.copyFileSync(filePath, backupPath);
|
|
1639
|
+
}
|
|
1640
|
+
// Create temporary file for resizing
|
|
1641
|
+
const tempPath = path.join(folder, `temp_resize_${fileName}`);
|
|
1642
|
+
// Resize the image to temporary file
|
|
1643
|
+
await resizeImage(filePath, tempPath, dimensions, {
|
|
1644
|
+
quality,
|
|
1645
|
+
format,
|
|
1646
|
+
withoutEnlargement
|
|
1647
|
+
});
|
|
1648
|
+
// Get resized image dimensions
|
|
1649
|
+
const finalSize = await getImageSizeWithSharp(tempPath);
|
|
1650
|
+
console_log_it(`Resized to: ${finalSize.width}x${finalSize.height}`, 'resize_images_folder');
|
|
1651
|
+
// Replace original with resized image
|
|
1652
|
+
fs.renameSync(tempPath, filePath);
|
|
1653
|
+
results.processed++;
|
|
1654
|
+
results.results.push({
|
|
1655
|
+
fileName,
|
|
1656
|
+
status: 'success',
|
|
1657
|
+
filePath,
|
|
1658
|
+
originalSize: `${originalSize.width}x${originalSize.height}`,
|
|
1659
|
+
finalSize: `${finalSize.width}x${finalSize.height}`,
|
|
1660
|
+
compressionRatio: ((originalSize.width * originalSize.height) / (finalSize.width * finalSize.height)).toFixed(2),
|
|
1661
|
+
backupCreated: backupOriginals
|
|
1662
|
+
});
|
|
1663
|
+
// Call progress callback
|
|
1664
|
+
if (onProgress) {
|
|
1665
|
+
await onProgress({
|
|
1666
|
+
current: i + 1,
|
|
1667
|
+
total: imageFiles.length,
|
|
1668
|
+
percentage: Math.round(((i + 1) / imageFiles.length) * 100),
|
|
1669
|
+
fileName,
|
|
1670
|
+
status: 'success',
|
|
1671
|
+
progress: Math.round(((i + 1) / imageFiles.length) * 100),
|
|
1672
|
+
originalSize,
|
|
1673
|
+
finalSize
|
|
1674
|
+
});
|
|
1675
|
+
}
|
|
1676
|
+
}
|
|
1677
|
+
catch (error) {
|
|
1678
|
+
console_log_it(`Error processing ${fileName}: ${error.message}`, 'resize_images_folder');
|
|
1679
|
+
// Clean up temporary file if it exists
|
|
1680
|
+
const tempPath = path.join(folder, `temp_resize_${fileName}`);
|
|
1681
|
+
if (fs.existsSync(tempPath)) {
|
|
1682
|
+
try {
|
|
1683
|
+
fs.unlinkSync(tempPath);
|
|
1684
|
+
}
|
|
1685
|
+
catch (cleanupError) {
|
|
1686
|
+
console_log_it(`Warning: Could not clean up temp file ${tempPath}`, 'resize_images_folder');
|
|
1687
|
+
}
|
|
1688
|
+
}
|
|
1689
|
+
results.failed++;
|
|
1690
|
+
results.results.push({
|
|
1691
|
+
fileName,
|
|
1692
|
+
status: 'failed',
|
|
1693
|
+
reason: error.message,
|
|
1694
|
+
filePath
|
|
1695
|
+
});
|
|
1696
|
+
// Call error callback
|
|
1697
|
+
if (onError) {
|
|
1698
|
+
onError({
|
|
1699
|
+
fileName,
|
|
1700
|
+
error: error.message,
|
|
1701
|
+
filePath
|
|
1702
|
+
});
|
|
1703
|
+
}
|
|
1704
|
+
// Call progress callback for failed items
|
|
1705
|
+
if (onProgress) {
|
|
1706
|
+
await onProgress({
|
|
1707
|
+
current: i + 1,
|
|
1708
|
+
total: imageFiles.length,
|
|
1709
|
+
percentage: Math.round(((i + 1) / imageFiles.length) * 100),
|
|
1710
|
+
fileName,
|
|
1711
|
+
status: 'failed',
|
|
1712
|
+
progress: Math.round(((i + 1) / imageFiles.length) * 100),
|
|
1713
|
+
error: error.message
|
|
1714
|
+
});
|
|
1715
|
+
}
|
|
1716
|
+
}
|
|
1717
|
+
}
|
|
1718
|
+
// Final summary
|
|
1719
|
+
console_log_it(`In-place resize complete. Processed: ${results.processed}, Skipped: ${results.skipped}, Failed: ${results.failed}`, 'resize_images_folder');
|
|
1720
|
+
return results;
|
|
1721
|
+
}
|
|
1722
|
+
catch (error) {
|
|
1723
|
+
console_log_it(`Error in batch resize: ${error.message}`, 'resize_images_folder');
|
|
1724
|
+
throw new Error(`Failed to resize images in folder: ${error.message}`);
|
|
1725
|
+
}
|
|
1726
|
+
}
|
|
1727
|
+
/**
|
|
1728
|
+
* Recursively resize images in a folder and all subfolders
|
|
1729
|
+
* @param {string} rootFolder - Path to root folder
|
|
1730
|
+
* @param {Object} dimensions - Maximum dimensions {width, height}
|
|
1731
|
+
* @param {Object} options - Additional options
|
|
1732
|
+
* @returns {Promise<Object>} - Combined results from all folders
|
|
1733
|
+
*/
|
|
1734
|
+
async function resizeImagesRecursive(rootFolder, dimensions, options = {}) {
|
|
1735
|
+
const { onProgress = null, folderFilter = null // Function to filter which folders to process
|
|
1736
|
+
} = options;
|
|
1737
|
+
try {
|
|
1738
|
+
console_log_it(`Starting recursive resize from: ${rootFolder}`, 'resize_images_recursive_start');
|
|
1739
|
+
const allResults = {
|
|
1740
|
+
success: true,
|
|
1741
|
+
totalProcessed: 0,
|
|
1742
|
+
totalSkipped: 0,
|
|
1743
|
+
totalFailed: 0,
|
|
1744
|
+
folderResults: []
|
|
1745
|
+
};
|
|
1746
|
+
// Get all subdirectories
|
|
1747
|
+
const entries = await fs.promises.readdir(rootFolder, { withFileTypes: true, recursive: true });
|
|
1748
|
+
const folders = [...entries.map(t => t.path)]; // Include root folder
|
|
1749
|
+
console_log_it(`Found ${folders.length} folders to process`, 'resize_images_recursive_length');
|
|
1750
|
+
// Process each folder
|
|
1751
|
+
for (let i = 0; i < folders.length; i++) {
|
|
1752
|
+
const folderPath = folders[i];
|
|
1753
|
+
const folderName = path.relative(rootFolder, folderPath) || path.basename(rootFolder);
|
|
1754
|
+
console_log_it(`Processing folder ${i + 1}/${folders.length}: ${folderName}`, 'resize_images_recursive_p');
|
|
1755
|
+
if (onProgress) {
|
|
1756
|
+
await onProgress({
|
|
1757
|
+
type: 'folder_start',
|
|
1758
|
+
folder: folderName,
|
|
1759
|
+
current: i + 1,
|
|
1760
|
+
total: folders.length,
|
|
1761
|
+
percentage: Math.round(((i + 1) / folders.length) * 100),
|
|
1762
|
+
message: `Starting folder: ${folderName}`
|
|
1763
|
+
});
|
|
1764
|
+
}
|
|
1765
|
+
try {
|
|
1766
|
+
const folderResults = await resizeImagesInFolder(folderPath, dimensions, {
|
|
1767
|
+
...options,
|
|
1768
|
+
onProgress: async (fileProgress) => {
|
|
1769
|
+
if (onProgress) {
|
|
1770
|
+
await onProgress({
|
|
1771
|
+
type: 'file_progress',
|
|
1772
|
+
folder: folderName,
|
|
1773
|
+
...fileProgress
|
|
1774
|
+
});
|
|
1775
|
+
}
|
|
1776
|
+
}
|
|
1777
|
+
});
|
|
1778
|
+
allResults.totalProcessed += folderResults.processed;
|
|
1779
|
+
allResults.totalSkipped += folderResults.skipped;
|
|
1780
|
+
allResults.totalFailed += folderResults.failed;
|
|
1781
|
+
allResults.folderResults.push({
|
|
1782
|
+
folder: folderName,
|
|
1783
|
+
path: folderPath,
|
|
1784
|
+
...folderResults
|
|
1785
|
+
});
|
|
1786
|
+
if (onProgress) {
|
|
1787
|
+
onProgress({
|
|
1788
|
+
type: 'folder_complete',
|
|
1789
|
+
folder: folderName,
|
|
1790
|
+
...folderResults
|
|
1791
|
+
});
|
|
1792
|
+
}
|
|
1793
|
+
}
|
|
1794
|
+
catch (error) {
|
|
1795
|
+
console_log_it(`Error processing folder ${folderName}: ${error.message}`, 'resize_images_recursive');
|
|
1796
|
+
allResults.folderResults.push({
|
|
1797
|
+
folder: folderName,
|
|
1798
|
+
path: folderPath,
|
|
1799
|
+
success: false,
|
|
1800
|
+
error: error.message
|
|
1801
|
+
});
|
|
1802
|
+
}
|
|
1803
|
+
}
|
|
1804
|
+
console_log_it(`Recursive resize complete. Total processed: ${allResults.totalProcessed}, skipped: ${allResults.totalSkipped}, failed: ${allResults.totalFailed}`, 'resize_images_recursive');
|
|
1805
|
+
return allResults;
|
|
1806
|
+
}
|
|
1807
|
+
catch (error) {
|
|
1808
|
+
console_log_it(`Error in recursive resize: ${error.message}`, 'resize_images_recursive');
|
|
1809
|
+
throw new Error(`Failed to resize images recursively: ${error.message}`);
|
|
1810
|
+
}
|
|
1811
|
+
}
|
|
1812
|
+
//# sourceMappingURL=capture_training_data_endpoints.js.map
|