@ductape/sdk 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +58 -0
- package/dist/actions/actions.repo.d.ts +0 -0
- package/dist/actions/actions.repo.js +13 -0
- package/dist/actions/actions.repo.js.map +1 -0
- package/dist/actions/actions.service.d.ts +0 -0
- package/dist/actions/actions.service.js +24 -0
- package/dist/actions/actions.service.js.map +1 -0
- package/dist/actions/utils/actions.util.read.d.ts +0 -0
- package/dist/actions/utils/actions.util.read.js +427 -0
- package/dist/actions/utils/actions.util.read.js.map +1 -0
- package/dist/api/services/appApi.service.d.ts +30 -0
- package/dist/api/services/appApi.service.js +157 -0
- package/dist/api/services/appApi.service.js.map +1 -0
- package/dist/api/services/integrationsApi.service.d.ts +18 -0
- package/dist/api/services/integrationsApi.service.js +80 -0
- package/dist/api/services/integrationsApi.service.js.map +1 -0
- package/dist/api/services/logsApi.service.d.ts +11 -0
- package/dist/api/services/logsApi.service.js +50 -0
- package/dist/api/services/logsApi.service.js.map +1 -0
- package/dist/api/services/processorApi.service.d.ts +20 -0
- package/dist/api/services/processorApi.service.js +108 -0
- package/dist/api/services/processorApi.service.js.map +1 -0
- package/dist/api/services/productsApi.service.d.ts +19 -0
- package/dist/api/services/productsApi.service.js +92 -0
- package/dist/api/services/productsApi.service.js.map +1 -0
- package/dist/api/services/userApi.service.d.ts +9 -0
- package/dist/api/services/userApi.service.js +20 -0
- package/dist/api/services/userApi.service.js.map +1 -0
- package/dist/api/services/webhooksApi.service.d.ts +11 -0
- package/dist/api/services/webhooksApi.service.js +51 -0
- package/dist/api/services/webhooksApi.service.js.map +1 -0
- package/dist/api/services/workspaceApi.service.d.ts +9 -0
- package/dist/api/services/workspaceApi.service.js +24 -0
- package/dist/api/services/workspaceApi.service.js.map +1 -0
- package/dist/api/urls.d.ts +43 -0
- package/dist/api/urls.js +83 -0
- package/dist/api/urls.js.map +1 -0
- package/dist/api/utils/auth.utils.d.ts +12 -0
- package/dist/api/utils/auth.utils.js +24 -0
- package/dist/api/utils/auth.utils.js.map +1 -0
- package/dist/api/utils/strings.utils.d.ts +5 -0
- package/dist/api/utils/strings.utils.js +50 -0
- package/dist/api/utils/strings.utils.js.map +1 -0
- package/dist/appBuilder/services/app.service.d.ts +111 -0
- package/dist/appBuilder/services/app.service.js +737 -0
- package/dist/appBuilder/services/app.service.js.map +1 -0
- package/dist/appBuilder/services/appBuilder.service.d.ts +111 -0
- package/dist/appBuilder/services/appBuilder.service.js +662 -0
- package/dist/appBuilder/services/appBuilder.service.js.map +1 -0
- package/dist/appBuilder/utils/objects.utils.d.ts +3 -0
- package/dist/appBuilder/utils/objects.utils.js +9 -0
- package/dist/appBuilder/utils/objects.utils.js.map +1 -0
- package/dist/appBuilder/utils/string.utils.d.ts +2 -0
- package/dist/appBuilder/utils/string.utils.js +57 -0
- package/dist/appBuilder/utils/string.utils.js.map +1 -0
- package/dist/appBuilder/validators/index.d.ts +19 -0
- package/dist/appBuilder/validators/index.js +40 -0
- package/dist/appBuilder/validators/index.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/create.app.validator.d.ts +3 -0
- package/dist/appBuilder/validators/joi-validators/create.app.validator.js +10 -0
- package/dist/appBuilder/validators/joi-validators/create.app.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/create.appAction.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/create.appAction.validator.js +20 -0
- package/dist/appBuilder/validators/joi-validators/create.appAction.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/create.appActionResponse.validator.d.ts +7 -0
- package/dist/appBuilder/validators/joi-validators/create.appActionResponse.validator.js +44 -0
- package/dist/appBuilder/validators/joi-validators/create.appActionResponse.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/create.appAuth.validator.d.ts +3 -0
- package/dist/appBuilder/validators/joi-validators/create.appAuth.validator.js +31 -0
- package/dist/appBuilder/validators/joi-validators/create.appAuth.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/create.appBody.validators.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/create.appBody.validators.js +11 -0
- package/dist/appBuilder/validators/joi-validators/create.appBody.validators.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/create.appConstants.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/create.appConstants.validator.js +12 -0
- package/dist/appBuilder/validators/joi-validators/create.appConstants.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/create.appEnv.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/create.appEnv.validator.js +17 -0
- package/dist/appBuilder/validators/joi-validators/create.appEnv.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/create.appEvent.validator.d.ts +5 -0
- package/dist/appBuilder/validators/joi-validators/create.appEvent.validator.js +30 -0
- package/dist/appBuilder/validators/joi-validators/create.appEvent.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/create.appVariable.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/create.appVariable.validator.js +14 -0
- package/dist/appBuilder/validators/joi-validators/create.appVariable.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/sample.validator.d.ts +5 -0
- package/dist/appBuilder/validators/joi-validators/sample.validator.js +26 -0
- package/dist/appBuilder/validators/joi-validators/sample.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/update.app.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/update.app.validator.js +34 -0
- package/dist/appBuilder/validators/joi-validators/update.app.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/update.appAction.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/update.appAction.validator.js +23 -0
- package/dist/appBuilder/validators/joi-validators/update.appAction.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/update.appActionResponse.validator.d.ts +3 -0
- package/dist/appBuilder/validators/joi-validators/update.appActionResponse.validator.js +21 -0
- package/dist/appBuilder/validators/joi-validators/update.appActionResponse.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/update.appAuth.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/update.appAuth.validator.js +19 -0
- package/dist/appBuilder/validators/joi-validators/update.appAuth.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/update.appConstants.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/update.appConstants.validator.js +12 -0
- package/dist/appBuilder/validators/joi-validators/update.appConstants.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/update.appEnv.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/update.appEnv.validator.js +17 -0
- package/dist/appBuilder/validators/joi-validators/update.appEnv.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/update.appEvent.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/update.appEvent.validator.js +16 -0
- package/dist/appBuilder/validators/joi-validators/update.appEvent.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/update.appVariables.validator.d.ts +4 -0
- package/dist/appBuilder/validators/joi-validators/update.appVariables.validator.js +14 -0
- package/dist/appBuilder/validators/joi-validators/update.appVariables.validator.js.map +1 -0
- package/dist/appBuilder/validators/joi-validators/update.validation.entityData.validator.d.ts +3 -0
- package/dist/appBuilder/validators/joi-validators/update.validation.entityData.validator.js +27 -0
- package/dist/appBuilder/validators/joi-validators/update.validation.entityData.validator.js.map +1 -0
- package/dist/apps/services/app.service.d.ts +116 -0
- package/dist/apps/services/app.service.js +995 -0
- package/dist/apps/services/app.service.js.map +1 -0
- package/dist/apps/utils/objects.utils.d.ts +4 -0
- package/dist/apps/utils/objects.utils.js +57 -0
- package/dist/apps/utils/objects.utils.js.map +1 -0
- package/dist/apps/utils/string.utils.d.ts +2 -0
- package/dist/apps/utils/string.utils.js +57 -0
- package/dist/apps/utils/string.utils.js.map +1 -0
- package/dist/apps/validators/index.d.ts +17 -0
- package/dist/apps/validators/index.js +39 -0
- package/dist/apps/validators/index.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.app.validator.d.ts +3 -0
- package/dist/apps/validators/joi-validators/create.app.validator.js +43 -0
- package/dist/apps/validators/joi-validators/create.app.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appAction.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/create.appAction.validator.js +52 -0
- package/dist/apps/validators/joi-validators/create.appAction.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appActionResponse.validator.d.ts +7 -0
- package/dist/apps/validators/joi-validators/create.appActionResponse.validator.js +79 -0
- package/dist/apps/validators/joi-validators/create.appActionResponse.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appAuth.validator.d.ts +3 -0
- package/dist/apps/validators/joi-validators/create.appAuth.validator.js +64 -0
- package/dist/apps/validators/joi-validators/create.appAuth.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appBody.validators.d.ts +4 -0
- package/dist/apps/validators/joi-validators/create.appBody.validators.js +44 -0
- package/dist/apps/validators/joi-validators/create.appBody.validators.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appConstants.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/create.appConstants.validator.js +45 -0
- package/dist/apps/validators/joi-validators/create.appConstants.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appEnv.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/create.appEnv.validator.js +49 -0
- package/dist/apps/validators/joi-validators/create.appEnv.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appEvent.validator.d.ts +5 -0
- package/dist/apps/validators/joi-validators/create.appEvent.validator.js +30 -0
- package/dist/apps/validators/joi-validators/create.appEvent.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appVariable.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/create.appVariable.validator.js +47 -0
- package/dist/apps/validators/joi-validators/create.appVariable.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appWebhook.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/create.appWebhook.validator.js +60 -0
- package/dist/apps/validators/joi-validators/create.appWebhook.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appWebhookEvent.validator.d.ts +3 -0
- package/dist/apps/validators/joi-validators/create.appWebhookEvent.validator.js +47 -0
- package/dist/apps/validators/joi-validators/create.appWebhookEvent.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/sample.validator.d.ts +6 -0
- package/dist/apps/validators/joi-validators/sample.validator.js +65 -0
- package/dist/apps/validators/joi-validators/sample.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.app.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/update.app.validator.js +64 -0
- package/dist/apps/validators/joi-validators/update.app.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.appAction.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/update.appAction.validator.js +55 -0
- package/dist/apps/validators/joi-validators/update.appAction.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.appActionResponse.validator.d.ts +3 -0
- package/dist/apps/validators/joi-validators/update.appActionResponse.validator.js +24 -0
- package/dist/apps/validators/joi-validators/update.appActionResponse.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.appAuth.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/update.appAuth.validator.js +52 -0
- package/dist/apps/validators/joi-validators/update.appAuth.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.appConstants.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/update.appConstants.validator.js +45 -0
- package/dist/apps/validators/joi-validators/update.appConstants.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.appEnv.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/update.appEnv.validator.js +49 -0
- package/dist/apps/validators/joi-validators/update.appEnv.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.appEvent.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/update.appEvent.validator.js +16 -0
- package/dist/apps/validators/joi-validators/update.appEvent.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.appVariables.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/update.appVariables.validator.js +47 -0
- package/dist/apps/validators/joi-validators/update.appVariables.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.appWebhook.validator.d.ts +4 -0
- package/dist/apps/validators/joi-validators/update.appWebhook.validator.js +59 -0
- package/dist/apps/validators/joi-validators/update.appWebhook.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.appWebhookEvent.validator.d.ts +3 -0
- package/dist/apps/validators/joi-validators/update.appWebhookEvent.validator.js +46 -0
- package/dist/apps/validators/joi-validators/update.appWebhookEvent.validator.js.map +1 -0
- package/dist/apps/validators/joi-validators/update.validation.entityData.validator.d.ts +3 -0
- package/dist/apps/validators/joi-validators/update.validation.entityData.validator.js +60 -0
- package/dist/apps/validators/joi-validators/update.validation.entityData.validator.js.map +1 -0
- package/dist/clients/apps.client.d.ts +4 -0
- package/dist/clients/apps.client.js +28 -0
- package/dist/clients/apps.client.js.map +1 -0
- package/dist/clients/email.client.d.ts +3 -0
- package/dist/clients/email.client.js +32 -0
- package/dist/clients/email.client.js.map +1 -0
- package/dist/clients/expo.client.d.ts +3 -0
- package/dist/clients/expo.client.js +28 -0
- package/dist/clients/expo.client.js.map +1 -0
- package/dist/clients/function.client.d.ts +3 -0
- package/dist/clients/function.client.js +27 -0
- package/dist/clients/function.client.js.map +1 -0
- package/dist/clients/http.client.d.ts +3 -0
- package/dist/clients/http.client.js +27 -0
- package/dist/clients/http.client.js.map +1 -0
- package/dist/clients/integrations.client.d.ts +2 -0
- package/dist/clients/integrations.client.js +26 -0
- package/dist/clients/integrations.client.js.map +1 -0
- package/dist/clients/logs.client.d.ts +3 -0
- package/dist/clients/logs.client.js +33 -0
- package/dist/clients/logs.client.js.map +1 -0
- package/dist/clients/products.client.d.ts +3 -0
- package/dist/clients/products.client.js +29 -0
- package/dist/clients/products.client.js.map +1 -0
- package/dist/clients/users.client.d.ts +3 -0
- package/dist/clients/users.client.js +33 -0
- package/dist/clients/users.client.js.map +1 -0
- package/dist/clients/webhooks.client.d.ts +2 -0
- package/dist/clients/webhooks.client.js +33 -0
- package/dist/clients/webhooks.client.js.map +1 -0
- package/dist/clients/workspace.client.d.ts +3 -0
- package/dist/clients/workspace.client.js +33 -0
- package/dist/clients/workspace.client.js.map +1 -0
- package/dist/imports/imports.repo.d.ts +0 -0
- package/dist/imports/imports.repo.js +1 -0
- package/dist/imports/imports.repo.js.map +1 -0
- package/dist/imports/imports.service.d.ts +24 -0
- package/dist/imports/imports.service.js +111 -0
- package/dist/imports/imports.service.js.map +1 -0
- package/dist/imports/imports.types.d.ts +104 -0
- package/dist/imports/imports.types.js +33 -0
- package/dist/imports/imports.types.js.map +1 -0
- package/dist/imports/openAPI3.0.types.d.ts +52 -0
- package/dist/imports/openAPI3.0.types.js +3 -0
- package/dist/imports/openAPI3.0.types.js.map +1 -0
- package/dist/imports/repos/openApi.repo.d.ts +13 -0
- package/dist/imports/repos/openApi.repo.js +33 -0
- package/dist/imports/repos/openApi.repo.js.map +1 -0
- package/dist/imports/repos/postmanV21.repo.d.ts +14 -0
- package/dist/imports/repos/postmanV21.repo.js +214 -0
- package/dist/imports/repos/postmanV21.repo.js.map +1 -0
- package/dist/imports/utils/imports.utils.d.ts +80 -0
- package/dist/imports/utils/imports.utils.js +114 -0
- package/dist/imports/utils/imports.utils.js.map +1 -0
- package/dist/imports/validators/index.d.ts +0 -0
- package/dist/imports/validators/index.js +1 -0
- package/dist/imports/validators/index.js.map +1 -0
- package/dist/imports/validators/joi-validators/postmanV21.validator.d.ts +5 -0
- package/dist/imports/validators/joi-validators/postmanV21.validator.js +96 -0
- package/dist/imports/validators/joi-validators/postmanV21.validator.js.map +1 -0
- package/dist/index.d.ts +325 -0
- package/dist/index.js +923 -0
- package/dist/index.js.map +1 -0
- package/dist/inputs/inputs.repo.d.ts +7 -0
- package/dist/inputs/inputs.repo.js +16 -0
- package/dist/inputs/inputs.repo.js.map +1 -0
- package/dist/inputs/inputs.service.d.ts +61 -0
- package/dist/inputs/inputs.service.js +468 -0
- package/dist/inputs/inputs.service.js.map +1 -0
- package/dist/inputs/utils/inputs.utils.create.d.ts +11 -0
- package/dist/inputs/utils/inputs.utils.create.js +273 -0
- package/dist/inputs/utils/inputs.utils.create.js.map +1 -0
- package/dist/inputs/validators/inputs.validator.parse.d.ts +4 -0
- package/dist/inputs/validators/inputs.validator.parse.js +52 -0
- package/dist/inputs/validators/inputs.validator.parse.js.map +1 -0
- package/dist/integrationsBuilder/services/integration.service.d.ts +138 -0
- package/dist/integrationsBuilder/services/integration.service.js +1148 -0
- package/dist/integrationsBuilder/services/integration.service.js.map +1 -0
- package/dist/integrationsBuilder/services/integrationBuilder.service.d.ts +130 -0
- package/dist/integrationsBuilder/services/integrationBuilder.service.js +1017 -0
- package/dist/integrationsBuilder/services/integrationBuilder.service.js.map +1 -0
- package/dist/integrationsBuilder/utils/objects.utils.d.ts +2 -0
- package/dist/integrationsBuilder/utils/objects.utils.js +48 -0
- package/dist/integrationsBuilder/utils/objects.utils.js.map +1 -0
- package/dist/integrationsBuilder/utils/string.utils.d.ts +1 -0
- package/dist/integrationsBuilder/utils/string.utils.js +9 -0
- package/dist/integrationsBuilder/utils/string.utils.js.map +1 -0
- package/dist/integrationsBuilder/validators/index.d.ts +18 -0
- package/dist/integrationsBuilder/validators/index.js +38 -0
- package/dist/integrationsBuilder/validators/index.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integration.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integration.validator.js +10 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integration.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationApp.validator.d.ts +4 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationApp.validator.js +26 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationApp.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationCache.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationCache.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationCache.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationDatabase.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationDatabase.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationDatabase.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationEnv.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationEnv.validator.js +10 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationEnv.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFeature.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFeature.validator.js +60 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFeature.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFunction.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFunction.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFunction.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationJob.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationJob.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationJob.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationNotification.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationNotification.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationNotification.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationApp.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationApp.validator.js +9 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationApp.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationCache.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationCache.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationCache.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationDatabase.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationDatabase.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationDatabase.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationEnv.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationEnv.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationEnv.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFeature.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFeature.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFeature.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFunction.validator copy.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFunction.validator copy.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFunction.validator copy.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationJob.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationJob.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationJob.validator.js.map +1 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationNotification.validator.d.ts +3 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationNotification.validator.js +8 -0
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationNotification.validator.js.map +1 -0
- package/dist/logs/logs.repo.d.ts +6 -0
- package/dist/logs/logs.repo.js +12 -0
- package/dist/logs/logs.repo.js.map +1 -0
- package/dist/logs/logs.service.d.ts +29 -0
- package/dist/logs/logs.service.js +70 -0
- package/dist/logs/logs.service.js.map +1 -0
- package/dist/logs/logs.types.d.ts +44 -0
- package/dist/logs/logs.types.js +31 -0
- package/dist/logs/logs.types.js.map +1 -0
- package/dist/logs/utils/logs.utils.create.d.ts +0 -0
- package/dist/logs/utils/logs.utils.create.js +1 -0
- package/dist/logs/utils/logs.utils.create.js.map +1 -0
- package/dist/logs/utils/logs.utils.errors.d.ts +2 -0
- package/dist/logs/utils/logs.utils.errors.js +8 -0
- package/dist/logs/utils/logs.utils.errors.js.map +1 -0
- package/dist/postman.d.ts +1 -0
- package/dist/postman.js +21674 -0
- package/dist/postman.js.map +1 -0
- package/dist/processor/repos/mongo.repo.d.ts +39 -0
- package/dist/processor/repos/mongo.repo.js +260 -0
- package/dist/processor/repos/mongo.repo.js.map +1 -0
- package/dist/processor/repos/postgres.repo.d.ts +31 -0
- package/dist/processor/repos/postgres.repo.js +185 -0
- package/dist/processor/repos/postgres.repo.js.map +1 -0
- package/dist/processor/repos/sms.repo.d.ts +39 -0
- package/dist/processor/repos/sms.repo.js +111 -0
- package/dist/processor/repos/sms.repo.js.map +1 -0
- package/dist/processor/services/fallback.service.d.ts +6 -0
- package/dist/processor/services/fallback.service.js +59 -0
- package/dist/processor/services/fallback.service.js.map +1 -0
- package/dist/processor/services/messagebrokers/aws-sqs.service.d.ts +15 -0
- package/dist/processor/services/messagebrokers/aws-sqs.service.js +77 -0
- package/dist/processor/services/messagebrokers/aws-sqs.service.js.map +1 -0
- package/dist/processor/services/messagebrokers/google-pubsub.service.d.ts +16 -0
- package/dist/processor/services/messagebrokers/google-pubsub.service.js +34 -0
- package/dist/processor/services/messagebrokers/google-pubsub.service.js.map +1 -0
- package/dist/processor/services/messagebrokers/index.d.ts +3 -0
- package/dist/processor/services/messagebrokers/index.js +26 -0
- package/dist/processor/services/messagebrokers/index.js.map +1 -0
- package/dist/processor/services/messagebrokers/kafka.service.d.ts +14 -0
- package/dist/processor/services/messagebrokers/kafka.service.js +45 -0
- package/dist/processor/services/messagebrokers/kafka.service.js.map +1 -0
- package/dist/processor/services/messagebrokers/messagebrokers.type.d.ts +6 -0
- package/dist/processor/services/messagebrokers/messagebrokers.type.js +3 -0
- package/dist/processor/services/messagebrokers/messagebrokers.type.js.map +1 -0
- package/dist/processor/services/messagebrokers/rabbitmq.service.d.ts +14 -0
- package/dist/processor/services/messagebrokers/rabbitmq.service.js +67 -0
- package/dist/processor/services/messagebrokers/rabbitmq.service.js.map +1 -0
- package/dist/processor/services/messagebrokers/redis.service.d.ts +16 -0
- package/dist/processor/services/messagebrokers/redis.service.js +34 -0
- package/dist/processor/services/messagebrokers/redis.service.js.map +1 -0
- package/dist/processor/services/processor.service.d.ts +189 -0
- package/dist/processor/services/processor.service.js +3078 -0
- package/dist/processor/services/processor.service.js.map +1 -0
- package/dist/processor/services/quota.service.d.ts +22 -0
- package/dist/processor/services/quota.service.js +54 -0
- package/dist/processor/services/quota.service.js.map +1 -0
- package/dist/processor/utils/mongo.util.d.ts +0 -0
- package/dist/processor/utils/mongo.util.js +152 -0
- package/dist/processor/utils/mongo.util.js.map +1 -0
- package/dist/processor/utils/postgres.util.d.ts +14 -0
- package/dist/processor/utils/postgres.util.js +83 -0
- package/dist/processor/utils/postgres.util.js.map +1 -0
- package/dist/processor/utils/processor.utils.d.ts +323 -0
- package/dist/processor/utils/processor.utils.js +906 -0
- package/dist/processor/utils/processor.utils.js.map +1 -0
- package/dist/processor/utils/storage.util.d.ts +8 -0
- package/dist/processor/utils/storage.util.js +83 -0
- package/dist/processor/utils/storage.util.js.map +1 -0
- package/dist/products/services/products.service.d.ts +225 -0
- package/dist/products/services/products.service.js +2762 -0
- package/dist/products/services/products.service.js.map +1 -0
- package/dist/products/utils/crypt.utils.d.ts +1 -0
- package/dist/products/utils/crypt.utils.js +17 -0
- package/dist/products/utils/crypt.utils.js.map +1 -0
- package/dist/products/utils/functions.utils.d.ts +12 -0
- package/dist/products/utils/functions.utils.js +283 -0
- package/dist/products/utils/functions.utils.js.map +1 -0
- package/dist/products/utils/objects.utils.d.ts +13 -0
- package/dist/products/utils/objects.utils.js +89 -0
- package/dist/products/utils/objects.utils.js.map +1 -0
- package/dist/products/utils/string.utils.d.ts +12 -0
- package/dist/products/utils/string.utils.js +155 -0
- package/dist/products/utils/string.utils.js.map +1 -0
- package/dist/products/validators/index.d.ts +25 -0
- package/dist/products/validators/index.js +56 -0
- package/dist/products/validators/index.js.map +1 -0
- package/dist/products/validators/joi-validators/create.html.validator.d.ts +2 -0
- package/dist/products/validators/joi-validators/create.html.validator.js +55 -0
- package/dist/products/validators/joi-validators/create.html.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.product.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.product.validator.js +10 -0
- package/dist/products/validators/joi-validators/create.product.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productApp.validator.d.ts +4 -0
- package/dist/products/validators/joi-validators/create.productApp.validator.js +61 -0
- package/dist/products/validators/joi-validators/create.productApp.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productCache.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productCache.validator.js +46 -0
- package/dist/products/validators/joi-validators/create.productCache.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productDatabase.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productDatabase.validator.js +72 -0
- package/dist/products/validators/joi-validators/create.productDatabase.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productDatabaseAction.validator.d.ts +7 -0
- package/dist/products/validators/joi-validators/create.productDatabaseAction.validator.js +187 -0
- package/dist/products/validators/joi-validators/create.productDatabaseAction.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productDatabaseMigration.validator.d.ts +4 -0
- package/dist/products/validators/joi-validators/create.productDatabaseMigration.validator.js +63 -0
- package/dist/products/validators/joi-validators/create.productDatabaseMigration.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productEnv.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productEnv.validator.js +44 -0
- package/dist/products/validators/joi-validators/create.productEnv.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productFallback.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productFallback.validator.js +58 -0
- package/dist/products/validators/joi-validators/create.productFallback.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productFeature.validator.d.ts +12 -0
- package/dist/products/validators/joi-validators/create.productFeature.validator.js +124 -0
- package/dist/products/validators/joi-validators/create.productFeature.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productFunction.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productFunction.validator.js +57 -0
- package/dist/products/validators/joi-validators/create.productFunction.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productJob.validator.d.ts +12 -0
- package/dist/products/validators/joi-validators/create.productJob.validator.js +60 -0
- package/dist/products/validators/joi-validators/create.productJob.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productMessageBrokerTopic.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productMessageBrokerTopic.validator.js +51 -0
- package/dist/products/validators/joi-validators/create.productMessageBrokerTopic.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productMessageBrokers.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productMessageBrokers.validator.js +137 -0
- package/dist/products/validators/joi-validators/create.productMessageBrokers.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productNotification.validator.d.ts +4 -0
- package/dist/products/validators/joi-validators/create.productNotification.validator.js +142 -0
- package/dist/products/validators/joi-validators/create.productNotification.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productNotificationMessage.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productNotificationMessage.validator.js +57 -0
- package/dist/products/validators/joi-validators/create.productNotificationMessage.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productNotifications.validator.d.ts +0 -0
- package/dist/products/validators/joi-validators/create.productNotifications.validator.js +1 -0
- package/dist/products/validators/joi-validators/create.productNotifications.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productQuota.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productQuota.validator.js +59 -0
- package/dist/products/validators/joi-validators/create.productQuota.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productStorage.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productStorage.validator.js +99 -0
- package/dist/products/validators/joi-validators/create.productStorage.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.products.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.products.validator.js +43 -0
- package/dist/products/validators/joi-validators/create.products.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.requestAction.validator.d.ts +2 -0
- package/dist/products/validators/joi-validators/create.requestAction.validator.js +45 -0
- package/dist/products/validators/joi-validators/create.requestAction.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/register.productWebhooks.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/register.productWebhooks.validator.js +48 -0
- package/dist/products/validators/joi-validators/register.productWebhooks.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.dataValue.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.dataValue.validator.js +101 -0
- package/dist/products/validators/joi-validators/update.dataValue.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productApp.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productApp.validator.js +42 -0
- package/dist/products/validators/joi-validators/update.productApp.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productCache.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productCache.validator.js +45 -0
- package/dist/products/validators/joi-validators/update.productCache.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productDatabase.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productDatabase.validator.js +71 -0
- package/dist/products/validators/joi-validators/update.productDatabase.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productDatabaseAction.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productDatabaseAction.validator.js +79 -0
- package/dist/products/validators/joi-validators/update.productDatabaseAction.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productDatabaseMigration.validator.d.ts +4 -0
- package/dist/products/validators/joi-validators/update.productDatabaseMigration.validator.js +64 -0
- package/dist/products/validators/joi-validators/update.productDatabaseMigration.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productEnv.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productEnv.validator.js +43 -0
- package/dist/products/validators/joi-validators/update.productEnv.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productFallback.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productFallback.validator.js +55 -0
- package/dist/products/validators/joi-validators/update.productFallback.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productFeature.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productFeature.validator.js +51 -0
- package/dist/products/validators/joi-validators/update.productFeature.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productFunction.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productFunction.validator.js +41 -0
- package/dist/products/validators/joi-validators/update.productFunction.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productJob.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productJob.validator.js +50 -0
- package/dist/products/validators/joi-validators/update.productJob.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productMessageBrokerTopic.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productMessageBrokerTopic.validator.js +52 -0
- package/dist/products/validators/joi-validators/update.productMessageBrokerTopic.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productNotification.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productNotification.validator.js +47 -0
- package/dist/products/validators/joi-validators/update.productNotification.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productNotificationMessage.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productNotificationMessage.validator.js +57 -0
- package/dist/products/validators/joi-validators/update.productNotificationMessage.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.productQuota.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/update.productQuota.validator.js +59 -0
- package/dist/products/validators/joi-validators/update.productQuota.validator.js.map +1 -0
- package/dist/test/test.appBuilder.d.ts +1 -0
- package/dist/test/test.appBuilder.js +16 -0
- package/dist/test/test.appBuilder.js.map +1 -0
- package/dist/test/test.import.d.ts +1 -0
- package/dist/test/test.import.js +1460 -0
- package/dist/test/test.import.js.map +1 -0
- package/dist/test/test.imports.d.ts +1 -0
- package/dist/test/test.imports.js +62 -0
- package/dist/test/test.imports.js.map +1 -0
- package/dist/test/test.logs.d.ts +1 -0
- package/dist/test/test.logs.js +19 -0
- package/dist/test/test.logs.js.map +1 -0
- package/dist/test/test.processor.d.ts +1 -0
- package/dist/test/test.processor.js +123 -0
- package/dist/test/test.processor.js.map +1 -0
- package/dist/test/test.productBuilder.d.ts +1 -0
- package/dist/test/test.productBuilder.js +661 -0
- package/dist/test/test.productBuilder.js.map +1 -0
- package/dist/test.appBuilder.d.ts +1 -0
- package/dist/test.appBuilder.js +14 -0
- package/dist/test.appBuilder.js.map +1 -0
- package/dist/test.import.d.ts +0 -0
- package/dist/test.import.js +24 -0
- package/dist/test.import.js.map +1 -0
- package/dist/test.imports.d.ts +1 -0
- package/dist/test.imports.js +28 -0
- package/dist/test.imports.js.map +1 -0
- package/dist/test.integrationBuilder.d.ts +1 -0
- package/dist/test.integrationBuilder.js +276 -0
- package/dist/test.integrationBuilder.js.map +1 -0
- package/dist/test.processor.d.ts +1 -0
- package/dist/test.processor.js +23 -0
- package/dist/test.processor.js.map +1 -0
- package/dist/test.utils.d.ts +1 -0
- package/dist/test.utils.js +25 -0
- package/dist/test.utils.js.map +1 -0
- package/dist/types/actions.types.d.ts +0 -0
- package/dist/types/actions.types.js +1 -0
- package/dist/types/actions.types.js.map +1 -0
- package/dist/types/appBuilder.types.d.ts +293 -0
- package/dist/types/appBuilder.types.js +3 -0
- package/dist/types/appBuilder.types.js.map +1 -0
- package/dist/types/database.types.d.ts +9 -0
- package/dist/types/database.types.js +3 -0
- package/dist/types/database.types.js.map +1 -0
- package/dist/types/enums.d.ts +215 -0
- package/dist/types/enums.js +244 -0
- package/dist/types/enums.js.map +1 -0
- package/dist/types/index.d.ts +12 -0
- package/dist/types/index.js +29 -0
- package/dist/types/index.js.map +1 -0
- package/dist/types/index.types.d.ts +26 -0
- package/dist/types/index.types.js +10 -0
- package/dist/types/index.types.js.map +1 -0
- package/dist/types/inputs.types.d.ts +111 -0
- package/dist/types/inputs.types.js +43 -0
- package/dist/types/inputs.types.js.map +1 -0
- package/dist/types/integrationsBuilder.types.d.ts +276 -0
- package/dist/types/integrationsBuilder.types.js +51 -0
- package/dist/types/integrationsBuilder.types.js.map +1 -0
- package/dist/types/logs.types.d.ts +19 -0
- package/dist/types/logs.types.js +8 -0
- package/dist/types/logs.types.js.map +1 -0
- package/dist/types/processor.types.d.ts +270 -0
- package/dist/types/processor.types.js +11 -0
- package/dist/types/processor.types.js.map +1 -0
- package/dist/types/productsBuilder.types.d.ts +560 -0
- package/dist/types/productsBuilder.types.js +89 -0
- package/dist/types/productsBuilder.types.js.map +1 -0
- package/dist/types/requests.types.d.ts +6 -0
- package/dist/types/requests.types.js +3 -0
- package/dist/types/requests.types.js.map +1 -0
- package/dist/types/workspaces.types.d.ts +28 -0
- package/dist/types/workspaces.types.js +4 -0
- package/dist/types/workspaces.types.js.map +1 -0
- package/dist/utils/index.d.ts +4 -0
- package/dist/utils/index.js +79 -0
- package/dist/utils/index.js.map +1 -0
- package/package.json +63 -0
|
@@ -0,0 +1,3078 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __rest = (this && this.__rest) || function (s, e) {
|
|
36
|
+
var t = {};
|
|
37
|
+
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
|
38
|
+
t[p] = s[p];
|
|
39
|
+
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
|
40
|
+
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
41
|
+
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
|
42
|
+
t[p[i]] = s[p[i]];
|
|
43
|
+
}
|
|
44
|
+
return t;
|
|
45
|
+
};
|
|
46
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
47
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
48
|
+
};
|
|
49
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
50
|
+
const products_service_1 = __importDefault(require("../../products/services/products.service"));
|
|
51
|
+
const types_1 = require("../../types");
|
|
52
|
+
const logs_service_1 = __importDefault(require("../../logs/logs.service"));
|
|
53
|
+
const inputs_service_1 = __importDefault(require("../../inputs/inputs.service"));
|
|
54
|
+
const processor_utils_1 = require("../utils/processor.utils");
|
|
55
|
+
const http_client_1 = __importDefault(require("../../clients/http.client"));
|
|
56
|
+
const processorApi_service_1 = require("../../api/services/processorApi.service");
|
|
57
|
+
const expo_client_1 = __importDefault(require("../../clients/expo.client"));
|
|
58
|
+
const handlebars_1 = require("handlebars");
|
|
59
|
+
const string_utils_1 = require("../../products/utils/string.utils");
|
|
60
|
+
const create_productFeature_validator_1 = require("../../products/validators/joi-validators/create.productFeature.validator");
|
|
61
|
+
const validators_1 = require("../../products/validators");
|
|
62
|
+
const uuid_1 = require("uuid");
|
|
63
|
+
const urls_1 = require("../../api/urls");
|
|
64
|
+
//import { createBrokerService } from './messagebrokers';
|
|
65
|
+
const date_fns_1 = require("date-fns");
|
|
66
|
+
const mongo_repo_1 = require("../repos/mongo.repo");
|
|
67
|
+
const postgres_repo_1 = require("../repos/postgres.repo");
|
|
68
|
+
const storage_util_1 = require("../utils/storage.util");
|
|
69
|
+
const sms_repo_1 = require("../repos/sms.repo");
|
|
70
|
+
async function loadBrokerService() {
|
|
71
|
+
if (typeof window === 'undefined') {
|
|
72
|
+
const { createBrokerService } = await Promise.resolve().then(() => __importStar(require('./messagebrokers')));
|
|
73
|
+
return createBrokerService;
|
|
74
|
+
}
|
|
75
|
+
return null;
|
|
76
|
+
}
|
|
77
|
+
class ProcessorService {
|
|
78
|
+
constructor({ workspace_id, public_key, user_id, token, env_type, redis_client }) {
|
|
79
|
+
this.workspace_id = workspace_id;
|
|
80
|
+
this.public_key = public_key;
|
|
81
|
+
this.user_id = user_id;
|
|
82
|
+
this.token = token;
|
|
83
|
+
this.productBuilderService = new products_service_1.default({
|
|
84
|
+
workspace_id,
|
|
85
|
+
public_key,
|
|
86
|
+
user_id,
|
|
87
|
+
token,
|
|
88
|
+
env_type,
|
|
89
|
+
});
|
|
90
|
+
this.inputService = new inputs_service_1.default();
|
|
91
|
+
this.processingOutput = {
|
|
92
|
+
success: [],
|
|
93
|
+
failure: [],
|
|
94
|
+
waiting: [],
|
|
95
|
+
skipped: [],
|
|
96
|
+
};
|
|
97
|
+
this.apps = [];
|
|
98
|
+
this.processorApiService = new processorApi_service_1.ProcessorApiService(env_type);
|
|
99
|
+
this.environment = env_type;
|
|
100
|
+
if (redis_client) {
|
|
101
|
+
this.redisClient = redis_client;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
async registerWebhook(data) {
|
|
105
|
+
const { product: product_tag, access_tag, webhook_tag, envs } = data;
|
|
106
|
+
await this.productBuilderService.initializeProductByTag(product_tag);
|
|
107
|
+
const product = this.productBuilderService.fetchProduct();
|
|
108
|
+
const { version, envs: appEnvs } = this.productBuilderService.fetchApp(access_tag);
|
|
109
|
+
const app = await this.productBuilderService.fetchThirdPartyAppByAccessTag(access_tag);
|
|
110
|
+
const { webhooks } = app.versions.find((data) => data.tag === version);
|
|
111
|
+
if (!webhooks) {
|
|
112
|
+
throw new Error(`Webhooks not found for app ${access_tag}`);
|
|
113
|
+
}
|
|
114
|
+
await validators_1.RegisterWebhookEnvSchema.validateAsync(envs);
|
|
115
|
+
const webhook = webhooks.find((data) => data.tag === webhook_tag);
|
|
116
|
+
if (!webhook) {
|
|
117
|
+
throw new Error(`Webhook tag ${webhook_tag} not found`);
|
|
118
|
+
}
|
|
119
|
+
const productEnvs = this.productBuilderService.fetchEnvs();
|
|
120
|
+
productEnvs.map((env) => {
|
|
121
|
+
const exists = envs.findIndex((dbEnv) => dbEnv.slug === env.slug);
|
|
122
|
+
if (exists === -1) {
|
|
123
|
+
throw new Error(`Product env ${env.slug} is not defined, please provide connection details`);
|
|
124
|
+
}
|
|
125
|
+
});
|
|
126
|
+
const dataArray = await Promise.all(envs.map(async (env) => {
|
|
127
|
+
await this.validateActionDataMappingInput(env.auth, types_1.WebhookEventTypes.WEBHOOK_REGISTER);
|
|
128
|
+
const productEnvData = productEnvs.find((data) => data.slug === env.slug);
|
|
129
|
+
if (!productEnvData) {
|
|
130
|
+
throw new Error(`Error finding product env ${env.slug}`);
|
|
131
|
+
}
|
|
132
|
+
const envData = productEnvData.envs.find((productEnvMap) => productEnvMap.product_env_slug === env.slug);
|
|
133
|
+
if (!envData) {
|
|
134
|
+
throw new Error(`Error finding env data for ${env.slug}`);
|
|
135
|
+
}
|
|
136
|
+
const { app_env_slug: appEnv, product_env_slug: productEnv } = envData;
|
|
137
|
+
const envWebhookData = webhook.envs.find((webhookEnv) => webhookEnv.slug === appEnv);
|
|
138
|
+
if (!envWebhookData) {
|
|
139
|
+
throw new Error(`Error finding webhook registration sample ${webhook_tag} for env ${env.slug}`);
|
|
140
|
+
}
|
|
141
|
+
const uuid = (0, uuid_1.v4)();
|
|
142
|
+
const url = `${urls_1.WEBHOOK_BASE_URL}/webhooks/v1/process/${uuid}`;
|
|
143
|
+
let active = false;
|
|
144
|
+
if (envWebhookData.sample) {
|
|
145
|
+
const samples = {
|
|
146
|
+
query: envWebhookData.sample_data.filter((data) => data.parent_key === 'query'),
|
|
147
|
+
params: envWebhookData.sample_data.filter((data) => data.parent_key === 'params'),
|
|
148
|
+
body: envWebhookData.sample_data.filter((data) => data.parent_key === 'body'),
|
|
149
|
+
headers: envWebhookData.sample_data.filter((data) => data.parent_key === 'headers'),
|
|
150
|
+
};
|
|
151
|
+
let payload = (await this.constructJSONDataPayloads(env.auth, {}, samples, {
|
|
152
|
+
app: access_tag,
|
|
153
|
+
event: '',
|
|
154
|
+
input: env.auth,
|
|
155
|
+
type: types_1.FeatureEventTypes.ACTION,
|
|
156
|
+
retries: 0,
|
|
157
|
+
allow_fail: false,
|
|
158
|
+
}));
|
|
159
|
+
const { updatedObj, replacedUrl } = (0, processor_utils_1.updateUrlsInObject)(payload, url);
|
|
160
|
+
if (replacedUrl && replacedUrl !== env.url && replacedUrl) {
|
|
161
|
+
throw new Error(`Ductape expects the url ${replacedUrl} in request body to match inputted url ${env.url}`);
|
|
162
|
+
}
|
|
163
|
+
const exists = this.fetchEnv(env.slug, {});
|
|
164
|
+
if (!exists) {
|
|
165
|
+
throw new Error(`Env ${env.slug} does not exist`);
|
|
166
|
+
}
|
|
167
|
+
payload = updatedObj;
|
|
168
|
+
if (!replacedUrl) {
|
|
169
|
+
payload.body.url = url;
|
|
170
|
+
}
|
|
171
|
+
const parsedUrl = new URL(envWebhookData.registration_url);
|
|
172
|
+
await this.sendActionRequest(parsedUrl.origin, parsedUrl.pathname, payload, envWebhookData.method, env.slug);
|
|
173
|
+
active = true;
|
|
174
|
+
}
|
|
175
|
+
return {
|
|
176
|
+
appEnv,
|
|
177
|
+
productEnv,
|
|
178
|
+
uuid,
|
|
179
|
+
url: env.url,
|
|
180
|
+
method: env.method,
|
|
181
|
+
access_tag,
|
|
182
|
+
webhook_tag,
|
|
183
|
+
version,
|
|
184
|
+
sender_workspace_id: app.workspace_id,
|
|
185
|
+
receiver_workspace_id: product.workspace_id,
|
|
186
|
+
app_tag: app.tag,
|
|
187
|
+
product_tag,
|
|
188
|
+
active,
|
|
189
|
+
};
|
|
190
|
+
}));
|
|
191
|
+
await this.webhookApi.registerWebhooks(dataArray, this.getUserAccess());
|
|
192
|
+
}
|
|
193
|
+
async generateWebhookLink(data) {
|
|
194
|
+
const { product: product_tag, access_tag, webhook_tag, env: product_env, url, method } = data;
|
|
195
|
+
await this.productBuilderService.initializeProductByTag(product_tag);
|
|
196
|
+
const product = this.productBuilderService.fetchProduct();
|
|
197
|
+
const { version, envs: appEnvs } = this.productBuilderService.fetchApp(access_tag);
|
|
198
|
+
const app = await this.productBuilderService.fetchThirdPartyAppByAccessTag(access_tag);
|
|
199
|
+
const { webhooks } = app.versions.find((data) => data.tag === version);
|
|
200
|
+
if (!webhooks) {
|
|
201
|
+
throw new Error(`Webhooks not found for app ${access_tag}`);
|
|
202
|
+
}
|
|
203
|
+
const appEnv = appEnvs.find((env) => env.product_env_slug === product_env);
|
|
204
|
+
if (!appEnv) {
|
|
205
|
+
throw new Error(`Cannot find product env ${product_env}`);
|
|
206
|
+
}
|
|
207
|
+
const uuid = (0, uuid_1.v4)();
|
|
208
|
+
const payload = {
|
|
209
|
+
appEnv: appEnv.app_env_slug,
|
|
210
|
+
productEnv: product_env,
|
|
211
|
+
uuid,
|
|
212
|
+
url,
|
|
213
|
+
method,
|
|
214
|
+
access_tag,
|
|
215
|
+
webhook_tag,
|
|
216
|
+
version,
|
|
217
|
+
sender_workspace_id: app.workspace_id,
|
|
218
|
+
receiver_workspace_id: product.workspace_id,
|
|
219
|
+
app_tag: app.tag,
|
|
220
|
+
product_tag,
|
|
221
|
+
active: false,
|
|
222
|
+
};
|
|
223
|
+
return await this.webhookApi.generateLink(payload, this.getUserAccess());
|
|
224
|
+
//return res.link;
|
|
225
|
+
}
|
|
226
|
+
async processQuota(data) {
|
|
227
|
+
this.start = Date.now();
|
|
228
|
+
this.input = data;
|
|
229
|
+
const { product: product_tag, env, input, tag: quota_tag } = data;
|
|
230
|
+
try {
|
|
231
|
+
this.productTag = product_tag;
|
|
232
|
+
const additional_logs = {
|
|
233
|
+
name: 'Process Quota',
|
|
234
|
+
type: types_1.LogEventTypes.QUOTA,
|
|
235
|
+
parent_tag: quota_tag,
|
|
236
|
+
};
|
|
237
|
+
await this.intializeProduct(additional_logs);
|
|
238
|
+
const process_id = this.process_id || (0, processor_utils_1.generateObjectId)();
|
|
239
|
+
this.process_id = process_id;
|
|
240
|
+
this.baseLogs = {
|
|
241
|
+
product_tag: this.productTag,
|
|
242
|
+
product_id: this.productId,
|
|
243
|
+
workspace_id: this.workspace_id,
|
|
244
|
+
env,
|
|
245
|
+
type: types_1.LogEventTypes.QUOTA,
|
|
246
|
+
process_id,
|
|
247
|
+
data: input,
|
|
248
|
+
};
|
|
249
|
+
this.quota = this.fetchQuota(quota_tag, additional_logs);
|
|
250
|
+
this.logService.setFeatureId(this.quota._id);
|
|
251
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process quota initiated', data: Object.assign(Object.assign({}, data), { input: (0, processor_utils_1.anonymizeObject)(data.input) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
252
|
+
this.processEnv = this.fetchEnv(env, additional_logs);
|
|
253
|
+
if (!this.processEnv.active) {
|
|
254
|
+
throw new Error(`Environment ${data.env} is not active`);
|
|
255
|
+
}
|
|
256
|
+
const { input: quotaInput, options } = this.quota;
|
|
257
|
+
// validate feature input and log failure
|
|
258
|
+
this.validateJSONFeatureInput(input, quotaInput, additional_logs);
|
|
259
|
+
// split processes
|
|
260
|
+
//this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
261
|
+
return await this.runQuotaOptions(options, additional_logs);
|
|
262
|
+
}
|
|
263
|
+
catch (e) {
|
|
264
|
+
this.end = Date.now();
|
|
265
|
+
this.logService.publish();
|
|
266
|
+
throw e;
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
async runQuotaOptions(options, additional_logs) { }
|
|
270
|
+
async runFallbackOptions(options, additional_logs) { }
|
|
271
|
+
async processFallback(data) {
|
|
272
|
+
this.start = Date.now();
|
|
273
|
+
this.input = data;
|
|
274
|
+
const { product: product_tag, env, input, tag: fallback_tag } = data;
|
|
275
|
+
try {
|
|
276
|
+
this.productTag = product_tag;
|
|
277
|
+
const additional_logs = {
|
|
278
|
+
name: 'Process Fallback',
|
|
279
|
+
type: types_1.LogEventTypes.FALLBACK,
|
|
280
|
+
parent_tag: fallback_tag,
|
|
281
|
+
};
|
|
282
|
+
await this.intializeProduct(additional_logs);
|
|
283
|
+
const process_id = this.process_id || (0, processor_utils_1.generateObjectId)();
|
|
284
|
+
this.process_id = process_id;
|
|
285
|
+
this.baseLogs = {
|
|
286
|
+
product_tag: this.productTag,
|
|
287
|
+
product_id: this.productId,
|
|
288
|
+
workspace_id: this.workspace_id,
|
|
289
|
+
env,
|
|
290
|
+
type: types_1.LogEventTypes.QUOTA,
|
|
291
|
+
process_id,
|
|
292
|
+
data: input,
|
|
293
|
+
};
|
|
294
|
+
this.fallback = this.fetchFallback(fallback_tag, additional_logs);
|
|
295
|
+
this.logService.setFeatureId(this.quota._id);
|
|
296
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process fallback initiated', data: Object.assign(Object.assign({}, data), { input: (0, processor_utils_1.anonymizeObject)(data.input) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
297
|
+
this.processEnv = this.fetchEnv(env, additional_logs);
|
|
298
|
+
if (!this.processEnv.active) {
|
|
299
|
+
throw new Error(`Environment ${data.env} is not active`);
|
|
300
|
+
}
|
|
301
|
+
const { input: fallbackInput, options } = this.fallback;
|
|
302
|
+
// validate feature input and log failure
|
|
303
|
+
this.validateJSONFeatureInput(input, fallbackInput, additional_logs);
|
|
304
|
+
// split processes
|
|
305
|
+
//this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
306
|
+
return await this.runFallbackOptions(options, additional_logs);
|
|
307
|
+
}
|
|
308
|
+
catch (e) {
|
|
309
|
+
this.end = Date.now();
|
|
310
|
+
this.logService.publish();
|
|
311
|
+
throw e;
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
async processFeature(data) {
|
|
315
|
+
this.start = Date.now();
|
|
316
|
+
this.input = data;
|
|
317
|
+
const { product: product_tag, env, input, tag: feature_tag } = data;
|
|
318
|
+
let additional_logs;
|
|
319
|
+
let passedValidation;
|
|
320
|
+
try {
|
|
321
|
+
this.productTag = product_tag;
|
|
322
|
+
additional_logs = {
|
|
323
|
+
name: 'Process feature',
|
|
324
|
+
type: types_1.LogEventTypes.FEATURE,
|
|
325
|
+
parent_tag: feature_tag,
|
|
326
|
+
};
|
|
327
|
+
await this.intializeProduct(additional_logs);
|
|
328
|
+
this.component = types_1.LogEventTypes.FEATURE;
|
|
329
|
+
const process_id = this.process_id || (0, processor_utils_1.generateObjectId)();
|
|
330
|
+
this.process_id = process_id;
|
|
331
|
+
this.baseLogs = {
|
|
332
|
+
product_tag: this.productTag,
|
|
333
|
+
product_id: this.productId,
|
|
334
|
+
workspace_id: this.workspace_id,
|
|
335
|
+
env,
|
|
336
|
+
type: types_1.LogEventTypes.FEATURE,
|
|
337
|
+
process_id,
|
|
338
|
+
data: input,
|
|
339
|
+
};
|
|
340
|
+
this.feature = this.fetchFeature(feature_tag, additional_logs);
|
|
341
|
+
this.logService.setFeatureId(this.feature._id);
|
|
342
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process feature initiated', data: Object.assign(Object.assign({}, data), { input: (0, processor_utils_1.anonymizeObject)(data.input) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
343
|
+
this.processEnv = this.fetchEnv(env, additional_logs);
|
|
344
|
+
if (!this.processEnv.active) {
|
|
345
|
+
throw new Error(`Environment ${data.env} is not active`);
|
|
346
|
+
}
|
|
347
|
+
const { input: featureInput, sequence, output } = this.feature;
|
|
348
|
+
// validate feature input and log failure
|
|
349
|
+
this.validateJSONFeatureInput(input, featureInput, additional_logs);
|
|
350
|
+
// split processes
|
|
351
|
+
this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
352
|
+
await this.processSequenceLevels(additional_logs);
|
|
353
|
+
return { process_id };
|
|
354
|
+
//return this.generateOutput(output as unknown as Record<string, IFeatureOutput>);
|
|
355
|
+
}
|
|
356
|
+
catch (e) {
|
|
357
|
+
this.end = Date.now();
|
|
358
|
+
if (this.logService) {
|
|
359
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process feature - failed', data: Object.assign(Object.assign({}, data), { input: (0, processor_utils_1.anonymizeObject)(data.input) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
360
|
+
this.logService.publish();
|
|
361
|
+
if (passedValidation) {
|
|
362
|
+
return { process_id: this.process_id };
|
|
363
|
+
}
|
|
364
|
+
else {
|
|
365
|
+
throw e;
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
else {
|
|
369
|
+
throw e;
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
async intializeProduct(additional_logs) {
|
|
374
|
+
try {
|
|
375
|
+
if (this.productTag) {
|
|
376
|
+
await this.productBuilderService.initializeProductByTag(this.productTag); // validate product_exists
|
|
377
|
+
}
|
|
378
|
+
else {
|
|
379
|
+
await this.productBuilderService.initializeProduct(this.productId);
|
|
380
|
+
}
|
|
381
|
+
this.logService = new logs_service_1.default({
|
|
382
|
+
product_id: this.productId,
|
|
383
|
+
workspace_id: this.workspace_id,
|
|
384
|
+
public_key: this.public_key,
|
|
385
|
+
user_id: this.user_id,
|
|
386
|
+
token: this.token,
|
|
387
|
+
env_type: this.environment,
|
|
388
|
+
});
|
|
389
|
+
const { _id: product_id, workspace_id } = this.productBuilderService.fetchProduct();
|
|
390
|
+
this.productId = product_id;
|
|
391
|
+
if (workspace_id !== this.workspace_id) {
|
|
392
|
+
throw new Error('Access Denied');
|
|
393
|
+
}
|
|
394
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Product initialize - success', data: { product_id: this.productId }, status: types_1.LogEventStatus.SUCCESS }));
|
|
395
|
+
}
|
|
396
|
+
catch (e) {
|
|
397
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Product initialize - failed', data: e, status: types_1.LogEventStatus.FAIL }));
|
|
398
|
+
throw e;
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
fetchFeature(tag, additional_logs) {
|
|
402
|
+
try {
|
|
403
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch feature - initiated', data: { tag }, status: types_1.LogEventStatus.PROCESSING }));
|
|
404
|
+
const feature = this.productBuilderService.fetchFeature(tag); // validate feature exists
|
|
405
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch feature - success', data: { tag, feature }, status: types_1.LogEventStatus.SUCCESS }));
|
|
406
|
+
return feature;
|
|
407
|
+
}
|
|
408
|
+
catch (e) {
|
|
409
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch feature - failed', data: e, status: types_1.LogEventStatus.FAIL }));
|
|
410
|
+
throw e;
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
fetchQuota(tag, additional_logs) {
|
|
414
|
+
try {
|
|
415
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch quota - initiated', data: { tag }, status: types_1.LogEventStatus.PROCESSING }));
|
|
416
|
+
const quota = this.productBuilderService.fetchQuota(tag); // validate feature exists
|
|
417
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch quota - success', data: { tag, quota }, status: types_1.LogEventStatus.SUCCESS }));
|
|
418
|
+
return quota;
|
|
419
|
+
}
|
|
420
|
+
catch (e) {
|
|
421
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch quota - failed', data: { tag, e }, status: types_1.LogEventStatus.FAIL }));
|
|
422
|
+
throw e;
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
fetchFallback(tag, additional_logs) {
|
|
426
|
+
try {
|
|
427
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch fallback - initiated', data: { tag }, status: types_1.LogEventStatus.PROCESSING }));
|
|
428
|
+
const fallback = this.productBuilderService.fetchFallback(tag); // validate feature exists
|
|
429
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch fallback - success', data: { tag, fallback }, status: types_1.LogEventStatus.SUCCESS }));
|
|
430
|
+
return fallback;
|
|
431
|
+
}
|
|
432
|
+
catch (e) {
|
|
433
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch fallback - failed', data: e, status: types_1.LogEventStatus.FAIL }));
|
|
434
|
+
throw e;
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
fetchEnv(env, additional_logs) {
|
|
438
|
+
try {
|
|
439
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch environment - initiated', data: { slug: env }, status: types_1.LogEventStatus.PROCESSING }));
|
|
440
|
+
const product_env = this.productBuilderService.fetchEnv(env); // validate env exists
|
|
441
|
+
if (!product_env) {
|
|
442
|
+
throw new Error(`Env ${env} not found`);
|
|
443
|
+
}
|
|
444
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch environment - success', data: { slug: env, env: product_env }, status: types_1.LogEventStatus.SUCCESS }));
|
|
445
|
+
return product_env;
|
|
446
|
+
}
|
|
447
|
+
catch (e) {
|
|
448
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch environment - failed', data: { slug: env, e }, status: types_1.LogEventStatus.FAIL }));
|
|
449
|
+
throw e;
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
validateJSONFeatureInput(input, feature_input, additional_logs) {
|
|
453
|
+
try {
|
|
454
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Input validation - initiated', data: { input: (0, processor_utils_1.anonymizeObject)(input), feature_input }, status: types_1.LogEventStatus.PROCESSING }));
|
|
455
|
+
(0, processor_utils_1.validateFeatureJSONInput)(input, feature_input);
|
|
456
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Input validation - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
457
|
+
}
|
|
458
|
+
catch (e) {
|
|
459
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Input validation - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
460
|
+
throw e;
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
splitSequenceIntoLevels(data, additional_logs) {
|
|
464
|
+
try {
|
|
465
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Split sequence - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
466
|
+
const levels = {};
|
|
467
|
+
const tagMap = new Map(data.map((seq) => [seq.tag, seq]));
|
|
468
|
+
const assignedLevels = new Map();
|
|
469
|
+
let currentLevel = 1;
|
|
470
|
+
let remainingSequences = [...data];
|
|
471
|
+
while (remainingSequences.length > 0) {
|
|
472
|
+
const currentLevelSequences = [];
|
|
473
|
+
remainingSequences = remainingSequences.filter((seq) => {
|
|
474
|
+
var _a;
|
|
475
|
+
const parentLevels = ((_a = seq.parents) === null || _a === void 0 ? void 0 : _a.map((parent) => { var _a; return (_a = assignedLevels.get(parent)) !== null && _a !== void 0 ? _a : -1; })) || [];
|
|
476
|
+
const isCurrentLevel = parentLevels.length === 0 || Math.max(...parentLevels) === currentLevel - 1;
|
|
477
|
+
if (isCurrentLevel) {
|
|
478
|
+
currentLevelSequences.push(seq);
|
|
479
|
+
assignedLevels.set(seq.tag, currentLevel);
|
|
480
|
+
return false; // Remove from remainingSequences
|
|
481
|
+
}
|
|
482
|
+
return true;
|
|
483
|
+
});
|
|
484
|
+
if (currentLevelSequences.length > 0) {
|
|
485
|
+
levels[currentLevel] = currentLevelSequences;
|
|
486
|
+
currentLevel++;
|
|
487
|
+
}
|
|
488
|
+
else {
|
|
489
|
+
break; // Prevent infinite loop if there's a cycle
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Split sequence - success', data: { levels }, status: types_1.LogEventStatus.SUCCESS }));
|
|
493
|
+
return levels;
|
|
494
|
+
}
|
|
495
|
+
catch (e) {
|
|
496
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Split sequence - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
497
|
+
throw e;
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
async processSequenceLevels(additional_logs) {
|
|
501
|
+
try {
|
|
502
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process levels - initiated', data: { levels: this.sequenceLevels }, status: types_1.LogEventStatus.PROCESSING }));
|
|
503
|
+
const levelEvents = {};
|
|
504
|
+
Object.entries(this.sequenceLevels).forEach(([level, sequences]) => {
|
|
505
|
+
levelEvents[parseInt(level)] = this.fetchLevelEvents(sequences);
|
|
506
|
+
});
|
|
507
|
+
let previousLevelComplete = true;
|
|
508
|
+
for (const level of Object.keys(levelEvents)
|
|
509
|
+
.map(Number)
|
|
510
|
+
.sort((a, b) => a - b)) {
|
|
511
|
+
if (previousLevelComplete) {
|
|
512
|
+
previousLevelComplete = await this.processLevelEvents(levelEvents[level], additional_logs);
|
|
513
|
+
}
|
|
514
|
+
else {
|
|
515
|
+
break;
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
if (previousLevelComplete) {
|
|
519
|
+
this.logService.publish();
|
|
520
|
+
this.end = Date.now();
|
|
521
|
+
this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
catch (e) {
|
|
525
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process levels - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
526
|
+
throw e;
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
async processLevelEvents(events, additional_logs) {
|
|
530
|
+
const promises = events.map((event) => {
|
|
531
|
+
const dependants = this.fetchActionRequestDependents(event.input, additional_logs);
|
|
532
|
+
if (this.checkDependentsSuccess(dependants)) {
|
|
533
|
+
// TODO: comparison to see if all depending events are in success || dependants is empty
|
|
534
|
+
return this.processEvent(event);
|
|
535
|
+
}
|
|
536
|
+
else {
|
|
537
|
+
this.addToWaitingOutput(event, dependants);
|
|
538
|
+
}
|
|
539
|
+
});
|
|
540
|
+
return Promise.all(promises);
|
|
541
|
+
}
|
|
542
|
+
async processFailedEvents(additional_logs) {
|
|
543
|
+
const { failure } = this.processingOutput;
|
|
544
|
+
const promises = failure.map((failed) => {
|
|
545
|
+
if (failed.retries_left > 0 && new Date().getTime() > failed.retry_at) {
|
|
546
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess failed events - initiated', data: Object.assign({}, failed), status: types_1.LogEventStatus.PROCESSING }));
|
|
547
|
+
return this.processEvent(failed.event); // process events should also take care of this.processingOutput
|
|
548
|
+
}
|
|
549
|
+
if (failed.retries_left === 0 && !failed.allow_fail) {
|
|
550
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess failed events - failed', data: Object.assign(Object.assign({}, failed), { reason: 'Ran out of Retries' }), status: types_1.LogEventStatus.FAIL }));
|
|
551
|
+
throw new Error(`Event ${failed.event.event} failed in sequence ${failed.event.sequence_tag}, ran out of retries and the feature cannot run without it succeeding`);
|
|
552
|
+
}
|
|
553
|
+
});
|
|
554
|
+
Promise.all(promises);
|
|
555
|
+
}
|
|
556
|
+
async processWaitingEvents(additional_logs) {
|
|
557
|
+
const { waiting } = this.processingOutput;
|
|
558
|
+
const promises = waiting.map((waiting) => {
|
|
559
|
+
const { dependants } = waiting;
|
|
560
|
+
if (this.checkDependentsSuccess(dependants)) {
|
|
561
|
+
// TODO: comparison to see if all depending events are in success || dependants is empty
|
|
562
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess waiting events - initiated', data: Object.assign({}, waiting), status: types_1.LogEventStatus.PROCESSING }));
|
|
563
|
+
return this.processEvent(waiting.event);
|
|
564
|
+
}
|
|
565
|
+
else {
|
|
566
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess waiting events - waiting', data: Object.assign({}, waiting), status: types_1.LogEventStatus.WAITING }));
|
|
567
|
+
}
|
|
568
|
+
});
|
|
569
|
+
return Promise.all(promises);
|
|
570
|
+
}
|
|
571
|
+
checkDependentsSuccess(dependants) {
|
|
572
|
+
let pass = true;
|
|
573
|
+
for (let i = 0; i < dependants.length; i++) {
|
|
574
|
+
if (!this.processingOutput.success.find((item) => item.event.sequence_tag === dependants[i].sequence_tag && item.event.event === dependants[i].event_tag)) {
|
|
575
|
+
pass = false;
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
return pass;
|
|
579
|
+
}
|
|
580
|
+
fetchActionRequestDependents(input, additional_logs) {
|
|
581
|
+
try {
|
|
582
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch request dependencies - initialized', data: input, status: types_1.LogEventStatus.PROCESSING }));
|
|
583
|
+
const dependents = [];
|
|
584
|
+
if (input.query) {
|
|
585
|
+
dependents.push(...this.fetchDependents(input.query, additional_logs));
|
|
586
|
+
}
|
|
587
|
+
if (input.body) {
|
|
588
|
+
dependents.push(...this.fetchDependents(input.body, additional_logs));
|
|
589
|
+
}
|
|
590
|
+
if (input.headers) {
|
|
591
|
+
dependents.push(...this.fetchDependents(input.headers, additional_logs));
|
|
592
|
+
}
|
|
593
|
+
if (input.params) {
|
|
594
|
+
dependents.push(...this.fetchDependents(input.params, additional_logs));
|
|
595
|
+
}
|
|
596
|
+
if (input.data) {
|
|
597
|
+
dependents.push(...this.fetchDependents(input.data, additional_logs));
|
|
598
|
+
}
|
|
599
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch request dependencies - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), dependents }, status: types_1.LogEventStatus.SUCCESS }));
|
|
600
|
+
return dependents;
|
|
601
|
+
}
|
|
602
|
+
catch (e) {
|
|
603
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch request dependents - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
604
|
+
}
|
|
605
|
+
}
|
|
606
|
+
fetchDependents(obj, additional_logs) {
|
|
607
|
+
try {
|
|
608
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch dependents - initiated', data: obj, status: types_1.LogEventStatus.PROCESSING }));
|
|
609
|
+
const dependants = [];
|
|
610
|
+
for (const key in obj) {
|
|
611
|
+
const value = obj[key];
|
|
612
|
+
if (typeof value === 'object') {
|
|
613
|
+
if ('function' in value && 'values' in value) {
|
|
614
|
+
const { function: func, values } = value;
|
|
615
|
+
for (let i = 0; i < values.length; i++) {
|
|
616
|
+
if (values[i].startsWith('$Sequence')) {
|
|
617
|
+
const stages = this.productBuilderService.extractStages(values[i]);
|
|
618
|
+
dependants.push({
|
|
619
|
+
sequence_tag: stages[0],
|
|
620
|
+
event_tag: stages[1],
|
|
621
|
+
});
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
else {
|
|
626
|
+
dependants.push(...this.fetchDependents(value, additional_logs));
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
else if (typeof value === 'string') {
|
|
630
|
+
if (value.startsWith('$Sequence')) {
|
|
631
|
+
const stages = this.productBuilderService.extractStages(value);
|
|
632
|
+
dependants.push({ sequence_tag: stages[0], event_tag: stages[1] });
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
return dependants;
|
|
637
|
+
}
|
|
638
|
+
catch (e) {
|
|
639
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch dependents - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
640
|
+
throw e;
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
async constructJSONDataPayloads(object, additional_logs, samples, event, loopIndex = 0) {
|
|
644
|
+
try {
|
|
645
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Construct JSON payloads - initiated', data: { object, samples }, status: types_1.LogEventStatus.PROCESSING }));
|
|
646
|
+
const payload = {};
|
|
647
|
+
const { body: actionBody, query: actionQuery, headers: actionHeaders, params: actionParams } = object;
|
|
648
|
+
if (actionBody) {
|
|
649
|
+
const body = await this.generatePayload(actionBody, event, additional_logs, samples.body, {}, loopIndex);
|
|
650
|
+
const validationPayload = (await this.inputService.parseJson({
|
|
651
|
+
data: body,
|
|
652
|
+
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
653
|
+
}));
|
|
654
|
+
await this.inputService.validateInput(validationPayload, samples.body);
|
|
655
|
+
Object.assign(payload, {
|
|
656
|
+
body,
|
|
657
|
+
});
|
|
658
|
+
}
|
|
659
|
+
if (actionQuery) {
|
|
660
|
+
const query = await this.generatePayload(actionQuery, event, additional_logs, samples.query, {}, loopIndex);
|
|
661
|
+
const validationPayload = (await this.inputService.parseJson({
|
|
662
|
+
data: query,
|
|
663
|
+
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
664
|
+
}));
|
|
665
|
+
await this.inputService.validateInput(validationPayload, samples.query);
|
|
666
|
+
Object.assign(payload, {
|
|
667
|
+
query,
|
|
668
|
+
});
|
|
669
|
+
}
|
|
670
|
+
if (actionHeaders) {
|
|
671
|
+
const headers = await this.generatePayload(actionHeaders, event, additional_logs, samples.headers, {}, loopIndex);
|
|
672
|
+
const validationPayload = (await this.inputService.parseJson({
|
|
673
|
+
data: headers,
|
|
674
|
+
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
675
|
+
}));
|
|
676
|
+
await this.inputService.validateInput(validationPayload, samples.headers);
|
|
677
|
+
Object.assign(payload, {
|
|
678
|
+
headers,
|
|
679
|
+
});
|
|
680
|
+
}
|
|
681
|
+
if (actionParams) {
|
|
682
|
+
const params = await this.generatePayload(actionParams, event, additional_logs, samples.params, {}, loopIndex);
|
|
683
|
+
const validationPayload = (await this.inputService.parseJson({
|
|
684
|
+
data: params,
|
|
685
|
+
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
686
|
+
}));
|
|
687
|
+
await this.inputService.validateInput(validationPayload, samples.params);
|
|
688
|
+
Object.assign(payload, {
|
|
689
|
+
params,
|
|
690
|
+
});
|
|
691
|
+
}
|
|
692
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Construct JSON payloads - success', data: { payload: (0, processor_utils_1.anonymizeObject)(payload) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
693
|
+
return payload;
|
|
694
|
+
}
|
|
695
|
+
catch (e) {
|
|
696
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Construct JSON payloads - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
697
|
+
throw e;
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
async generatePayload(obj, event, additional_logs, sample = [], index = {}, loopIndex = null) {
|
|
701
|
+
try {
|
|
702
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate payload - initiated', data: { obj, event, sample }, status: types_1.LogEventStatus.PROCESSING }));
|
|
703
|
+
const payload = {};
|
|
704
|
+
const keys = Object.keys(obj);
|
|
705
|
+
for (let i = 0; i < keys.length; i++) {
|
|
706
|
+
const key = keys[i];
|
|
707
|
+
const value = obj[key];
|
|
708
|
+
if (typeof value === 'object') {
|
|
709
|
+
// check if function or object
|
|
710
|
+
if ('function' in value && 'values' in value) {
|
|
711
|
+
// Object.assign(payload, { [key]: await this.generateFunctionResult(value.function, value.values, sample) });
|
|
712
|
+
}
|
|
713
|
+
else {
|
|
714
|
+
const new_level = index.level ? index.level + 1 : 1;
|
|
715
|
+
index = { parent_key: key, level: new_level };
|
|
716
|
+
// TODO: how to pass sample into this
|
|
717
|
+
Object.assign(payload, {
|
|
718
|
+
[key]: await this.generatePayload(value, event, additional_logs, sample),
|
|
719
|
+
});
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
else if (typeof value === 'string') {
|
|
723
|
+
const app = event ? event.app || event.event : null;
|
|
724
|
+
Object.assign(payload, {
|
|
725
|
+
[key]: await this.generateStringValues(value, app, additional_logs, sample, index, key, loopIndex),
|
|
726
|
+
});
|
|
727
|
+
}
|
|
728
|
+
else {
|
|
729
|
+
// allow hardcoded values that are not numbers
|
|
730
|
+
Object.assign(payload, {
|
|
731
|
+
[key]: value,
|
|
732
|
+
});
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate payload - success', data: { payload: (0, processor_utils_1.anonymizeObject)(payload) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
736
|
+
return payload;
|
|
737
|
+
}
|
|
738
|
+
catch (e) {
|
|
739
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generating payload - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
740
|
+
throw e;
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
async generateStringValues(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
744
|
+
value = (0, processor_utils_1.removeWrappingQuotes)(value);
|
|
745
|
+
const stages = this.productBuilderService.extractStages(value);
|
|
746
|
+
const locatorFor$Index = (0, string_utils_1.validateAndLocateTag)(value);
|
|
747
|
+
if (value.startsWith('$Auth{') && value.endsWith('}')) {
|
|
748
|
+
// should only be allowed in apps
|
|
749
|
+
return await this.generateAuthValue(stages, app, sample, additional_logs);
|
|
750
|
+
}
|
|
751
|
+
else if (value.startsWith('$Sequence{') && value.endsWith('}')) {
|
|
752
|
+
return await this.generateSequenceValue(stages, locatorFor$Index, loopIndex); // pass
|
|
753
|
+
}
|
|
754
|
+
else if (value.startsWith('$Input{') && value.endsWith('}')) {
|
|
755
|
+
return await this.generateInputValue(this.input.input, stages);
|
|
756
|
+
}
|
|
757
|
+
else if (value === '$Default') {
|
|
758
|
+
return await this.generateDefaultValue(sample, Object.assign(Object.assign({}, index), { key }));
|
|
759
|
+
}
|
|
760
|
+
else if (value.startsWith('$Variable{') && value.endsWith('}')) {
|
|
761
|
+
return await this.generateVariableValue(stages);
|
|
762
|
+
}
|
|
763
|
+
else if (value.startsWith('$Constant{') && value.endsWith('}')) {
|
|
764
|
+
return await this.generateConstantValue(stages);
|
|
765
|
+
}
|
|
766
|
+
else if (value.startsWith('$Size{') || value.startsWith('$Length{')) {
|
|
767
|
+
const { matchLength, matchSize } = (0, string_utils_1.checkLengthAndSizeMatches)(value);
|
|
768
|
+
let content;
|
|
769
|
+
if (matchLength) {
|
|
770
|
+
content = matchLength[1];
|
|
771
|
+
return (await this.generateStringValues(content, app, additional_logs, sample, index, key)).length;
|
|
772
|
+
}
|
|
773
|
+
else if (matchSize) {
|
|
774
|
+
// Added safeguard for $Size match
|
|
775
|
+
content = matchSize[1];
|
|
776
|
+
return Object.keys(await this.generateStringValues(content, app, additional_logs, sample, index, key)).length;
|
|
777
|
+
}
|
|
778
|
+
}
|
|
779
|
+
else if (value === '$Now') {
|
|
780
|
+
return Date.now();
|
|
781
|
+
}
|
|
782
|
+
else if (value === '$Date') {
|
|
783
|
+
return new Date().toISOString();
|
|
784
|
+
}
|
|
785
|
+
else if (!value.startsWith('$')) {
|
|
786
|
+
// allow hardcoded values
|
|
787
|
+
return value;
|
|
788
|
+
}
|
|
789
|
+
else {
|
|
790
|
+
// should be a ductape operator
|
|
791
|
+
return await this.generateOperatorValues(value, app, additional_logs, sample, index, key, loopIndex);
|
|
792
|
+
}
|
|
793
|
+
}
|
|
794
|
+
async generateOperatorValues(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
795
|
+
if (value.startsWith('$Add(') && value.endsWith(')')) {
|
|
796
|
+
return await this.sumValues(value, app, additional_logs, sample, index, key, loopIndex);
|
|
797
|
+
}
|
|
798
|
+
else if (value.startsWith('$Substract(') && value.endsWith(')')) {
|
|
799
|
+
return await this.sumValues(value, app, additional_logs, sample, index, key, loopIndex);
|
|
800
|
+
}
|
|
801
|
+
else if (value.startsWith('$Pick(') && value.endsWith(')')) {
|
|
802
|
+
return await this.pickValue(value, app, additional_logs, sample, index, key, loopIndex);
|
|
803
|
+
}
|
|
804
|
+
else if (value.startsWith('$Concat(') && value.endsWith(')')) {
|
|
805
|
+
return await this.concatValues(value, app, additional_logs, sample, index, key, loopIndex);
|
|
806
|
+
}
|
|
807
|
+
else if (value.startsWith('$Substring(') && value.endsWith(')')) {
|
|
808
|
+
return await this.substringValues(value, app, additional_logs, sample, index, key, loopIndex);
|
|
809
|
+
}
|
|
810
|
+
else if (value.startsWith('$Trim(') && value.endsWith(')')) {
|
|
811
|
+
return await this.trimValues(value, app, additional_logs, sample, index, key, loopIndex);
|
|
812
|
+
}
|
|
813
|
+
else if (value.startsWith('$Join(') && value.endsWith(')')) {
|
|
814
|
+
return await this.joinArrays(value, app, additional_logs, sample, index, key, loopIndex);
|
|
815
|
+
}
|
|
816
|
+
else if (value.startsWith('$Split(') && value.endsWith(')')) {
|
|
817
|
+
return await this.splitValues(value, app, additional_logs, sample, index, key, loopIndex);
|
|
818
|
+
}
|
|
819
|
+
else if (value.startsWith('$Uppercase(') && value.endsWith(')')) {
|
|
820
|
+
return await this.uppercaseValue(value, app, additional_logs, sample, index, key, loopIndex);
|
|
821
|
+
}
|
|
822
|
+
else if (value.startsWith('$Lowercase(') && value.endsWith(')')) {
|
|
823
|
+
return await this.lowercaseValue(value, app, additional_logs, sample, index, key, loopIndex);
|
|
824
|
+
}
|
|
825
|
+
else if (value.startsWith('$Dateformat(') && value.endsWith(')')) {
|
|
826
|
+
return await this.dateFormatValue(value, app, additional_logs, sample, index, key, loopIndex);
|
|
827
|
+
}
|
|
828
|
+
else if (value.startsWith('$Replace(') && value.endsWith(')')) {
|
|
829
|
+
return await this.replaceValue(value, app, additional_logs, sample, index, key, loopIndex);
|
|
830
|
+
}
|
|
831
|
+
else if (value.startsWith('$Filter(') && value.endsWith(')')) {
|
|
832
|
+
return await this.filterValue(value, app, additional_logs, sample, index, key, loopIndex);
|
|
833
|
+
}
|
|
834
|
+
else if (value.startsWith('$Find(') && value.endsWith(')')) {
|
|
835
|
+
return await this.findValue(value, app, additional_logs, sample, index, key, loopIndex);
|
|
836
|
+
}
|
|
837
|
+
else {
|
|
838
|
+
throw new Error(`Invalid value input ${value}`);
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
async sumValues(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
842
|
+
const match = value.match(types_1.sumRegex);
|
|
843
|
+
if (match && match[1]) {
|
|
844
|
+
const placeHolders = match[1].split(',').map((data) => data.trim());
|
|
845
|
+
const values = await Promise.all(placeHolders.map(async (holder) => {
|
|
846
|
+
return await this.generateStringValues(holder, app, additional_logs, sample, index, key, loopIndex);
|
|
847
|
+
}));
|
|
848
|
+
const numericValues = values.map((val, i) => {
|
|
849
|
+
const num = Number(val);
|
|
850
|
+
if (isNaN(num)) {
|
|
851
|
+
throw new Error(`Value at index ${i} ('${placeHolders[i]}' => '${val}') is not a valid number`);
|
|
852
|
+
}
|
|
853
|
+
return num;
|
|
854
|
+
});
|
|
855
|
+
return numericValues.reduce((acc, num) => acc + num, 0);
|
|
856
|
+
}
|
|
857
|
+
else {
|
|
858
|
+
return 0;
|
|
859
|
+
}
|
|
860
|
+
}
|
|
861
|
+
async subtractValues(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
862
|
+
const match = value.match(types_1.substringRegex);
|
|
863
|
+
if (match && match[1]) {
|
|
864
|
+
const placeHolders = match[1].split(',').map((data) => data.trim());
|
|
865
|
+
const values = await Promise.all(placeHolders.map(async (holder) => {
|
|
866
|
+
return await this.generateStringValues(holder, app, additional_logs, sample, index, key, loopIndex);
|
|
867
|
+
}));
|
|
868
|
+
const numericValues = values.map((val, i) => {
|
|
869
|
+
const num = Number(val);
|
|
870
|
+
if (isNaN(num)) {
|
|
871
|
+
throw new Error(`Value at index ${i} ('${placeHolders[i]}' => '${val}') is not a valid number`);
|
|
872
|
+
}
|
|
873
|
+
return num;
|
|
874
|
+
});
|
|
875
|
+
// Subtract the numbers in sequence (first value minus the rest)
|
|
876
|
+
const result = numericValues.reduce((acc, num) => acc - num);
|
|
877
|
+
return result;
|
|
878
|
+
}
|
|
879
|
+
else {
|
|
880
|
+
return 0;
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
async concatValues(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
884
|
+
const match = value.match(types_1.concatRegex);
|
|
885
|
+
if (match) {
|
|
886
|
+
const placeholdersStr = match[1];
|
|
887
|
+
const separator = match[2];
|
|
888
|
+
const placeHolders = placeholdersStr.split(',').map((data) => data.trim());
|
|
889
|
+
const values = await Promise.all(placeHolders.map(async (holder) => {
|
|
890
|
+
return await this.generateStringValues(holder, app, additional_logs, sample, index, key, loopIndex);
|
|
891
|
+
}));
|
|
892
|
+
return values.join(separator);
|
|
893
|
+
}
|
|
894
|
+
else {
|
|
895
|
+
console.log('No match found');
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
async uppercaseValue(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
899
|
+
const match = value.match(types_1.uppercaseRegex);
|
|
900
|
+
if (match) {
|
|
901
|
+
const resolvedValue = await this.generateStringValues(match[1].trim(), app, additional_logs, sample, index, key, loopIndex);
|
|
902
|
+
if (typeof resolvedValue !== 'string') {
|
|
903
|
+
throw new Error(`Resolved value ('${resolvedValue}') is not a string.`);
|
|
904
|
+
}
|
|
905
|
+
return resolvedValue.toUpperCase();
|
|
906
|
+
}
|
|
907
|
+
else {
|
|
908
|
+
//console.log('No match found');
|
|
909
|
+
}
|
|
910
|
+
}
|
|
911
|
+
async lowercaseValue(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
912
|
+
const match = value.match(types_1.lowercaseRegex);
|
|
913
|
+
if (match) {
|
|
914
|
+
const resolvedValue = await this.generateStringValues(match[1].trim(), app, additional_logs, sample, index, key, loopIndex);
|
|
915
|
+
if (typeof resolvedValue !== 'string') {
|
|
916
|
+
throw new Error(`Resolved value ('${resolvedValue}') is not a string.`);
|
|
917
|
+
}
|
|
918
|
+
return resolvedValue.toLowerCase();
|
|
919
|
+
}
|
|
920
|
+
else {
|
|
921
|
+
//console.log('No match found');
|
|
922
|
+
}
|
|
923
|
+
}
|
|
924
|
+
async dateFormatValue(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
925
|
+
const match = value.match(types_1.dateFormatRegex);
|
|
926
|
+
if (match) {
|
|
927
|
+
const expression = match[1].trim();
|
|
928
|
+
const formatStr = match[3].trim();
|
|
929
|
+
const resolvedDate = await this.generateStringValues(expression, app, additional_logs, sample, index, key, loopIndex);
|
|
930
|
+
const parsedDate = typeof resolvedDate === 'string' ? (0, date_fns_1.parseISO)(resolvedDate) : new Date(resolvedDate);
|
|
931
|
+
if (isNaN(parsedDate.getTime())) {
|
|
932
|
+
throw new Error(`Resolved date ('${resolvedDate}') is invalid.`);
|
|
933
|
+
}
|
|
934
|
+
return (0, date_fns_1.format)(parsedDate, formatStr);
|
|
935
|
+
}
|
|
936
|
+
else {
|
|
937
|
+
//console.log('No match found');
|
|
938
|
+
}
|
|
939
|
+
}
|
|
940
|
+
async replaceValue(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
941
|
+
const match = value.match(types_1.replaceRegex);
|
|
942
|
+
if (match) {
|
|
943
|
+
const [_, strPlaceholder, , searchStr, , replaceStr] = match;
|
|
944
|
+
const resolvedValue = await this.generateStringValues(strPlaceholder.trim(), app, additional_logs, sample, index, key, loopIndex);
|
|
945
|
+
if (typeof resolvedValue !== 'string') {
|
|
946
|
+
throw new Error(`Resolved value ('${resolvedValue}') is not a string.`);
|
|
947
|
+
}
|
|
948
|
+
return resolvedValue.split(searchStr).join(replaceStr);
|
|
949
|
+
}
|
|
950
|
+
else {
|
|
951
|
+
console.log('No match found');
|
|
952
|
+
}
|
|
953
|
+
}
|
|
954
|
+
async substringValues(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
955
|
+
const match = value.match(types_1.substringRegex);
|
|
956
|
+
if (match) {
|
|
957
|
+
const [_, stringPlaceholder, startStr, endStr] = match;
|
|
958
|
+
const start = Number(startStr);
|
|
959
|
+
const end = Number(endStr);
|
|
960
|
+
if (isNaN(start) || isNaN(end)) {
|
|
961
|
+
throw new Error(`Start and end indexes must be valid numbers (received start: '${startStr}', end: '${endStr}').`);
|
|
962
|
+
}
|
|
963
|
+
const resolvedString = await this.generateStringValues(stringPlaceholder.trim(), app, additional_logs, sample, index, key, loopIndex);
|
|
964
|
+
if (typeof resolvedString !== 'string') {
|
|
965
|
+
throw new Error(`Resolved value ('${resolvedString}') is not a string.`);
|
|
966
|
+
}
|
|
967
|
+
return resolvedString.substring(start, end);
|
|
968
|
+
}
|
|
969
|
+
else {
|
|
970
|
+
console.log('No match found');
|
|
971
|
+
return '';
|
|
972
|
+
}
|
|
973
|
+
}
|
|
974
|
+
async trimValues(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
975
|
+
const match = value.match(types_1.trimRegex);
|
|
976
|
+
if (match) {
|
|
977
|
+
const stringPlaceholder = match[1].trim();
|
|
978
|
+
const resolvedString = await this.generateStringValues(stringPlaceholder, app, additional_logs, sample, index, key, loopIndex);
|
|
979
|
+
if (typeof resolvedString !== 'string') {
|
|
980
|
+
throw new Error(`Resolved value ('${resolvedString}') is not a string.`);
|
|
981
|
+
}
|
|
982
|
+
return resolvedString.trim();
|
|
983
|
+
}
|
|
984
|
+
else {
|
|
985
|
+
console.log('No match found');
|
|
986
|
+
return '';
|
|
987
|
+
}
|
|
988
|
+
}
|
|
989
|
+
async pickValue(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
990
|
+
const match = value.match(types_1.pickRegex);
|
|
991
|
+
if (match) {
|
|
992
|
+
const valuePlaceholder = match[1].trim();
|
|
993
|
+
const indexToPick = Number(await this.generateStringValues(match[2], app, additional_logs, sample, index, key, loopIndex));
|
|
994
|
+
if (isNaN(indexToPick)) {
|
|
995
|
+
throw new Error(`Index '${match[2]}' is not a valid number.`);
|
|
996
|
+
}
|
|
997
|
+
const resolvedValue = await this.generateStringValues(valuePlaceholder, app, additional_logs, sample, index, key, loopIndex);
|
|
998
|
+
if (typeof resolvedValue === 'string' || Array.isArray(resolvedValue)) {
|
|
999
|
+
if (indexToPick < 0 || indexToPick >= resolvedValue.length) {
|
|
1000
|
+
throw new Error(`Index ${indexToPick} is out of bounds for the provided value.`);
|
|
1001
|
+
}
|
|
1002
|
+
return resolvedValue[indexToPick];
|
|
1003
|
+
}
|
|
1004
|
+
else {
|
|
1005
|
+
throw new Error(`Resolved value ('${resolvedValue}') is not a string or array.`);
|
|
1006
|
+
}
|
|
1007
|
+
}
|
|
1008
|
+
else {
|
|
1009
|
+
console.log('No match found');
|
|
1010
|
+
}
|
|
1011
|
+
}
|
|
1012
|
+
async filterValue(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
1013
|
+
const match = value.match(types_1.filterRegex);
|
|
1014
|
+
if (match) {
|
|
1015
|
+
const arrayPlaceholder = match[1].trim();
|
|
1016
|
+
const operator = match[2].trim();
|
|
1017
|
+
const valueToCompare = match[3].trim();
|
|
1018
|
+
if (!types_1.ValidOperators.includes(operator)) {
|
|
1019
|
+
throw new Error(`Invalid operator: ${operator}`);
|
|
1020
|
+
}
|
|
1021
|
+
const resolvedArray = await this.generateStringValues(arrayPlaceholder, app, additional_logs, sample, index, key, loopIndex);
|
|
1022
|
+
const resolvedValue = await this.generateStringValues(valueToCompare, app, additional_logs, sample, index, key, loopIndex);
|
|
1023
|
+
if (!Array.isArray(resolvedArray)) {
|
|
1024
|
+
throw new Error(`Resolved value for array ('${resolvedArray}') is not an array.`);
|
|
1025
|
+
}
|
|
1026
|
+
return resolvedArray.filter((item) => (0, processor_utils_1.compareValues)(item, operator, resolvedValue));
|
|
1027
|
+
}
|
|
1028
|
+
console.log('No $Filter match found');
|
|
1029
|
+
return [];
|
|
1030
|
+
}
|
|
1031
|
+
async findValue(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
1032
|
+
const match = value.match(types_1.findRegex);
|
|
1033
|
+
if (match) {
|
|
1034
|
+
const arrayPlaceholder = match[1].trim();
|
|
1035
|
+
const operator = match[2].trim();
|
|
1036
|
+
const valueToCompare = match[3].trim();
|
|
1037
|
+
if (!types_1.ValidOperators.includes(operator)) {
|
|
1038
|
+
throw new Error(`Invalid operator: ${operator}`);
|
|
1039
|
+
}
|
|
1040
|
+
const resolvedArray = await this.generateStringValues(arrayPlaceholder, app, additional_logs, sample, index, key, loopIndex);
|
|
1041
|
+
const resolvedValue = await this.generateStringValues(valueToCompare, app, additional_logs, sample, index, key, loopIndex);
|
|
1042
|
+
if (!Array.isArray(resolvedArray)) {
|
|
1043
|
+
throw new Error(`Resolved value for array ('${resolvedArray}') is not an array.`);
|
|
1044
|
+
}
|
|
1045
|
+
return resolvedArray.find((item) => (0, processor_utils_1.compareValues)(item, operator, resolvedValue));
|
|
1046
|
+
}
|
|
1047
|
+
console.log('No $Find match found');
|
|
1048
|
+
return null;
|
|
1049
|
+
}
|
|
1050
|
+
async splitValues(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
1051
|
+
const match = value.match(types_1.splitRegex);
|
|
1052
|
+
if (match) {
|
|
1053
|
+
const stringPlaceholder = match[1].trim();
|
|
1054
|
+
const separator = await this.generateStringValues(match[2], app, additional_logs, sample, index, key, loopIndex);
|
|
1055
|
+
const resolvedString = await this.generateStringValues(stringPlaceholder, app, additional_logs, sample, index, key, loopIndex);
|
|
1056
|
+
if (typeof resolvedString !== 'string') {
|
|
1057
|
+
throw new Error(`Resolved value ('${resolvedString}') is not a string.`);
|
|
1058
|
+
}
|
|
1059
|
+
return resolvedString.split(separator);
|
|
1060
|
+
}
|
|
1061
|
+
else {
|
|
1062
|
+
console.log('No match found');
|
|
1063
|
+
return [];
|
|
1064
|
+
}
|
|
1065
|
+
}
|
|
1066
|
+
async joinArrays(value, app, additional_logs, sample = [], index = {}, key = '', loopIndex = null) {
|
|
1067
|
+
const match = value.match(types_1.joinRegex);
|
|
1068
|
+
if (match) {
|
|
1069
|
+
const arraysPlaceholder = match[1].trim();
|
|
1070
|
+
const resolvedArrays = await this.generateStringValues(arraysPlaceholder, app, additional_logs, sample, index, key, loopIndex);
|
|
1071
|
+
if (!Array.isArray(resolvedArrays)) {
|
|
1072
|
+
throw new Error(`Resolved value ('${JSON.stringify(resolvedArrays)}') is not an array.`);
|
|
1073
|
+
}
|
|
1074
|
+
const mergedArray = resolvedArrays.reduce((acc, current) => {
|
|
1075
|
+
if (!Array.isArray(current)) {
|
|
1076
|
+
throw new Error(`Element '${JSON.stringify(current)}' is not an array.`);
|
|
1077
|
+
}
|
|
1078
|
+
return acc.concat(current);
|
|
1079
|
+
}, []);
|
|
1080
|
+
return mergedArray;
|
|
1081
|
+
}
|
|
1082
|
+
else {
|
|
1083
|
+
console.log('No match found');
|
|
1084
|
+
}
|
|
1085
|
+
}
|
|
1086
|
+
async generateInputValue(input, stages) {
|
|
1087
|
+
try {
|
|
1088
|
+
if (stages.length === 1) {
|
|
1089
|
+
return input[stages[0]];
|
|
1090
|
+
}
|
|
1091
|
+
if (typeof input[stages[0]] !== 'object') {
|
|
1092
|
+
throw new Error(`Nested input ${input[stages[0]]} should be an object or array`);
|
|
1093
|
+
}
|
|
1094
|
+
stages.shift();
|
|
1095
|
+
return this.generateInputValue(input[stages[0]], stages);
|
|
1096
|
+
}
|
|
1097
|
+
catch (e) {
|
|
1098
|
+
throw e;
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
async generateSequenceValue(stages, indexLocator = null, indexValue = null) {
|
|
1102
|
+
try {
|
|
1103
|
+
// find out put in success array using event and success tags
|
|
1104
|
+
const result = this.processingOutput.success.find((item) => item.event.event === stages[1] && item.event.sequence_tag === stages[0]);
|
|
1105
|
+
const cleanedStages = stages.slice(2); // remove event and sequence tags
|
|
1106
|
+
let locator = indexLocator;
|
|
1107
|
+
if (indexLocator) {
|
|
1108
|
+
locator = indexLocator - 2; // adjust to handle cleaned stages
|
|
1109
|
+
}
|
|
1110
|
+
if (result && result.output) {
|
|
1111
|
+
return this.fetchOutputValueAfterStrippingLocators(cleanedStages, result.output, locator, indexValue);
|
|
1112
|
+
}
|
|
1113
|
+
}
|
|
1114
|
+
catch (e) {
|
|
1115
|
+
throw e;
|
|
1116
|
+
}
|
|
1117
|
+
}
|
|
1118
|
+
fetchOutputValueAfterStrippingLocators(stages, output, indexLocator = null, indexValue = null, stageIndex = 0) {
|
|
1119
|
+
try {
|
|
1120
|
+
let next;
|
|
1121
|
+
if (stages.length === 0) {
|
|
1122
|
+
return output;
|
|
1123
|
+
}
|
|
1124
|
+
//const arrayIndex = extractNumberFromArrayString(stages[0]);
|
|
1125
|
+
if (indexLocator !== stageIndex) {
|
|
1126
|
+
// is not an array
|
|
1127
|
+
next = output[stages[0]];
|
|
1128
|
+
}
|
|
1129
|
+
else if (indexValue) {
|
|
1130
|
+
next = output[indexValue];
|
|
1131
|
+
}
|
|
1132
|
+
if (!next) {
|
|
1133
|
+
// throw error for when next value is not found
|
|
1134
|
+
}
|
|
1135
|
+
if (stages.length === 1) {
|
|
1136
|
+
return next;
|
|
1137
|
+
}
|
|
1138
|
+
if (typeof next === 'object' && stages.length) {
|
|
1139
|
+
stages.shift();
|
|
1140
|
+
return this.fetchOutputValueAfterStrippingLocators(stages, next, indexLocator, indexValue, stageIndex + 1);
|
|
1141
|
+
}
|
|
1142
|
+
}
|
|
1143
|
+
catch (e) {
|
|
1144
|
+
throw e;
|
|
1145
|
+
}
|
|
1146
|
+
}
|
|
1147
|
+
async generateDefaultValue(sample, index) {
|
|
1148
|
+
try {
|
|
1149
|
+
const value = sample.find((item) => item.key === index.key && item.level === index.level && item.parent_key === index.parent_key);
|
|
1150
|
+
if (!value || !value.defaultValue) {
|
|
1151
|
+
throw new Error(`Cannot find default value of ${index}`);
|
|
1152
|
+
}
|
|
1153
|
+
return value.defaultValue;
|
|
1154
|
+
}
|
|
1155
|
+
catch (e) {
|
|
1156
|
+
throw e;
|
|
1157
|
+
}
|
|
1158
|
+
}
|
|
1159
|
+
async generateVariableValue(stages) {
|
|
1160
|
+
try {
|
|
1161
|
+
const app = this.productBuilderService.fetchApp(stages[0]);
|
|
1162
|
+
const env = app.envs.find((items) => items.product_env_slug === this.processEnv.slug);
|
|
1163
|
+
if (!env) {
|
|
1164
|
+
throw new Error(`App ${stages[0]} variables needs to have a definition for env: ${this.processEnv.slug}`);
|
|
1165
|
+
}
|
|
1166
|
+
const { variables } = env;
|
|
1167
|
+
const found = variables.find((item) => item.key === stages[1]);
|
|
1168
|
+
if (!found) {
|
|
1169
|
+
throw new Error(`Variable ${stages[1]} not found declared for app ${stages[0]} in ${this.processEnv.slug}`);
|
|
1170
|
+
}
|
|
1171
|
+
return found.value;
|
|
1172
|
+
}
|
|
1173
|
+
catch (e) {
|
|
1174
|
+
throw e;
|
|
1175
|
+
}
|
|
1176
|
+
}
|
|
1177
|
+
async generateConstantValue(stages) {
|
|
1178
|
+
try {
|
|
1179
|
+
const app = await this.fetchThirdPartyApp(stages[0]);
|
|
1180
|
+
const found = app.constants.find((data, index) => data.key === stages[1]);
|
|
1181
|
+
if (!found) {
|
|
1182
|
+
throw new Error(`Constant ${stages[1]} not found declared for app ${stages[0]} in`);
|
|
1183
|
+
}
|
|
1184
|
+
return found.value;
|
|
1185
|
+
}
|
|
1186
|
+
catch (e) {
|
|
1187
|
+
throw e;
|
|
1188
|
+
}
|
|
1189
|
+
}
|
|
1190
|
+
decorateValue(value, sample) {
|
|
1191
|
+
if (sample.decorator) {
|
|
1192
|
+
if (sample.decoratorPosition &&
|
|
1193
|
+
sample.decoratorPosition == types_1.DecoratorPostions.APPEND &&
|
|
1194
|
+
!value.endsWith(sample.decorator)) {
|
|
1195
|
+
return `${value} ${sample.decorator}`;
|
|
1196
|
+
}
|
|
1197
|
+
if (sample.decoratorPosition &&
|
|
1198
|
+
sample.decoratorPosition == types_1.DecoratorPostions.PREPEND &&
|
|
1199
|
+
!value.startsWith(sample.decorator)) {
|
|
1200
|
+
return `${sample.decorator} ${value}`;
|
|
1201
|
+
}
|
|
1202
|
+
}
|
|
1203
|
+
return value;
|
|
1204
|
+
}
|
|
1205
|
+
async generateAuthValue(stages, app, sample, additional_logs) {
|
|
1206
|
+
try {
|
|
1207
|
+
let auth_data = await this.fetchAuthData(app, additional_logs); //TODO: should use stages[0]
|
|
1208
|
+
// take the app tag in index 0..
|
|
1209
|
+
if (!auth_data) {
|
|
1210
|
+
throw new Error(`Cannot fetch auth data of app ${app}`);
|
|
1211
|
+
}
|
|
1212
|
+
stages.shift();
|
|
1213
|
+
const auth = this.fetchOutputValueAfterStrippingLocators(stages, auth_data);
|
|
1214
|
+
return this.decorateValue(auth, sample[0]);
|
|
1215
|
+
}
|
|
1216
|
+
catch (e) {
|
|
1217
|
+
throw e;
|
|
1218
|
+
}
|
|
1219
|
+
}
|
|
1220
|
+
async fetchAuthData(app_tag, additional_logs) {
|
|
1221
|
+
try {
|
|
1222
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetching auth data - initiated', data: { app_tag, env: this.processEnv.slug }, status: types_1.LogEventStatus.PROCESSING }));
|
|
1223
|
+
const app = this.productBuilderService.fetchApp(app_tag);
|
|
1224
|
+
if (!app) {
|
|
1225
|
+
throw new Error(`App ${app_tag} not found in $Auth value`);
|
|
1226
|
+
}
|
|
1227
|
+
const env = app.envs.find((item) => item.product_env_slug === this.processEnv.slug);
|
|
1228
|
+
if (!env) {
|
|
1229
|
+
throw new Error(`App ${app_tag} in auth needs to have a definition for env: ${this.processEnv.slug}`);
|
|
1230
|
+
}
|
|
1231
|
+
if (!env.auth) {
|
|
1232
|
+
throw new Error(`App ${app_tag} in auth needs to have a definition for auth in env: ${this.processEnv.slug}`);
|
|
1233
|
+
}
|
|
1234
|
+
let values = env.auth.values;
|
|
1235
|
+
if (!values) {
|
|
1236
|
+
// no auth values
|
|
1237
|
+
values = await this.getAndStoreAuth(env, app_tag);
|
|
1238
|
+
}
|
|
1239
|
+
if (!env.auth.expiry || (env.auth.expiry && Date.now() > new Date(env.auth.expiry).getTime())) {
|
|
1240
|
+
// refresh
|
|
1241
|
+
values = await this.getAndStoreAuth(env, app_tag);
|
|
1242
|
+
}
|
|
1243
|
+
const decrypted = (0, processor_utils_1.decrypt)(values, this.productBuilderService.fetchProduct().private_key);
|
|
1244
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch auth data - success', data: { auth: (0, processor_utils_1.anonymizeValue)(decrypted) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
1245
|
+
return JSON.parse(decrypted);
|
|
1246
|
+
}
|
|
1247
|
+
catch (e) {
|
|
1248
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetching auth data - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
1249
|
+
throw e;
|
|
1250
|
+
}
|
|
1251
|
+
}
|
|
1252
|
+
/*async runFallback(event: IFallbackEvent) {
|
|
1253
|
+
|
|
1254
|
+
}
|
|
1255
|
+
|
|
1256
|
+
async runQuota(event: IQuotaEvent) {
|
|
1257
|
+
|
|
1258
|
+
}*/
|
|
1259
|
+
async processEvent(event) {
|
|
1260
|
+
try {
|
|
1261
|
+
const success = this.processingOutput.success.find((data) => data.event.sequence_tag === event.sequence_tag && data.event.event === event.event);
|
|
1262
|
+
if (success) {
|
|
1263
|
+
return success.output;
|
|
1264
|
+
}
|
|
1265
|
+
/*if(event.type === FeatureEventTypes.QUOTA) {
|
|
1266
|
+
this.logService.add({
|
|
1267
|
+
...this.baseLogs,
|
|
1268
|
+
child_tag: event.event,
|
|
1269
|
+
data: {event},
|
|
1270
|
+
message: "Processing Quota",
|
|
1271
|
+
status: LogEventStatus.PROCESSING
|
|
1272
|
+
})
|
|
1273
|
+
return this.runQuota(event as unknown as IQuotaEvent);
|
|
1274
|
+
}
|
|
1275
|
+
|
|
1276
|
+
if(event.type === FeatureEventTypes.FALLBACK) {
|
|
1277
|
+
this.logService.add({
|
|
1278
|
+
...this.baseLogs,
|
|
1279
|
+
child_tag: event.event,
|
|
1280
|
+
data: {event},
|
|
1281
|
+
message: "Processing Fallback",
|
|
1282
|
+
status: LogEventStatus.PROCESSING
|
|
1283
|
+
})
|
|
1284
|
+
return this.runFallback(event as unknown as IFallbackEvent);
|
|
1285
|
+
}*/
|
|
1286
|
+
if (event.type === types_1.FeatureEventTypes.ACTION) {
|
|
1287
|
+
const additional_logs = {
|
|
1288
|
+
parent_tag: (0, string_utils_1.extractOriginAndTag)(event.app),
|
|
1289
|
+
child_tag: event.event,
|
|
1290
|
+
type: types_1.LogEventTypes.ACTION,
|
|
1291
|
+
name: 'Process feature action',
|
|
1292
|
+
};
|
|
1293
|
+
return this.runAction(event, additional_logs);
|
|
1294
|
+
}
|
|
1295
|
+
if (event.type === types_1.FeatureEventTypes.DB_ACTION) {
|
|
1296
|
+
const [parent_tag, child_tag] = event.event.split(':');
|
|
1297
|
+
const additional_logs = {
|
|
1298
|
+
parent_tag,
|
|
1299
|
+
child_tag,
|
|
1300
|
+
type: types_1.LogEventTypes.DB_ACTION,
|
|
1301
|
+
name: 'Process feature database action',
|
|
1302
|
+
};
|
|
1303
|
+
return this.runDBAction(event, additional_logs);
|
|
1304
|
+
}
|
|
1305
|
+
if (event.type === types_1.FeatureEventTypes.STORAGE) {
|
|
1306
|
+
this.clone = (0, processor_utils_1.structuredClone)(event);
|
|
1307
|
+
(0, processor_utils_1.cleanBlob)(this.clone);
|
|
1308
|
+
const additional_logs = {
|
|
1309
|
+
parent_tag: event.event,
|
|
1310
|
+
type: types_1.LogEventTypes.STORAGE,
|
|
1311
|
+
name: 'Process feature storage',
|
|
1312
|
+
};
|
|
1313
|
+
return this.runStorage(event, additional_logs);
|
|
1314
|
+
}
|
|
1315
|
+
if (event.type === types_1.FeatureEventTypes.FEATURE) {
|
|
1316
|
+
// this.processFeature({})
|
|
1317
|
+
// this.processFeature TODO: do some processing to get this to reuse this.processFeature from abov
|
|
1318
|
+
}
|
|
1319
|
+
if (event.type === types_1.FeatureEventTypes.NOTIFICATION) {
|
|
1320
|
+
const [parent_tag, child_tag] = event.event.split(':');
|
|
1321
|
+
const additional_logs = {
|
|
1322
|
+
parent_tag,
|
|
1323
|
+
child_tag,
|
|
1324
|
+
type: types_1.LogEventTypes.NOTIFICATIONS,
|
|
1325
|
+
name: 'Process feature notification',
|
|
1326
|
+
};
|
|
1327
|
+
return this.runNotification(event, additional_logs);
|
|
1328
|
+
}
|
|
1329
|
+
if (event.type === types_1.FeatureEventTypes.PUBLISH) {
|
|
1330
|
+
const [parent_tag, child_tag] = event.event.split(':');
|
|
1331
|
+
const additional_logs = {
|
|
1332
|
+
parent_tag,
|
|
1333
|
+
child_tag,
|
|
1334
|
+
type: types_1.LogEventTypes.MESSAGEBROKER,
|
|
1335
|
+
name: 'Process feature broker event',
|
|
1336
|
+
};
|
|
1337
|
+
try {
|
|
1338
|
+
console.log("GRENADYE ALASOOOO!");
|
|
1339
|
+
return this.runBrokerPublish(event, additional_logs);
|
|
1340
|
+
}
|
|
1341
|
+
catch (e) {
|
|
1342
|
+
console.log("GRENADYE ALASOOOO!!!!");
|
|
1343
|
+
}
|
|
1344
|
+
}
|
|
1345
|
+
if (event.type === types_1.FeatureEventTypes.JOB) {
|
|
1346
|
+
const additional_logs = {
|
|
1347
|
+
type: types_1.LogEventTypes.JOB,
|
|
1348
|
+
parent_tag: event.event,
|
|
1349
|
+
name: 'Process feature job',
|
|
1350
|
+
};
|
|
1351
|
+
return this.runJob(event, additional_logs);
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
catch (e) {
|
|
1355
|
+
throw e;
|
|
1356
|
+
}
|
|
1357
|
+
}
|
|
1358
|
+
runJob(event, additional_logs) {
|
|
1359
|
+
throw new Error('Method not implemented.');
|
|
1360
|
+
}
|
|
1361
|
+
async processFailedAndWaiting() { }
|
|
1362
|
+
async generateOutput(process_id) {
|
|
1363
|
+
var _a, _b, _c, _d;
|
|
1364
|
+
const result = (await this.processorApiService.fetchResult(process_id, this.getUserAccess()));
|
|
1365
|
+
if (!result) {
|
|
1366
|
+
throw new Error(`Invalid process id ${process_id}`);
|
|
1367
|
+
}
|
|
1368
|
+
if (result.component === types_1.LogEventTypes.FEATURE) {
|
|
1369
|
+
const additional_logs = {
|
|
1370
|
+
parent_tag: result.input.tag,
|
|
1371
|
+
type: types_1.LogEventTypes.FEATURE,
|
|
1372
|
+
name: 'Fetching Process Result',
|
|
1373
|
+
};
|
|
1374
|
+
if (result.status === types_1.LogEventStatus.SUCCESS) {
|
|
1375
|
+
this.productTag = result.input.product;
|
|
1376
|
+
await this.intializeProduct(additional_logs);
|
|
1377
|
+
this.processingOutput = result.result;
|
|
1378
|
+
this.process_id = process_id;
|
|
1379
|
+
this.input = result.input;
|
|
1380
|
+
this.feature = await this.fetchFeature(result.input.tag, additional_logs);
|
|
1381
|
+
const { input: featureInput, sequence, output } = this.feature;
|
|
1382
|
+
const data = await this.generatePayload(output, null, additional_logs, []);
|
|
1383
|
+
return { process_id, status: result.status, data };
|
|
1384
|
+
}
|
|
1385
|
+
else if (result.status === types_1.LogEventStatus.FAIL) {
|
|
1386
|
+
const errors = result.result.failure.map((data) => data.reason);
|
|
1387
|
+
return { process_id, status: result.status, errors };
|
|
1388
|
+
}
|
|
1389
|
+
}
|
|
1390
|
+
else {
|
|
1391
|
+
if ((_a = result.result.success[0]) === null || _a === void 0 ? void 0 : _a.output) {
|
|
1392
|
+
return { process_id, status: result.status, data: (_b = result.result.success[0]) === null || _b === void 0 ? void 0 : _b.output };
|
|
1393
|
+
}
|
|
1394
|
+
else if ((_c = result.result.failure[0]) === null || _c === void 0 ? void 0 : _c.reason) {
|
|
1395
|
+
return { process_id, status: result.status, errors: [(_d = result.result.failure[0]) === null || _d === void 0 ? void 0 : _d.reason] };
|
|
1396
|
+
}
|
|
1397
|
+
else {
|
|
1398
|
+
return { process_id, status: result.status };
|
|
1399
|
+
}
|
|
1400
|
+
}
|
|
1401
|
+
}
|
|
1402
|
+
async resumeProcess(process_id) {
|
|
1403
|
+
const result = (await this.processorApiService.fetchResult(process_id, this.getUserAccess()));
|
|
1404
|
+
if (!result) {
|
|
1405
|
+
throw new Error(`Invalid process id ${process_id}`);
|
|
1406
|
+
}
|
|
1407
|
+
const additional_logs = {
|
|
1408
|
+
parent_tag: result.input.tag,
|
|
1409
|
+
type: result.component,
|
|
1410
|
+
name: 'Resume Process',
|
|
1411
|
+
};
|
|
1412
|
+
this.productTag = result.input.product;
|
|
1413
|
+
await this.intializeProduct(additional_logs);
|
|
1414
|
+
this.processingOutput = result.result;
|
|
1415
|
+
this.process_id = process_id;
|
|
1416
|
+
await this.processFailedEvents(additional_logs);
|
|
1417
|
+
await this.processWaitingEvents(additional_logs);
|
|
1418
|
+
this.input = result.input;
|
|
1419
|
+
this.start = Date.now();
|
|
1420
|
+
if (result.component === types_1.LogEventTypes.FEATURE) {
|
|
1421
|
+
this.feature = await this.fetchFeature(result.input.tag, additional_logs);
|
|
1422
|
+
const { input: featureInput, sequence, output } = this.feature;
|
|
1423
|
+
this.processEnv = this.fetchEnv(result.env, additional_logs);
|
|
1424
|
+
if (!this.processEnv.active) {
|
|
1425
|
+
throw new Error(`Environment ${result.env} is not active`);
|
|
1426
|
+
}
|
|
1427
|
+
// validate feature input and log failure
|
|
1428
|
+
this.validateJSONFeatureInput(result.input.input, featureInput, additional_logs);
|
|
1429
|
+
// split processes
|
|
1430
|
+
this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
1431
|
+
await this.processSequenceLevels(additional_logs);
|
|
1432
|
+
}
|
|
1433
|
+
else {
|
|
1434
|
+
this.end = Date.now();
|
|
1435
|
+
let status = types_1.LogEventStatus.SUCCESS;
|
|
1436
|
+
if (this.processingOutput.failure.length > 0) {
|
|
1437
|
+
status = types_1.LogEventStatus.FAIL;
|
|
1438
|
+
}
|
|
1439
|
+
this.writeResult(status);
|
|
1440
|
+
}
|
|
1441
|
+
return { process_id };
|
|
1442
|
+
}
|
|
1443
|
+
async replayProcess(process_id) {
|
|
1444
|
+
var _a, _b, _c;
|
|
1445
|
+
const result = (await this.processorApiService.fetchResult(process_id, this.getUserAccess()));
|
|
1446
|
+
if (!result) {
|
|
1447
|
+
throw new Error(`Invalid process id ${process_id}`);
|
|
1448
|
+
}
|
|
1449
|
+
this.productTag = result.input.product;
|
|
1450
|
+
this.process_id = process_id;
|
|
1451
|
+
this.input = result.input;
|
|
1452
|
+
this.start = Date.now();
|
|
1453
|
+
this.component = result.component;
|
|
1454
|
+
const additional_logs = {
|
|
1455
|
+
parent_tag: result.input.tag,
|
|
1456
|
+
type: result.component,
|
|
1457
|
+
name: 'Replay Process',
|
|
1458
|
+
};
|
|
1459
|
+
await this.intializeProduct(additional_logs);
|
|
1460
|
+
//await this.processFailedEvents(additional_logs);
|
|
1461
|
+
if (result.component === types_1.LogEventTypes.FEATURE) {
|
|
1462
|
+
//await this.processWaitingEvents(additional_logs);
|
|
1463
|
+
this.feature = await this.fetchFeature(result.input.tag, additional_logs);
|
|
1464
|
+
const { input: featureInput, sequence, output } = this.feature;
|
|
1465
|
+
this.processEnv = this.fetchEnv(result.env, additional_logs);
|
|
1466
|
+
if (!this.processEnv.active) {
|
|
1467
|
+
throw new Error(`Environment ${result.env} is not active`);
|
|
1468
|
+
}
|
|
1469
|
+
// validate feature input and log failure
|
|
1470
|
+
this.validateJSONFeatureInput(result.input.input, featureInput, additional_logs);
|
|
1471
|
+
// split processes
|
|
1472
|
+
this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
1473
|
+
await this.processSequenceLevels(additional_logs);
|
|
1474
|
+
}
|
|
1475
|
+
else {
|
|
1476
|
+
const event = ((_a = result.result.failure[0]) === null || _a === void 0 ? void 0 : _a.event) ||
|
|
1477
|
+
((_b = result.result.success[0]) === null || _b === void 0 ? void 0 : _b.event) ||
|
|
1478
|
+
((_c = result.result.skipped[0]) === null || _c === void 0 ? void 0 : _c.event) ||
|
|
1479
|
+
result.result.waiting[0].event;
|
|
1480
|
+
this.processEnv = this.fetchEnv(result.env, additional_logs);
|
|
1481
|
+
this.start = Date.now();
|
|
1482
|
+
await this.processEvent(event);
|
|
1483
|
+
this.end = Date.now();
|
|
1484
|
+
this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
1485
|
+
this.logService.publish();
|
|
1486
|
+
}
|
|
1487
|
+
return { process_id };
|
|
1488
|
+
}
|
|
1489
|
+
fetchLevelEvents(level) {
|
|
1490
|
+
const events = [];
|
|
1491
|
+
for (let i = 0; i < level.length; i++) {
|
|
1492
|
+
events.push(...this.appendSequenceDataToLevelEvents(level[i]));
|
|
1493
|
+
}
|
|
1494
|
+
return events;
|
|
1495
|
+
}
|
|
1496
|
+
appendSequenceDataToLevelEvents(sequence) {
|
|
1497
|
+
const { events, level, tag } = sequence;
|
|
1498
|
+
for (let i = 0; i < events.length; i++) {
|
|
1499
|
+
events[i].sequence_level = level;
|
|
1500
|
+
events[i].sequence_tag = tag;
|
|
1501
|
+
events[i].env = this.processEnv;
|
|
1502
|
+
}
|
|
1503
|
+
return events;
|
|
1504
|
+
}
|
|
1505
|
+
async getAndStoreAuth(appEnv, access_tag) {
|
|
1506
|
+
try {
|
|
1507
|
+
// const payload = JSON.parse(decrypt(env.auth.data, this.productBuilderService.fetchProduct().private_key));
|
|
1508
|
+
const payload = appEnv.auth.data;
|
|
1509
|
+
let app = await this.fetchThirdPartyApp(access_tag);
|
|
1510
|
+
const auth = app.auths.find((item) => item.tag === appEnv.auth.auth_tag);
|
|
1511
|
+
if (!auth) {
|
|
1512
|
+
// throw an error
|
|
1513
|
+
throw new Error(`Cannot find auth ${appEnv.auth.auth_tag} on environment ${appEnv.product_env_slug}`);
|
|
1514
|
+
}
|
|
1515
|
+
if (auth.setup_type === types_1.AuthTypes.CREDENTIALS) {
|
|
1516
|
+
const { action_tag, expiry, period } = auth;
|
|
1517
|
+
const action = app.actions.find((item) => item.tag === action_tag);
|
|
1518
|
+
const { envs: appEnvs } = app;
|
|
1519
|
+
const env = appEnvs.find((item) => item.slug === appEnv.app_env_slug); // fetch the actuall app Environment variable
|
|
1520
|
+
const { method, base_url, resource: url } = action;
|
|
1521
|
+
let request_base_url = base_url;
|
|
1522
|
+
if (env.base_url) {
|
|
1523
|
+
request_base_url = env.base_url;
|
|
1524
|
+
}
|
|
1525
|
+
const results = await this.sendActionRequest(request_base_url, url, payload, method, appEnv.app_env_slug);
|
|
1526
|
+
const values = (0, processor_utils_1.encrypt)(JSON.stringify(results), this.productBuilderService.fetchProduct().private_key);
|
|
1527
|
+
const productApp = this.productBuilderService.fetchApp(access_tag);
|
|
1528
|
+
for (let i = 0; i < productApp.envs.length; i++) {
|
|
1529
|
+
if (productApp.envs[i].app_env_slug === env.slug) {
|
|
1530
|
+
productApp.envs[i].auth.values = values; // write new values
|
|
1531
|
+
productApp.envs[i].auth.expiry = (0, processor_utils_1.calculateExpiry)(expiry, period); // write new expiry
|
|
1532
|
+
}
|
|
1533
|
+
}
|
|
1534
|
+
await this.productBuilderService.updateApp(access_tag, productApp); // stores auth values
|
|
1535
|
+
return values;
|
|
1536
|
+
}
|
|
1537
|
+
else {
|
|
1538
|
+
return appEnv.auth.values;
|
|
1539
|
+
}
|
|
1540
|
+
}
|
|
1541
|
+
catch (e) {
|
|
1542
|
+
throw e;
|
|
1543
|
+
}
|
|
1544
|
+
}
|
|
1545
|
+
async fetchThirdPartyApp(access_tag) {
|
|
1546
|
+
try {
|
|
1547
|
+
let details = this.apps.find((item) => item.access_tag === access_tag);
|
|
1548
|
+
let version;
|
|
1549
|
+
let workspace_id;
|
|
1550
|
+
let _id;
|
|
1551
|
+
if (!details) {
|
|
1552
|
+
const _a = await this.productBuilderService.fetchThirdPartyAppByAccessTag(access_tag), { version: appVersion } = _a, app = __rest(_a, ["version"]);
|
|
1553
|
+
version = appVersion;
|
|
1554
|
+
workspace_id = app.workspace_id;
|
|
1555
|
+
_id = app._id;
|
|
1556
|
+
details = { access_tag, app, version, workspace_id: app.workspace_id, _id: app._id };
|
|
1557
|
+
this.apps.push(details);
|
|
1558
|
+
}
|
|
1559
|
+
else {
|
|
1560
|
+
version = details.version;
|
|
1561
|
+
workspace_id = details.workspace_id;
|
|
1562
|
+
_id = details._id;
|
|
1563
|
+
}
|
|
1564
|
+
const appVersion = details.app.versions.find((data) => data.tag === version);
|
|
1565
|
+
return Object.assign(Object.assign({}, appVersion), { workspace_id, _id, version });
|
|
1566
|
+
}
|
|
1567
|
+
catch (e) {
|
|
1568
|
+
throw e;
|
|
1569
|
+
}
|
|
1570
|
+
}
|
|
1571
|
+
async processConditionalCheck(event, additional_logs = {}) {
|
|
1572
|
+
const parts = (0, string_utils_1.extractConditionalParts)(event.condition.check);
|
|
1573
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Check conditional - initiated', data: { condition: event.condition, parts }, status: types_1.LogEventStatus.PROCESSING, action: event.event }));
|
|
1574
|
+
const [, initiator, operator, value] = parts;
|
|
1575
|
+
const initiatorValue = await this.generateStringValues(initiator, event.app, additional_logs);
|
|
1576
|
+
let valueValue;
|
|
1577
|
+
if (!(0, string_utils_1.isNumeric)(value)) {
|
|
1578
|
+
if (value.startsWith('$')) {
|
|
1579
|
+
valueValue = await this.generateStringValues(value, event.app, additional_logs);
|
|
1580
|
+
}
|
|
1581
|
+
}
|
|
1582
|
+
else {
|
|
1583
|
+
valueValue = value;
|
|
1584
|
+
}
|
|
1585
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate check conditional - initiated', data: { condition: { initiatorValue, operator, valueValue } }, status: types_1.LogEventStatus.PROCESSING, action: event.event }));
|
|
1586
|
+
const pass = (0, processor_utils_1.compareValues)(initiatorValue, operator, valueValue);
|
|
1587
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate check condition - success', data: { condition: { initiatorValue, operator, valueValue }, pass }, status: types_1.LogEventStatus.SUCCESS, action: event.event }));
|
|
1588
|
+
if (!pass) {
|
|
1589
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate check condition - skipped', data: { condition: { initiatorValue, operator, valueValue }, pass }, status: types_1.LogEventStatus.SUCCESS, action: event.event }));
|
|
1590
|
+
// add to skipped queue
|
|
1591
|
+
this.processingOutput.skipped.push({
|
|
1592
|
+
allow_fail: false,
|
|
1593
|
+
retry_at: 0,
|
|
1594
|
+
error_code: 0,
|
|
1595
|
+
reason: `check condition "${initiatorValue} ${operator} ${valueValue} failed, event skipped`,
|
|
1596
|
+
payload: {
|
|
1597
|
+
query: {},
|
|
1598
|
+
params: {},
|
|
1599
|
+
body: {},
|
|
1600
|
+
headers: {},
|
|
1601
|
+
},
|
|
1602
|
+
event: event,
|
|
1603
|
+
});
|
|
1604
|
+
return false;
|
|
1605
|
+
}
|
|
1606
|
+
return true;
|
|
1607
|
+
}
|
|
1608
|
+
async extractLoopIndexes(event, additional_logs = {}) {
|
|
1609
|
+
const parts = (0, string_utils_1.extractConditionalParts)(event.condition.check);
|
|
1610
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Loop conditional - initiated', data: { condition: event.condition, parts }, status: types_1.LogEventStatus.PROCESSING, action: event.event }));
|
|
1611
|
+
const [, initiator, operator, value] = parts;
|
|
1612
|
+
let valueValue;
|
|
1613
|
+
if (!(0, string_utils_1.isNumeric)(value)) {
|
|
1614
|
+
if (value.startsWith('$')) {
|
|
1615
|
+
valueValue = await this.generateStringValues(value, event.app, additional_logs);
|
|
1616
|
+
}
|
|
1617
|
+
}
|
|
1618
|
+
else {
|
|
1619
|
+
valueValue = value;
|
|
1620
|
+
}
|
|
1621
|
+
const init = event.condition.init || 0;
|
|
1622
|
+
const iter = event.condition.iter || 1;
|
|
1623
|
+
// generate indexes
|
|
1624
|
+
return (0, processor_utils_1.generateIndexes)(operator, iter, init, valueValue);
|
|
1625
|
+
}
|
|
1626
|
+
async runAction(event, additional_logs, returnValue = false) {
|
|
1627
|
+
try {
|
|
1628
|
+
const { event: action_tag, app: access_tag, condition, cache: cache_tag } = event;
|
|
1629
|
+
let indexes = [];
|
|
1630
|
+
if (condition &&
|
|
1631
|
+
condition.type === types_1.Conditions.CHECK &&
|
|
1632
|
+
(await this.processConditionalCheck(event, additional_logs))) {
|
|
1633
|
+
// if it fails, it would add to skipped queue
|
|
1634
|
+
return;
|
|
1635
|
+
}
|
|
1636
|
+
if (condition && condition.type === types_1.Conditions.LOOP) {
|
|
1637
|
+
indexes = await this.extractLoopIndexes(event, additional_logs);
|
|
1638
|
+
}
|
|
1639
|
+
let app = await this.fetchThirdPartyApp(access_tag);
|
|
1640
|
+
const { actions, envs: appEnvs, retries, workspace_id: recipient_workspace_id, active } = app;
|
|
1641
|
+
const productApp = this.productBuilderService.fetchApp(access_tag);
|
|
1642
|
+
const { envs: productEnvs, version } = productApp;
|
|
1643
|
+
const { app_env_slug } = productEnvs.find((item) => item.product_env_slug === this.processEnv.slug);
|
|
1644
|
+
additional_logs.app_env = app_env_slug;
|
|
1645
|
+
const env = appEnvs.find((item) => item.slug === app_env_slug); // fetch the actuall app Environment variable
|
|
1646
|
+
if (!active) {
|
|
1647
|
+
throw new Error(`App ${event.app} version ${app.version} is not active`);
|
|
1648
|
+
}
|
|
1649
|
+
if (!env.active) {
|
|
1650
|
+
throw new Error(`Action environment ${app_env_slug} is not active`);
|
|
1651
|
+
}
|
|
1652
|
+
const action = actions.find((item) => item.tag === action_tag);
|
|
1653
|
+
if (!action) {
|
|
1654
|
+
throw new Error(`Action ${action_tag} not found in ${access_tag}`);
|
|
1655
|
+
}
|
|
1656
|
+
const { query, headers, body, params, request_type, method, base_url, resource } = action;
|
|
1657
|
+
let request_base_url = base_url;
|
|
1658
|
+
if (env.base_url) {
|
|
1659
|
+
request_base_url = env.base_url;
|
|
1660
|
+
}
|
|
1661
|
+
const samples = {
|
|
1662
|
+
query: (query === null || query === void 0 ? void 0 : query.data) || [],
|
|
1663
|
+
headers: (headers === null || headers === void 0 ? void 0 : headers.data) || [],
|
|
1664
|
+
body: (body === null || body === void 0 ? void 0 : body.data) || [],
|
|
1665
|
+
params: (params === null || params === void 0 ? void 0 : params.data) || [],
|
|
1666
|
+
};
|
|
1667
|
+
let payloads;
|
|
1668
|
+
let result;
|
|
1669
|
+
const product = this.productBuilderService.fetchProduct();
|
|
1670
|
+
if (cache_tag && this.redisClient) {
|
|
1671
|
+
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
1672
|
+
if (!productCache) {
|
|
1673
|
+
throw new Error('Invalid cache tag ');
|
|
1674
|
+
}
|
|
1675
|
+
const inputString = JSON.stringify(event.input);
|
|
1676
|
+
const check = await this.fetchFromCache({
|
|
1677
|
+
cache_tag,
|
|
1678
|
+
input: inputString,
|
|
1679
|
+
privateKey: product.private_key,
|
|
1680
|
+
expiry: productCache.expiry,
|
|
1681
|
+
});
|
|
1682
|
+
if (check) {
|
|
1683
|
+
result = JSON.parse(check);
|
|
1684
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Run action - return from cache', data: { result }, status: types_1.LogEventStatus.PROCESSING }));
|
|
1685
|
+
if (returnValue) {
|
|
1686
|
+
return result;
|
|
1687
|
+
}
|
|
1688
|
+
}
|
|
1689
|
+
}
|
|
1690
|
+
if (request_type === types_1.DataFormats.JSON || !request_type) {
|
|
1691
|
+
if (indexes.length == 0) {
|
|
1692
|
+
payloads = await this.constructJSONDataPayloads(event.input, additional_logs, samples, event);
|
|
1693
|
+
additional_logs.recipient_workspace_id = recipient_workspace_id;
|
|
1694
|
+
result = await this.processRequest({ request_base_url, resource, method, env, payloads, app_id: app._id }, event, retries, additional_logs, returnValue);
|
|
1695
|
+
}
|
|
1696
|
+
else {
|
|
1697
|
+
const promises = indexes.map(async (index) => {
|
|
1698
|
+
payloads = await this.constructJSONDataPayloads(event.input, additional_logs, samples, event, index);
|
|
1699
|
+
additional_logs.recipient_workspace_id = recipient_workspace_id;
|
|
1700
|
+
await this.processRequest({ request_base_url, resource, method, env, payloads, app_id: app._id }, event, retries, additional_logs, returnValue);
|
|
1701
|
+
});
|
|
1702
|
+
result = await Promise.all(promises);
|
|
1703
|
+
}
|
|
1704
|
+
}
|
|
1705
|
+
if (cache_tag && this.redisClient && result) {
|
|
1706
|
+
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
1707
|
+
if (!productCache) {
|
|
1708
|
+
throw new Error('Invalid cache tag ');
|
|
1709
|
+
}
|
|
1710
|
+
const inputString = JSON.stringify(event.input);
|
|
1711
|
+
await this.addToCache({
|
|
1712
|
+
input: inputString,
|
|
1713
|
+
privateKey: product.private_key,
|
|
1714
|
+
data: JSON.stringify(result),
|
|
1715
|
+
cache_tag,
|
|
1716
|
+
timestamp: Date.now(),
|
|
1717
|
+
component_tag: action_tag,
|
|
1718
|
+
component_type: types_1.ProductComponents.ACTION,
|
|
1719
|
+
product_tag: this.productTag,
|
|
1720
|
+
});
|
|
1721
|
+
}
|
|
1722
|
+
if (result && returnValue) {
|
|
1723
|
+
return result;
|
|
1724
|
+
}
|
|
1725
|
+
}
|
|
1726
|
+
catch (e) {
|
|
1727
|
+
console.log(e);
|
|
1728
|
+
//throw e;
|
|
1729
|
+
}
|
|
1730
|
+
}
|
|
1731
|
+
async processRequest(payload, event, retries, additional_logs, returnValue = false) {
|
|
1732
|
+
const { request_base_url, resource, payloads, method, env, app_id } = payload;
|
|
1733
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process http request - initiated', data: { request: (0, processor_utils_1.anonymizeObject)(payload) }, status: types_1.LogEventStatus.PROCESSING, app_id, action: event.event }));
|
|
1734
|
+
const start = Date.now();
|
|
1735
|
+
try {
|
|
1736
|
+
const results = await this.sendActionRequest(request_base_url, resource, payloads, method, env.slug);
|
|
1737
|
+
const end = Date.now();
|
|
1738
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process http request - success', successful_execution: true, data: { response: (0, processor_utils_1.anonymizeObject)(results) }, status: types_1.LogEventStatus.SUCCESS, app_id, action: event.event, start,
|
|
1739
|
+
end }));
|
|
1740
|
+
await this.addToSuccessOutput(event, results, additional_logs);
|
|
1741
|
+
if (returnValue) {
|
|
1742
|
+
return { process_id: this.process_id, status: true, data: results };
|
|
1743
|
+
}
|
|
1744
|
+
else {
|
|
1745
|
+
return true;
|
|
1746
|
+
}
|
|
1747
|
+
}
|
|
1748
|
+
catch (e) {
|
|
1749
|
+
const end = Date.now();
|
|
1750
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process http request - failed', failed_execution: true, data: { e }, status: types_1.LogEventStatus.FAIL, app_id, action: event.event, start,
|
|
1751
|
+
end }));
|
|
1752
|
+
try {
|
|
1753
|
+
const value = this.addToFailureOutput(e, event, {
|
|
1754
|
+
request_base_url,
|
|
1755
|
+
resource,
|
|
1756
|
+
method,
|
|
1757
|
+
env,
|
|
1758
|
+
payloads,
|
|
1759
|
+
app_id,
|
|
1760
|
+
}, additional_logs, retries);
|
|
1761
|
+
//if (returnValue) {
|
|
1762
|
+
return {
|
|
1763
|
+
process_id: this.process_id,
|
|
1764
|
+
};
|
|
1765
|
+
/*} else {
|
|
1766
|
+
throw e;
|
|
1767
|
+
}*/
|
|
1768
|
+
}
|
|
1769
|
+
catch (err) {
|
|
1770
|
+
throw err;
|
|
1771
|
+
}
|
|
1772
|
+
}
|
|
1773
|
+
}
|
|
1774
|
+
async addToSuccessOutput(event, output, additional_logs) {
|
|
1775
|
+
// Remove event from failed, skipped, and waiting arrays
|
|
1776
|
+
this.processingOutput.failure = this.processingOutput.failure.filter((data) => !(data.event.sequence_tag === event.sequence_tag && data.event.event === event.event));
|
|
1777
|
+
this.processingOutput.skipped = this.processingOutput.skipped.filter((data) => !(data.event.sequence_tag === event.sequence_tag && data.event.event === event.event));
|
|
1778
|
+
this.processingOutput.waiting = this.processingOutput.waiting.filter((data) => !(data.event.sequence_tag === event.sequence_tag && data.event.event === event.event));
|
|
1779
|
+
this.processingOutput.success.push({ event, output });
|
|
1780
|
+
await this.processWaitingEvents(additional_logs);
|
|
1781
|
+
}
|
|
1782
|
+
addToWaitingOutput(event, dependants) {
|
|
1783
|
+
const exists = this.processingOutput.waiting.findIndex((item) => {
|
|
1784
|
+
return item.event.event === event.event && item.event.sequence_tag === event.sequence_tag;
|
|
1785
|
+
});
|
|
1786
|
+
const skipped = this.processingOutput.skipped.findIndex((item) => {
|
|
1787
|
+
return item.event.event === event.event && item.event.sequence_tag === event.sequence_tag;
|
|
1788
|
+
});
|
|
1789
|
+
if (!exists && !skipped) {
|
|
1790
|
+
this.processingOutput.waiting.push({ event, dependants });
|
|
1791
|
+
}
|
|
1792
|
+
// addToSkippedOutput()
|
|
1793
|
+
}
|
|
1794
|
+
addToFailureOutput(e, event, payload, additional_logs, policy = {}) {
|
|
1795
|
+
try {
|
|
1796
|
+
const exists = this.processingOutput.failure.findIndex((item) => {
|
|
1797
|
+
if (item.event.sequence_tag) {
|
|
1798
|
+
return item.event.event === event.event && item.event.sequence_tag === event.sequence_tag;
|
|
1799
|
+
}
|
|
1800
|
+
else {
|
|
1801
|
+
return item.event.event === event.event;
|
|
1802
|
+
}
|
|
1803
|
+
});
|
|
1804
|
+
let error_code = '1000';
|
|
1805
|
+
let retry_at = 500;
|
|
1806
|
+
let max = event.retries || 0;
|
|
1807
|
+
if (event.type === types_1.FeatureEventTypes.ACTION) {
|
|
1808
|
+
e.response && e.response.status ? e.response.status : '500';
|
|
1809
|
+
const metrices = this.generateRetryMetrices(String(error_code), policy);
|
|
1810
|
+
retry_at = metrices.retry_at;
|
|
1811
|
+
max = metrices.max;
|
|
1812
|
+
}
|
|
1813
|
+
const { allow_fail, retries } = event;
|
|
1814
|
+
let retries_left = retries || max;
|
|
1815
|
+
if (exists > -1) {
|
|
1816
|
+
retries_left = this.processingOutput.failure[exists].retries_left - 1;
|
|
1817
|
+
this.processingOutput.failure.splice(exists, 1);
|
|
1818
|
+
}
|
|
1819
|
+
let reason = JSON.stringify((0, processor_utils_1.extractAxiosErrorDetails)(e));
|
|
1820
|
+
if (event.type === types_1.FeatureEventTypes.ACTION) {
|
|
1821
|
+
reason = JSON.stringify((0, processor_utils_1.extractAxiosErrorDetails)(e));
|
|
1822
|
+
}
|
|
1823
|
+
const output = {
|
|
1824
|
+
allow_fail,
|
|
1825
|
+
retry_at,
|
|
1826
|
+
retries_left,
|
|
1827
|
+
payload: payload.payloads || event.input,
|
|
1828
|
+
error_code,
|
|
1829
|
+
reason,
|
|
1830
|
+
event,
|
|
1831
|
+
};
|
|
1832
|
+
if (allow_fail === true && retries === 0) {
|
|
1833
|
+
this.processingOutput.skipped.push(output);
|
|
1834
|
+
}
|
|
1835
|
+
else {
|
|
1836
|
+
this.processingOutput.failure.push(output);
|
|
1837
|
+
}
|
|
1838
|
+
if (retries_left > 0) {
|
|
1839
|
+
setTimeout(() => {
|
|
1840
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Retrying Request', data: Object.assign(Object.assign({}, output), { payload: (0, processor_utils_1.anonymizeObject)(output.payload) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
1841
|
+
if (event.type === types_1.FeatureEventTypes.ACTION) {
|
|
1842
|
+
this.processRequest(payload, event, policy, additional_logs);
|
|
1843
|
+
}
|
|
1844
|
+
if (event.type === types_1.FeatureEventTypes.DB_ACTION) {
|
|
1845
|
+
this.processDBRequest(event, event.input, payload.database_tag, payload.databaseEnv, payload.action_tag, additional_logs);
|
|
1846
|
+
}
|
|
1847
|
+
if (event.type === types_1.FeatureEventTypes.STORAGE) {
|
|
1848
|
+
this.processStorageRequest(event, event.input, payload.storageEnv, additional_logs);
|
|
1849
|
+
}
|
|
1850
|
+
}, retry_at);
|
|
1851
|
+
}
|
|
1852
|
+
if (allow_fail === false && retries_left === 0) {
|
|
1853
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Ran out of retries - failed', data: Object.assign(Object.assign({}, output), { payload: (0, processor_utils_1.anonymizeObject)(output.payload) }), status: types_1.LogEventStatus.FAIL }));
|
|
1854
|
+
//throw new Error("Run out of retries")
|
|
1855
|
+
this.end = Date.now();
|
|
1856
|
+
this.writeResult(types_1.LogEventStatus.FAIL);
|
|
1857
|
+
this.logService.publish();
|
|
1858
|
+
}
|
|
1859
|
+
return output;
|
|
1860
|
+
}
|
|
1861
|
+
catch (e) {
|
|
1862
|
+
throw e;
|
|
1863
|
+
}
|
|
1864
|
+
}
|
|
1865
|
+
generateRetryMetrices(error_code, retries) {
|
|
1866
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r;
|
|
1867
|
+
let allow_fail = true;
|
|
1868
|
+
let retry_at = 5000;
|
|
1869
|
+
let max = (retries === null || retries === void 0 ? void 0 : retries.max) || 0;
|
|
1870
|
+
switch (error_code) {
|
|
1871
|
+
case '500':
|
|
1872
|
+
allow_fail = ((_a = retries === null || retries === void 0 ? void 0 : retries.policy[500]) === null || _a === void 0 ? void 0 : _a.available) || false;
|
|
1873
|
+
retry_at = ((_b = retries === null || retries === void 0 ? void 0 : retries.policy[500]) === null || _b === void 0 ? void 0 : _b.lag) || 0;
|
|
1874
|
+
break;
|
|
1875
|
+
case '502':
|
|
1876
|
+
allow_fail = ((_c = retries === null || retries === void 0 ? void 0 : retries.policy[502]) === null || _c === void 0 ? void 0 : _c.available) || false;
|
|
1877
|
+
retry_at = ((_d = retries === null || retries === void 0 ? void 0 : retries.policy[502]) === null || _d === void 0 ? void 0 : _d.lag) || 0;
|
|
1878
|
+
break;
|
|
1879
|
+
case '503':
|
|
1880
|
+
allow_fail = ((_e = retries === null || retries === void 0 ? void 0 : retries.policy[503]) === null || _e === void 0 ? void 0 : _e.available) || false;
|
|
1881
|
+
retry_at = ((_f = retries === null || retries === void 0 ? void 0 : retries.policy[503]) === null || _f === void 0 ? void 0 : _f.lag) || 0;
|
|
1882
|
+
break;
|
|
1883
|
+
case '504':
|
|
1884
|
+
allow_fail = ((_g = retries === null || retries === void 0 ? void 0 : retries.policy[504]) === null || _g === void 0 ? void 0 : _g.available) || false;
|
|
1885
|
+
retry_at = ((_h = retries === null || retries === void 0 ? void 0 : retries.policy[504]) === null || _h === void 0 ? void 0 : _h.lag) || 0;
|
|
1886
|
+
break;
|
|
1887
|
+
case '400':
|
|
1888
|
+
allow_fail = ((_j = retries === null || retries === void 0 ? void 0 : retries.policy[400]) === null || _j === void 0 ? void 0 : _j.available) || false;
|
|
1889
|
+
retry_at = ((_k = retries === null || retries === void 0 ? void 0 : retries.policy[400]) === null || _k === void 0 ? void 0 : _k.lag) || 0;
|
|
1890
|
+
break;
|
|
1891
|
+
case '401':
|
|
1892
|
+
allow_fail = ((_l = retries === null || retries === void 0 ? void 0 : retries.policy[401]) === null || _l === void 0 ? void 0 : _l.available) || false;
|
|
1893
|
+
retry_at = ((_m = retries === null || retries === void 0 ? void 0 : retries.policy[401]) === null || _m === void 0 ? void 0 : _m.lag) || 0;
|
|
1894
|
+
break;
|
|
1895
|
+
case '403':
|
|
1896
|
+
allow_fail = ((_o = retries === null || retries === void 0 ? void 0 : retries.policy[403]) === null || _o === void 0 ? void 0 : _o.available) || false;
|
|
1897
|
+
retry_at = ((_p = retries === null || retries === void 0 ? void 0 : retries.policy[403]) === null || _p === void 0 ? void 0 : _p.lag) || 0;
|
|
1898
|
+
break;
|
|
1899
|
+
case '404':
|
|
1900
|
+
allow_fail = ((_q = retries === null || retries === void 0 ? void 0 : retries.policy[404]) === null || _q === void 0 ? void 0 : _q.available) || false;
|
|
1901
|
+
retry_at = ((_r = retries === null || retries === void 0 ? void 0 : retries.policy[404]) === null || _r === void 0 ? void 0 : _r.lag) || 0;
|
|
1902
|
+
break;
|
|
1903
|
+
case '1000': // all non http errors
|
|
1904
|
+
allow_fail = true;
|
|
1905
|
+
retry_at = 500;
|
|
1906
|
+
default:
|
|
1907
|
+
allow_fail = true;
|
|
1908
|
+
retry_at = 0;
|
|
1909
|
+
max = 0;
|
|
1910
|
+
break;
|
|
1911
|
+
}
|
|
1912
|
+
return { allow_fail, max, retry_at };
|
|
1913
|
+
}
|
|
1914
|
+
async sendActionRequest(base_url, resource, payload, method, env) {
|
|
1915
|
+
try {
|
|
1916
|
+
const { headers, query, body, params } = payload;
|
|
1917
|
+
if (params) {
|
|
1918
|
+
const paramsKeys = Object.keys(params);
|
|
1919
|
+
for (let i = 0; i < paramsKeys.length; i++) {
|
|
1920
|
+
resource = (0, processor_utils_1.parameterizeResource)(resource, paramsKeys[i], params[paramsKeys[i]]);
|
|
1921
|
+
}
|
|
1922
|
+
}
|
|
1923
|
+
const authHeaders = headers;
|
|
1924
|
+
const request = {
|
|
1925
|
+
url: resource,
|
|
1926
|
+
method,
|
|
1927
|
+
data: body,
|
|
1928
|
+
params: query,
|
|
1929
|
+
headers: authHeaders,
|
|
1930
|
+
};
|
|
1931
|
+
if (!base_url.endsWith('/') && !resource.startsWith('/')) {
|
|
1932
|
+
base_url = base_url + '/';
|
|
1933
|
+
}
|
|
1934
|
+
const response = await (0, http_client_1.default)(base_url).request(request);
|
|
1935
|
+
return response.data;
|
|
1936
|
+
}
|
|
1937
|
+
catch (e) {
|
|
1938
|
+
throw e;
|
|
1939
|
+
}
|
|
1940
|
+
}
|
|
1941
|
+
async processStorage(action) {
|
|
1942
|
+
//TODO: schema validation
|
|
1943
|
+
const { env, input, retries, event, product: product_tag } = action;
|
|
1944
|
+
const additional_logs = {
|
|
1945
|
+
parent_tag: event,
|
|
1946
|
+
type: types_1.LogEventTypes.STORAGE,
|
|
1947
|
+
name: 'Process Storage',
|
|
1948
|
+
};
|
|
1949
|
+
try {
|
|
1950
|
+
this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.STORAGE);
|
|
1951
|
+
this.input = action;
|
|
1952
|
+
this.start = Date.now();
|
|
1953
|
+
this.component = types_1.LogEventTypes.STORAGE;
|
|
1954
|
+
// clone
|
|
1955
|
+
this.clone = (0, processor_utils_1.structuredClone)(input);
|
|
1956
|
+
(0, processor_utils_1.cleanBlob)(this.clone);
|
|
1957
|
+
this.productTag = product_tag;
|
|
1958
|
+
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
1959
|
+
this.baseLogs = Object.assign({ product_tag: this.productTag, product_id: this.productId, workspace_id: this.workspace_id, env,
|
|
1960
|
+
process_id, data: this.clone }, additional_logs);
|
|
1961
|
+
await this.intializeProduct(additional_logs);
|
|
1962
|
+
this.process_id = process_id;
|
|
1963
|
+
const productEnv = this.fetchEnv(env, additional_logs);
|
|
1964
|
+
this.processEnv = productEnv;
|
|
1965
|
+
if (!productEnv.active) {
|
|
1966
|
+
throw new Error(`Environment ${env} is not active`);
|
|
1967
|
+
}
|
|
1968
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Storing file - initiated', status: types_1.LogEventStatus.PROCESSING, storage: new Blob([action.input.buffer]).size }));
|
|
1969
|
+
const payload = {
|
|
1970
|
+
type: types_1.FeatureEventTypes.STORAGE,
|
|
1971
|
+
event,
|
|
1972
|
+
cache: action.cache,
|
|
1973
|
+
input,
|
|
1974
|
+
env: productEnv,
|
|
1975
|
+
retries: retries || 0,
|
|
1976
|
+
allow_fail: false,
|
|
1977
|
+
};
|
|
1978
|
+
const result = await this.runStorage(payload);
|
|
1979
|
+
this.end = Date.now();
|
|
1980
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Storing file - success', data: { input: this.clone, result }, status: types_1.LogEventStatus.SUCCESS }));
|
|
1981
|
+
this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
1982
|
+
this.logService.publish();
|
|
1983
|
+
return result;
|
|
1984
|
+
}
|
|
1985
|
+
catch (e) {
|
|
1986
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Storing file - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
1987
|
+
this.end = Date.now();
|
|
1988
|
+
this.logService.publish();
|
|
1989
|
+
return { process_id: this.process_id };
|
|
1990
|
+
}
|
|
1991
|
+
}
|
|
1992
|
+
async processMessageBrokerSubscribe(data) {
|
|
1993
|
+
const [brokerTag, topicTag] = data.event.split(':');
|
|
1994
|
+
if (!brokerTag || !topicTag) {
|
|
1995
|
+
throw new Error(`message broker events should be in the format broker_tag:event_tag`);
|
|
1996
|
+
}
|
|
1997
|
+
const additional_logs = {
|
|
1998
|
+
parent_tag: brokerTag,
|
|
1999
|
+
child_tag: topicTag,
|
|
2000
|
+
type: types_1.LogEventTypes.MESSAGEBROKER,
|
|
2001
|
+
name: `Subscribe to broker topic`,
|
|
2002
|
+
};
|
|
2003
|
+
try {
|
|
2004
|
+
this.validateActionDataMappingInput(data.input, types_1.FeatureEventTypes.STORAGE);
|
|
2005
|
+
this.start = Date.now();
|
|
2006
|
+
this.productTag = data.product;
|
|
2007
|
+
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
2008
|
+
this.input = data;
|
|
2009
|
+
this.baseLogs = Object.assign({ product_tag: this.productTag, product_id: this.productId, workspace_id: this.workspace_id, env: data.env, process_id, data: data.input }, additional_logs);
|
|
2010
|
+
await this.intializeProduct(additional_logs);
|
|
2011
|
+
this.baseLogs.product_id = this.productId;
|
|
2012
|
+
this.process_id = process_id;
|
|
2013
|
+
const productEnv = this.fetchEnv(data.env, additional_logs);
|
|
2014
|
+
this.processEnv = productEnv;
|
|
2015
|
+
if (!productEnv.active) {
|
|
2016
|
+
throw new Error(`Environment ${data.env} is not active`);
|
|
2017
|
+
}
|
|
2018
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Subscribe to topic - initiated', status: types_1.LogEventStatus.PROCESSING }));
|
|
2019
|
+
const payload = {
|
|
2020
|
+
type: types_1.FeatureEventTypes.SUBSCRIBE,
|
|
2021
|
+
event: data.event,
|
|
2022
|
+
input: data.input,
|
|
2023
|
+
env: productEnv,
|
|
2024
|
+
retries: 0,
|
|
2025
|
+
allow_fail: false,
|
|
2026
|
+
};
|
|
2027
|
+
const result = await this.runBrokerSubscribe(payload);
|
|
2028
|
+
this.end = Date.now();
|
|
2029
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Subscribe to topic - success', data: { input: this.clone, result }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2030
|
+
this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
2031
|
+
this.logService.publish();
|
|
2032
|
+
return result;
|
|
2033
|
+
}
|
|
2034
|
+
catch (e) {
|
|
2035
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Subscribe to topic - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2036
|
+
this.end = Date.now();
|
|
2037
|
+
this.logService.publish();
|
|
2038
|
+
return { process_id: this.process_id };
|
|
2039
|
+
}
|
|
2040
|
+
}
|
|
2041
|
+
async processMessageBrokerPublish(data) {
|
|
2042
|
+
const [brokerTag, topicTag] = data.event.split(':');
|
|
2043
|
+
if (!brokerTag || !topicTag) {
|
|
2044
|
+
throw new Error(`message broker events should be in the format broker_tag:event_tag`);
|
|
2045
|
+
}
|
|
2046
|
+
const additional_logs = {
|
|
2047
|
+
parent_tag: brokerTag,
|
|
2048
|
+
child_tag: topicTag,
|
|
2049
|
+
type: types_1.LogEventTypes.MESSAGEBROKER,
|
|
2050
|
+
name: 'Publish to broker topic',
|
|
2051
|
+
};
|
|
2052
|
+
try {
|
|
2053
|
+
this.validateActionDataMappingInput(data.input, types_1.FeatureEventTypes.PUBLISH);
|
|
2054
|
+
console.log("JAPANESE MIRRORS", data.input);
|
|
2055
|
+
this.start = Date.now();
|
|
2056
|
+
// clone
|
|
2057
|
+
this.clone = (0, processor_utils_1.structuredClone)(data.input);
|
|
2058
|
+
this.input = data;
|
|
2059
|
+
this.productTag = data.product;
|
|
2060
|
+
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
2061
|
+
this.component = types_1.LogEventTypes.MESSAGEBROKER;
|
|
2062
|
+
this.baseLogs = Object.assign({ product_tag: this.productTag, workspace_id: this.workspace_id, env: data.env, process_id, data: this.clone }, additional_logs);
|
|
2063
|
+
await this.intializeProduct(additional_logs);
|
|
2064
|
+
this.baseLogs.product_id = this.productId;
|
|
2065
|
+
this.process_id = process_id;
|
|
2066
|
+
const productEnv = this.fetchEnv(data.env, additional_logs);
|
|
2067
|
+
this.processEnv = productEnv;
|
|
2068
|
+
console.log("JAPANESE MIRRORS 2", productEnv);
|
|
2069
|
+
if (!productEnv.active) {
|
|
2070
|
+
throw new Error(`Environment ${data.env} is not active`);
|
|
2071
|
+
}
|
|
2072
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Publish to topic - initiated', data: Object.assign(Object.assign({}, data), { input: (0, processor_utils_1.anonymizeObject)(data.input) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
2073
|
+
const payload = {
|
|
2074
|
+
type: types_1.FeatureEventTypes.PUBLISH,
|
|
2075
|
+
event: data.event,
|
|
2076
|
+
cache: data.cache,
|
|
2077
|
+
input: data.input,
|
|
2078
|
+
env: productEnv,
|
|
2079
|
+
retries: 0,
|
|
2080
|
+
allow_fail: false,
|
|
2081
|
+
};
|
|
2082
|
+
const result = await this.runBrokerPublish(payload);
|
|
2083
|
+
this.end = Date.now();
|
|
2084
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Publishing to topic - success', data: { input: (0, processor_utils_1.anonymizeObject)(this.clone), result }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2085
|
+
this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
2086
|
+
this.logService.publish();
|
|
2087
|
+
return result;
|
|
2088
|
+
}
|
|
2089
|
+
catch (e) {
|
|
2090
|
+
console.log(e);
|
|
2091
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Publishing to topic - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2092
|
+
this.end = Date.now();
|
|
2093
|
+
this.logService.publish();
|
|
2094
|
+
return { process_id: this.process_id };
|
|
2095
|
+
}
|
|
2096
|
+
}
|
|
2097
|
+
async processJob(job) { }
|
|
2098
|
+
async sendExpoNotification(payload, device_tokens) {
|
|
2099
|
+
const message = {
|
|
2100
|
+
to: device_tokens,
|
|
2101
|
+
sound: 'default',
|
|
2102
|
+
title: payload.title,
|
|
2103
|
+
body: payload.body,
|
|
2104
|
+
data: (0, processor_utils_1.convertStringToObject)(payload.data),
|
|
2105
|
+
};
|
|
2106
|
+
console.log("MESSAGE!!!", message);
|
|
2107
|
+
try {
|
|
2108
|
+
await (0, expo_client_1.default)().post('', message, (0, processor_utils_1.generateAxiosConfig)());
|
|
2109
|
+
}
|
|
2110
|
+
catch (e) {
|
|
2111
|
+
throw e;
|
|
2112
|
+
}
|
|
2113
|
+
}
|
|
2114
|
+
async sendFirebaseNotification(payload, device_tokens, credentials) {
|
|
2115
|
+
const message = {
|
|
2116
|
+
notification: {
|
|
2117
|
+
title: payload.title,
|
|
2118
|
+
body: payload.body,
|
|
2119
|
+
},
|
|
2120
|
+
tokens: device_tokens,
|
|
2121
|
+
};
|
|
2122
|
+
try {
|
|
2123
|
+
const admin = require('firebase-admin');
|
|
2124
|
+
const serviceAccount = credentials;
|
|
2125
|
+
console.log("MESSAGE", message);
|
|
2126
|
+
admin.initializeApp({
|
|
2127
|
+
credential: admin.credential.cert(serviceAccount),
|
|
2128
|
+
});
|
|
2129
|
+
await admin.messaging().sendMulticast(message);
|
|
2130
|
+
}
|
|
2131
|
+
catch (e) {
|
|
2132
|
+
if (this.environment === types_1.EnvType.PRODUCTION) {
|
|
2133
|
+
throw e;
|
|
2134
|
+
}
|
|
2135
|
+
}
|
|
2136
|
+
}
|
|
2137
|
+
async ProcessExpoNotification(notification, template, payload, additional_logs) {
|
|
2138
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate notification payload - initiated', data: { notification, payload: (0, processor_utils_1.anonymizeObject)(payload) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2139
|
+
(0, processor_utils_1.validateNotification)(template, payload);
|
|
2140
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate notification payload - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2141
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate notification template - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2142
|
+
const { title, body, data } = (0, processor_utils_1.generateNotificationTemplate)(template, payload);
|
|
2143
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate notification template - success', data: { title, body, data: (0, processor_utils_1.anonymizeObject)(data) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2144
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send Expo notification - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2145
|
+
await this.sendExpoNotification({ title, body, data }, payload.device_tokens);
|
|
2146
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send Expo notification - success', data: { title, body, data: (0, processor_utils_1.anonymizeObject)(data) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2147
|
+
}
|
|
2148
|
+
async ProcessFirebaseNotification(notification, template, payload, additional_logs) {
|
|
2149
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate Firebase notification payload - initiated', data: { notification, payload: (0, processor_utils_1.anonymizeObject)(payload) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2150
|
+
(0, processor_utils_1.validateNotification)(template, payload);
|
|
2151
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate Firebase notification payload - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2152
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate Firebase notification template - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2153
|
+
const { title, body, data } = (0, processor_utils_1.generateNotificationTemplate)(template, payload);
|
|
2154
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate Firebase notification template - success', data: { title, body: (0, processor_utils_1.anonymizeObject)(data), data: (0, processor_utils_1.anonymizeObject)(data) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2155
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send Firebase notification - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2156
|
+
await this.sendFirebaseNotification({ title, body, data }, payload.device_tokens, notification.credentials);
|
|
2157
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send Firebase notification - success', data: { title, body: (0, processor_utils_1.anonymizeObject)(data), data: (0, processor_utils_1.anonymizeObject)(data) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2158
|
+
}
|
|
2159
|
+
async runNotification(notification, additional_logs) {
|
|
2160
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2161
|
+
const { event } = notification;
|
|
2162
|
+
const input = notification.input;
|
|
2163
|
+
try {
|
|
2164
|
+
//await this.intializeProduct(additional_logs);
|
|
2165
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Attempting notification', data: { notification }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2166
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetching notification details', data: { notification }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2167
|
+
const notificationEvent = this.productBuilderService.fetchNotification(event.split(":")[0]);
|
|
2168
|
+
const message = this.productBuilderService.fetchNotificationMessage(event);
|
|
2169
|
+
const { envs } = notificationEvent;
|
|
2170
|
+
const { push_notifications: notifications, emails, callbacks, sms: smses } = envs.find((data) => data.slug === notification.env.slug);
|
|
2171
|
+
const { push_notification: push, email, callback, sms } = message;
|
|
2172
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetching notification', data: notificationEvent, status: types_1.LogEventStatus.SUCCESS }));
|
|
2173
|
+
if (push) {
|
|
2174
|
+
input.push_notification.title = (0, processor_utils_1.replacePlaceholderString)(push.title, input.push_notification.title || {});
|
|
2175
|
+
input.push_notification.body = await this.generatePayload(input.push_notification.body || {}, notification, additional_logs, message.push_notification_data.filter((data) => data.parent_key === 'body') || []);
|
|
2176
|
+
input.push_notification.data = await this.generatePayload(input.push_notification.data || {}, notification, additional_logs, message.push_notification_data.filter((data) => data.parent_key === 'data') || []);
|
|
2177
|
+
}
|
|
2178
|
+
const validationPayload = (await this.inputService.parseJson({
|
|
2179
|
+
data: Object.assign(Object.assign({}, input.push_notification.title), input.push_notification.body),
|
|
2180
|
+
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2181
|
+
}));
|
|
2182
|
+
//await this.inputService.validateInput(validationPayload, message.push_notification_data, "Push Notifications");
|
|
2183
|
+
if (push && notifications.type === types_1.Notifiers.FIREBASE) {
|
|
2184
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Processing Expo notification - initiated', data: { notification, input: (0, processor_utils_1.anonymizeObject)(input.push_notification) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2185
|
+
await this.ProcessExpoNotification(notifications, message, input.push_notification, additional_logs);
|
|
2186
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Processing Expo notification - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2187
|
+
}
|
|
2188
|
+
if (push && notifications.type === types_1.Notifiers.EXPO) {
|
|
2189
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Processing Firebase notification - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2190
|
+
await this.ProcessFirebaseNotification(notifications, message, input.push_notification, additional_logs);
|
|
2191
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Processing Firebase notification - success', data: { notification, input: (0, processor_utils_1.anonymizeObject)(input.push_notification) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2192
|
+
}
|
|
2193
|
+
if (email && emails) {
|
|
2194
|
+
input.email.subject = await this.generatePayload(input.email.subject, notification, additional_logs, message.email_data.filter((data) => data.parent_key === 'subject'));
|
|
2195
|
+
input.email.template = await this.generatePayload(input.email.template, notification, additional_logs, message.email_data.filter((data) => data.parent_key === 'template'));
|
|
2196
|
+
input.email.recipients = await Promise.all(input.email.recipients.map(async (email) => await this.generateStringValues(email, notification.event, additional_logs)));
|
|
2197
|
+
const validationPayload = (await this.inputService.parseJson({
|
|
2198
|
+
data: input.email,
|
|
2199
|
+
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2200
|
+
}));
|
|
2201
|
+
//await this.inputService.validateInput(validationPayload, message.email_data);
|
|
2202
|
+
input.email.recipients.map((email) => this.inputService.validateEmailString({ key: 'to', value: email }));
|
|
2203
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process email - initiated', data: { email: (0, processor_utils_1.anonymizeObject)(email), input: (0, processor_utils_1.anonymizeObject)(input.email) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2204
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Attempt email auth fetch - initiated', data: { emails: (0, processor_utils_1.anonymizeObject)(emails) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2205
|
+
const { sender_email: from } = emails, auth = __rest(emails, ["sender_email"]);
|
|
2206
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Attempt email auth fetch - success', data: { from: (0, processor_utils_1.anonymizeValue)(from), host: (0, processor_utils_1.anonymizeValue)(auth.host), port: (0, processor_utils_1.anonymizeValue)(auth.port) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2207
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate email template - initiated', data: {
|
|
2208
|
+
template: (0, processor_utils_1.anonymizeValue)(email.template),
|
|
2209
|
+
subject: (0, processor_utils_1.anonymizeValue)(email.subject),
|
|
2210
|
+
input: (0, processor_utils_1.anonymizeObject)(input.email),
|
|
2211
|
+
}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2212
|
+
const templateMaker = (0, handlebars_1.compile)(email.template);
|
|
2213
|
+
const template = templateMaker(input.email.template);
|
|
2214
|
+
const subject = (0, processor_utils_1.replacePlaceholderString)(email.subject, input.email.subject || {});
|
|
2215
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate email template - success', data: { template: (0, processor_utils_1.anonymizeValue)(template), subject: (0, processor_utils_1.anonymizeValue)(subject) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2216
|
+
const mailOptions = {
|
|
2217
|
+
from,
|
|
2218
|
+
to: input.email.recipients,
|
|
2219
|
+
subject,
|
|
2220
|
+
template,
|
|
2221
|
+
};
|
|
2222
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send email - initiated', data: { template: (0, processor_utils_1.anonymizeValue)(template), subject: (0, processor_utils_1.anonymizeValue)(subject), to: input.email.recipients.map((data) => (0, processor_utils_1.anonymizeValue)(data)) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2223
|
+
try {
|
|
2224
|
+
const transporter = await (0, processor_utils_1.mailerClient)(auth);
|
|
2225
|
+
const response = await transporter.sendMail(mailOptions);
|
|
2226
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send email - success', data: { response }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2227
|
+
}
|
|
2228
|
+
catch (e) {
|
|
2229
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send email - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2230
|
+
}
|
|
2231
|
+
}
|
|
2232
|
+
if (callback && callbacks) {
|
|
2233
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process callback - initiated', data: Object.assign(Object.assign({}, callbacks), { data: (0, processor_utils_1.anonymizeObject)(callback) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
2234
|
+
const payload = {
|
|
2235
|
+
query: Object.assign(Object.assign({}, (_a = input.callback) === null || _a === void 0 ? void 0 : _a.query), (_b = callbacks.auth) === null || _b === void 0 ? void 0 : _b.query),
|
|
2236
|
+
headers: Object.assign(Object.assign({}, (_c = input.callback) === null || _c === void 0 ? void 0 : _c.headers), (_d = callbacks.auth) === null || _d === void 0 ? void 0 : _d.headers),
|
|
2237
|
+
params: Object.assign(Object.assign({}, (_e = input.callback) === null || _e === void 0 ? void 0 : _e.params), (_f = callbacks.auth) === null || _f === void 0 ? void 0 : _f.params),
|
|
2238
|
+
body: Object.assign(Object.assign({}, (_g = input.callback) === null || _g === void 0 ? void 0 : _g.body), (_h = callbacks.auth) === null || _h === void 0 ? void 0 : _h.body),
|
|
2239
|
+
};
|
|
2240
|
+
input.callback.body = await this.generatePayload(payload.body, notification, additional_logs, message.callback_data.filter((data) => data.parent_key === 'body'));
|
|
2241
|
+
input.callback.query = await this.generatePayload(payload.query, notification, additional_logs, message.callback_data.filter((data) => data.parent_key === 'query'));
|
|
2242
|
+
input.callback.params = await this.generatePayload(payload.body, notification, additional_logs, message.callback_data.filter((data) => data.parent_key === 'params'));
|
|
2243
|
+
input.callback.headers = await this.generatePayload(payload.body, notification, additional_logs, message.callback_data.filter((data) => data.parent_key === 'headers'));
|
|
2244
|
+
const validationPayload = (await this.inputService.parseJson({
|
|
2245
|
+
data: input.callback,
|
|
2246
|
+
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2247
|
+
}));
|
|
2248
|
+
//this.inputService.validateInput(validationPayload, message.callback_data);
|
|
2249
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate callback payload - initiated', data: Object.assign(Object.assign({}, callbacks), { data: (0, processor_utils_1.anonymizeObject)(payload) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
2250
|
+
const url = new URL(callbacks.url);
|
|
2251
|
+
try {
|
|
2252
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send callback - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2253
|
+
console.log("CALLBACK!!!!", {
|
|
2254
|
+
url,
|
|
2255
|
+
payload,
|
|
2256
|
+
method: callbacks.method
|
|
2257
|
+
});
|
|
2258
|
+
await this.sendActionRequest(url.origin, url.pathname, payload, callbacks.method, '');
|
|
2259
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { name: 'Send callback - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2260
|
+
}
|
|
2261
|
+
catch (e) {
|
|
2262
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send callback - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2263
|
+
}
|
|
2264
|
+
}
|
|
2265
|
+
if (sms && smses) {
|
|
2266
|
+
try {
|
|
2267
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process sms - initiated', data: { data: sms, config: (0, processor_utils_1.anonymizeObject)(smses) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2268
|
+
input.sms.body = await (0, processor_utils_1.replacePlaceholderString)(sms, input.sms.body);
|
|
2269
|
+
const SmsClient = await (0, sms_repo_1.loadSMSClient)();
|
|
2270
|
+
const smsClient = new SmsClient(smses);
|
|
2271
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { name: 'Send sms - initiated', data: { message: input.sms.body, config: (0, processor_utils_1.anonymizeObject)(smses) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2272
|
+
console.log("SMS!!!!", input.sms, smses);
|
|
2273
|
+
const res = await smsClient.sendMessage(input.sms.body, input.sms.recipients);
|
|
2274
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { name: 'Send sms - success', data: res, status: types_1.LogEventStatus.SUCCESS }));
|
|
2275
|
+
}
|
|
2276
|
+
catch (e) {
|
|
2277
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process sms - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2278
|
+
}
|
|
2279
|
+
}
|
|
2280
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Attempt notification - success', data: notification, status: types_1.LogEventStatus.SUCCESS }));
|
|
2281
|
+
}
|
|
2282
|
+
catch (e) {
|
|
2283
|
+
console.log(e);
|
|
2284
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Attempt notification - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2285
|
+
this.logService.publish();
|
|
2286
|
+
}
|
|
2287
|
+
}
|
|
2288
|
+
async runMigration(product_tag, tag, env, type) {
|
|
2289
|
+
try {
|
|
2290
|
+
this.productTag = product_tag;
|
|
2291
|
+
const [dbTag, migrationTag] = tag.split(':');
|
|
2292
|
+
if (!dbTag || !migrationTag) {
|
|
2293
|
+
throw new Error('tag should be in the format database_tag:migration_tag');
|
|
2294
|
+
}
|
|
2295
|
+
this.start = Date.now();
|
|
2296
|
+
const additional_logs = {
|
|
2297
|
+
parent_tag: dbTag,
|
|
2298
|
+
child_tag: migrationTag,
|
|
2299
|
+
type: types_1.LogEventTypes.DB_MIGRATION,
|
|
2300
|
+
name: 'Run Migration',
|
|
2301
|
+
};
|
|
2302
|
+
await this.intializeProduct(additional_logs);
|
|
2303
|
+
const db = this.productBuilderService.fetchDatabase(dbTag);
|
|
2304
|
+
if (!db) {
|
|
2305
|
+
throw new Error('Database not found');
|
|
2306
|
+
}
|
|
2307
|
+
if (db.type === types_1.DatabaseTypes.MONGODB) {
|
|
2308
|
+
throw new Error(`${db.type} does not support migrations`);
|
|
2309
|
+
}
|
|
2310
|
+
const migration = this.productBuilderService.fetchDatabaseMigration(tag);
|
|
2311
|
+
if (!migration) {
|
|
2312
|
+
throw new Error('Database migration not found');
|
|
2313
|
+
}
|
|
2314
|
+
const dbEnv = db.envs.find((el) => el.slug === env);
|
|
2315
|
+
if (!dbEnv) {
|
|
2316
|
+
throw new Error(`Environment ${env} not found`);
|
|
2317
|
+
}
|
|
2318
|
+
const productEnv = this.fetchEnv(env, additional_logs);
|
|
2319
|
+
if (!productEnv.active) {
|
|
2320
|
+
throw new Error(`Environment ${env} is not active`);
|
|
2321
|
+
}
|
|
2322
|
+
const product = this.productBuilderService.fetchProduct();
|
|
2323
|
+
const migrations = this.productBuilderService.fetchDatabaseMigrations(dbTag);
|
|
2324
|
+
//this.processEnv = productEnv;
|
|
2325
|
+
/* const check = migration.envs.find((migrationEnv) => migrationEnv.slug === env);
|
|
2326
|
+
if (!check) {
|
|
2327
|
+
throw new Error(`Migration does not exist for environment ${env}`);
|
|
2328
|
+
}*/
|
|
2329
|
+
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
2330
|
+
this.baseLogs = Object.assign({ product_tag: this.productTag, product_id: this.productId, workspace_id: this.workspace_id, env,
|
|
2331
|
+
process_id, data: { tag, env } }, additional_logs);
|
|
2332
|
+
const migrationsToRun = (0, processor_utils_1.getMigrationsToRun)((0, processor_utils_1.structuredClone)(migrations), type, migrationTag, env);
|
|
2333
|
+
if (db.type === types_1.DatabaseTypes.POSTGRES) {
|
|
2334
|
+
const PostgresDBHandler = await (0, postgres_repo_1.loadPostgresHandler)();
|
|
2335
|
+
const pgHandler = new PostgresDBHandler((0, processor_utils_1.decrypt)(dbEnv.connection_url, product.private_key));
|
|
2336
|
+
for (const migrationToRun of migrationsToRun) {
|
|
2337
|
+
const envS = migration.envs.find((envT) => envT.slug === env && type === envT.type);
|
|
2338
|
+
if (envS && envS.status === types_1.MigrationStatus.PROCESSED) {
|
|
2339
|
+
continue;
|
|
2340
|
+
}
|
|
2341
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: `Starting migration ${type} name: ${migrationToRun.name}`, data: { migration: migrationToRun, type }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2342
|
+
const { type: migType, result, status, processed_at, } = await pgHandler.runMigration(migrationToRun.value[type], type, envS);
|
|
2343
|
+
if (!envS) {
|
|
2344
|
+
migrationToRun.envs.push({ slug: env, results: result, status, type, processed_at });
|
|
2345
|
+
}
|
|
2346
|
+
else {
|
|
2347
|
+
migrationToRun.envs.map((envT) => {
|
|
2348
|
+
if (envT.slug === env && type === envT.type) {
|
|
2349
|
+
envT.results = [...result];
|
|
2350
|
+
envT.processed_at = processed_at;
|
|
2351
|
+
return envT;
|
|
2352
|
+
}
|
|
2353
|
+
return envT;
|
|
2354
|
+
});
|
|
2355
|
+
}
|
|
2356
|
+
this.productBuilderService.updateDatabaseMigration(Object.assign(Object.assign({}, migrationToRun), { tag }));
|
|
2357
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: `Starting migration ${type} name: ${migrationToRun.name}`, data: { migration: migrationToRun, type }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2358
|
+
}
|
|
2359
|
+
return { process_id };
|
|
2360
|
+
}
|
|
2361
|
+
}
|
|
2362
|
+
catch (e) {
|
|
2363
|
+
console.log(e);
|
|
2364
|
+
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { failed_execution: true, message: 'Attempt migration - failed', data: e, status: types_1.LogEventStatus.FAIL }));
|
|
2365
|
+
this.logService.publish();
|
|
2366
|
+
}
|
|
2367
|
+
}
|
|
2368
|
+
/*async runFunction(data: IFeatureEvent, additional_logs: Partial<ILogData>): Promise<any> {
|
|
2369
|
+
const { product_id, env } = data;
|
|
2370
|
+
const input = data.input as IFunctionRequest;
|
|
2371
|
+
|
|
2372
|
+
this.productId = product_id;
|
|
2373
|
+
|
|
2374
|
+
this.logService = new LogsService({
|
|
2375
|
+
product_id,
|
|
2376
|
+
workspace_id: this.workspace_id,
|
|
2377
|
+
public_key: this.public_key,
|
|
2378
|
+
user_id: this.user_id,
|
|
2379
|
+
token: this.token,
|
|
2380
|
+
env_type: this.environment,
|
|
2381
|
+
});
|
|
2382
|
+
|
|
2383
|
+
const process_id = generateObjectId();
|
|
2384
|
+
this.process_id = process_id;
|
|
2385
|
+
|
|
2386
|
+
|
|
2387
|
+
try {
|
|
2388
|
+
await this.intializeProduct(additional_logs);
|
|
2389
|
+
this.logService.add({
|
|
2390
|
+
...this.baseLogs,
|
|
2391
|
+
...additional_logs,
|
|
2392
|
+
message: 'Attempt function - initiated',
|
|
2393
|
+
data,
|
|
2394
|
+
status: LogEventStatus.PROCESSING,
|
|
2395
|
+
});
|
|
2396
|
+
|
|
2397
|
+
this.logService.add({
|
|
2398
|
+
...this.baseLogs,
|
|
2399
|
+
...additional_logs,
|
|
2400
|
+
message: 'Fetch function - initiated',
|
|
2401
|
+
data: data,
|
|
2402
|
+
status: LogEventStatus.PROCESSING,
|
|
2403
|
+
});
|
|
2404
|
+
|
|
2405
|
+
const cloudFunction = this.productBuilderService.fetchFunction(data.event);
|
|
2406
|
+
|
|
2407
|
+
this.logService.add({
|
|
2408
|
+
...this.baseLogs,
|
|
2409
|
+
...additional_logs,
|
|
2410
|
+
message: 'Fetch function - success',
|
|
2411
|
+
data: data,
|
|
2412
|
+
status: LogEventStatus.SUCCESS,
|
|
2413
|
+
});
|
|
2414
|
+
|
|
2415
|
+
this.logService.add({
|
|
2416
|
+
...this.baseLogs,
|
|
2417
|
+
...additional_logs,
|
|
2418
|
+
message: 'Validate function payload - initiated',
|
|
2419
|
+
data: { data, payload: input.payload },
|
|
2420
|
+
status: LogEventStatus.PROCESSING,
|
|
2421
|
+
});
|
|
2422
|
+
|
|
2423
|
+
validateFunctionInputKeys(cloudFunction.inputs, input.payload);
|
|
2424
|
+
|
|
2425
|
+
this.logService.add({
|
|
2426
|
+
...this.baseLogs,
|
|
2427
|
+
...additional_logs,
|
|
2428
|
+
message: 'Validate function payload - success',
|
|
2429
|
+
data: { data, payload: input.payload },
|
|
2430
|
+
status: LogEventStatus.SUCCESS,
|
|
2431
|
+
});
|
|
2432
|
+
|
|
2433
|
+
this.logService.add({
|
|
2434
|
+
...this.baseLogs,
|
|
2435
|
+
...additional_logs,
|
|
2436
|
+
message: 'Run function - initiated',
|
|
2437
|
+
data: { data, payload: input.payload },
|
|
2438
|
+
status: LogEventStatus.PROCESSING,
|
|
2439
|
+
});
|
|
2440
|
+
|
|
2441
|
+
const response = await makeFunctionsRequest(cloudFunction, input.payload);
|
|
2442
|
+
|
|
2443
|
+
this.logService.add({
|
|
2444
|
+
...this.baseLogs,
|
|
2445
|
+
...additional_logs,
|
|
2446
|
+
message: 'Run function - success',
|
|
2447
|
+
data: { data, payload: input.payload },
|
|
2448
|
+
status: LogEventStatus.SUCCESS,
|
|
2449
|
+
});
|
|
2450
|
+
} catch (e) {
|
|
2451
|
+
this.logService.add({
|
|
2452
|
+
...this.baseLogs,
|
|
2453
|
+
...additional_logs,
|
|
2454
|
+
message: 'Run function - failed',
|
|
2455
|
+
data: e,
|
|
2456
|
+
status: LogEventStatus.FAIL,
|
|
2457
|
+
});
|
|
2458
|
+
this.logService.publish();
|
|
2459
|
+
}
|
|
2460
|
+
}*/
|
|
2461
|
+
async runStorage(data, additional_logs = {}) {
|
|
2462
|
+
const { product_id, env, event, cache: cache_tag } = data;
|
|
2463
|
+
const input = data.input;
|
|
2464
|
+
try {
|
|
2465
|
+
await this.intializeProduct(additional_logs);
|
|
2466
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Attempt storage - initiated', data: this.clone, status: types_1.LogEventStatus.PROCESSING }));
|
|
2467
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch storage details - initiated', data: this.clone, status: types_1.LogEventStatus.PROCESSING }));
|
|
2468
|
+
const storage = await this.productBuilderService.fetchStorage(event);
|
|
2469
|
+
const storageEnv = storage.envs.find((el) => el.slug === env.slug);
|
|
2470
|
+
if (!storageEnv) {
|
|
2471
|
+
throw new Error(`Storage env for ${env.slug} not found`);
|
|
2472
|
+
}
|
|
2473
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch storage details - success', data: { storage }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2474
|
+
const product = this.productBuilderService.fetchProduct();
|
|
2475
|
+
let result;
|
|
2476
|
+
if (cache_tag && this.redisClient) {
|
|
2477
|
+
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
2478
|
+
if (!productCache) {
|
|
2479
|
+
throw new Error('Invalid cache tag ');
|
|
2480
|
+
}
|
|
2481
|
+
const inputString = JSON.stringify(input);
|
|
2482
|
+
const check = await this.fetchFromCache({
|
|
2483
|
+
cache_tag,
|
|
2484
|
+
input: inputString,
|
|
2485
|
+
privateKey: product.private_key,
|
|
2486
|
+
expiry: productCache.expiry,
|
|
2487
|
+
});
|
|
2488
|
+
if (check) {
|
|
2489
|
+
result = JSON.parse(check);
|
|
2490
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Store file - return from cache', data: { result }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2491
|
+
return result;
|
|
2492
|
+
}
|
|
2493
|
+
}
|
|
2494
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Store file - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2495
|
+
const file = await this.generateStringValues(input.buffer, '', additional_logs, []);
|
|
2496
|
+
input.buffer = file.buffer;
|
|
2497
|
+
input.fileName = file.fileName;
|
|
2498
|
+
input.mimeType = file.mimeType;
|
|
2499
|
+
result = await this.processStorageRequest(data, input, storageEnv, additional_logs);
|
|
2500
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Store file - success', data: { result }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2501
|
+
if (cache_tag && this.redisClient) {
|
|
2502
|
+
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
2503
|
+
if (!productCache) {
|
|
2504
|
+
throw new Error('Invalid cache tag ');
|
|
2505
|
+
}
|
|
2506
|
+
const inputString = JSON.stringify(input);
|
|
2507
|
+
await this.addToCache({
|
|
2508
|
+
input: inputString,
|
|
2509
|
+
privateKey: product.private_key,
|
|
2510
|
+
data: JSON.stringify(result),
|
|
2511
|
+
cache_tag,
|
|
2512
|
+
timestamp: Date.now(),
|
|
2513
|
+
component_tag: event,
|
|
2514
|
+
component_type: types_1.ProductComponents.STORAGE,
|
|
2515
|
+
product_tag: this.productTag,
|
|
2516
|
+
});
|
|
2517
|
+
}
|
|
2518
|
+
return result;
|
|
2519
|
+
}
|
|
2520
|
+
catch (e) {
|
|
2521
|
+
console.log(e);
|
|
2522
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Attempt storage - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2523
|
+
throw e;
|
|
2524
|
+
}
|
|
2525
|
+
}
|
|
2526
|
+
async runDBAction(db_action, additional_logs = {}) {
|
|
2527
|
+
const { product_id, env, event, cache: cache_tag } = db_action;
|
|
2528
|
+
const input = db_action.input;
|
|
2529
|
+
try {
|
|
2530
|
+
//await this.intializeProduct(additional_logs);
|
|
2531
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Attempt database action - initiated', data: db_action, status: types_1.LogEventStatus.PROCESSING }));
|
|
2532
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch database action - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2533
|
+
const [database_tag, action_tag] = event.split(':');
|
|
2534
|
+
const product = this.productBuilderService.fetchProduct();
|
|
2535
|
+
const database = await this.productBuilderService.fetchDatabase(database_tag);
|
|
2536
|
+
const databaseAction = await this.productBuilderService.fetchDatabaseAction(event);
|
|
2537
|
+
const databaseEnv = database.envs.find((el) => el.slug === env.slug);
|
|
2538
|
+
if (!databaseEnv) {
|
|
2539
|
+
throw new Error(`Database env for ${env.slug} not found`);
|
|
2540
|
+
}
|
|
2541
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch database action - success', data: databaseAction, status: types_1.LogEventStatus.SUCCESS }));
|
|
2542
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate database action payload - initiated', data: { payload: (0, processor_utils_1.anonymizeObject)(input.data) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2543
|
+
input.data = await this.generatePayload(input.data, db_action, additional_logs, databaseAction.data);
|
|
2544
|
+
if (Array.isArray(input.data)) {
|
|
2545
|
+
await Promise.all(input.data.map(async (d) => {
|
|
2546
|
+
const dataValidationPayload = (await this.inputService.parseJson({
|
|
2547
|
+
data: d,
|
|
2548
|
+
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2549
|
+
}));
|
|
2550
|
+
this.inputService.validateInput(dataValidationPayload, databaseAction.data);
|
|
2551
|
+
}));
|
|
2552
|
+
}
|
|
2553
|
+
else {
|
|
2554
|
+
const dataValidationPayload = (await this.inputService.parseJson({
|
|
2555
|
+
data: input.data,
|
|
2556
|
+
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2557
|
+
}));
|
|
2558
|
+
const filterValidationPayload = (await this.inputService.parseJson({
|
|
2559
|
+
data: input.filter,
|
|
2560
|
+
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2561
|
+
}));
|
|
2562
|
+
this.inputService.validateInput(dataValidationPayload, databaseAction.data);
|
|
2563
|
+
this.inputService.validateInput(filterValidationPayload, databaseAction.filterData);
|
|
2564
|
+
}
|
|
2565
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate database action payload - success', data: { db_action, payload: (0, processor_utils_1.anonymizeObject)(input.data) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2566
|
+
if (database.type === types_1.DatabaseTypes.MONGODB) {
|
|
2567
|
+
if (databaseAction.type === types_1.DatabaseActionTypes.UPDATE) {
|
|
2568
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate database update action filter - initiated', data: { filter: (0, processor_utils_1.anonymizeObject)(input.filter) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2569
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate database update action filter - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2570
|
+
}
|
|
2571
|
+
}
|
|
2572
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Run database action query - initiated', data: { payload: (0, processor_utils_1.anonymizeObject)(input) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2573
|
+
let result;
|
|
2574
|
+
if (cache_tag && this.redisClient) {
|
|
2575
|
+
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
2576
|
+
if (!productCache) {
|
|
2577
|
+
throw new Error('Invalid cache tag ');
|
|
2578
|
+
}
|
|
2579
|
+
const inputString = JSON.stringify(input);
|
|
2580
|
+
const check = await this.fetchFromCache({
|
|
2581
|
+
cache_tag,
|
|
2582
|
+
input: inputString,
|
|
2583
|
+
privateKey: product.private_key,
|
|
2584
|
+
expiry: productCache.expiry,
|
|
2585
|
+
});
|
|
2586
|
+
if (check) {
|
|
2587
|
+
result = JSON.parse(check);
|
|
2588
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Run database action query - return from cache', data: { result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2589
|
+
return result;
|
|
2590
|
+
}
|
|
2591
|
+
}
|
|
2592
|
+
const template = typeof databaseAction.template === 'string' ? databaseAction.template : JSON.stringify(databaseAction.template);
|
|
2593
|
+
if (database.type === types_1.DatabaseTypes.MONGODB) {
|
|
2594
|
+
const MongoDBHandler = await (0, mongo_repo_1.loadMongoDbHandler)();
|
|
2595
|
+
if (!MongoDBHandler) {
|
|
2596
|
+
throw new Error(`Running in browser, mongo handler not loaded.`);
|
|
2597
|
+
}
|
|
2598
|
+
const mongoHandler = new MongoDBHandler((0, processor_utils_1.decrypt)(databaseEnv.connection_url, this.productBuilderService.fetchProduct().private_key));
|
|
2599
|
+
if (databaseAction.type === types_1.DatabaseActionTypes.UPDATE) {
|
|
2600
|
+
const filterTemplate = typeof databaseAction.filterTemplate === 'string'
|
|
2601
|
+
? databaseAction.filterTemplate
|
|
2602
|
+
: JSON.stringify(databaseAction.filterTemplate);
|
|
2603
|
+
result = await mongoHandler[databaseAction.type](databaseAction.data, template, input.data, databaseAction.tableName, databaseAction.filterData, filterTemplate, input.filter);
|
|
2604
|
+
}
|
|
2605
|
+
else {
|
|
2606
|
+
result = await mongoHandler[databaseAction.type](databaseAction.data, template, input.data, databaseAction.tableName);
|
|
2607
|
+
}
|
|
2608
|
+
}
|
|
2609
|
+
else if (database.type === types_1.DatabaseTypes.POSTGRES) {
|
|
2610
|
+
const PostgresDBHandler = await (0, postgres_repo_1.loadPostgresHandler)();
|
|
2611
|
+
if (!PostgresDBHandler) {
|
|
2612
|
+
throw new Error(`Running in browser, postgres handler not loaded.`);
|
|
2613
|
+
}
|
|
2614
|
+
const pgHandler = new PostgresDBHandler((0, processor_utils_1.decrypt)(databaseEnv.connection_url, this.productBuilderService.fetchProduct().private_key));
|
|
2615
|
+
if (databaseAction.type !== types_1.DatabaseActionTypes.AGGREGATE) {
|
|
2616
|
+
result = await pgHandler[databaseAction.type](databaseAction.data, template, input.data);
|
|
2617
|
+
}
|
|
2618
|
+
}
|
|
2619
|
+
//this.logService.publish();
|
|
2620
|
+
// const result = this.processDBRequest(db_action, input, database_tag, databaseEnv, action_tag, additional_logs);
|
|
2621
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Run database action query - success', data: { result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2622
|
+
await this.addToSuccessOutput(db_action, result, additional_logs);
|
|
2623
|
+
if (cache_tag && this.redisClient) {
|
|
2624
|
+
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
2625
|
+
if (!productCache) {
|
|
2626
|
+
throw new Error('Invalid cache tag ');
|
|
2627
|
+
}
|
|
2628
|
+
const inputString = JSON.stringify(input);
|
|
2629
|
+
await this.addToCache({
|
|
2630
|
+
input: inputString,
|
|
2631
|
+
privateKey: product.private_key,
|
|
2632
|
+
data: JSON.stringify(result),
|
|
2633
|
+
cache_tag,
|
|
2634
|
+
timestamp: Date.now(),
|
|
2635
|
+
product_tag: this.productTag,
|
|
2636
|
+
component_tag: database_tag,
|
|
2637
|
+
component_type: types_1.ProductComponents.DATABASE_ACTION,
|
|
2638
|
+
});
|
|
2639
|
+
}
|
|
2640
|
+
return result;
|
|
2641
|
+
}
|
|
2642
|
+
catch (e) {
|
|
2643
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Attempt database action - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2644
|
+
}
|
|
2645
|
+
}
|
|
2646
|
+
async runBrokerSubscribe(data, additional_logs = {}) {
|
|
2647
|
+
const { env, event } = data;
|
|
2648
|
+
const input = data.input;
|
|
2649
|
+
try {
|
|
2650
|
+
await this.intializeProduct(additional_logs);
|
|
2651
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Attempt broker topic subscription - initiated', data: { event }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2652
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch broker details - initiated', data: { event }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2653
|
+
const [brokerTag, topicTag] = event.split(':');
|
|
2654
|
+
const broker = this.productBuilderService.fetchMessageBroker(brokerTag);
|
|
2655
|
+
if (!broker) {
|
|
2656
|
+
throw new Error(`Message Broker ${brokerTag} not found`);
|
|
2657
|
+
}
|
|
2658
|
+
const brokerEnv = broker.envs.find((el) => el.slug === env.slug);
|
|
2659
|
+
if (!brokerEnv) {
|
|
2660
|
+
throw new Error(`Broker env for ${env.slug} not found`);
|
|
2661
|
+
}
|
|
2662
|
+
const topic = this.productBuilderService.fetchMessageBrokerTopic(event);
|
|
2663
|
+
if (!topic) {
|
|
2664
|
+
throw new Error(`Topic ${topicTag} not found in broker ${brokerTag}`);
|
|
2665
|
+
}
|
|
2666
|
+
let url = topic.name;
|
|
2667
|
+
if (brokerEnv.type === types_1.MessageBrokerTypes.AWS_SQS) {
|
|
2668
|
+
const queueUrl = topic.queueUrls.find((el) => el.env_slug == env.slug);
|
|
2669
|
+
if (!queueUrl) {
|
|
2670
|
+
throw new Error(`AWS SQS topic requires a queue url defined in env ${env.slug}`);
|
|
2671
|
+
}
|
|
2672
|
+
if (!(0, processor_utils_1.isValidSqsUrl)(queueUrl.url)) {
|
|
2673
|
+
throw new Error('AWS SQS queueUrl is expected to be a url in the format "https://sqs.<region>.amazonaws.com/<account-id>/<queue-name>"');
|
|
2674
|
+
}
|
|
2675
|
+
url = queueUrl.url;
|
|
2676
|
+
}
|
|
2677
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch broker details - success', data: { event, broker }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2678
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Subscribe to broker topic - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2679
|
+
const createBrokerService = await loadBrokerService();
|
|
2680
|
+
if (createBrokerService) {
|
|
2681
|
+
const brokerService = createBrokerService(brokerEnv.type, brokerEnv.config);
|
|
2682
|
+
await brokerService.subscribe(url, input.callback);
|
|
2683
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Subscribe to broker topic - success', data: { event }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2684
|
+
return;
|
|
2685
|
+
}
|
|
2686
|
+
else {
|
|
2687
|
+
throw new Error(`Running in browser, broker service not loaded.`);
|
|
2688
|
+
}
|
|
2689
|
+
//return this.processStorageRequest(data, input, storageEnv, additional_logs);
|
|
2690
|
+
}
|
|
2691
|
+
catch (e) {
|
|
2692
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Attempt broker subcription - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2693
|
+
throw e;
|
|
2694
|
+
}
|
|
2695
|
+
}
|
|
2696
|
+
async runBrokerPublish(data, additional_logs = {}) {
|
|
2697
|
+
const { env, event } = data;
|
|
2698
|
+
const input = data.input;
|
|
2699
|
+
try {
|
|
2700
|
+
await this.intializeProduct(additional_logs);
|
|
2701
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Attempt publish to broker topic - initiated', data: { event }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2702
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch broker details - initiated', data: { event }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2703
|
+
const [brokerTag, topicTag] = event.split(':');
|
|
2704
|
+
const broker = this.productBuilderService.fetchMessageBroker(brokerTag);
|
|
2705
|
+
if (!broker) {
|
|
2706
|
+
throw new Error(`Message Broker ${brokerTag} not found`);
|
|
2707
|
+
}
|
|
2708
|
+
const brokerEnv = broker.envs.find((el) => el.slug === env.slug);
|
|
2709
|
+
if (!brokerEnv) {
|
|
2710
|
+
throw new Error(`Broker env for ${env.slug} not found`);
|
|
2711
|
+
}
|
|
2712
|
+
const topic = this.productBuilderService.fetchMessageBrokerTopic(event);
|
|
2713
|
+
if (!topic) {
|
|
2714
|
+
throw new Error(`Topic ${topicTag} not found in broker ${brokerTag}`);
|
|
2715
|
+
}
|
|
2716
|
+
let url = topic.name;
|
|
2717
|
+
if (brokerEnv.type === types_1.MessageBrokerTypes.AWS_SQS) {
|
|
2718
|
+
const queueUrl = topic.queueUrls.find((el) => el.env_slug == env.slug);
|
|
2719
|
+
if (!queueUrl) {
|
|
2720
|
+
throw new Error(`AWS SQS topic requires a queue url defined in env ${env.slug}`);
|
|
2721
|
+
}
|
|
2722
|
+
if (!(0, processor_utils_1.isValidSqsUrl)(queueUrl.url)) {
|
|
2723
|
+
throw new Error('AWS SQS queueUrl is expected to be a url in the format "https://sqs.<region>.amazonaws.com/<account-id>/<queue-name>"');
|
|
2724
|
+
}
|
|
2725
|
+
url = queueUrl.url;
|
|
2726
|
+
}
|
|
2727
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch broker details - success', data: { event, broker }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2728
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Publish to broker topic initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2729
|
+
const createBrokerService = await loadBrokerService();
|
|
2730
|
+
if (createBrokerService) {
|
|
2731
|
+
const brokerService = createBrokerService(brokerEnv.type, brokerEnv.config);
|
|
2732
|
+
await brokerService.publish(url, input.message);
|
|
2733
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Publish to broker topic - success', successful_execution: true, data: { event }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2734
|
+
return;
|
|
2735
|
+
}
|
|
2736
|
+
else {
|
|
2737
|
+
throw new Error(`Running in browser, broker service not loaded.`);
|
|
2738
|
+
}
|
|
2739
|
+
//return this.processStorageRequest(data, input, storageEnv, additional_logs);
|
|
2740
|
+
}
|
|
2741
|
+
catch (e) {
|
|
2742
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Attempt publish to broker topic - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2743
|
+
console.log("JERMOOOOO!!!");
|
|
2744
|
+
throw e;
|
|
2745
|
+
}
|
|
2746
|
+
}
|
|
2747
|
+
async processStorageRequest(data, input, storageEnv, additional_logs) {
|
|
2748
|
+
try {
|
|
2749
|
+
/*const result = await this.processorApiService.processProduct(
|
|
2750
|
+
this.productId,
|
|
2751
|
+
{
|
|
2752
|
+
slug: storageEnv.slug,
|
|
2753
|
+
tag: data.event,
|
|
2754
|
+
component: ProductComponents.STORAGE,
|
|
2755
|
+
destinationPath: input.fileName,
|
|
2756
|
+
input,
|
|
2757
|
+
},
|
|
2758
|
+
this.getUserAccess(),
|
|
2759
|
+
);*/
|
|
2760
|
+
const cloudConfig = storageEnv.config;
|
|
2761
|
+
const config = {
|
|
2762
|
+
provider: storageEnv.type,
|
|
2763
|
+
};
|
|
2764
|
+
if (storageEnv.type === types_1.StorageProviders.AWS) {
|
|
2765
|
+
Object.assign(config, { awsConfig: cloudConfig });
|
|
2766
|
+
}
|
|
2767
|
+
if (storageEnv.type === types_1.StorageProviders.GCP) {
|
|
2768
|
+
Object.assign(config, { gcpConfig: cloudConfig });
|
|
2769
|
+
}
|
|
2770
|
+
if (storageEnv.type === types_1.StorageProviders.AWS) {
|
|
2771
|
+
Object.assign(config, { awsConfig: cloudConfig });
|
|
2772
|
+
}
|
|
2773
|
+
const result = { url: await (0, storage_util_1.uploadBlobToCloud)({ data: input.buffer, destinationPath: input.fileName, config }) };
|
|
2774
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Run storage request - success', data: { payload: this.clone, result }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2775
|
+
await this.addToSuccessOutput(data, result, additional_logs);
|
|
2776
|
+
return result;
|
|
2777
|
+
}
|
|
2778
|
+
catch (e) {
|
|
2779
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Run storage request - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2780
|
+
const value = this.addToFailureOutput(e, data, { storageEnv }, additional_logs);
|
|
2781
|
+
throw e;
|
|
2782
|
+
}
|
|
2783
|
+
}
|
|
2784
|
+
async processDBRequest(db_action, input, database_tag, databaseEnv, action_tag, additional_logs, returnValue = false) {
|
|
2785
|
+
try {
|
|
2786
|
+
const result = await this.processorApiService.processProduct(this.productId, {
|
|
2787
|
+
input: input,
|
|
2788
|
+
database_tag: database_tag,
|
|
2789
|
+
database_env_slug: databaseEnv.slug,
|
|
2790
|
+
tag: action_tag,
|
|
2791
|
+
component: types_1.ProductComponents.DATABASE_ACTION,
|
|
2792
|
+
}, {
|
|
2793
|
+
user_id: this.user_id,
|
|
2794
|
+
token: this.token,
|
|
2795
|
+
workspace_id: this.workspace_id,
|
|
2796
|
+
public_key: this.public_key,
|
|
2797
|
+
});
|
|
2798
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Run database query - success', data: { payload: (0, processor_utils_1.anonymizeObject)(input) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2799
|
+
return result;
|
|
2800
|
+
}
|
|
2801
|
+
catch (e) {
|
|
2802
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Running database query - failed', data: { payload: (0, processor_utils_1.anonymizeObject)(input), e }, status: types_1.LogEventStatus.FAIL }));
|
|
2803
|
+
const value = this.addToFailureOutput(e, db_action, {
|
|
2804
|
+
db_action,
|
|
2805
|
+
input,
|
|
2806
|
+
database_tag,
|
|
2807
|
+
databaseEnv,
|
|
2808
|
+
action_tag,
|
|
2809
|
+
}, additional_logs);
|
|
2810
|
+
return value;
|
|
2811
|
+
}
|
|
2812
|
+
}
|
|
2813
|
+
async writeResult(status) {
|
|
2814
|
+
this.processorApiService.saveResult({
|
|
2815
|
+
status,
|
|
2816
|
+
component: this.component,
|
|
2817
|
+
start: this.start,
|
|
2818
|
+
end: this.end,
|
|
2819
|
+
result: this.processingOutput,
|
|
2820
|
+
process_id: this.process_id,
|
|
2821
|
+
feature_id: this.feature ? this.feature._id : null,
|
|
2822
|
+
product_id: this.productId,
|
|
2823
|
+
env: this.processEnv.slug,
|
|
2824
|
+
input: this.input,
|
|
2825
|
+
}, this.getUserAccess());
|
|
2826
|
+
}
|
|
2827
|
+
async validateActionDataMappingInput(input, type) {
|
|
2828
|
+
try {
|
|
2829
|
+
if (type === types_1.FeatureEventTypes.ACTION || type === types_1.WebhookEventTypes.WEBHOOK_REGISTER) {
|
|
2830
|
+
await create_productFeature_validator_1.ActionInputSchema.validateAsync(input);
|
|
2831
|
+
}
|
|
2832
|
+
if (type === types_1.FeatureEventTypes.DB_ACTION) {
|
|
2833
|
+
await create_productFeature_validator_1.DBActionInputSchema.validateAsync(input);
|
|
2834
|
+
}
|
|
2835
|
+
if (type === types_1.FeatureEventTypes.NOTIFICATION) {
|
|
2836
|
+
await create_productFeature_validator_1.NotificationInputschema.validateAsync(input);
|
|
2837
|
+
}
|
|
2838
|
+
(0, processor_utils_1.validateStringsInObject)(input);
|
|
2839
|
+
}
|
|
2840
|
+
catch (e) {
|
|
2841
|
+
throw e;
|
|
2842
|
+
}
|
|
2843
|
+
}
|
|
2844
|
+
async processAction(action) {
|
|
2845
|
+
//TODO: schema validation
|
|
2846
|
+
const { env, input, retries, event, app, product: product_tag } = action;
|
|
2847
|
+
const additional_logs = {
|
|
2848
|
+
parent_tag: (0, string_utils_1.extractOriginAndTag)(app),
|
|
2849
|
+
child_tag: event,
|
|
2850
|
+
type: types_1.LogEventTypes.ACTION,
|
|
2851
|
+
name: 'Process Action',
|
|
2852
|
+
};
|
|
2853
|
+
this.component = types_1.LogEventTypes.ACTION;
|
|
2854
|
+
try {
|
|
2855
|
+
// validate input do not allow $Sequence or $Length and $Size of $Input
|
|
2856
|
+
this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.ACTION);
|
|
2857
|
+
this.input = action;
|
|
2858
|
+
this.start = Date.now();
|
|
2859
|
+
this.productTag = product_tag;
|
|
2860
|
+
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
2861
|
+
this.baseLogs = {
|
|
2862
|
+
product_tag: this.productTag,
|
|
2863
|
+
workspace_id: this.workspace_id,
|
|
2864
|
+
env,
|
|
2865
|
+
type: types_1.LogEventTypes.ACTION,
|
|
2866
|
+
process_id,
|
|
2867
|
+
data: input,
|
|
2868
|
+
};
|
|
2869
|
+
await this.intializeProduct(additional_logs);
|
|
2870
|
+
this.process_id = process_id;
|
|
2871
|
+
this.baseLogs.product_id = this.productId;
|
|
2872
|
+
const productEnv = this.fetchEnv(env, additional_logs);
|
|
2873
|
+
this.processEnv = productEnv;
|
|
2874
|
+
if (!productEnv.active) {
|
|
2875
|
+
throw new Error(`Environment ${env} is not active`);
|
|
2876
|
+
}
|
|
2877
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute action initiated', status: types_1.LogEventStatus.PROCESSING }));
|
|
2878
|
+
const result = await this.runAction({
|
|
2879
|
+
type: types_1.FeatureEventTypes.ACTION,
|
|
2880
|
+
event,
|
|
2881
|
+
cache: action.cache,
|
|
2882
|
+
app,
|
|
2883
|
+
input,
|
|
2884
|
+
env: productEnv,
|
|
2885
|
+
retries: retries || 0,
|
|
2886
|
+
allow_fail: false,
|
|
2887
|
+
}, additional_logs, true);
|
|
2888
|
+
this.end = Date.now();
|
|
2889
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute action - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2890
|
+
this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
2891
|
+
this.logService.publish();
|
|
2892
|
+
return result;
|
|
2893
|
+
}
|
|
2894
|
+
catch (e) {
|
|
2895
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute action - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2896
|
+
this.end = Date.now();
|
|
2897
|
+
this.logService.publish();
|
|
2898
|
+
return { process_id: this.process_id };
|
|
2899
|
+
}
|
|
2900
|
+
}
|
|
2901
|
+
async processDBAction(action) {
|
|
2902
|
+
//TODO: schema validation
|
|
2903
|
+
const { env, input, retries, event, product: product_tag } = action;
|
|
2904
|
+
const [parent_tag, child_tag] = event.split(':');
|
|
2905
|
+
this.component = types_1.LogEventTypes.DB_ACTION;
|
|
2906
|
+
if (!parent_tag || !child_tag) {
|
|
2907
|
+
throw new Error(`database action events should be in the format notification_tag:message_tag`);
|
|
2908
|
+
}
|
|
2909
|
+
const additional_logs = {
|
|
2910
|
+
parent_tag,
|
|
2911
|
+
child_tag,
|
|
2912
|
+
type: types_1.LogEventTypes.DB_ACTION,
|
|
2913
|
+
name: 'Process database action',
|
|
2914
|
+
};
|
|
2915
|
+
try {
|
|
2916
|
+
this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.DB_ACTION);
|
|
2917
|
+
this.input = action;
|
|
2918
|
+
this.start = Date.now();
|
|
2919
|
+
this.productTag = product_tag;
|
|
2920
|
+
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
2921
|
+
this.baseLogs = Object.assign({ product_tag: this.productTag, workspace_id: this.workspace_id, env,
|
|
2922
|
+
process_id, data: input }, additional_logs);
|
|
2923
|
+
await this.intializeProduct(additional_logs);
|
|
2924
|
+
this.baseLogs.product_id = this.productId;
|
|
2925
|
+
this.process_id = process_id;
|
|
2926
|
+
const productEnv = this.fetchEnv(env, additional_logs);
|
|
2927
|
+
this.processEnv = productEnv;
|
|
2928
|
+
if (!productEnv.active) {
|
|
2929
|
+
throw new Error(`Environment ${env} is not active`);
|
|
2930
|
+
}
|
|
2931
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute database action', data: { action: (0, processor_utils_1.anonymizeObject)(action) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2932
|
+
const payload = {
|
|
2933
|
+
type: types_1.FeatureEventTypes.DB_ACTION,
|
|
2934
|
+
event,
|
|
2935
|
+
input,
|
|
2936
|
+
cache: action.cache,
|
|
2937
|
+
env: productEnv,
|
|
2938
|
+
retries: retries || 0,
|
|
2939
|
+
allow_fail: false,
|
|
2940
|
+
};
|
|
2941
|
+
const result = await this.runDBAction(payload, additional_logs);
|
|
2942
|
+
this.end = Date.now();
|
|
2943
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute database action', data: { input: (0, processor_utils_1.anonymizeObject)(input), result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2944
|
+
this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
2945
|
+
this.logService.publish();
|
|
2946
|
+
return result;
|
|
2947
|
+
}
|
|
2948
|
+
catch (e) {
|
|
2949
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute database action', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2950
|
+
this.end = Date.now();
|
|
2951
|
+
this.logService.publish();
|
|
2952
|
+
return { process_id: this.process_id };
|
|
2953
|
+
}
|
|
2954
|
+
}
|
|
2955
|
+
async processFunction(data) { }
|
|
2956
|
+
async processNotification(action) {
|
|
2957
|
+
//TODO: schema validation
|
|
2958
|
+
const { env, input, retries, event, product: product_tag } = action;
|
|
2959
|
+
const [parent_tag, child_tag] = event.split(':');
|
|
2960
|
+
if (!parent_tag || !child_tag) {
|
|
2961
|
+
throw new Error(`database action events should be in the format notification_tag:message_tag`);
|
|
2962
|
+
}
|
|
2963
|
+
this.component = types_1.LogEventTypes.NOTIFICATIONS;
|
|
2964
|
+
const additional_logs = {
|
|
2965
|
+
parent_tag,
|
|
2966
|
+
child_tag,
|
|
2967
|
+
type: types_1.LogEventTypes.NOTIFICATIONS,
|
|
2968
|
+
name: 'Process Notification',
|
|
2969
|
+
};
|
|
2970
|
+
try {
|
|
2971
|
+
this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.NOTIFICATION);
|
|
2972
|
+
this.input = action;
|
|
2973
|
+
this.start = Date.now();
|
|
2974
|
+
this.productTag = product_tag;
|
|
2975
|
+
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
2976
|
+
this.baseLogs = Object.assign({ product_tag: this.productTag, workspace_id: this.workspace_id, env,
|
|
2977
|
+
process_id, data: input }, additional_logs);
|
|
2978
|
+
await this.intializeProduct(additional_logs);
|
|
2979
|
+
this.process_id = process_id;
|
|
2980
|
+
this.baseLogs.product_id = this.productId;
|
|
2981
|
+
const productEnv = this.fetchEnv(env, additional_logs);
|
|
2982
|
+
this.processEnv = productEnv;
|
|
2983
|
+
if (!productEnv.active) {
|
|
2984
|
+
throw new Error(`Environment ${env} is not active`);
|
|
2985
|
+
}
|
|
2986
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send notification - initiated', data: { action: (0, processor_utils_1.anonymizeObject)(action) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2987
|
+
const payload = {
|
|
2988
|
+
type: types_1.FeatureEventTypes.NOTIFICATION,
|
|
2989
|
+
event,
|
|
2990
|
+
input,
|
|
2991
|
+
cache: action.cache,
|
|
2992
|
+
env: productEnv,
|
|
2993
|
+
retries: retries || 0,
|
|
2994
|
+
allow_fail: false,
|
|
2995
|
+
};
|
|
2996
|
+
const result = await this.runNotification(payload, additional_logs);
|
|
2997
|
+
this.end = Date.now();
|
|
2998
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send notification - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2999
|
+
this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
3000
|
+
this.logService.publish();
|
|
3001
|
+
return { process_id };
|
|
3002
|
+
}
|
|
3003
|
+
catch (e) {
|
|
3004
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send notification - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
3005
|
+
this.end = Date.now();
|
|
3006
|
+
this.logService.publish();
|
|
3007
|
+
return { process_id: this.process_id };
|
|
3008
|
+
}
|
|
3009
|
+
}
|
|
3010
|
+
async fetchRemoteCaches(payload) {
|
|
3011
|
+
try {
|
|
3012
|
+
const data = await this.processorApiService.fetchRemoteCaches(payload, this.getUserAccess());
|
|
3013
|
+
return data.map((data) => {
|
|
3014
|
+
data.value = (0, processor_utils_1.decrypt)(data.value, this.productBuilderService.fetchProduct().private_key);
|
|
3015
|
+
return data;
|
|
3016
|
+
});
|
|
3017
|
+
}
|
|
3018
|
+
catch (e) {
|
|
3019
|
+
throw e;
|
|
3020
|
+
}
|
|
3021
|
+
}
|
|
3022
|
+
getUserAccess() {
|
|
3023
|
+
return {
|
|
3024
|
+
user_id: this.user_id,
|
|
3025
|
+
workspace_id: this.workspace_id,
|
|
3026
|
+
token: this.token,
|
|
3027
|
+
public_key: this.public_key,
|
|
3028
|
+
};
|
|
3029
|
+
}
|
|
3030
|
+
async addToCache(payload) {
|
|
3031
|
+
if (!this.redisClient) {
|
|
3032
|
+
throw 'redis client not setup';
|
|
3033
|
+
}
|
|
3034
|
+
const { input, privateKey, data, cache_tag, timestamp, expiry, product_tag, component_tag, component_type } = payload;
|
|
3035
|
+
// sha-512 instead of md5, private key is fetch product.privatekey
|
|
3036
|
+
const key = `${cache_tag}-${CryptoJS.SHA512(input)}`;
|
|
3037
|
+
const encryptedData = (0, processor_utils_1.encrypt)(data, privateKey);
|
|
3038
|
+
let expiresAt = null;
|
|
3039
|
+
if (expiry) {
|
|
3040
|
+
expiresAt = timestamp + expiry;
|
|
3041
|
+
}
|
|
3042
|
+
await this.processorApiService.addToRemoteCache(Object.assign(Object.assign({}, (expiresAt && { expiry: new Date(expiresAt) })), { key, value: encryptedData, cache_tag,
|
|
3043
|
+
product_tag,
|
|
3044
|
+
component_tag,
|
|
3045
|
+
component_type }), this.getUserAccess());
|
|
3046
|
+
await this.redisClient.hSet(key, 'data', encryptedData);
|
|
3047
|
+
await this.redisClient.hSet(key, 'lastUpdated', timestamp);
|
|
3048
|
+
}
|
|
3049
|
+
async fetchFromCache(payload) {
|
|
3050
|
+
var _a;
|
|
3051
|
+
const { input, privateKey, cache_tag, expiry } = payload;
|
|
3052
|
+
const key = `${cache_tag}-${CryptoJS.SHA512(input)}`;
|
|
3053
|
+
if (!this.redisClient) {
|
|
3054
|
+
return null;
|
|
3055
|
+
}
|
|
3056
|
+
let record = await this.redisClient.hGetAll(key);
|
|
3057
|
+
if (!record || !record.data || !record.lastUpdated) {
|
|
3058
|
+
const checkRemote = await this.processorApiService.fetchRemoteCacheByKey(key, this.getUserAccess());
|
|
3059
|
+
if (!checkRemote) {
|
|
3060
|
+
return null;
|
|
3061
|
+
}
|
|
3062
|
+
record = { data: checkRemote.value, expiresAt: (_a = checkRemote.expiry) === null || _a === void 0 ? void 0 : _a.getTime().toString() };
|
|
3063
|
+
}
|
|
3064
|
+
if (record.lastUpdated) {
|
|
3065
|
+
const lastUpdated = Number(record.lastUpdated);
|
|
3066
|
+
if (expiry) {
|
|
3067
|
+
const expiryTime = lastUpdated + expiry;
|
|
3068
|
+
if (Date.now() > expiryTime) {
|
|
3069
|
+
await this.redisClient.del(key);
|
|
3070
|
+
return null;
|
|
3071
|
+
}
|
|
3072
|
+
}
|
|
3073
|
+
}
|
|
3074
|
+
return (0, processor_utils_1.decrypt)(record.data, privateKey);
|
|
3075
|
+
}
|
|
3076
|
+
}
|
|
3077
|
+
exports.default = ProcessorService;
|
|
3078
|
+
//# sourceMappingURL=processor.service.js.map
|