@ductape/sdk 0.0.4-v6 → 0.0.4-v61
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/agent-context.d.ts +98 -0
- package/dist/agents/agent-context.js +588 -0
- package/dist/agents/agent-context.js.map +1 -0
- package/dist/agents/agent-executor.d.ts +180 -0
- package/dist/agents/agent-executor.js +715 -0
- package/dist/agents/agent-executor.js.map +1 -0
- package/dist/agents/agents.service.d.ts +310 -0
- package/dist/agents/agents.service.js +1249 -0
- package/dist/agents/agents.service.js.map +1 -0
- package/dist/agents/index.d.ts +55 -0
- package/dist/agents/index.js +110 -0
- package/dist/agents/index.js.map +1 -0
- package/dist/agents/memory-manager.d.ts +182 -0
- package/dist/agents/memory-manager.js +383 -0
- package/dist/agents/memory-manager.js.map +1 -0
- package/dist/agents/tool-registry.d.ts +141 -0
- package/dist/agents/tool-registry.js +355 -0
- package/dist/agents/tool-registry.js.map +1 -0
- package/dist/agents/types/agents.types.d.ts +1227 -0
- package/dist/agents/types/agents.types.js +12 -0
- package/dist/agents/types/agents.types.js.map +1 -0
- package/dist/agents/types/index.d.ts +6 -0
- package/dist/agents/types/index.js +23 -0
- package/dist/agents/types/index.js.map +1 -0
- package/dist/agents/vector-store-adapter.d.ts +108 -0
- package/dist/agents/vector-store-adapter.js +213 -0
- package/dist/agents/vector-store-adapter.js.map +1 -0
- package/dist/api/services/appApi.service.d.ts +51 -5
- package/dist/api/services/appApi.service.js +101 -3
- package/dist/api/services/appApi.service.js.map +1 -1
- package/dist/api/services/pricingApi.service.d.ts +10 -0
- package/dist/api/services/pricingApi.service.js +34 -0
- package/dist/api/services/pricingApi.service.js.map +1 -0
- package/dist/api/services/processorApi.service.d.ts +334 -2
- package/dist/api/services/processorApi.service.js +264 -2
- package/dist/api/services/processorApi.service.js.map +1 -1
- package/dist/api/services/productsApi.service.d.ts +108 -1
- package/dist/api/services/productsApi.service.js +150 -3
- package/dist/api/services/productsApi.service.js.map +1 -1
- package/dist/api/services/resilienceApi.service.d.ts +106 -0
- package/dist/api/services/resilienceApi.service.js +224 -0
- package/dist/api/services/resilienceApi.service.js.map +1 -0
- package/dist/api/services/secretsApi.service.d.ts +50 -0
- package/dist/api/services/secretsApi.service.js +124 -0
- package/dist/api/services/secretsApi.service.js.map +1 -0
- package/dist/api/services/workflowApi.service.d.ts +199 -0
- package/dist/api/services/workflowApi.service.js +183 -0
- package/dist/api/services/workflowApi.service.js.map +1 -0
- package/dist/api/services/workspaceApi.service.d.ts +8 -0
- package/dist/api/services/workspaceApi.service.js +20 -0
- package/dist/api/services/workspaceApi.service.js.map +1 -1
- package/dist/api/urls.d.ts +65 -1
- package/dist/api/urls.js +90 -18
- package/dist/api/urls.js.map +1 -1
- package/dist/api/utils/auth.utils.d.ts +1 -3
- package/dist/api/utils/auth.utils.js.map +1 -1
- package/dist/api/utils/cache.utils.d.ts +1 -1
- package/dist/api/utils/cache.utils.js +2 -2
- package/dist/api/utils/cache.utils.js.map +1 -1
- package/dist/api/utils/strings.utils.d.ts +2 -0
- package/dist/api/utils/strings.utils.js +14 -0
- package/dist/api/utils/strings.utils.js.map +1 -1
- package/dist/apps/services/app.service.d.ts +41 -33
- package/dist/apps/services/app.service.js +472 -184
- package/dist/apps/services/app.service.js.map +1 -1
- package/dist/apps/utils/auth-context-manager.d.ts +137 -0
- package/dist/apps/utils/auth-context-manager.js +248 -0
- package/dist/apps/utils/auth-context-manager.js.map +1 -0
- package/dist/apps/utils/credential-manager.d.ts +128 -0
- package/dist/apps/utils/credential-manager.js +199 -0
- package/dist/apps/utils/credential-manager.js.map +1 -0
- package/dist/apps/utils/index.d.ts +10 -0
- package/dist/apps/utils/index.js +54 -0
- package/dist/apps/utils/index.js.map +1 -0
- package/dist/apps/utils/input-helpers.d.ts +67 -0
- package/dist/apps/utils/input-helpers.js +185 -0
- package/dist/apps/utils/input-helpers.js.map +1 -0
- package/dist/apps/utils/input-resolver.d.ts +165 -0
- package/dist/apps/utils/input-resolver.js +477 -0
- package/dist/apps/utils/input-resolver.js.map +1 -0
- package/dist/apps/utils/oauth-manager.d.ts +196 -0
- package/dist/apps/utils/oauth-manager.js +429 -0
- package/dist/apps/utils/oauth-manager.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appAction.validator.d.ts +1 -2
- package/dist/apps/validators/joi-validators/create.appAction.validator.js +21 -2
- package/dist/apps/validators/joi-validators/create.appAction.validator.js.map +1 -1
- package/dist/apps/validators/joi-validators/update.appAction.validator.js +11 -1
- package/dist/apps/validators/joi-validators/update.appAction.validator.js.map +1 -1
- package/dist/apps/validators/joi-validators/update.appActionResponse.validator.d.ts +1 -1
- package/dist/apps/validators/joi-validators/update.appActionResponse.validator.js +34 -1
- package/dist/apps/validators/joi-validators/update.appActionResponse.validator.js.map +1 -1
- package/dist/bin.d.ts +26 -0
- package/dist/bin.js +28 -0
- package/dist/bin.js.map +1 -0
- package/dist/brokers/brokers.service.d.ts +289 -0
- package/dist/brokers/brokers.service.js +722 -0
- package/dist/brokers/brokers.service.js.map +1 -0
- package/dist/brokers/index.d.ts +46 -0
- package/dist/brokers/index.js +83 -0
- package/dist/brokers/index.js.map +1 -0
- package/dist/brokers/types/index.d.ts +314 -0
- package/dist/brokers/types/index.js +8 -0
- package/dist/brokers/types/index.js.map +1 -0
- package/dist/brokers/utils/broker.util.d.ts +33 -0
- package/dist/brokers/utils/broker.util.js +125 -0
- package/dist/brokers/utils/broker.util.js.map +1 -0
- package/dist/brokers/utils/providers/aws-sqs.service.d.ts +16 -0
- package/dist/brokers/utils/providers/aws-sqs.service.js +71 -0
- package/dist/brokers/utils/providers/aws-sqs.service.js.map +1 -0
- package/dist/brokers/utils/providers/google-pubsub.service.d.ts +16 -0
- package/dist/brokers/utils/providers/google-pubsub.service.js +43 -0
- package/dist/brokers/utils/providers/google-pubsub.service.js.map +1 -0
- package/dist/brokers/utils/providers/index.d.ts +6 -0
- package/dist/brokers/utils/providers/index.js +16 -0
- package/dist/brokers/utils/providers/index.js.map +1 -0
- package/dist/brokers/utils/providers/kafka.service.d.ts +23 -0
- package/dist/brokers/utils/providers/kafka.service.js +131 -0
- package/dist/brokers/utils/providers/kafka.service.js.map +1 -0
- package/dist/brokers/utils/providers/nats.service.d.ts +18 -0
- package/dist/brokers/utils/providers/nats.service.js +63 -0
- package/dist/brokers/utils/providers/nats.service.js.map +1 -0
- package/dist/brokers/utils/providers/rabbitmq.service.d.ts +15 -0
- package/dist/brokers/utils/providers/rabbitmq.service.js +151 -0
- package/dist/brokers/utils/providers/rabbitmq.service.js.map +1 -0
- package/dist/brokers/utils/providers/redis.service.d.ts +18 -0
- package/dist/brokers/utils/providers/redis.service.js +93 -0
- package/dist/brokers/utils/providers/redis.service.js.map +1 -0
- package/dist/cache/cache.manager.d.ts +159 -0
- package/dist/cache/cache.manager.js +265 -0
- package/dist/cache/cache.manager.js.map +1 -0
- package/dist/cache/cache.service.d.ts +186 -0
- package/dist/cache/cache.service.js +437 -0
- package/dist/cache/cache.service.js.map +1 -0
- package/dist/cache/index.d.ts +52 -0
- package/dist/cache/index.js +79 -0
- package/dist/cache/index.js.map +1 -0
- package/dist/cache/types/index.d.ts +106 -0
- package/dist/cache/types/index.js +6 -0
- package/dist/cache/types/index.js.map +1 -0
- package/dist/clients/pricing.client.d.ts +3 -0
- package/dist/clients/pricing.client.js +33 -0
- package/dist/clients/pricing.client.js.map +1 -0
- package/dist/database/actions/action-manager.d.ts +170 -0
- package/dist/database/actions/action-manager.js +465 -0
- package/dist/database/actions/action-manager.js.map +1 -0
- package/dist/database/actions/index.d.ts +6 -0
- package/dist/database/actions/index.js +13 -0
- package/dist/database/actions/index.js.map +1 -0
- package/dist/database/adapters/adapter.factory.d.ts +62 -0
- package/dist/database/adapters/adapter.factory.js +97 -0
- package/dist/database/adapters/adapter.factory.js.map +1 -0
- package/dist/database/adapters/base.adapter.d.ts +393 -0
- package/dist/database/adapters/base.adapter.js +150 -0
- package/dist/database/adapters/base.adapter.js.map +1 -0
- package/dist/database/adapters/cassandra.adapter.d.ts +91 -0
- package/dist/database/adapters/cassandra.adapter.js +1075 -0
- package/dist/database/adapters/cassandra.adapter.js.map +1 -0
- package/dist/database/adapters/dynamodb.adapter.d.ts +109 -0
- package/dist/database/adapters/dynamodb.adapter.js +1534 -0
- package/dist/database/adapters/dynamodb.adapter.js.map +1 -0
- package/dist/database/adapters/index.d.ts +11 -0
- package/dist/database/adapters/index.js +27 -0
- package/dist/database/adapters/index.js.map +1 -0
- package/dist/database/adapters/mariadb.adapter.d.ts +100 -0
- package/dist/database/adapters/mariadb.adapter.js +247 -0
- package/dist/database/adapters/mariadb.adapter.js.map +1 -0
- package/dist/database/adapters/mongodb.adapter.d.ts +120 -0
- package/dist/database/adapters/mongodb.adapter.js +1253 -0
- package/dist/database/adapters/mongodb.adapter.js.map +1 -0
- package/dist/database/adapters/mysql.adapter.d.ts +85 -0
- package/dist/database/adapters/mysql.adapter.js +1313 -0
- package/dist/database/adapters/mysql.adapter.js.map +1 -0
- package/dist/database/adapters/postgresql.adapter.d.ts +88 -0
- package/dist/database/adapters/postgresql.adapter.js +1434 -0
- package/dist/database/adapters/postgresql.adapter.js.map +1 -0
- package/dist/database/databases.service.d.ts +1388 -0
- package/dist/database/databases.service.js +2821 -0
- package/dist/database/databases.service.js.map +1 -0
- package/dist/database/index.d.ts +46 -0
- package/dist/database/index.js +109 -0
- package/dist/database/index.js.map +1 -0
- package/dist/database/migrations/index.d.ts +6 -0
- package/dist/database/migrations/index.js +12 -0
- package/dist/database/migrations/index.js.map +1 -0
- package/dist/database/migrations/migration-engine.d.ts +132 -0
- package/dist/database/migrations/migration-engine.js +1356 -0
- package/dist/database/migrations/migration-engine.js.map +1 -0
- package/dist/database/operators/aggregation-builder.d.ts +67 -0
- package/dist/database/operators/aggregation-builder.js +841 -0
- package/dist/database/operators/aggregation-builder.js.map +1 -0
- package/dist/database/operators/index.d.ts +7 -0
- package/dist/database/operators/index.js +15 -0
- package/dist/database/operators/index.js.map +1 -0
- package/dist/database/operators/query-builder.d.ts +59 -0
- package/dist/database/operators/query-builder.js +397 -0
- package/dist/database/operators/query-builder.js.map +1 -0
- package/dist/database/presave/decrypt.d.ts +25 -0
- package/dist/database/presave/decrypt.js +146 -0
- package/dist/database/presave/decrypt.js.map +1 -0
- package/dist/database/presave/index.d.ts +9 -0
- package/dist/database/presave/index.js +18 -0
- package/dist/database/presave/index.js.map +1 -0
- package/dist/database/presave/presave-processor.d.ts +148 -0
- package/dist/database/presave/presave-processor.js +702 -0
- package/dist/database/presave/presave-processor.js.map +1 -0
- package/dist/database/schema/index.d.ts +7 -0
- package/dist/database/schema/index.js +13 -0
- package/dist/database/schema/index.js.map +1 -0
- package/dist/database/schema/schema-manager.d.ts +258 -0
- package/dist/database/schema/schema-manager.js +637 -0
- package/dist/database/schema/schema-manager.js.map +1 -0
- package/dist/database/transactions/index.d.ts +6 -0
- package/dist/database/transactions/index.js +13 -0
- package/dist/database/transactions/index.js.map +1 -0
- package/dist/database/transactions/transaction-manager.d.ts +113 -0
- package/dist/database/transactions/transaction-manager.js +344 -0
- package/dist/database/transactions/transaction-manager.js.map +1 -0
- package/dist/database/triggers/index.d.ts +7 -0
- package/dist/database/triggers/index.js +14 -0
- package/dist/database/triggers/index.js.map +1 -0
- package/dist/database/triggers/trigger-processor.d.ts +239 -0
- package/dist/database/triggers/trigger-processor.js +1034 -0
- package/dist/database/triggers/trigger-processor.js.map +1 -0
- package/dist/database/types/action.interface.d.ts +148 -0
- package/dist/database/types/action.interface.js +6 -0
- package/dist/database/types/action.interface.js.map +1 -0
- package/dist/database/types/aggregation.interface.d.ts +185 -0
- package/dist/database/types/aggregation.interface.js +6 -0
- package/dist/database/types/aggregation.interface.js.map +1 -0
- package/dist/database/types/connection.interface.d.ts +137 -0
- package/dist/database/types/connection.interface.js +6 -0
- package/dist/database/types/connection.interface.js.map +1 -0
- package/dist/database/types/enums.d.ts +195 -0
- package/dist/database/types/enums.js +244 -0
- package/dist/database/types/enums.js.map +1 -0
- package/dist/database/types/index.d.ts +14 -0
- package/dist/database/types/index.js +31 -0
- package/dist/database/types/index.js.map +1 -0
- package/dist/database/types/migration.interface.d.ts +686 -0
- package/dist/database/types/migration.interface.js +9 -0
- package/dist/database/types/migration.interface.js.map +1 -0
- package/dist/database/types/presave.interface.d.ts +292 -0
- package/dist/database/types/presave.interface.js +60 -0
- package/dist/database/types/presave.interface.js.map +1 -0
- package/dist/database/types/query.interface.d.ts +205 -0
- package/dist/database/types/query.interface.js +6 -0
- package/dist/database/types/query.interface.js.map +1 -0
- package/dist/database/types/schema.interface.d.ts +398 -0
- package/dist/database/types/schema.interface.js +6 -0
- package/dist/database/types/schema.interface.js.map +1 -0
- package/dist/database/types/transaction.interface.d.ts +84 -0
- package/dist/database/types/transaction.interface.js +6 -0
- package/dist/database/types/transaction.interface.js.map +1 -0
- package/dist/database/types/trigger.interface.d.ts +612 -0
- package/dist/database/types/trigger.interface.js +121 -0
- package/dist/database/types/trigger.interface.js.map +1 -0
- package/dist/database/types/write.interface.d.ts +216 -0
- package/dist/database/types/write.interface.js +6 -0
- package/dist/database/types/write.interface.js.map +1 -0
- package/dist/database/utils/database-error.d.ts +96 -0
- package/dist/database/utils/database-error.js +221 -0
- package/dist/database/utils/database-error.js.map +1 -0
- package/dist/database/utils/index.d.ts +6 -0
- package/dist/database/utils/index.js +11 -0
- package/dist/database/utils/index.js.map +1 -0
- package/dist/graph/adapters/adapter.factory.d.ts +47 -0
- package/dist/graph/adapters/adapter.factory.js +77 -0
- package/dist/graph/adapters/adapter.factory.js.map +1 -0
- package/dist/graph/adapters/arangodb.adapter.d.ts +86 -0
- package/dist/graph/adapters/arangodb.adapter.js +1522 -0
- package/dist/graph/adapters/arangodb.adapter.js.map +1 -0
- package/dist/graph/adapters/base.adapter.d.ts +245 -0
- package/dist/graph/adapters/base.adapter.js +64 -0
- package/dist/graph/adapters/base.adapter.js.map +1 -0
- package/dist/graph/adapters/index.d.ts +11 -0
- package/dist/graph/adapters/index.js +21 -0
- package/dist/graph/adapters/index.js.map +1 -0
- package/dist/graph/adapters/memgraph.adapter.d.ts +110 -0
- package/dist/graph/adapters/memgraph.adapter.js +1345 -0
- package/dist/graph/adapters/memgraph.adapter.js.map +1 -0
- package/dist/graph/adapters/neo4j.adapter.d.ts +81 -0
- package/dist/graph/adapters/neo4j.adapter.js +1198 -0
- package/dist/graph/adapters/neo4j.adapter.js.map +1 -0
- package/dist/graph/adapters/neptune.adapter.d.ts +82 -0
- package/dist/graph/adapters/neptune.adapter.js +1313 -0
- package/dist/graph/adapters/neptune.adapter.js.map +1 -0
- package/dist/graph/graphs.service.d.ts +546 -0
- package/dist/graph/graphs.service.js +1893 -0
- package/dist/graph/graphs.service.js.map +1 -0
- package/dist/graph/index.d.ts +57 -0
- package/dist/graph/index.js +77 -0
- package/dist/graph/index.js.map +1 -0
- package/dist/graph/transactions/index.d.ts +4 -0
- package/dist/graph/transactions/index.js +9 -0
- package/dist/graph/transactions/index.js.map +1 -0
- package/dist/graph/transactions/transaction-manager.d.ts +61 -0
- package/dist/graph/transactions/transaction-manager.js +126 -0
- package/dist/graph/transactions/transaction-manager.js.map +1 -0
- package/dist/graph/types/connection.interface.d.ts +149 -0
- package/dist/graph/types/connection.interface.js +9 -0
- package/dist/graph/types/connection.interface.js.map +1 -0
- package/dist/graph/types/enums.d.ts +101 -0
- package/dist/graph/types/enums.js +114 -0
- package/dist/graph/types/enums.js.map +1 -0
- package/dist/graph/types/index.d.ts +13 -0
- package/dist/graph/types/index.js +20 -0
- package/dist/graph/types/index.js.map +1 -0
- package/dist/graph/types/node.interface.d.ts +248 -0
- package/dist/graph/types/node.interface.js +9 -0
- package/dist/graph/types/node.interface.js.map +1 -0
- package/dist/graph/types/query.interface.d.ts +175 -0
- package/dist/graph/types/query.interface.js +9 -0
- package/dist/graph/types/query.interface.js.map +1 -0
- package/dist/graph/types/relationship.interface.d.ts +207 -0
- package/dist/graph/types/relationship.interface.js +9 -0
- package/dist/graph/types/relationship.interface.js.map +1 -0
- package/dist/graph/types/schema.interface.d.ts +295 -0
- package/dist/graph/types/schema.interface.js +9 -0
- package/dist/graph/types/schema.interface.js.map +1 -0
- package/dist/graph/types/transaction.interface.d.ts +55 -0
- package/dist/graph/types/transaction.interface.js +9 -0
- package/dist/graph/types/transaction.interface.js.map +1 -0
- package/dist/graph/types/traversal.interface.d.ts +181 -0
- package/dist/graph/types/traversal.interface.js +9 -0
- package/dist/graph/types/traversal.interface.js.map +1 -0
- package/dist/graph/utils/graph-error.d.ts +71 -0
- package/dist/graph/utils/graph-error.js +142 -0
- package/dist/graph/utils/graph-error.js.map +1 -0
- package/dist/graph/utils/index.d.ts +4 -0
- package/dist/graph/utils/index.js +9 -0
- package/dist/graph/utils/index.js.map +1 -0
- package/dist/imports/imports.service.d.ts +3 -3
- package/dist/imports/imports.service.js +8 -7
- package/dist/imports/imports.service.js.map +1 -1
- package/dist/imports/imports.types.d.ts +8 -0
- package/dist/imports/repos/openApi.repo.d.ts +1 -1
- package/dist/imports/repos/openApi.repo.js +414 -47
- package/dist/imports/repos/openApi.repo.js.map +1 -1
- package/dist/imports/repos/postmanV21.repo.d.ts +1 -1
- package/dist/imports/repos/postmanV21.repo.js +126 -83
- package/dist/imports/repos/postmanV21.repo.js.map +1 -1
- package/dist/index.d.ts +3241 -285
- package/dist/index.js +4711 -687
- package/dist/index.js.map +1 -1
- package/dist/init.interface.d.ts +407 -0
- package/dist/init.interface.js +3 -0
- package/dist/init.interface.js.map +1 -0
- package/dist/inputs/inputs.service.d.ts +1 -1
- package/dist/inputs/inputs.service.js +2 -2
- package/dist/inputs/inputs.service.js.map +1 -1
- package/dist/inputs/utils/inputs.utils.create.js +1 -1
- package/dist/inputs/utils/inputs.utils.create.js.map +1 -1
- package/dist/jobs/index.d.ts +38 -0
- package/dist/jobs/index.js +50 -0
- package/dist/jobs/index.js.map +1 -0
- package/dist/jobs/jobs.service.d.ts +154 -0
- package/dist/jobs/jobs.service.js +491 -0
- package/dist/jobs/jobs.service.js.map +1 -0
- package/dist/jobs/jobs.state.d.ts +113 -0
- package/dist/jobs/jobs.state.js +447 -0
- package/dist/jobs/jobs.state.js.map +1 -0
- package/dist/jobs/types.d.ts +449 -0
- package/dist/jobs/types.js +74 -0
- package/dist/jobs/types.js.map +1 -0
- package/dist/logs/logs.types.d.ts +16 -1
- package/dist/logs/logs.types.js +5 -0
- package/dist/logs/logs.types.js.map +1 -1
- package/dist/models/index.d.ts +6 -0
- package/dist/models/index.js +11 -0
- package/dist/models/index.js.map +1 -0
- package/dist/models/models.service.d.ts +137 -0
- package/dist/models/models.service.js +195 -0
- package/dist/models/models.service.js.map +1 -0
- package/dist/notifications/index.d.ts +13 -0
- package/dist/notifications/index.js +26 -0
- package/dist/notifications/index.js.map +1 -0
- package/dist/notifications/notifications.service.d.ts +257 -0
- package/dist/notifications/notifications.service.js +656 -0
- package/dist/notifications/notifications.service.js.map +1 -0
- package/dist/notifications/types/index.d.ts +4 -0
- package/dist/notifications/types/index.js +21 -0
- package/dist/notifications/types/index.js.map +1 -0
- package/dist/notifications/types/notifications.types.d.ts +400 -0
- package/dist/notifications/types/notifications.types.js +49 -0
- package/dist/notifications/types/notifications.types.js.map +1 -0
- package/dist/parsers/index.d.ts +3 -0
- package/dist/parsers/index.js +27 -0
- package/dist/parsers/index.js.map +1 -0
- package/dist/parsers/pipelines/postman.pipelines.d.ts +15 -0
- package/dist/parsers/pipelines/postman.pipelines.js +103 -0
- package/dist/parsers/pipelines/postman.pipelines.js.map +1 -0
- package/dist/parsers/types/postman.types.d.ts +200 -0
- package/dist/parsers/types/postman.types.js +3 -0
- package/dist/parsers/types/postman.types.js.map +1 -0
- package/dist/parsers/utils/postman.utils.d.ts +12 -0
- package/dist/parsers/utils/postman.utils.js +116 -0
- package/dist/parsers/utils/postman.utils.js.map +1 -0
- package/dist/parsers/validators/postman-auth.validators.d.ts +10 -0
- package/dist/parsers/validators/postman-auth.validators.js +127 -0
- package/dist/parsers/validators/postman-auth.validators.js.map +1 -0
- package/dist/parsers/validators/postman-request.validators.d.ts +13 -0
- package/dist/parsers/validators/postman-request.validators.js +139 -0
- package/dist/parsers/validators/postman-request.validators.js.map +1 -0
- package/dist/parsers/validators/postman-response.validators.d.ts +13 -0
- package/dist/parsers/validators/postman-response.validators.js +150 -0
- package/dist/parsers/validators/postman-response.validators.js.map +1 -0
- package/dist/parsers/validators/postman-variable.validators.d.ts +14 -0
- package/dist/parsers/validators/postman-variable.validators.js +163 -0
- package/dist/parsers/validators/postman-variable.validators.js.map +1 -0
- package/dist/pricing/pricing.repo.js +1 -0
- package/dist/pricing/pricing.repo.js.map +1 -0
- package/dist/pricing/pricing.service.d.ts +24 -0
- package/dist/pricing/pricing.service.js +51 -0
- package/dist/pricing/pricing.service.js.map +1 -0
- package/dist/pricing/pricing.types.d.ts +76 -0
- package/dist/pricing/pricing.types.js +21 -0
- package/dist/pricing/pricing.types.js.map +1 -0
- package/dist/pricing/utils/string.utils.d.ts +1 -0
- package/dist/pricing/utils/string.utils.js +9 -0
- package/dist/pricing/utils/string.utils.js.map +1 -0
- package/dist/processor/services/processor.service.d.ts +120 -73
- package/dist/processor/services/processor.service.js +1705 -1294
- package/dist/processor/services/processor.service.js.map +1 -1
- package/dist/processor/services/request.service.d.ts +36 -0
- package/dist/processor/services/request.service.js +304 -0
- package/dist/processor/services/request.service.js.map +1 -0
- package/dist/processor/types/request.types.d.ts +14 -0
- package/dist/processor/types/request.types.js +3 -0
- package/dist/processor/types/request.types.js.map +1 -0
- package/dist/processor/utils/processor.utils.d.ts +3 -0
- package/dist/processor/utils/processor.utils.js +87 -21
- package/dist/processor/utils/processor.utils.js.map +1 -1
- package/dist/processor/utils/request.utils.d.ts +20 -0
- package/dist/processor/utils/request.utils.js +113 -0
- package/dist/processor/utils/request.utils.js.map +1 -0
- package/dist/products/services/products.service.d.ts +330 -77
- package/dist/products/services/products.service.js +2586 -397
- package/dist/products/services/products.service.js.map +1 -1
- package/dist/products/utils/string.utils.d.ts +1 -1
- package/dist/products/utils/string.utils.js +14 -2
- package/dist/products/utils/string.utils.js.map +1 -1
- package/dist/products/validators/index.d.ts +7 -1
- package/dist/products/validators/index.js +16 -1
- package/dist/products/validators/index.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productAgent.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productAgent.validator.js +266 -0
- package/dist/products/validators/joi-validators/create.productAgent.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productDatabase.validator.js +5 -0
- package/dist/products/validators/joi-validators/create.productDatabase.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productEnv.validator.js +1 -0
- package/dist/products/validators/joi-validators/create.productEnv.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productGraph.validator.js +89 -0
- package/dist/products/validators/joi-validators/create.productGraph.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productHealthcheck.validator.d.ts +4 -0
- package/dist/products/validators/joi-validators/create.productHealthcheck.validator.js +58 -0
- package/dist/products/validators/joi-validators/create.productHealthcheck.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productMessageBrokers.validator.js +81 -25
- package/dist/products/validators/joi-validators/create.productMessageBrokers.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productModel.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productModel.validator.js +132 -0
- package/dist/products/validators/joi-validators/create.productModel.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productNotification.validator.js +133 -45
- package/dist/products/validators/joi-validators/create.productNotification.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productStorage.validator.js +77 -18
- package/dist/products/validators/joi-validators/create.productStorage.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productVector.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productVector.validator.js +135 -0
- package/dist/products/validators/joi-validators/create.productVector.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.dataValue.validator.js +1 -0
- package/dist/products/validators/joi-validators/update.dataValue.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/update.productDatabase.validator.js +5 -0
- package/dist/products/validators/joi-validators/update.productDatabase.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/update.productEnv.validator.js +3 -0
- package/dist/products/validators/joi-validators/update.productEnv.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/update.productGraph.validator.js +88 -0
- package/dist/products/validators/joi-validators/update.productGraph.validator.js.map +1 -0
- package/dist/resilience/fallback.service.d.ts +140 -0
- package/dist/resilience/fallback.service.js +764 -0
- package/dist/resilience/fallback.service.js.map +1 -0
- package/dist/resilience/healthcheck.service.d.ts +159 -0
- package/dist/resilience/healthcheck.service.js +943 -0
- package/dist/resilience/healthcheck.service.js.map +1 -0
- package/dist/resilience/index.d.ts +104 -0
- package/dist/resilience/index.js +140 -0
- package/dist/resilience/index.js.map +1 -0
- package/dist/resilience/quota.service.d.ts +82 -0
- package/dist/resilience/quota.service.js +516 -0
- package/dist/resilience/quota.service.js.map +1 -0
- package/dist/resilience/resilience.service.d.ts +98 -0
- package/dist/resilience/resilience.service.js +560 -0
- package/dist/resilience/resilience.service.js.map +1 -0
- package/dist/resilience/types/index.d.ts +513 -0
- package/dist/resilience/types/index.js +29 -0
- package/dist/resilience/types/index.js.map +1 -0
- package/dist/secrets/index.d.ts +10 -0
- package/dist/secrets/index.js +33 -0
- package/dist/secrets/index.js.map +1 -0
- package/dist/secrets/secrets.resolver.d.ts +52 -0
- package/dist/secrets/secrets.resolver.js +233 -0
- package/dist/secrets/secrets.resolver.js.map +1 -0
- package/dist/secrets/secrets.service.d.ts +93 -0
- package/dist/secrets/secrets.service.js +258 -0
- package/dist/secrets/secrets.service.js.map +1 -0
- package/dist/secrets/secrets.types.d.ts +188 -0
- package/dist/secrets/secrets.types.js +87 -0
- package/dist/secrets/secrets.types.js.map +1 -0
- package/dist/sessions/index.d.ts +50 -0
- package/dist/sessions/index.js +93 -0
- package/dist/sessions/index.js.map +1 -0
- package/dist/sessions/sessions.helper.d.ts +68 -0
- package/dist/sessions/sessions.helper.js +116 -0
- package/dist/sessions/sessions.helper.js.map +1 -0
- package/dist/sessions/sessions.resolver.d.ts +150 -0
- package/dist/sessions/sessions.resolver.js +356 -0
- package/dist/sessions/sessions.resolver.js.map +1 -0
- package/dist/sessions/sessions.service.d.ts +170 -0
- package/dist/sessions/sessions.service.js +736 -0
- package/dist/sessions/sessions.service.js.map +1 -0
- package/dist/sessions/types/index.d.ts +290 -0
- package/dist/sessions/types/index.js +6 -0
- package/dist/sessions/types/index.js.map +1 -0
- package/dist/storage/index.d.ts +66 -0
- package/dist/storage/index.js +98 -0
- package/dist/storage/index.js.map +1 -0
- package/dist/storage/storage.service.d.ts +151 -0
- package/dist/storage/storage.service.js +522 -0
- package/dist/storage/storage.service.js.map +1 -0
- package/dist/storage/types/index.d.ts +207 -0
- package/dist/storage/types/index.js +6 -0
- package/dist/storage/types/index.js.map +1 -0
- package/dist/storage/utils/storage.util.d.ts +51 -0
- package/dist/storage/utils/storage.util.js +402 -0
- package/dist/storage/utils/storage.util.js.map +1 -0
- package/dist/test/index.d.ts +3 -0
- package/dist/test/index.js +11 -0
- package/dist/test/index.js.map +1 -0
- package/dist/test/test.appBuilder.d.ts +0 -1
- package/dist/test/test.appBuilder.js +0 -15
- package/dist/test/test.appBuilder.js.map +1 -1
- package/dist/test/test.broker.kafka.js +172 -0
- package/dist/test/test.broker.kafka.js.map +1 -0
- package/dist/test/test.broker.nats.js +193 -0
- package/dist/test/test.broker.nats.js.map +1 -0
- package/dist/test/test.broker.pubsub.js +171 -0
- package/dist/test/test.broker.pubsub.js.map +1 -0
- package/dist/test/test.broker.rabbitmq.js +164 -0
- package/dist/test/test.broker.rabbitmq.js.map +1 -0
- package/dist/test/test.broker.redis.js +168 -0
- package/dist/test/test.broker.redis.js.map +1 -0
- package/dist/test/test.broker.sqs.d.ts +1 -0
- package/dist/test/test.broker.sqs.js +158 -0
- package/dist/test/test.broker.sqs.js.map +1 -0
- package/dist/test/test.caches.d.ts +1 -0
- package/dist/test/test.caches.js +231 -0
- package/dist/test/test.caches.js.map +1 -0
- package/dist/test/test.database.d.ts +1 -0
- package/dist/test/test.database.dynamo.d.ts +1 -0
- package/dist/test/test.database.dynamo.js +265 -0
- package/dist/test/test.database.dynamo.js.map +1 -0
- package/dist/test/test.database.js +140 -0
- package/dist/test/test.database.js.map +1 -0
- package/dist/test/test.database.mongo.d.ts +1 -0
- package/dist/test/test.database.mongo.js +371 -0
- package/dist/test/test.database.mongo.js.map +1 -0
- package/dist/test/test.database.mysql.d.ts +1 -0
- package/dist/test/test.database.mysql.js +415 -0
- package/dist/test/test.database.mysql.js.map +1 -0
- package/dist/test/test.database.postgres.d.ts +1 -0
- package/dist/test/test.database.postgres.js +412 -0
- package/dist/test/test.database.postgres.js.map +1 -0
- package/dist/test/test.email.brevo.d.ts +1 -0
- package/dist/test/test.email.brevo.js +326 -0
- package/dist/test/test.email.brevo.js.map +1 -0
- package/dist/test/test.email.mailgun.d.ts +1 -0
- package/dist/test/test.email.mailgun.js +352 -0
- package/dist/test/test.email.mailgun.js.map +1 -0
- package/dist/test/test.email.postmark.d.ts +1 -0
- package/dist/test/test.email.postmark.js +316 -0
- package/dist/test/test.email.postmark.js.map +1 -0
- package/dist/test/test.email.sendgrid.d.ts +1 -0
- package/dist/test/test.email.sendgrid.js +365 -0
- package/dist/test/test.email.sendgrid.js.map +1 -0
- package/dist/test/test.email.smtp.d.ts +1 -0
- package/dist/test/test.email.smtp.js +323 -0
- package/dist/test/test.email.smtp.js.map +1 -0
- package/dist/test/test.graph.arangodb.d.ts +1 -0
- package/dist/test/test.graph.arangodb.js +358 -0
- package/dist/test/test.graph.arangodb.js.map +1 -0
- package/dist/test/test.graph.memgraph.d.ts +1 -0
- package/dist/test/test.graph.memgraph.js +320 -0
- package/dist/test/test.graph.memgraph.js.map +1 -0
- package/dist/test/test.graph.neo4j.d.ts +1 -0
- package/dist/test/test.graph.neo4j.js +218 -0
- package/dist/test/test.graph.neo4j.js.map +1 -0
- package/dist/test/test.graph.neptune.d.ts +1 -0
- package/dist/test/test.graph.neptune.js +331 -0
- package/dist/test/test.graph.neptune.js.map +1 -0
- package/dist/test/test.health.js +1 -0
- package/dist/test/test.health.js.map +1 -0
- package/dist/test/test.import.d.ts +0 -1
- package/dist/test/test.import.js +0 -1459
- package/dist/test/test.import.js.map +1 -1
- package/dist/test/test.import.openapi.d.ts +0 -1
- package/dist/test/test.import.openapi.js +0 -75
- package/dist/test/test.import.openapi.js.map +1 -1
- package/dist/test/test.imports.js +14 -55
- package/dist/test/test.imports.js.map +1 -1
- package/dist/test/test.logs.d.ts +0 -1
- package/dist/test/test.logs.js +0 -17
- package/dist/test/test.logs.js.map +1 -1
- package/dist/test/test.notifications.d.ts +1 -0
- package/dist/test/test.notifications.js +198 -0
- package/dist/test/test.notifications.js.map +1 -0
- package/dist/test/test.notifiers.js +1 -0
- package/dist/test/test.notifiers.js.map +1 -0
- package/dist/test/test.processor.d.ts +0 -1
- package/dist/test/test.processor.js +0 -122
- package/dist/test/test.processor.js.map +1 -1
- package/dist/test/test.productBuilder.d.ts +0 -1
- package/dist/test/test.productBuilder.js +0 -660
- package/dist/test/test.productBuilder.js.map +1 -1
- package/dist/test/test.products.js +1 -0
- package/dist/test/test.products.js.map +1 -0
- package/dist/test/test.push.expo.d.ts +1 -0
- package/dist/test/test.push.expo.js +442 -0
- package/dist/test/test.push.expo.js.map +1 -0
- package/dist/test/test.push.firebase.d.ts +1 -0
- package/dist/test/test.push.firebase.js +409 -0
- package/dist/test/test.push.firebase.js.map +1 -0
- package/dist/test/test.session.d.ts +1 -0
- package/dist/test/test.session.js +299 -0
- package/dist/test/test.session.js.map +1 -0
- package/dist/test/test.sms.nexmo.d.ts +1 -0
- package/dist/test/test.sms.nexmo.js +278 -0
- package/dist/test/test.sms.nexmo.js.map +1 -0
- package/dist/test/test.sms.twilio.d.ts +1 -0
- package/dist/test/test.sms.twilio.js +275 -0
- package/dist/test/test.sms.twilio.js.map +1 -0
- package/dist/test/test.storage.d.ts +1 -0
- package/dist/test/test.storage.js +202 -0
- package/dist/test/test.storage.js.map +1 -0
- package/dist/test/test.triggers.d.ts +1 -0
- package/dist/test/test.triggers.js +314 -0
- package/dist/test/test.triggers.js.map +1 -0
- package/dist/test/test.vector.pinecone.d.ts +1 -0
- package/dist/test/test.vector.pinecone.js +238 -0
- package/dist/test/test.vector.pinecone.js.map +1 -0
- package/dist/test/test.vector.qdrant.d.ts +1 -0
- package/dist/test/test.vector.qdrant.js +307 -0
- package/dist/test/test.vector.qdrant.js.map +1 -0
- package/dist/test/test.vector.weaviate.d.ts +1 -0
- package/dist/test/test.vector.weaviate.js +325 -0
- package/dist/test/test.vector.weaviate.js.map +1 -0
- package/dist/types/appBuilder.types.d.ts +10 -13
- package/dist/types/enums.d.ts +11 -1
- package/dist/types/enums.js +10 -0
- package/dist/types/enums.js.map +1 -1
- package/dist/types/index.types.d.ts +6 -4
- package/dist/types/index.types.js +0 -1
- package/dist/types/index.types.js.map +1 -1
- package/dist/types/pricing.types.d.ts +4 -0
- package/dist/types/pricing.types.js +3 -0
- package/dist/types/pricing.types.js.map +1 -0
- package/dist/types/processor.types.d.ts +245 -20
- package/dist/types/processor.types.js +9 -1
- package/dist/types/processor.types.js.map +1 -1
- package/dist/types/productsBuilder.types.d.ts +906 -24
- package/dist/types/productsBuilder.types.js +173 -2
- package/dist/types/productsBuilder.types.js.map +1 -1
- package/dist/types/request-tracker.interface.js +1 -0
- package/dist/types/request-tracker.interface.js.map +1 -0
- package/dist/types/requests.types.d.ts +2 -0
- package/dist/utils/constants.d.ts +1 -0
- package/dist/utils/constants.js +5 -0
- package/dist/utils/constants.js.map +1 -0
- package/dist/utils/index.d.ts +0 -2
- package/dist/utils/index.js +24 -52
- package/dist/utils/index.js.map +1 -1
- package/dist/vector/adapters/base.adapter.d.ts +152 -0
- package/dist/vector/adapters/base.adapter.js +137 -0
- package/dist/vector/adapters/base.adapter.js.map +1 -0
- package/dist/vector/adapters/index.d.ts +10 -0
- package/dist/vector/adapters/index.js +19 -0
- package/dist/vector/adapters/index.js.map +1 -0
- package/dist/vector/adapters/memory.adapter.d.ts +85 -0
- package/dist/vector/adapters/memory.adapter.js +505 -0
- package/dist/vector/adapters/memory.adapter.js.map +1 -0
- package/dist/vector/adapters/pinecone.adapter.d.ts +52 -0
- package/dist/vector/adapters/pinecone.adapter.js +400 -0
- package/dist/vector/adapters/pinecone.adapter.js.map +1 -0
- package/dist/vector/adapters/qdrant.adapter.d.ts +56 -0
- package/dist/vector/adapters/qdrant.adapter.js +392 -0
- package/dist/vector/adapters/qdrant.adapter.js.map +1 -0
- package/dist/vector/adapters/weaviate.adapter.d.ts +64 -0
- package/dist/vector/adapters/weaviate.adapter.js +478 -0
- package/dist/vector/adapters/weaviate.adapter.js.map +1 -0
- package/dist/vector/index.d.ts +47 -0
- package/dist/vector/index.js +72 -0
- package/dist/vector/index.js.map +1 -0
- package/dist/vector/types/connection.interface.d.ts +151 -0
- package/dist/vector/types/connection.interface.js +8 -0
- package/dist/vector/types/connection.interface.js.map +1 -0
- package/dist/vector/types/embedding.interface.d.ts +144 -0
- package/dist/vector/types/embedding.interface.js +8 -0
- package/dist/vector/types/embedding.interface.js.map +1 -0
- package/dist/vector/types/enums.d.ts +104 -0
- package/dist/vector/types/enums.js +113 -0
- package/dist/vector/types/enums.js.map +1 -0
- package/dist/vector/types/index.d.ts +9 -0
- package/dist/vector/types/index.js +16 -0
- package/dist/vector/types/index.js.map +1 -0
- package/dist/vector/types/vector.interface.d.ts +315 -0
- package/dist/vector/types/vector.interface.js +8 -0
- package/dist/vector/types/vector.interface.js.map +1 -0
- package/dist/vector/utils/index.d.ts +6 -0
- package/dist/vector/utils/index.js +11 -0
- package/dist/vector/utils/index.js.map +1 -0
- package/dist/vector/utils/vector-error.d.ts +69 -0
- package/dist/vector/utils/vector-error.js +116 -0
- package/dist/vector/utils/vector-error.js.map +1 -0
- package/dist/vector/vector-database.service.d.ts +408 -0
- package/dist/vector/vector-database.service.js +545 -0
- package/dist/vector/vector-database.service.js.map +1 -0
- package/dist/vector/vector.service.d.ts +245 -0
- package/dist/vector/vector.service.js +384 -0
- package/dist/vector/vector.service.js.map +1 -0
- package/dist/workflows/index.d.ts +30 -0
- package/dist/workflows/index.js +64 -0
- package/dist/workflows/index.js.map +1 -0
- package/dist/workflows/types/index.d.ts +6 -0
- package/dist/workflows/types/index.js +23 -0
- package/dist/workflows/types/index.js.map +1 -0
- package/dist/workflows/types/workflows.types.d.ts +1035 -0
- package/dist/workflows/types/workflows.types.js +13 -0
- package/dist/workflows/types/workflows.types.js.map +1 -0
- package/dist/workflows/workflow-builder.d.ts +70 -0
- package/dist/workflows/workflow-builder.js +338 -0
- package/dist/workflows/workflow-builder.js.map +1 -0
- package/dist/workflows/workflow-executor.d.ts +208 -0
- package/dist/workflows/workflow-executor.js +1194 -0
- package/dist/workflows/workflow-executor.js.map +1 -0
- package/dist/workflows/workflows.service.d.ts +404 -0
- package/dist/workflows/workflows.service.js +1620 -0
- package/dist/workflows/workflows.service.js.map +1 -0
- package/package.json +54 -11
- package/dist/actions/actions.repo.js +0 -13
- package/dist/actions/actions.repo.js.map +0 -1
- package/dist/actions/actions.service.js +0 -24
- package/dist/actions/actions.service.js.map +0 -1
- package/dist/actions/utils/actions.util.read.js +0 -427
- package/dist/actions/utils/actions.util.read.js.map +0 -1
- package/dist/api/services/integrationsApi.service.d.ts +0 -18
- package/dist/api/services/integrationsApi.service.js +0 -80
- package/dist/api/services/integrationsApi.service.js.map +0 -1
- package/dist/appBuilder/services/app.service.d.ts +0 -111
- package/dist/appBuilder/services/app.service.js +0 -737
- package/dist/appBuilder/services/app.service.js.map +0 -1
- package/dist/appBuilder/services/appBuilder.service.d.ts +0 -111
- package/dist/appBuilder/services/appBuilder.service.js +0 -662
- package/dist/appBuilder/services/appBuilder.service.js.map +0 -1
- package/dist/appBuilder/utils/objects.utils.d.ts +0 -3
- package/dist/appBuilder/utils/objects.utils.js +0 -9
- package/dist/appBuilder/utils/objects.utils.js.map +0 -1
- package/dist/appBuilder/utils/string.utils.d.ts +0 -2
- package/dist/appBuilder/utils/string.utils.js +0 -57
- package/dist/appBuilder/utils/string.utils.js.map +0 -1
- package/dist/appBuilder/validators/index.d.ts +0 -19
- package/dist/appBuilder/validators/index.js +0 -40
- package/dist/appBuilder/validators/index.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.app.validator.js +0 -10
- package/dist/appBuilder/validators/joi-validators/create.app.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appAction.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/create.appAction.validator.js +0 -20
- package/dist/appBuilder/validators/joi-validators/create.appAction.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appActionResponse.validator.d.ts +0 -7
- package/dist/appBuilder/validators/joi-validators/create.appActionResponse.validator.js +0 -44
- package/dist/appBuilder/validators/joi-validators/create.appActionResponse.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appAuth.validator.d.ts +0 -3
- package/dist/appBuilder/validators/joi-validators/create.appAuth.validator.js +0 -31
- package/dist/appBuilder/validators/joi-validators/create.appAuth.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appBody.validators.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/create.appBody.validators.js +0 -11
- package/dist/appBuilder/validators/joi-validators/create.appBody.validators.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appConstants.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/create.appConstants.validator.js +0 -12
- package/dist/appBuilder/validators/joi-validators/create.appConstants.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appEnv.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/create.appEnv.validator.js +0 -17
- package/dist/appBuilder/validators/joi-validators/create.appEnv.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appEvent.validator.d.ts +0 -5
- package/dist/appBuilder/validators/joi-validators/create.appEvent.validator.js +0 -30
- package/dist/appBuilder/validators/joi-validators/create.appEvent.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appVariable.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/create.appVariable.validator.js +0 -14
- package/dist/appBuilder/validators/joi-validators/create.appVariable.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/sample.validator.d.ts +0 -5
- package/dist/appBuilder/validators/joi-validators/sample.validator.js +0 -26
- package/dist/appBuilder/validators/joi-validators/sample.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.app.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.app.validator.js +0 -34
- package/dist/appBuilder/validators/joi-validators/update.app.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appAction.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appAction.validator.js +0 -23
- package/dist/appBuilder/validators/joi-validators/update.appAction.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appActionResponse.validator.d.ts +0 -3
- package/dist/appBuilder/validators/joi-validators/update.appActionResponse.validator.js +0 -21
- package/dist/appBuilder/validators/joi-validators/update.appActionResponse.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appAuth.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appAuth.validator.js +0 -19
- package/dist/appBuilder/validators/joi-validators/update.appAuth.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appConstants.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appConstants.validator.js +0 -12
- package/dist/appBuilder/validators/joi-validators/update.appConstants.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appEnv.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appEnv.validator.js +0 -17
- package/dist/appBuilder/validators/joi-validators/update.appEnv.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appEvent.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appEvent.validator.js +0 -16
- package/dist/appBuilder/validators/joi-validators/update.appEvent.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appVariables.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appVariables.validator.js +0 -14
- package/dist/appBuilder/validators/joi-validators/update.appVariables.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.validation.entityData.validator.js +0 -27
- package/dist/appBuilder/validators/joi-validators/update.validation.entityData.validator.js.map +0 -1
- package/dist/apps/validators/joi-validators/create.appEvent.validator.d.ts +0 -5
- package/dist/apps/validators/joi-validators/create.appEvent.validator.js +0 -30
- package/dist/apps/validators/joi-validators/create.appEvent.validator.js.map +0 -1
- package/dist/apps/validators/joi-validators/update.appEvent.validator.d.ts +0 -4
- package/dist/apps/validators/joi-validators/update.appEvent.validator.js +0 -16
- package/dist/apps/validators/joi-validators/update.appEvent.validator.js.map +0 -1
- package/dist/clients/integrations.client.d.ts +0 -2
- package/dist/clients/integrations.client.js +0 -26
- package/dist/clients/integrations.client.js.map +0 -1
- package/dist/integrationsBuilder/services/integration.service.d.ts +0 -138
- package/dist/integrationsBuilder/services/integration.service.js +0 -1148
- package/dist/integrationsBuilder/services/integration.service.js.map +0 -1
- package/dist/integrationsBuilder/services/integrationBuilder.service.d.ts +0 -130
- package/dist/integrationsBuilder/services/integrationBuilder.service.js +0 -1017
- package/dist/integrationsBuilder/services/integrationBuilder.service.js.map +0 -1
- package/dist/integrationsBuilder/utils/objects.utils.d.ts +0 -2
- package/dist/integrationsBuilder/utils/objects.utils.js +0 -48
- package/dist/integrationsBuilder/utils/objects.utils.js.map +0 -1
- package/dist/integrationsBuilder/utils/string.utils.d.ts +0 -1
- package/dist/integrationsBuilder/utils/string.utils.js +0 -9
- package/dist/integrationsBuilder/utils/string.utils.js.map +0 -1
- package/dist/integrationsBuilder/validators/index.d.ts +0 -18
- package/dist/integrationsBuilder/validators/index.js +0 -38
- package/dist/integrationsBuilder/validators/index.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integration.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integration.validator.js +0 -10
- package/dist/integrationsBuilder/validators/joi-validators/create.integration.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationApp.validator.d.ts +0 -4
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationApp.validator.js +0 -26
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationApp.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationCache.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationCache.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationCache.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationDatabase.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationDatabase.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationDatabase.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationEnv.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationEnv.validator.js +0 -10
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationEnv.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFeature.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFeature.validator.js +0 -60
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFeature.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFunction.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFunction.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFunction.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationJob.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationJob.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationJob.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationNotification.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationNotification.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationNotification.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationApp.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationApp.validator.js +0 -9
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationApp.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationCache.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationCache.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationCache.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationDatabase.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationDatabase.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationDatabase.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationEnv.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationEnv.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationEnv.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFeature.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFeature.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFeature.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFunction.validator copy.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFunction.validator copy.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFunction.validator copy.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationJob.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationJob.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationJob.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationNotification.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationNotification.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationNotification.validator.js.map +0 -1
- package/dist/postman.d.ts +0 -1
- package/dist/postman.js +0 -21674
- package/dist/postman.js.map +0 -1
- package/dist/processor/utils/mongo.util.js +0 -152
- package/dist/processor/utils/mongo.util.js.map +0 -1
- package/dist/processor/utils/postgres.util.d.ts +0 -14
- package/dist/processor/utils/postgres.util.js +0 -83
- package/dist/processor/utils/postgres.util.js.map +0 -1
- package/dist/products/validators/joi-validators/create.product.validator.d.ts +0 -3
- package/dist/products/validators/joi-validators/create.product.validator.js +0 -10
- package/dist/products/validators/joi-validators/create.product.validator.js.map +0 -1
- package/dist/test.appBuilder.js +0 -14
- package/dist/test.appBuilder.js.map +0 -1
- package/dist/test.import.js +0 -24
- package/dist/test.import.js.map +0 -1
- package/dist/test.imports.js +0 -28
- package/dist/test.imports.js.map +0 -1
- package/dist/test.integrationBuilder.js +0 -276
- package/dist/test.integrationBuilder.js.map +0 -1
- package/dist/test.processor.js +0 -23
- package/dist/test.processor.js.map +0 -1
- package/dist/test.utils.js +0 -25
- package/dist/test.utils.js.map +0 -1
- package/dist/types/integrationsBuilder.types.d.ts +0 -276
- package/dist/types/integrationsBuilder.types.js +0 -51
- package/dist/types/integrationsBuilder.types.js.map +0 -1
- /package/dist/{actions/actions.repo.d.ts → pricing/pricing.repo.d.ts} +0 -0
- /package/dist/{appBuilder/validators/joi-validators/create.app.validator.d.ts → products/validators/joi-validators/create.productGraph.validator.d.ts} +0 -0
- /package/dist/{appBuilder/validators/joi-validators/update.validation.entityData.validator.d.ts → products/validators/joi-validators/update.productGraph.validator.d.ts} +0 -0
- /package/dist/{test.appBuilder.d.ts → test/test.broker.kafka.d.ts} +0 -0
- /package/dist/{test.imports.d.ts → test/test.broker.nats.d.ts} +0 -0
- /package/dist/{test.integrationBuilder.d.ts → test/test.broker.pubsub.d.ts} +0 -0
- /package/dist/{test.processor.d.ts → test/test.broker.rabbitmq.d.ts} +0 -0
- /package/dist/{test.utils.d.ts → test/test.broker.redis.d.ts} +0 -0
- /package/dist/{actions/actions.service.d.ts → test/test.health.d.ts} +0 -0
- /package/dist/{actions/utils/actions.util.read.d.ts → test/test.notifiers.d.ts} +0 -0
- /package/dist/{processor/utils/mongo.util.d.ts → test/test.products.d.ts} +0 -0
- /package/dist/{test.import.d.ts → types/request-tracker.interface.d.ts} +0 -0
|
@@ -56,38 +56,43 @@ const axios_1 = __importDefault(require("axios"));
|
|
|
56
56
|
const processorApi_service_1 = require("../../api/services/processorApi.service");
|
|
57
57
|
const expo_client_1 = __importDefault(require("../../clients/expo.client"));
|
|
58
58
|
const handlebars_1 = require("handlebars");
|
|
59
|
-
const functions_utils_1 = require("../../products/utils/functions.utils");
|
|
60
59
|
const string_utils_1 = require("../../products/utils/string.utils");
|
|
61
60
|
const create_productFeature_validator_1 = require("../../products/validators/joi-validators/create.productFeature.validator");
|
|
62
61
|
const validators_1 = require("../../products/validators");
|
|
63
62
|
const uuid_1 = require("uuid");
|
|
64
63
|
const urls_1 = require("../../api/urls");
|
|
65
|
-
//import { createBrokerService } from './messagebrokers';
|
|
66
64
|
const date_fns_1 = require("date-fns");
|
|
67
|
-
const mongo_repo_1 = require("../repos/mongo.repo");
|
|
68
|
-
const postgres_repo_1 = require("../repos/postgres.repo");
|
|
69
65
|
const storage_util_1 = require("../utils/storage.util");
|
|
70
66
|
const sms_repo_1 = require("../repos/sms.repo");
|
|
71
|
-
const
|
|
67
|
+
const pricing_service_1 = __importDefault(require("../../pricing/pricing.service"));
|
|
68
|
+
const request_utils_1 = require("../utils/request.utils");
|
|
69
|
+
const request_service_1 = __importDefault(require("./request.service"));
|
|
70
|
+
const app_service_1 = __importDefault(require("../../apps/services/app.service"));
|
|
71
|
+
const utils_1 = require("../../apps/utils");
|
|
72
|
+
const credential_manager_1 = require("../../apps/utils/credential-manager");
|
|
73
|
+
const oauth_manager_1 = require("../../apps/utils/oauth-manager");
|
|
74
|
+
const secrets_1 = require("../../secrets");
|
|
72
75
|
async function loadBrokerService() {
|
|
73
76
|
if (typeof window === 'undefined') {
|
|
74
|
-
const {
|
|
75
|
-
return
|
|
77
|
+
const { loadBrokerService: loadBroker } = await Promise.resolve().then(() => __importStar(require('../../brokers')));
|
|
78
|
+
return loadBroker();
|
|
76
79
|
}
|
|
77
80
|
return null;
|
|
78
81
|
}
|
|
79
82
|
async function loadJWT() {
|
|
80
|
-
if (typeof window === undefined) {
|
|
81
|
-
const JWT = await Promise.resolve().then(() => __importStar(require(
|
|
83
|
+
if (typeof window === 'undefined') {
|
|
84
|
+
const JWT = await Promise.resolve().then(() => __importStar(require('jsonwebtoken')));
|
|
82
85
|
return JWT;
|
|
83
86
|
}
|
|
84
87
|
return null;
|
|
85
88
|
}
|
|
86
89
|
class ProcessorService {
|
|
87
|
-
constructor({ workspace_id, public_key, user_id, token, env_type, redis_client }) {
|
|
90
|
+
constructor({ workspace_id, public_key, user_id, token, env_type, private_key, access_key, redis_client, queues }) {
|
|
88
91
|
this.workspace_id = workspace_id;
|
|
89
92
|
this.public_key = public_key;
|
|
90
93
|
this.user_id = user_id;
|
|
94
|
+
this._privateKey = private_key;
|
|
95
|
+
this.accessKey = access_key;
|
|
91
96
|
this.token = token;
|
|
92
97
|
this.published = false;
|
|
93
98
|
this.productBuilderService = new products_service_1.default({
|
|
@@ -96,7 +101,23 @@ class ProcessorService {
|
|
|
96
101
|
user_id,
|
|
97
102
|
token,
|
|
98
103
|
env_type,
|
|
99
|
-
redis_client
|
|
104
|
+
redis_client,
|
|
105
|
+
});
|
|
106
|
+
this.appBuilderService = new app_service_1.default({
|
|
107
|
+
workspace_id,
|
|
108
|
+
public_key,
|
|
109
|
+
user_id,
|
|
110
|
+
token,
|
|
111
|
+
env_type,
|
|
112
|
+
redis_client,
|
|
113
|
+
});
|
|
114
|
+
this.pricingService = new pricing_service_1.default({
|
|
115
|
+
workspace_id,
|
|
116
|
+
public_key,
|
|
117
|
+
user_id,
|
|
118
|
+
token,
|
|
119
|
+
env_type,
|
|
120
|
+
redis_client,
|
|
100
121
|
});
|
|
101
122
|
this.inputService = new inputs_service_1.default();
|
|
102
123
|
this.requestTime = 0;
|
|
@@ -108,37 +129,135 @@ class ProcessorService {
|
|
|
108
129
|
skipped: [],
|
|
109
130
|
};
|
|
110
131
|
this.apps = [];
|
|
132
|
+
this.requestTrackerService = request_service_1.default.getInstance(redis_client);
|
|
111
133
|
this.processorApiService = new processorApi_service_1.ProcessorApiService(env_type);
|
|
112
134
|
this.environment = env_type;
|
|
135
|
+
// Store redis client for job state management
|
|
113
136
|
if (redis_client) {
|
|
114
137
|
this.redisClient = redis_client;
|
|
115
138
|
}
|
|
139
|
+
// Start healthcheck workers only if product tag is available
|
|
140
|
+
if (redis_client && this.productTag) {
|
|
141
|
+
this.startHealthcheckWorkers();
|
|
142
|
+
}
|
|
143
|
+
if (queues) {
|
|
144
|
+
this.queues = queues;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
/**
|
|
148
|
+
* Start healthcheck workers for all products/environments after Redis is connected.
|
|
149
|
+
* This is called automatically in the constructor if redisClient is present.
|
|
150
|
+
*/
|
|
151
|
+
async startHealthcheckWorkers() {
|
|
152
|
+
// Fetch all products (or the current product if context is single-tenant)
|
|
153
|
+
// For demo, we use the current product only
|
|
154
|
+
await this.productBuilderService.initializeProductByTag(this.productTag);
|
|
155
|
+
const healthchecks = await this.productBuilderService.fetchProductHealthchecks();
|
|
156
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
157
|
+
for (const healthcheck of healthchecks) {
|
|
158
|
+
for (const env of healthcheck.envs) {
|
|
159
|
+
// Each env gets its own worker (setInterval)
|
|
160
|
+
const interval = healthcheck.interval || 60000; // default 60s
|
|
161
|
+
setInterval(async () => {
|
|
162
|
+
try {
|
|
163
|
+
// Decrypt input for this env
|
|
164
|
+
let decryptedInput = env.input;
|
|
165
|
+
if (typeof decryptedInput === 'string') {
|
|
166
|
+
decryptedInput = JSON.parse((0, processor_utils_1.decrypt)(decryptedInput, privateKey));
|
|
167
|
+
}
|
|
168
|
+
// Prepare action input
|
|
169
|
+
const actionInput = {
|
|
170
|
+
env: env.slug,
|
|
171
|
+
product: this.productTag,
|
|
172
|
+
app: healthcheck.app,
|
|
173
|
+
input: decryptedInput,
|
|
174
|
+
action: healthcheck.event,
|
|
175
|
+
retries: healthcheck.retries || 0,
|
|
176
|
+
};
|
|
177
|
+
// Process the action
|
|
178
|
+
const result = await this.processAction(actionInput);
|
|
179
|
+
// Log result (success/failure)
|
|
180
|
+
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { message: `Healthcheck processed for ${healthcheck.tag} in env ${env.slug}`, data: { result }, status: types_1.LogEventStatus.SUCCESS }));
|
|
181
|
+
}
|
|
182
|
+
catch (e) {
|
|
183
|
+
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { message: `Healthcheck failed for ${healthcheck.tag} in env ${env.slug}`, data: { error: e.toString() }, status: types_1.LogEventStatus.FAIL }));
|
|
184
|
+
}
|
|
185
|
+
}, interval);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
/**
|
|
190
|
+
* Manually trigger healthcheck processing for all healthchecks (can be called externally if needed)
|
|
191
|
+
*/
|
|
192
|
+
async processAllHealthchecksForProduct(productTag) {
|
|
193
|
+
await this.productBuilderService.initializeProductByTag(productTag);
|
|
194
|
+
const healthchecks = await this.productBuilderService.fetchProductHealthchecks();
|
|
195
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
196
|
+
for (const healthcheck of healthchecks) {
|
|
197
|
+
for (const env of healthcheck.envs) {
|
|
198
|
+
try {
|
|
199
|
+
let decryptedInput = env.input;
|
|
200
|
+
if (typeof decryptedInput === 'string') {
|
|
201
|
+
decryptedInput = JSON.parse((0, processor_utils_1.decrypt)(decryptedInput, privateKey));
|
|
202
|
+
}
|
|
203
|
+
const actionInput = {
|
|
204
|
+
env: env.slug,
|
|
205
|
+
product: productTag,
|
|
206
|
+
app: healthcheck.app,
|
|
207
|
+
input: decryptedInput,
|
|
208
|
+
action: healthcheck.event,
|
|
209
|
+
retries: healthcheck.retries || 0,
|
|
210
|
+
};
|
|
211
|
+
const result = await this.processAction(actionInput);
|
|
212
|
+
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { message: `Manual healthcheck processed for ${healthcheck.tag} in env ${env.slug}`, data: { result }, status: types_1.LogEventStatus.SUCCESS }));
|
|
213
|
+
}
|
|
214
|
+
catch (e) {
|
|
215
|
+
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { message: `Manual healthcheck failed for ${healthcheck.tag} in env ${env.slug}`, data: { error: e.toString() }, status: types_1.LogEventStatus.FAIL }));
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
}
|
|
116
219
|
}
|
|
117
220
|
async generateSession(payload) {
|
|
118
221
|
try {
|
|
119
222
|
const { product: product_tag, env: slug, tag, data } = payload;
|
|
120
|
-
const input = await this.inputService.parseJson({
|
|
223
|
+
const input = (await this.inputService.parseJson({
|
|
121
224
|
data,
|
|
122
225
|
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
123
|
-
});
|
|
226
|
+
}));
|
|
124
227
|
await this.productBuilderService.initializeProductByTag(product_tag);
|
|
125
|
-
|
|
126
|
-
const session = this.productBuilderService.fetchSession(tag);
|
|
228
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
229
|
+
const session = await this.productBuilderService.fetchSession(tag);
|
|
127
230
|
if (!session) {
|
|
128
231
|
throw new Error(`Session with tag ${tag} does not exist`);
|
|
129
232
|
}
|
|
130
|
-
const env = this.productBuilderService.fetchEnv(slug);
|
|
233
|
+
const env = await this.productBuilderService.fetchEnv(slug);
|
|
131
234
|
if (!env) {
|
|
132
235
|
throw new Error(`Env with slug ${slug} does not exist`);
|
|
133
236
|
}
|
|
134
237
|
await this.inputService.validateInput(input, session.schema_data);
|
|
135
|
-
const
|
|
238
|
+
const stages = (0, string_utils_1.extractStages)(session.selector);
|
|
239
|
+
// Convert stages to string[] for functions expecting string[]
|
|
240
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
241
|
+
const user = (0, processor_utils_1.extractSelectorValue)(data, stringStages, session.selector);
|
|
242
|
+
const expiry = (0, processor_utils_1.calculateJWTExpiry)(session.expiry, session.period);
|
|
243
|
+
const end_at = (0, processor_utils_1.calculateExpiry)(session.expiry, session.period);
|
|
136
244
|
const JWT = await loadJWT();
|
|
137
245
|
if (JWT) {
|
|
138
|
-
const
|
|
139
|
-
const
|
|
246
|
+
const session_id = (0, uuid_1.v4)();
|
|
247
|
+
const token = JWT.sign({ session: payload.tag, env: payload.env, session_id, data }, privateKey, {
|
|
248
|
+
expiresIn: expiry,
|
|
249
|
+
});
|
|
250
|
+
const refreshToken = (0, processor_utils_1.encrypt)(JSON.stringify(data), privateKey);
|
|
140
251
|
// WRITE REFRESH TOKEN TO DATABASE... TO INVALIDATE DELETE FROM DATABASE
|
|
141
|
-
|
|
252
|
+
const details = {
|
|
253
|
+
identifier: user,
|
|
254
|
+
start_at: Date.now(),
|
|
255
|
+
end_at,
|
|
256
|
+
session_tag: tag,
|
|
257
|
+
data: (0, processor_utils_1.encrypt)(JSON.stringify(JSON.stringify(data)), privateKey),
|
|
258
|
+
session_id,
|
|
259
|
+
};
|
|
260
|
+
await this.processorApiService.createSessionInfo(Object.assign({ product_tag, env: slug, refreshToken }, details), this.getUserAccess());
|
|
142
261
|
return {
|
|
143
262
|
token,
|
|
144
263
|
refreshToken,
|
|
@@ -152,12 +271,46 @@ class ProcessorService {
|
|
|
152
271
|
throw e;
|
|
153
272
|
}
|
|
154
273
|
}
|
|
155
|
-
async
|
|
274
|
+
async refreshSession(payload) {
|
|
275
|
+
await this.productBuilderService.initializeProductByTag(payload.product);
|
|
276
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
277
|
+
// validate token validity
|
|
278
|
+
const { refreshToken } = payload, payloadData = __rest(payload, ["refreshToken"]);
|
|
279
|
+
const valid = await this.processorApiService.validateRefreshToken({ refreshToken, product: payload.product, env: payload.env }, this.getUserAccess());
|
|
280
|
+
if (valid) {
|
|
281
|
+
const data = JSON.parse((0, processor_utils_1.decrypt)(refreshToken, privateKey));
|
|
282
|
+
return await this.generateSession(Object.assign(Object.assign({}, payloadData), { data }));
|
|
283
|
+
}
|
|
284
|
+
else {
|
|
285
|
+
throw new Error(`Invalid refresh token`);
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
async decryptSession(data) {
|
|
289
|
+
await this.productBuilderService.initializeProductByTag(data.product);
|
|
290
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
291
|
+
const JWT = await loadJWT();
|
|
292
|
+
if (!JWT) {
|
|
293
|
+
throw new Error(`Running in browser, token service not loaded.`);
|
|
294
|
+
}
|
|
295
|
+
try {
|
|
296
|
+
const res = (await JWT.verify(data.token, privateKey));
|
|
297
|
+
if (res.session !== data.tag) {
|
|
298
|
+
throw new Error(`Invalid token for session ${data.tag}`);
|
|
299
|
+
}
|
|
300
|
+
if (res.env !== data.env) {
|
|
301
|
+
throw new Error(`Invalid session env`);
|
|
302
|
+
}
|
|
303
|
+
return res.data;
|
|
304
|
+
}
|
|
305
|
+
catch (e) {
|
|
306
|
+
console.log(e);
|
|
307
|
+
throw new Error('Invalid/Expired token');
|
|
308
|
+
}
|
|
309
|
+
}
|
|
156
310
|
async registerWebhook(data) {
|
|
157
311
|
const { product: product_tag, access_tag, webhook_tag, envs } = data;
|
|
158
312
|
await this.productBuilderService.initializeProductByTag(product_tag);
|
|
159
|
-
const
|
|
160
|
-
const { version, envs: appEnvs } = this.productBuilderService.fetchApp(access_tag);
|
|
313
|
+
const { version, envs: appEnvs } = await this.productBuilderService.fetchApp(access_tag);
|
|
161
314
|
const app = await this.productBuilderService.fetchThirdPartyAppByAccessTag(access_tag);
|
|
162
315
|
const { webhooks } = app.versions.find((data) => data.tag === version);
|
|
163
316
|
if (!webhooks) {
|
|
@@ -168,7 +321,7 @@ class ProcessorService {
|
|
|
168
321
|
if (!webhook) {
|
|
169
322
|
throw new Error(`Webhook tag ${webhook_tag} not found`);
|
|
170
323
|
}
|
|
171
|
-
const productEnvs = this.productBuilderService.fetchEnvs();
|
|
324
|
+
const productEnvs = await this.productBuilderService.fetchEnvs();
|
|
172
325
|
productEnvs.map((env) => {
|
|
173
326
|
const exists = envs.findIndex((dbEnv) => dbEnv.slug === env.slug);
|
|
174
327
|
if (exists === -1) {
|
|
@@ -212,7 +365,7 @@ class ProcessorService {
|
|
|
212
365
|
if (replacedUrl && replacedUrl !== env.url && replacedUrl) {
|
|
213
366
|
throw new Error(`Ductape expects the url ${replacedUrl} in request body to match inputted url ${env.url}`);
|
|
214
367
|
}
|
|
215
|
-
const exists = this.fetchEnv(env.slug, {});
|
|
368
|
+
const exists = await this.fetchEnv(env.slug, {});
|
|
216
369
|
if (!exists) {
|
|
217
370
|
throw new Error(`Env ${env.slug} does not exist`);
|
|
218
371
|
}
|
|
@@ -234,7 +387,7 @@ class ProcessorService {
|
|
|
234
387
|
webhook_tag,
|
|
235
388
|
version,
|
|
236
389
|
sender_workspace_id: app.workspace_id,
|
|
237
|
-
receiver_workspace_id:
|
|
390
|
+
receiver_workspace_id: this.getUserAccess().workspace_id,
|
|
238
391
|
app_tag: app.tag,
|
|
239
392
|
product_tag,
|
|
240
393
|
active,
|
|
@@ -245,8 +398,7 @@ class ProcessorService {
|
|
|
245
398
|
async generateWebhookLink(data) {
|
|
246
399
|
const { product: product_tag, access_tag, webhook_tag, env: product_env, url, method } = data;
|
|
247
400
|
await this.productBuilderService.initializeProductByTag(product_tag);
|
|
248
|
-
const
|
|
249
|
-
const { version, envs: appEnvs } = this.productBuilderService.fetchApp(access_tag);
|
|
401
|
+
const { version, envs: appEnvs } = await this.productBuilderService.fetchApp(access_tag);
|
|
250
402
|
const app = await this.productBuilderService.fetchThirdPartyAppByAccessTag(access_tag);
|
|
251
403
|
const { webhooks } = app.versions.find((data) => data.tag === version);
|
|
252
404
|
if (!webhooks) {
|
|
@@ -267,7 +419,7 @@ class ProcessorService {
|
|
|
267
419
|
webhook_tag,
|
|
268
420
|
version,
|
|
269
421
|
sender_workspace_id: app.workspace_id,
|
|
270
|
-
receiver_workspace_id:
|
|
422
|
+
receiver_workspace_id: this.getUserAccess().workspace_id,
|
|
271
423
|
app_tag: app.tag,
|
|
272
424
|
product_tag,
|
|
273
425
|
active: false,
|
|
@@ -275,239 +427,6 @@ class ProcessorService {
|
|
|
275
427
|
return await this.webhookApi.generateLink(payload, this.getUserAccess());
|
|
276
428
|
//return res.link;
|
|
277
429
|
}
|
|
278
|
-
async processQuota(data) {
|
|
279
|
-
this.start = Date.now();
|
|
280
|
-
const { product: product_tag, env, input, tag: quota_tag } = data;
|
|
281
|
-
try {
|
|
282
|
-
this.productTag = product_tag;
|
|
283
|
-
const additional_logs = {
|
|
284
|
-
name: 'Process Quota',
|
|
285
|
-
type: types_1.LogEventTypes.QUOTA,
|
|
286
|
-
parent_tag: quota_tag,
|
|
287
|
-
};
|
|
288
|
-
await this.intializeProduct(additional_logs);
|
|
289
|
-
const process_id = this.process_id || (0, processor_utils_1.generateObjectId)();
|
|
290
|
-
this.process_id = process_id;
|
|
291
|
-
this.baseLogs = {
|
|
292
|
-
product_tag: this.productTag,
|
|
293
|
-
product_id: this.productId,
|
|
294
|
-
workspace_id: this.workspace_id,
|
|
295
|
-
env,
|
|
296
|
-
type: types_1.LogEventTypes.QUOTA,
|
|
297
|
-
process_id,
|
|
298
|
-
data: input,
|
|
299
|
-
};
|
|
300
|
-
this.quota = this.fetchQuota(quota_tag, additional_logs);
|
|
301
|
-
if (!this.quota) {
|
|
302
|
-
throw new Error(`Quota ${quota_tag} not found`);
|
|
303
|
-
}
|
|
304
|
-
this.logService.setFeatureId(this.quota._id);
|
|
305
|
-
this.processEnv = this.fetchEnv(env, additional_logs);
|
|
306
|
-
if (!this.processEnv.active) {
|
|
307
|
-
throw new Error(`Environment ${data.env} is not active`);
|
|
308
|
-
}
|
|
309
|
-
const { input: quotaInput, options } = this.quota;
|
|
310
|
-
// validate feature input and log failure
|
|
311
|
-
this.validateJSONFeatureInput(input, quotaInput, additional_logs);
|
|
312
|
-
// split processes
|
|
313
|
-
//this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
314
|
-
await this.logService.publish();
|
|
315
|
-
return await this.runQuotaOptions(options, input, additional_logs);
|
|
316
|
-
}
|
|
317
|
-
catch (e) {
|
|
318
|
-
this.end = Date.now();
|
|
319
|
-
await this.logService.publish();
|
|
320
|
-
throw e;
|
|
321
|
-
}
|
|
322
|
-
}
|
|
323
|
-
async runQuotaOptions(options, input, additional_logs) {
|
|
324
|
-
try {
|
|
325
|
-
const quotaManager = new quota_service_1.default(options, this.redisClient);
|
|
326
|
-
const getNextProvider = quotaManager.getNextProvider();
|
|
327
|
-
const quotaInput = await this.mapQuotaFallbackInput(getNextProvider.input, input, getNextProvider.app);
|
|
328
|
-
const result = await this.processEvent({
|
|
329
|
-
app: getNextProvider.app,
|
|
330
|
-
type: getNextProvider.type,
|
|
331
|
-
event: getNextProvider.event,
|
|
332
|
-
input: quotaInput,
|
|
333
|
-
retries: getNextProvider.retries,
|
|
334
|
-
allow_fail: false
|
|
335
|
-
});
|
|
336
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process quota successful', successful_execution: true, data: { result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
337
|
-
return result;
|
|
338
|
-
}
|
|
339
|
-
catch (e) {
|
|
340
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process quota failed', failed_execution: true, data: { e }, status: types_1.LogEventStatus.PROCESSING }));
|
|
341
|
-
throw e;
|
|
342
|
-
}
|
|
343
|
-
}
|
|
344
|
-
async mapQuotaFallbackInput(providerInput, quotaInput, app) {
|
|
345
|
-
if (Array.isArray(providerInput)) {
|
|
346
|
-
return providerInput.map(async (item) => await this.mapQuotaFallbackInput(item, quotaInput, app));
|
|
347
|
-
}
|
|
348
|
-
else if (providerInput && typeof providerInput === 'object') {
|
|
349
|
-
const result = {};
|
|
350
|
-
for (const key in providerInput) {
|
|
351
|
-
result[key] = await this.mapQuotaFallbackInput(providerInput[key], quotaInput, app);
|
|
352
|
-
}
|
|
353
|
-
return result;
|
|
354
|
-
}
|
|
355
|
-
else {
|
|
356
|
-
// Base case: primitive value
|
|
357
|
-
return await this.overrideQuotaFallbackInput(providerInput, quotaInput, app);
|
|
358
|
-
}
|
|
359
|
-
}
|
|
360
|
-
async overrideQuotaFallbackInput(value, quotaInput, app) {
|
|
361
|
-
if (value.startsWith('$Input{')) {
|
|
362
|
-
return quotaInput[(0, string_utils_1.extractStages)(value)[0]];
|
|
363
|
-
}
|
|
364
|
-
else if (value.startsWith('$Auth')) {
|
|
365
|
-
return await this.generateAuthValue((0, string_utils_1.extractStages)(value), app, [], {});
|
|
366
|
-
}
|
|
367
|
-
else if (value.startsWith('$')) {
|
|
368
|
-
this.input = Object.assign(Object.assign({}, this.input), { input: quotaInput });
|
|
369
|
-
return await this.generateOperatorValues(value, '', {});
|
|
370
|
-
}
|
|
371
|
-
else {
|
|
372
|
-
return value;
|
|
373
|
-
}
|
|
374
|
-
}
|
|
375
|
-
async runFallbackOptions(options, input, additional_logs) {
|
|
376
|
-
return await this.executeActionWithFallback(options, input);
|
|
377
|
-
}
|
|
378
|
-
async executeActionWithFallback(providers, input) {
|
|
379
|
-
let index = 0;
|
|
380
|
-
for (const provider of providers) {
|
|
381
|
-
try {
|
|
382
|
-
const payload = {
|
|
383
|
-
app: provider.app,
|
|
384
|
-
type: provider.type,
|
|
385
|
-
event: provider.event,
|
|
386
|
-
input: await this.mapQuotaFallbackInput(provider.input, input, provider.app),
|
|
387
|
-
retries: provider.retries,
|
|
388
|
-
allow_fail: false
|
|
389
|
-
};
|
|
390
|
-
const result = await this.processEvent(payload);
|
|
391
|
-
if (result.status === types_1.LogEventStatus.FAIL) {
|
|
392
|
-
throw new Error(result.errors);
|
|
393
|
-
}
|
|
394
|
-
index++;
|
|
395
|
-
}
|
|
396
|
-
catch (error) {
|
|
397
|
-
if (index > 0) {
|
|
398
|
-
throw error;
|
|
399
|
-
}
|
|
400
|
-
console.warn(`${provider.app || provider.event} failed: ${error}`);
|
|
401
|
-
}
|
|
402
|
-
}
|
|
403
|
-
//throw new Error("All providers failed.");
|
|
404
|
-
}
|
|
405
|
-
async processFallback(data) {
|
|
406
|
-
this.start = Date.now();
|
|
407
|
-
const { product: product_tag, env, input, tag: fallback_tag } = data;
|
|
408
|
-
try {
|
|
409
|
-
this.productTag = product_tag;
|
|
410
|
-
const additional_logs = {
|
|
411
|
-
name: 'Process Fallback',
|
|
412
|
-
type: types_1.LogEventTypes.FALLBACK,
|
|
413
|
-
parent_tag: fallback_tag,
|
|
414
|
-
};
|
|
415
|
-
await this.intializeProduct(additional_logs);
|
|
416
|
-
const process_id = this.process_id || (0, processor_utils_1.generateObjectId)();
|
|
417
|
-
this.process_id = process_id;
|
|
418
|
-
this.baseLogs = {
|
|
419
|
-
product_tag: this.productTag,
|
|
420
|
-
product_id: this.productId,
|
|
421
|
-
workspace_id: this.workspace_id,
|
|
422
|
-
env,
|
|
423
|
-
type: types_1.LogEventTypes.FALLBACK,
|
|
424
|
-
process_id,
|
|
425
|
-
data: input,
|
|
426
|
-
};
|
|
427
|
-
this.fallback = this.fetchFallback(fallback_tag, additional_logs);
|
|
428
|
-
if (!this.fallback) {
|
|
429
|
-
throw new Error(`Fallback "${fallback_tag}" not found`);
|
|
430
|
-
}
|
|
431
|
-
this.logService.setFeatureId(this.fallback._id);
|
|
432
|
-
this.processEnv = this.fetchEnv(env, additional_logs);
|
|
433
|
-
if (!this.processEnv.active) {
|
|
434
|
-
throw new Error(`Environment ${data.env} is not active`);
|
|
435
|
-
}
|
|
436
|
-
const { input: fallbackInput, options } = this.fallback;
|
|
437
|
-
// validate feature input and log failure
|
|
438
|
-
this.validateJSONFeatureInput(input, fallbackInput, additional_logs);
|
|
439
|
-
// split processes
|
|
440
|
-
//this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
441
|
-
return await this.runFallbackOptions(options, input, additional_logs);
|
|
442
|
-
}
|
|
443
|
-
catch (e) {
|
|
444
|
-
this.end = Date.now();
|
|
445
|
-
await this.logService.publish();
|
|
446
|
-
throw e;
|
|
447
|
-
}
|
|
448
|
-
}
|
|
449
|
-
async processFeature(data, awaitResolution = false) {
|
|
450
|
-
this.start = Date.now();
|
|
451
|
-
this.input = data;
|
|
452
|
-
const { product: product_tag, env, input, tag: feature_tag } = data;
|
|
453
|
-
let additional_logs;
|
|
454
|
-
let passedValidation;
|
|
455
|
-
try {
|
|
456
|
-
this.productTag = product_tag;
|
|
457
|
-
additional_logs = {
|
|
458
|
-
name: 'Process feature',
|
|
459
|
-
type: types_1.LogEventTypes.FEATURE,
|
|
460
|
-
};
|
|
461
|
-
await this.intializeProduct(additional_logs);
|
|
462
|
-
this.component = types_1.LogEventTypes.FEATURE;
|
|
463
|
-
const process_id = this.process_id || (0, processor_utils_1.generateObjectId)();
|
|
464
|
-
this.process_id = process_id;
|
|
465
|
-
this.feature = this.fetchFeature(feature_tag, additional_logs);
|
|
466
|
-
if (!this.feature) {
|
|
467
|
-
throw new Error(`Feature "${feature_tag}" not found`);
|
|
468
|
-
}
|
|
469
|
-
this.baseLogs = {
|
|
470
|
-
product_tag: this.productTag,
|
|
471
|
-
product_id: this.productId,
|
|
472
|
-
workspace_id: this.workspace_id,
|
|
473
|
-
env,
|
|
474
|
-
type: types_1.LogEventTypes.FEATURE,
|
|
475
|
-
process_id,
|
|
476
|
-
data: input,
|
|
477
|
-
feature_tag: feature_tag,
|
|
478
|
-
feature_id: this.feature._id,
|
|
479
|
-
};
|
|
480
|
-
this.logService.setFeatureId(this.feature._id);
|
|
481
|
-
this.processEnv = this.fetchEnv(env, additional_logs);
|
|
482
|
-
if (!this.processEnv.active) {
|
|
483
|
-
throw new Error(`Environment ${data.env} is not active`);
|
|
484
|
-
}
|
|
485
|
-
const { input: featureInput, sequence, output } = this.feature;
|
|
486
|
-
// validate feature input and log failure
|
|
487
|
-
this.validateJSONFeatureInput(input, featureInput, additional_logs);
|
|
488
|
-
// split processes
|
|
489
|
-
this.sequenceLevels = await this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
490
|
-
await this.processSequenceLevels(additional_logs);
|
|
491
|
-
return { process_id };
|
|
492
|
-
//return this.generateOutput(output as unknown as Record<string, IFeatureOutput>);
|
|
493
|
-
}
|
|
494
|
-
catch (e) {
|
|
495
|
-
this.end = Date.now();
|
|
496
|
-
if (this.logService) {
|
|
497
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process feature - failed', data: Object.assign(Object.assign({}, data), { input: (0, processor_utils_1.anonymizeObject)(data.input) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
498
|
-
await this.logService.publish();
|
|
499
|
-
if (passedValidation) {
|
|
500
|
-
return { process_id: this.process_id };
|
|
501
|
-
}
|
|
502
|
-
else {
|
|
503
|
-
throw e;
|
|
504
|
-
}
|
|
505
|
-
}
|
|
506
|
-
else {
|
|
507
|
-
throw e;
|
|
508
|
-
}
|
|
509
|
-
}
|
|
510
|
-
}
|
|
511
430
|
async intializeProduct(additional_logs) {
|
|
512
431
|
if (!this.logService) {
|
|
513
432
|
this.logService = new logs_service_1.default({
|
|
@@ -526,8 +445,8 @@ class ProcessorService {
|
|
|
526
445
|
else {
|
|
527
446
|
await this.productBuilderService.initializeProduct(this.productId);
|
|
528
447
|
}
|
|
529
|
-
|
|
530
|
-
|
|
448
|
+
this.productId = this.productBuilderService.fetchProductId();
|
|
449
|
+
const workspace_id = this.productBuilderService.fetchWorkspaceId();
|
|
531
450
|
if (workspace_id !== this.workspace_id) {
|
|
532
451
|
throw new Error('Access Denied');
|
|
533
452
|
}
|
|
@@ -538,42 +457,35 @@ class ProcessorService {
|
|
|
538
457
|
throw e;
|
|
539
458
|
}
|
|
540
459
|
}
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
}
|
|
552
|
-
fetchQuota(tag, additional_logs) {
|
|
553
|
-
try {
|
|
554
|
-
const quota = this.productBuilderService.fetchQuota(tag); // validate feature exists
|
|
555
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch quota - success', data: { tag, quota }, status: types_1.LogEventStatus.SUCCESS }));
|
|
556
|
-
return quota;
|
|
557
|
-
}
|
|
558
|
-
catch (e) {
|
|
559
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch quota - failed', data: { tag, e }, status: types_1.LogEventStatus.FAIL }));
|
|
560
|
-
throw e;
|
|
460
|
+
async initializePricing(additional_logs, access_tag) {
|
|
461
|
+
if (!this.logService) {
|
|
462
|
+
this.logService = new logs_service_1.default({
|
|
463
|
+
product_id: this.productId,
|
|
464
|
+
workspace_id: this.workspace_id,
|
|
465
|
+
public_key: this.public_key,
|
|
466
|
+
user_id: this.user_id,
|
|
467
|
+
token: this.token,
|
|
468
|
+
env_type: this.environment,
|
|
469
|
+
});
|
|
561
470
|
}
|
|
562
|
-
}
|
|
563
|
-
fetchFallback(tag, additional_logs) {
|
|
564
471
|
try {
|
|
565
|
-
|
|
566
|
-
this.
|
|
567
|
-
|
|
472
|
+
console.log(`Initializing pricing for access tag: ${access_tag}`);
|
|
473
|
+
const product_app = await this.productBuilderService.fetchApp(access_tag); // validate app exists
|
|
474
|
+
console.log(`Found product app: ${JSON.stringify(product_app)}`);
|
|
475
|
+
const app = await this.productBuilderService.fetchThirdPartyAppByAccessTag(product_app.access_tag);
|
|
476
|
+
await this.pricingService.initializePricingByTag(product_app.pricing_tag, app._id);
|
|
477
|
+
const { pricing_tag } = this.pricingService.fetchPricing();
|
|
478
|
+
this.pricingTag = pricing_tag;
|
|
479
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Pricing initialize - success', data: { pricing_tag: this.pricingTag }, status: types_1.LogEventStatus.SUCCESS }));
|
|
568
480
|
}
|
|
569
481
|
catch (e) {
|
|
570
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: '
|
|
482
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Pricing initialize - failed', data: e, status: types_1.LogEventStatus.FAIL }));
|
|
571
483
|
throw e;
|
|
572
484
|
}
|
|
573
485
|
}
|
|
574
|
-
fetchEnv(env, additional_logs) {
|
|
486
|
+
async fetchEnv(env, additional_logs) {
|
|
575
487
|
try {
|
|
576
|
-
const product_env = this.productBuilderService.fetchEnv(env); // validate env exists
|
|
488
|
+
const product_env = await this.productBuilderService.fetchEnv(env); // validate env exists
|
|
577
489
|
if (!product_env) {
|
|
578
490
|
throw new Error(`Env ${env} not found`);
|
|
579
491
|
}
|
|
@@ -585,271 +497,6 @@ class ProcessorService {
|
|
|
585
497
|
throw e;
|
|
586
498
|
}
|
|
587
499
|
}
|
|
588
|
-
validateJSONFeatureInput(input, feature_input, additional_logs) {
|
|
589
|
-
try {
|
|
590
|
-
(0, processor_utils_1.validateFeatureJSONInput)(input, feature_input);
|
|
591
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Input validation - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
592
|
-
}
|
|
593
|
-
catch (e) {
|
|
594
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Input validation - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
595
|
-
throw e;
|
|
596
|
-
}
|
|
597
|
-
}
|
|
598
|
-
async splitSequenceIntoLevels(data, additional_logs) {
|
|
599
|
-
try {
|
|
600
|
-
const levels = {};
|
|
601
|
-
const tagMap = new Map(data.map((seq) => [seq.tag, seq]));
|
|
602
|
-
const assignedLevels = new Map();
|
|
603
|
-
let currentLevel = 1;
|
|
604
|
-
let remainingSequences = [...data];
|
|
605
|
-
while (remainingSequences.length > 0) {
|
|
606
|
-
const currentLevelSequences = [];
|
|
607
|
-
remainingSequences = remainingSequences.filter((seq) => {
|
|
608
|
-
var _a;
|
|
609
|
-
const parentLevels = ((_a = seq.parents) === null || _a === void 0 ? void 0 : _a.map((parent) => { var _a; return (_a = assignedLevels.get(parent)) !== null && _a !== void 0 ? _a : -1; })) || [];
|
|
610
|
-
const isCurrentLevel = parentLevels.length === 0 || Math.max(...parentLevels) === currentLevel - 1;
|
|
611
|
-
if (isCurrentLevel) {
|
|
612
|
-
currentLevelSequences.push(seq);
|
|
613
|
-
assignedLevels.set(seq.tag, currentLevel);
|
|
614
|
-
return false; // Remove from remainingSequences
|
|
615
|
-
}
|
|
616
|
-
return true;
|
|
617
|
-
});
|
|
618
|
-
if (currentLevelSequences.length > 0) {
|
|
619
|
-
levels[currentLevel] = currentLevelSequences;
|
|
620
|
-
currentLevel++;
|
|
621
|
-
}
|
|
622
|
-
else {
|
|
623
|
-
break; // Prevent infinite loop if there's a cycle
|
|
624
|
-
}
|
|
625
|
-
}
|
|
626
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Split sequence - success', data: { levels }, status: types_1.LogEventStatus.SUCCESS }));
|
|
627
|
-
return levels;
|
|
628
|
-
}
|
|
629
|
-
catch (e) {
|
|
630
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Split sequence - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
631
|
-
throw e;
|
|
632
|
-
}
|
|
633
|
-
}
|
|
634
|
-
async processSequenceLevels(additional_logs) {
|
|
635
|
-
try {
|
|
636
|
-
const levelEvents = {};
|
|
637
|
-
Object.entries(this.sequenceLevels).forEach(([level, sequences]) => {
|
|
638
|
-
levelEvents[parseInt(level)] = this.fetchLevelEvents(sequences, parseInt(level));
|
|
639
|
-
});
|
|
640
|
-
let previousLevelComplete = true;
|
|
641
|
-
for (const level of Object.keys(levelEvents)
|
|
642
|
-
.map(Number)
|
|
643
|
-
.sort((a, b) => a - b)) {
|
|
644
|
-
if (previousLevelComplete) {
|
|
645
|
-
previousLevelComplete = await this.processLevelEvents(levelEvents[level], additional_logs);
|
|
646
|
-
}
|
|
647
|
-
else {
|
|
648
|
-
break;
|
|
649
|
-
}
|
|
650
|
-
}
|
|
651
|
-
this.doneWithProcessing = true;
|
|
652
|
-
if (previousLevelComplete && !this.published) {
|
|
653
|
-
let message;
|
|
654
|
-
let status;
|
|
655
|
-
let successful_feature_execution;
|
|
656
|
-
let failed_feature_execution;
|
|
657
|
-
if (this.processingOutput.failure.length === 0) {
|
|
658
|
-
message = 'Process feature - success';
|
|
659
|
-
status = types_1.LogEventStatus.SUCCESS;
|
|
660
|
-
successful_feature_execution = true;
|
|
661
|
-
}
|
|
662
|
-
else if (this.processingFailure) {
|
|
663
|
-
message = 'Process feature - processing';
|
|
664
|
-
status = types_1.LogEventStatus.PROCESSING;
|
|
665
|
-
successful_feature_execution = false;
|
|
666
|
-
failed_feature_execution = false;
|
|
667
|
-
}
|
|
668
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_feature_execution,
|
|
669
|
-
failed_feature_execution,
|
|
670
|
-
message, data: {}, status }));
|
|
671
|
-
await this.logService.publish();
|
|
672
|
-
this.end = Date.now();
|
|
673
|
-
await this.writeResult(status);
|
|
674
|
-
}
|
|
675
|
-
return true;
|
|
676
|
-
}
|
|
677
|
-
catch (e) {
|
|
678
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process sequence levels - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
679
|
-
throw e;
|
|
680
|
-
}
|
|
681
|
-
}
|
|
682
|
-
async processLevelEvents(events, additional_logs) {
|
|
683
|
-
try {
|
|
684
|
-
const promises = events.map((event) => {
|
|
685
|
-
const dependants = this.fetchActionRequestDependents(event.input, additional_logs);
|
|
686
|
-
const passed = this.checkDependentsSuccess(dependants);
|
|
687
|
-
if (passed) {
|
|
688
|
-
// TODO: comparison to see if all depending events are in success || dependants is empty
|
|
689
|
-
return this.processEvent(event);
|
|
690
|
-
}
|
|
691
|
-
else {
|
|
692
|
-
this.addToWaitingOutput(event, dependants);
|
|
693
|
-
}
|
|
694
|
-
});
|
|
695
|
-
return Promise.all(promises);
|
|
696
|
-
}
|
|
697
|
-
catch (e) {
|
|
698
|
-
throw e;
|
|
699
|
-
}
|
|
700
|
-
}
|
|
701
|
-
async processFailedEvents(additional_logs) {
|
|
702
|
-
try {
|
|
703
|
-
const { failure } = this.processingOutput;
|
|
704
|
-
const promises = failure.map((failed) => {
|
|
705
|
-
if (failed.retries_left > 0 && new Date().getTime() > failed.retry_at) {
|
|
706
|
-
return this.processEvent(failed.event); // process events should also take care of this.processingOutput
|
|
707
|
-
}
|
|
708
|
-
if (failed.retries_left === 0 && !failed.allow_fail) {
|
|
709
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess failed events - failed', data: Object.assign(Object.assign({}, failed), { reason: 'Ran out of Retries' }), status: types_1.LogEventStatus.FAIL }));
|
|
710
|
-
throw new Error(`Event ${failed.event.event} failed in sequence ${failed.event.sequence_tag}, ran out of retries and the feature cannot run without it succeeding`);
|
|
711
|
-
}
|
|
712
|
-
});
|
|
713
|
-
Promise.all(promises);
|
|
714
|
-
}
|
|
715
|
-
catch (e) {
|
|
716
|
-
throw e;
|
|
717
|
-
}
|
|
718
|
-
}
|
|
719
|
-
async processWaitingEvents(additional_logs) {
|
|
720
|
-
try {
|
|
721
|
-
const { waiting } = this.processingOutput;
|
|
722
|
-
const promises = waiting.map((waiting) => {
|
|
723
|
-
const { dependants } = waiting;
|
|
724
|
-
if (this.checkDependentsSuccess(dependants)) {
|
|
725
|
-
// TODO: comparison to see if all depending events are in success || dependants is empty
|
|
726
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess waiting events - initiated', data: Object.assign({}, waiting), status: types_1.LogEventStatus.PROCESSING }));
|
|
727
|
-
return this.processEvent(waiting.event);
|
|
728
|
-
}
|
|
729
|
-
else {
|
|
730
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess waiting events - waiting', data: Object.assign({}, waiting), status: types_1.LogEventStatus.WAITING }));
|
|
731
|
-
}
|
|
732
|
-
});
|
|
733
|
-
return Promise.all(promises);
|
|
734
|
-
}
|
|
735
|
-
catch (e) {
|
|
736
|
-
throw e;
|
|
737
|
-
}
|
|
738
|
-
}
|
|
739
|
-
checkDependentsSuccess(dependants) {
|
|
740
|
-
let pass = true;
|
|
741
|
-
for (let i = 0; i < dependants.length; i++) {
|
|
742
|
-
if (!this.processingOutput.success.find((item) => item.event.sequence_tag === dependants[i].sequence_tag && item.event.event === dependants[i].event_tag)) {
|
|
743
|
-
pass = false;
|
|
744
|
-
}
|
|
745
|
-
}
|
|
746
|
-
return pass;
|
|
747
|
-
}
|
|
748
|
-
fetchActionRequestDependents(input, additional_logs) {
|
|
749
|
-
try {
|
|
750
|
-
const dependents = [];
|
|
751
|
-
if (input.query) {
|
|
752
|
-
dependents.push(...this.fetchDependents(input.query, additional_logs));
|
|
753
|
-
}
|
|
754
|
-
if (input.body) {
|
|
755
|
-
dependents.push(...this.fetchDependents(input.body, additional_logs));
|
|
756
|
-
}
|
|
757
|
-
if (input.headers) {
|
|
758
|
-
dependents.push(...this.fetchDependents(input.headers, additional_logs));
|
|
759
|
-
}
|
|
760
|
-
if (input.params) {
|
|
761
|
-
dependents.push(...this.fetchDependents(input.params, additional_logs));
|
|
762
|
-
}
|
|
763
|
-
if (input.data) {
|
|
764
|
-
dependents.push(...this.fetchDependents(input.data, additional_logs));
|
|
765
|
-
}
|
|
766
|
-
if (input.fileName) {
|
|
767
|
-
dependents.push(...this.valueStringDepsCheck(input.fileName));
|
|
768
|
-
}
|
|
769
|
-
if (input.buffer) {
|
|
770
|
-
dependents.push(...this.valueStringDepsCheck(input.buffer));
|
|
771
|
-
}
|
|
772
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch request dependencies - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), dependents }, status: types_1.LogEventStatus.SUCCESS }));
|
|
773
|
-
return dependents;
|
|
774
|
-
}
|
|
775
|
-
catch (e) {
|
|
776
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch request dependents - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
777
|
-
}
|
|
778
|
-
}
|
|
779
|
-
fetchDependents(obj, additional_logs) {
|
|
780
|
-
try {
|
|
781
|
-
const dependants = [];
|
|
782
|
-
for (const key in obj) {
|
|
783
|
-
const value = obj[key];
|
|
784
|
-
if (typeof value === 'object') {
|
|
785
|
-
if ('function' in value && 'values' in value) {
|
|
786
|
-
const { function: func, values } = value;
|
|
787
|
-
for (let i = 0; i < values.length; i++) {
|
|
788
|
-
if (values[i].startsWith('$Sequence')) {
|
|
789
|
-
const stages = this.productBuilderService.extractStages(values[i]);
|
|
790
|
-
dependants.push({
|
|
791
|
-
sequence_tag: stages[0],
|
|
792
|
-
event_tag: stages[1],
|
|
793
|
-
});
|
|
794
|
-
}
|
|
795
|
-
else if (values[i].startsWith('$')) {
|
|
796
|
-
const funcArgs = (0, functions_utils_1.extractFunctionAndArgs)(values[i]);
|
|
797
|
-
if (funcArgs.args.length) {
|
|
798
|
-
funcArgs.args.map((arg) => {
|
|
799
|
-
if (arg.startsWith('$Sequence')) {
|
|
800
|
-
const stages = this.productBuilderService.extractStages(arg);
|
|
801
|
-
dependants.push({
|
|
802
|
-
sequence_tag: stages[0],
|
|
803
|
-
event_tag: stages[1],
|
|
804
|
-
});
|
|
805
|
-
}
|
|
806
|
-
});
|
|
807
|
-
}
|
|
808
|
-
}
|
|
809
|
-
}
|
|
810
|
-
}
|
|
811
|
-
else {
|
|
812
|
-
dependants.push(...this.fetchDependents(value, additional_logs));
|
|
813
|
-
}
|
|
814
|
-
}
|
|
815
|
-
else if (typeof value === 'string') {
|
|
816
|
-
dependants.push(...this.valueStringDepsCheck(value.trim()));
|
|
817
|
-
}
|
|
818
|
-
}
|
|
819
|
-
return dependants;
|
|
820
|
-
}
|
|
821
|
-
catch (e) {
|
|
822
|
-
throw e;
|
|
823
|
-
}
|
|
824
|
-
}
|
|
825
|
-
valueStringDepsCheck(value) {
|
|
826
|
-
const dependants = [];
|
|
827
|
-
if (value.startsWith('$Sequence')) {
|
|
828
|
-
const stages = this.productBuilderService.extractStages(value);
|
|
829
|
-
dependants.push({ sequence_tag: stages[0], event_tag: stages[1] });
|
|
830
|
-
}
|
|
831
|
-
else if (value.startsWith('$')) {
|
|
832
|
-
const funcArgs = (0, functions_utils_1.extractFunctionAndArgs)(value);
|
|
833
|
-
if (funcArgs && funcArgs.args.length) {
|
|
834
|
-
funcArgs.args.map((arg) => {
|
|
835
|
-
if (arg.startsWith('$Sequence')) {
|
|
836
|
-
const stages = this.productBuilderService.extractStages(arg);
|
|
837
|
-
dependants.push({
|
|
838
|
-
sequence_tag: stages[0],
|
|
839
|
-
event_tag: stages[1],
|
|
840
|
-
});
|
|
841
|
-
}
|
|
842
|
-
else {
|
|
843
|
-
const args = arg.split(',');
|
|
844
|
-
args.map((arg) => {
|
|
845
|
-
dependants.push(...this.valueStringDepsCheck(arg.trim()));
|
|
846
|
-
});
|
|
847
|
-
}
|
|
848
|
-
});
|
|
849
|
-
}
|
|
850
|
-
}
|
|
851
|
-
return dependants;
|
|
852
|
-
}
|
|
853
500
|
async constructJSONDataPayloads(object, additional_logs, samples, event, loopIndex = 0) {
|
|
854
501
|
try {
|
|
855
502
|
const payload = {};
|
|
@@ -909,6 +556,7 @@ class ProcessorService {
|
|
|
909
556
|
async generatePayload(obj, event, additional_logs, sample = [], index = {}, loopIndex = null) {
|
|
910
557
|
try {
|
|
911
558
|
const payload = {};
|
|
559
|
+
console.log('Payload Construction', { obj, event, sample, index, loopIndex });
|
|
912
560
|
const keys = Object.keys(obj);
|
|
913
561
|
for (let i = 0; i < keys.length; i++) {
|
|
914
562
|
const key = keys[i];
|
|
@@ -954,22 +602,32 @@ class ProcessorService {
|
|
|
954
602
|
const locatorFor$Index = (0, string_utils_1.validateAndLocateTag)(value);
|
|
955
603
|
if (value.startsWith('$Auth{') && value.endsWith('}')) {
|
|
956
604
|
// should only be allowed in apps
|
|
957
|
-
|
|
605
|
+
// Convert stages to string[] for functions expecting string[]
|
|
606
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
607
|
+
return await this.generateAuthValue(stringStages, app, sample, additional_logs);
|
|
958
608
|
}
|
|
959
609
|
else if (value.startsWith('$Sequence{') && value.endsWith('}')) {
|
|
960
|
-
|
|
610
|
+
// Convert stages to string[] for functions expecting string[]
|
|
611
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
612
|
+
return await this.generateSequenceValue(stringStages, locatorFor$Index, loopIndex); // pass
|
|
961
613
|
}
|
|
962
614
|
else if (value.startsWith('$Input{') && value.endsWith('}')) {
|
|
963
|
-
|
|
615
|
+
// Convert stages to string[] for functions expecting string[]
|
|
616
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
617
|
+
return await this.generateInputValue(this.input.input, stringStages);
|
|
964
618
|
}
|
|
965
619
|
else if (value === '$Default') {
|
|
966
620
|
return await this.generateDefaultValue(sample, Object.assign(Object.assign({}, index), { key }));
|
|
967
621
|
}
|
|
968
622
|
else if (value.startsWith('$Variable{') && value.endsWith('}')) {
|
|
969
|
-
|
|
623
|
+
// Convert stages to string[] for functions expecting string[]
|
|
624
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
625
|
+
return await this.generateVariableValue(stringStages);
|
|
970
626
|
}
|
|
971
627
|
else if (value.startsWith('$Constant{') && value.endsWith('}')) {
|
|
972
|
-
|
|
628
|
+
// Convert stages to string[] for functions expecting string[]
|
|
629
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
630
|
+
return await this.generateConstantValue(stringStages);
|
|
973
631
|
}
|
|
974
632
|
else if (value.startsWith('$Size{') || value.startsWith('$Length{')) {
|
|
975
633
|
const { matchLength, matchSize } = (0, string_utils_1.checkLengthAndSizeMatches)(value);
|
|
@@ -1094,6 +752,7 @@ class ProcessorService {
|
|
|
1094
752
|
const placeholdersStr = match[1];
|
|
1095
753
|
const separator = match[2];
|
|
1096
754
|
const placeHolders = placeholdersStr.split(',').map((data) => data.trim());
|
|
755
|
+
console.log('placeHolders', { placeHolders, separator });
|
|
1097
756
|
const values = await Promise.all(placeHolders.map(async (holder) => {
|
|
1098
757
|
return await this.generateStringValues(holder, app, additional_logs, sample, index, key, loopIndex);
|
|
1099
758
|
}));
|
|
@@ -1366,7 +1025,7 @@ class ProcessorService {
|
|
|
1366
1025
|
}
|
|
1367
1026
|
async generateVariableValue(stages) {
|
|
1368
1027
|
try {
|
|
1369
|
-
const app = this.productBuilderService.fetchApp(stages[0]);
|
|
1028
|
+
const app = await this.productBuilderService.fetchApp(stages[0]);
|
|
1370
1029
|
const env = app.envs.find((items) => items.product_env_slug === this.processEnv.slug);
|
|
1371
1030
|
if (!env) {
|
|
1372
1031
|
throw new Error(`App ${stages[0]} variables needs to have a definition for env: ${this.processEnv.slug}`);
|
|
@@ -1412,6 +1071,7 @@ class ProcessorService {
|
|
|
1412
1071
|
}
|
|
1413
1072
|
async generateAuthValue(stages, app, sample, additional_logs) {
|
|
1414
1073
|
try {
|
|
1074
|
+
console.log('Generate Auth Data', { stages, app, sample });
|
|
1415
1075
|
let auth_data = await this.fetchAuthData(app, additional_logs); //TODO: should use stages[0]
|
|
1416
1076
|
// take the app tag in index 0..
|
|
1417
1077
|
if (!auth_data) {
|
|
@@ -1427,7 +1087,7 @@ class ProcessorService {
|
|
|
1427
1087
|
}
|
|
1428
1088
|
async fetchAuthData(app_tag, additional_logs) {
|
|
1429
1089
|
try {
|
|
1430
|
-
const app = this.productBuilderService.fetchApp(app_tag);
|
|
1090
|
+
const app = await this.productBuilderService.fetchApp(app_tag);
|
|
1431
1091
|
if (!app) {
|
|
1432
1092
|
throw new Error(`App ${app_tag} not found in $Auth value`);
|
|
1433
1093
|
}
|
|
@@ -1438,6 +1098,7 @@ class ProcessorService {
|
|
|
1438
1098
|
if (!env.auth) {
|
|
1439
1099
|
throw new Error(`App ${app_tag} in auth needs to have a definition for auth in env: ${this.processEnv.slug}`);
|
|
1440
1100
|
}
|
|
1101
|
+
console.log('Envroment', env.auth);
|
|
1441
1102
|
let values = env.auth.values;
|
|
1442
1103
|
if (!values) {
|
|
1443
1104
|
// no auth values
|
|
@@ -1445,9 +1106,10 @@ class ProcessorService {
|
|
|
1445
1106
|
}
|
|
1446
1107
|
if (!env.auth.expiry || (env.auth.expiry && Date.now() > new Date(env.auth.expiry).getTime())) {
|
|
1447
1108
|
// refresh
|
|
1109
|
+
console.log('REFRESH DATA', env, app_tag);
|
|
1448
1110
|
values = await this.getAndStoreAuth(env, app_tag);
|
|
1449
1111
|
}
|
|
1450
|
-
const decrypted = (0, processor_utils_1.decrypt)(values, this.productBuilderService.
|
|
1112
|
+
const decrypted = (0, processor_utils_1.decrypt)(values, this.productBuilderService.fetchPrivateKey());
|
|
1451
1113
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch auth data - success', data: { auth: (0, processor_utils_1.anonymizeValue)(decrypted) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
1452
1114
|
return JSON.parse(decrypted);
|
|
1453
1115
|
}
|
|
@@ -1499,16 +1161,6 @@ class ProcessorService {
|
|
|
1499
1161
|
};
|
|
1500
1162
|
return this.runAction(event, additional_logs);
|
|
1501
1163
|
}
|
|
1502
|
-
if (event.type === types_1.FeatureEventTypes.DB_ACTION) {
|
|
1503
|
-
const [parent_tag, child_tag] = event.event.split(':');
|
|
1504
|
-
const additional_logs = {
|
|
1505
|
-
parent_tag,
|
|
1506
|
-
child_tag,
|
|
1507
|
-
type: types_1.LogEventTypes.DB_ACTION,
|
|
1508
|
-
name: 'Process feature database action',
|
|
1509
|
-
};
|
|
1510
|
-
return this.runDBAction(event, additional_logs);
|
|
1511
|
-
}
|
|
1512
1164
|
if (event.type === types_1.FeatureEventTypes.STORAGE) {
|
|
1513
1165
|
this.clone = (0, processor_utils_1.structuredClone)(event);
|
|
1514
1166
|
(0, processor_utils_1.cleanBlob)(this.clone);
|
|
@@ -1519,14 +1171,6 @@ class ProcessorService {
|
|
|
1519
1171
|
};
|
|
1520
1172
|
return this.runStorage(event, additional_logs);
|
|
1521
1173
|
}
|
|
1522
|
-
if (event.type === types_1.FeatureEventTypes.FEATURE) {
|
|
1523
|
-
const additional_logs = {
|
|
1524
|
-
parent_tag: event.event,
|
|
1525
|
-
type: types_1.LogEventTypes.FEATURE,
|
|
1526
|
-
name: 'Process sub-feature',
|
|
1527
|
-
};
|
|
1528
|
-
return this.runFeature(event, additional_logs);
|
|
1529
|
-
}
|
|
1530
1174
|
if (event.type === types_1.FeatureEventTypes.NOTIFICATION) {
|
|
1531
1175
|
const [parent_tag, child_tag] = event.event.split(':');
|
|
1532
1176
|
const additional_logs = {
|
|
@@ -1548,8 +1192,7 @@ class ProcessorService {
|
|
|
1548
1192
|
try {
|
|
1549
1193
|
return this.runBrokerPublish(event, additional_logs);
|
|
1550
1194
|
}
|
|
1551
|
-
catch (e) {
|
|
1552
|
-
}
|
|
1195
|
+
catch (e) { }
|
|
1553
1196
|
}
|
|
1554
1197
|
if (event.type === types_1.FeatureEventTypes.JOB) {
|
|
1555
1198
|
const additional_logs = {
|
|
@@ -1557,193 +1200,592 @@ class ProcessorService {
|
|
|
1557
1200
|
parent_tag: event.event,
|
|
1558
1201
|
name: 'Process feature job',
|
|
1559
1202
|
};
|
|
1560
|
-
|
|
1203
|
+
const input = {
|
|
1204
|
+
env: event.env.slug,
|
|
1205
|
+
product: this.productTag,
|
|
1206
|
+
event: event.event,
|
|
1207
|
+
retries: event.retries,
|
|
1208
|
+
input: event.input,
|
|
1209
|
+
start_at: event.start_at || 0,
|
|
1210
|
+
cache: event.cache,
|
|
1211
|
+
session: this.input.session,
|
|
1212
|
+
};
|
|
1213
|
+
// TODO: fix this
|
|
1214
|
+
return this.processJob(input, additional_logs);
|
|
1561
1215
|
}
|
|
1562
1216
|
}
|
|
1563
1217
|
catch (e) {
|
|
1564
1218
|
throw e;
|
|
1565
1219
|
}
|
|
1566
1220
|
}
|
|
1567
|
-
async
|
|
1568
|
-
|
|
1569
|
-
const
|
|
1570
|
-
|
|
1571
|
-
|
|
1572
|
-
|
|
1573
|
-
|
|
1574
|
-
}, true);
|
|
1575
|
-
const output = await freshInstance.generateOutput(result.process_id);
|
|
1576
|
-
if (output.status === types_1.LogEventStatus.FAIL) {
|
|
1577
|
-
await this.addToFailureOutput(output.errors, event, { process_id: result.process_id }, Object.assign(Object.assign({}, additional_logs), { process_id: result.process_id }));
|
|
1578
|
-
}
|
|
1579
|
-
else if (output.status === types_1.LogEventStatus.SUCCESS) {
|
|
1580
|
-
await this.addToSuccessOutput(event, output.data, Object.assign(Object.assign({}, additional_logs), { process_id: result.process_id }));
|
|
1581
|
-
}
|
|
1582
|
-
return output;
|
|
1583
|
-
}
|
|
1584
|
-
runJob(event, additional_logs) {
|
|
1585
|
-
throw new Error('Method not implemented.');
|
|
1586
|
-
}
|
|
1587
|
-
async processFailedAndWaiting() { }
|
|
1588
|
-
async fetchResult(process_id) {
|
|
1589
|
-
const result = await this.processorApiService.fetchResult(process_id, this.getUserAccess());
|
|
1590
|
-
if (!result) {
|
|
1591
|
-
throw new Error(`Invalid process id ${process_id}`);
|
|
1221
|
+
async runJobs(job, additional_logs = {}) {
|
|
1222
|
+
var _a;
|
|
1223
|
+
const jobId = (_a = job.data) === null || _a === void 0 ? void 0 : _a._job_id;
|
|
1224
|
+
const jobType = job.name;
|
|
1225
|
+
// Update job status to running
|
|
1226
|
+
if (jobId && this.redisClient) {
|
|
1227
|
+
await this.updateJobStatus(jobId, 'running', { started_at: Date.now() });
|
|
1592
1228
|
}
|
|
1593
|
-
|
|
1594
|
-
|
|
1595
|
-
|
|
1596
|
-
|
|
1597
|
-
}
|
|
1598
|
-
async generateOutput(process_id) {
|
|
1599
|
-
var _a, _b, _c, _d;
|
|
1600
|
-
const result = await this.fetchResult(process_id);
|
|
1601
|
-
if (result.component === types_1.LogEventTypes.FEATURE) {
|
|
1602
|
-
const additional_logs = {
|
|
1603
|
-
parent_tag: result.input.tag,
|
|
1604
|
-
type: types_1.LogEventTypes.FEATURE,
|
|
1605
|
-
name: 'Fetching Process Result',
|
|
1606
|
-
};
|
|
1607
|
-
if (result.status === types_1.LogEventStatus.PROCESSING) {
|
|
1608
|
-
return { process_id, status: result.status };
|
|
1229
|
+
try {
|
|
1230
|
+
let result;
|
|
1231
|
+
if (jobType === types_1.JobEventTypes.ACTION || jobType === types_1.FeatureEventTypes.ACTION) {
|
|
1232
|
+
result = await this.processAction(job.data);
|
|
1609
1233
|
}
|
|
1610
|
-
else if (
|
|
1611
|
-
|
|
1612
|
-
await this.intializeProduct(additional_logs);
|
|
1613
|
-
this.processingOutput = result.result;
|
|
1614
|
-
this.process_id = process_id;
|
|
1615
|
-
this.input = result.input;
|
|
1616
|
-
this.feature = await this.fetchFeature(result.input.tag, additional_logs);
|
|
1617
|
-
const { input: featureInput, sequence, output } = this.feature;
|
|
1618
|
-
const data = await this.generatePayload(output, null, additional_logs, []);
|
|
1619
|
-
return { process_id, status: result.status, data };
|
|
1234
|
+
else if (jobType === types_1.JobEventTypes.NOTIFICATION) {
|
|
1235
|
+
result = await this.processNotification(job.data);
|
|
1620
1236
|
}
|
|
1621
|
-
else if (
|
|
1622
|
-
|
|
1623
|
-
return { process_id, status: result.status, errors };
|
|
1237
|
+
else if (jobType === types_1.JobEventTypes.STORAGE) {
|
|
1238
|
+
result = await this.processStorage(job.data);
|
|
1624
1239
|
}
|
|
1625
|
-
|
|
1626
|
-
|
|
1627
|
-
|
|
1628
|
-
|
|
1240
|
+
else if (jobType === types_1.JobEventTypes.PUBLISH) {
|
|
1241
|
+
result = await this.processMessageBrokerPublish(job.data);
|
|
1242
|
+
}
|
|
1243
|
+
else if (jobType === types_1.JobEventTypes.DATABASE_ACTION) {
|
|
1244
|
+
// Database actions use processAction with database-specific input
|
|
1245
|
+
result = await this.processDatabaseAction(job.data);
|
|
1246
|
+
}
|
|
1247
|
+
else if (jobType === types_1.JobEventTypes.DATABASE_OPERATION) {
|
|
1248
|
+
// Handle database operations
|
|
1249
|
+
result = await this.processDatabaseOperation(job.data);
|
|
1629
1250
|
}
|
|
1630
|
-
else if (
|
|
1631
|
-
|
|
1251
|
+
else if (jobType === types_1.JobEventTypes.GRAPH_ACTION) {
|
|
1252
|
+
result = await this.processGraphAction(job.data);
|
|
1253
|
+
}
|
|
1254
|
+
else if (jobType === types_1.JobEventTypes.GRAPH_OPERATION) {
|
|
1255
|
+
result = await this.processGraphOperation(job.data);
|
|
1256
|
+
}
|
|
1257
|
+
else if (jobType === types_1.JobEventTypes.WORKFLOW) {
|
|
1258
|
+
result = await this.processWorkflow(job.data);
|
|
1632
1259
|
}
|
|
1633
1260
|
else {
|
|
1634
|
-
|
|
1261
|
+
throw new Error(`Unknown job type: ${jobType}`);
|
|
1635
1262
|
}
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
|
|
1639
|
-
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
}
|
|
1643
|
-
const additional_logs = {
|
|
1644
|
-
parent_tag: result.input.tag,
|
|
1645
|
-
type: result.component,
|
|
1646
|
-
name: 'Resume Process',
|
|
1647
|
-
};
|
|
1648
|
-
this.productTag = result.input.product;
|
|
1649
|
-
await this.intializeProduct(additional_logs);
|
|
1650
|
-
this.processingOutput = result.result;
|
|
1651
|
-
this.process_id = process_id;
|
|
1652
|
-
await this.processFailedEvents(additional_logs);
|
|
1653
|
-
await this.processWaitingEvents(additional_logs);
|
|
1654
|
-
this.input = result.input;
|
|
1655
|
-
this.start = Date.now();
|
|
1656
|
-
if (result.component === types_1.LogEventTypes.FEATURE) {
|
|
1657
|
-
this.feature = await this.fetchFeature(result.input.tag, additional_logs);
|
|
1658
|
-
const { input: featureInput, sequence, output } = this.feature;
|
|
1659
|
-
this.processEnv = this.fetchEnv(result.env, additional_logs);
|
|
1660
|
-
if (!this.processEnv.active) {
|
|
1661
|
-
throw new Error(`Environment ${result.env} is not active`);
|
|
1662
|
-
}
|
|
1663
|
-
// validate feature input and log failure
|
|
1664
|
-
this.validateJSONFeatureInput(result.input.input, featureInput, additional_logs);
|
|
1665
|
-
// split processes
|
|
1666
|
-
this.sequenceLevels = await this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
1667
|
-
await this.processSequenceLevels(additional_logs);
|
|
1668
|
-
}
|
|
1669
|
-
else {
|
|
1670
|
-
this.end = Date.now();
|
|
1671
|
-
let status = types_1.LogEventStatus.SUCCESS;
|
|
1672
|
-
if (this.processingOutput.failure.length > 0) {
|
|
1673
|
-
status = types_1.LogEventStatus.FAIL;
|
|
1263
|
+
// Update job status to completed
|
|
1264
|
+
if (jobId && this.redisClient) {
|
|
1265
|
+
await this.updateJobStatus(jobId, 'completed', {
|
|
1266
|
+
completed_at: Date.now(),
|
|
1267
|
+
result,
|
|
1268
|
+
});
|
|
1674
1269
|
}
|
|
1675
|
-
|
|
1676
|
-
}
|
|
1677
|
-
return { process_id };
|
|
1678
|
-
}
|
|
1679
|
-
async replayProcess(process_id) {
|
|
1680
|
-
var _a, _b, _c;
|
|
1681
|
-
const result = await this.fetchResult(process_id);
|
|
1682
|
-
if (!result) {
|
|
1683
|
-
throw new Error(`Invalid process id ${process_id}`);
|
|
1684
|
-
}
|
|
1685
|
-
this.productTag = result.input.product;
|
|
1686
|
-
this.process_id = process_id;
|
|
1687
|
-
this.input = result.input;
|
|
1688
|
-
this.start = Date.now();
|
|
1689
|
-
this.component = result.component;
|
|
1690
|
-
const additional_logs = {
|
|
1691
|
-
parent_tag: result.input.tag,
|
|
1692
|
-
type: result.component,
|
|
1693
|
-
name: 'Replay Process',
|
|
1694
|
-
};
|
|
1695
|
-
await this.intializeProduct(additional_logs);
|
|
1696
|
-
//await this.processFailedEvents(additional_logs);
|
|
1697
|
-
if (result.component === types_1.LogEventTypes.FEATURE) {
|
|
1698
|
-
//await this.processWaitingEvents(additional_logs);
|
|
1699
|
-
this.feature = await this.fetchFeature(result.input.tag, additional_logs);
|
|
1700
|
-
const { input: featureInput, sequence, output } = this.feature;
|
|
1701
|
-
this.processEnv = this.fetchEnv(result.env, additional_logs);
|
|
1702
|
-
if (!this.processEnv.active) {
|
|
1703
|
-
throw new Error(`Environment ${result.env} is not active`);
|
|
1704
|
-
}
|
|
1705
|
-
// validate feature input and log failure
|
|
1706
|
-
this.validateJSONFeatureInput(result.input.input, featureInput, additional_logs);
|
|
1707
|
-
// split processes
|
|
1708
|
-
this.sequenceLevels = await this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
1709
|
-
await this.processSequenceLevels(additional_logs);
|
|
1270
|
+
return result;
|
|
1710
1271
|
}
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
|
|
1719
|
-
|
|
1720
|
-
|
|
1721
|
-
|
|
1272
|
+
catch (error) {
|
|
1273
|
+
// Update job status to failed or schedule retry
|
|
1274
|
+
if (jobId && this.redisClient) {
|
|
1275
|
+
const jobData = await this.getJobData(jobId);
|
|
1276
|
+
if (jobData) {
|
|
1277
|
+
const { shouldRetry, delay } = this.calculateJobRetry(jobData, error.code);
|
|
1278
|
+
if (shouldRetry) {
|
|
1279
|
+
await this.updateJobStatus(jobId, 'scheduled', {
|
|
1280
|
+
retry_count: (jobData.retry_count || 0) + 1,
|
|
1281
|
+
last_error: error.message,
|
|
1282
|
+
last_error_code: error.code,
|
|
1283
|
+
scheduled_at: Date.now() + delay,
|
|
1284
|
+
});
|
|
1285
|
+
// Re-queue the job with delay
|
|
1286
|
+
await this.queues.jobs.add(jobType, job.data, {
|
|
1287
|
+
jobId: `${jobId}_retry_${jobData.retry_count + 1}`,
|
|
1288
|
+
delay,
|
|
1289
|
+
});
|
|
1290
|
+
}
|
|
1291
|
+
else {
|
|
1292
|
+
await this.updateJobStatus(jobId, 'failed', {
|
|
1293
|
+
completed_at: Date.now(),
|
|
1294
|
+
last_error: error.message,
|
|
1295
|
+
last_error_code: error.code,
|
|
1296
|
+
});
|
|
1297
|
+
}
|
|
1298
|
+
// Add to execution history
|
|
1299
|
+
await this.addJobExecution(jobId, {
|
|
1300
|
+
number: (jobData.execution_count || 0) + 1,
|
|
1301
|
+
started_at: jobData.started_at || Date.now(),
|
|
1302
|
+
completed_at: Date.now(),
|
|
1303
|
+
duration_ms: Date.now() - (jobData.started_at || Date.now()),
|
|
1304
|
+
status: 'failed',
|
|
1305
|
+
error: error.message,
|
|
1306
|
+
error_code: error.code,
|
|
1307
|
+
});
|
|
1308
|
+
}
|
|
1309
|
+
}
|
|
1310
|
+
throw error;
|
|
1722
1311
|
}
|
|
1723
|
-
return { process_id };
|
|
1724
1312
|
}
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
|
|
1313
|
+
/**
|
|
1314
|
+
* Get job data from Redis
|
|
1315
|
+
*/
|
|
1316
|
+
async getJobData(jobId) {
|
|
1317
|
+
if (!this.redisClient)
|
|
1318
|
+
return null;
|
|
1319
|
+
const redis = this.redisClient;
|
|
1320
|
+
const jobKey = `job:${this.workspace_id}:${jobId}`;
|
|
1321
|
+
const data = await redis.get(jobKey);
|
|
1322
|
+
if (!data)
|
|
1323
|
+
return null;
|
|
1324
|
+
return JSON.parse(data);
|
|
1325
|
+
}
|
|
1326
|
+
/**
|
|
1327
|
+
* Update job status in Redis
|
|
1328
|
+
*/
|
|
1329
|
+
async updateJobStatus(jobId, status, updates = {}) {
|
|
1330
|
+
if (!this.redisClient)
|
|
1331
|
+
return;
|
|
1332
|
+
const redis = this.redisClient;
|
|
1333
|
+
const jobKey = `job:${this.workspace_id}:${jobId}`;
|
|
1334
|
+
const data = await redis.get(jobKey);
|
|
1335
|
+
if (!data)
|
|
1336
|
+
return;
|
|
1337
|
+
const jobData = JSON.parse(data);
|
|
1338
|
+
const oldStatus = jobData.status;
|
|
1339
|
+
const updatedJob = Object.assign(Object.assign(Object.assign({}, jobData), updates), { status, updated_at: Date.now(), execution_count: status === 'completed' || status === 'failed'
|
|
1340
|
+
? (jobData.execution_count || 0) + 1
|
|
1341
|
+
: jobData.execution_count });
|
|
1342
|
+
// Update job data
|
|
1343
|
+
await redis.setex(jobKey, 90 * 24 * 60 * 60, JSON.stringify(updatedJob));
|
|
1344
|
+
// Update status indices
|
|
1345
|
+
if (oldStatus !== status) {
|
|
1346
|
+
const oldStatusKey = `job_status:${this.workspace_id}:${oldStatus}`;
|
|
1347
|
+
const newStatusKey = `job_status:${this.workspace_id}:${status}`;
|
|
1348
|
+
await redis.srem(oldStatusKey, jobId);
|
|
1349
|
+
await redis.sadd(newStatusKey, jobId);
|
|
1350
|
+
}
|
|
1351
|
+
}
|
|
1352
|
+
/**
|
|
1353
|
+
* Add job execution record to history
|
|
1354
|
+
*/
|
|
1355
|
+
async addJobExecution(jobId, execution) {
|
|
1356
|
+
if (!this.redisClient)
|
|
1357
|
+
return;
|
|
1358
|
+
const redis = this.redisClient;
|
|
1359
|
+
const historyKey = `job_history:${this.workspace_id}:${jobId}`;
|
|
1360
|
+
await redis.lpush(historyKey, JSON.stringify(execution));
|
|
1361
|
+
await redis.ltrim(historyKey, 0, 99); // Keep last 100 executions
|
|
1362
|
+
await redis.expire(historyKey, 30 * 24 * 60 * 60); // 30 days TTL
|
|
1363
|
+
}
|
|
1364
|
+
/**
|
|
1365
|
+
* Calculate retry delay based on job configuration
|
|
1366
|
+
*/
|
|
1367
|
+
calculateJobRetry(jobData, errorCode) {
|
|
1368
|
+
const retryConfig = jobData.retry_config || {};
|
|
1369
|
+
const { initialDelay = 1000, maxDelay = 300000, backoffMultiplier = 2, retryableErrors, nonRetryableErrors, jitter = false, jitterPercent = 0.3, } = retryConfig;
|
|
1370
|
+
// Check if we've exceeded max retries
|
|
1371
|
+
if ((jobData.retry_count || 0) >= (jobData.retries || 0)) {
|
|
1372
|
+
return { shouldRetry: false, delay: 0 };
|
|
1373
|
+
}
|
|
1374
|
+
// Check error-based retry rules
|
|
1375
|
+
if (errorCode) {
|
|
1376
|
+
if (nonRetryableErrors === null || nonRetryableErrors === void 0 ? void 0 : nonRetryableErrors.includes(errorCode)) {
|
|
1377
|
+
return { shouldRetry: false, delay: 0 };
|
|
1378
|
+
}
|
|
1379
|
+
if (retryableErrors && !retryableErrors.includes(errorCode)) {
|
|
1380
|
+
return { shouldRetry: false, delay: 0 };
|
|
1381
|
+
}
|
|
1382
|
+
}
|
|
1383
|
+
// Calculate exponential backoff delay
|
|
1384
|
+
let delay = initialDelay * Math.pow(backoffMultiplier, jobData.retry_count || 0);
|
|
1385
|
+
delay = Math.min(delay, maxDelay);
|
|
1386
|
+
// Add jitter if enabled
|
|
1387
|
+
if (jitter) {
|
|
1388
|
+
const jitterAmount = delay * jitterPercent * Math.random();
|
|
1389
|
+
delay = delay + jitterAmount;
|
|
1390
|
+
}
|
|
1391
|
+
return { shouldRetry: true, delay: Math.round(delay) };
|
|
1392
|
+
}
|
|
1393
|
+
/**
|
|
1394
|
+
* Process database action job (predefined database actions)
|
|
1395
|
+
* Integrates with the database action manager to execute predefined operations
|
|
1396
|
+
*/
|
|
1397
|
+
async processDatabaseAction(data) {
|
|
1398
|
+
var _a;
|
|
1399
|
+
const { env, product, database, event, input } = data;
|
|
1400
|
+
// Initialize product builder if not already done
|
|
1401
|
+
if (product) {
|
|
1402
|
+
await this.productBuilderService.initializeProductByTag(product);
|
|
1403
|
+
}
|
|
1404
|
+
// Fetch the database action configuration
|
|
1405
|
+
const databaseAction = await this.productBuilderService.fetchDatabaseAction(`${database}:${event}`);
|
|
1406
|
+
if (!databaseAction) {
|
|
1407
|
+
throw new Error(`Database action '${event}' not found on database '${database}'`);
|
|
1408
|
+
}
|
|
1409
|
+
// Get the database environment configuration
|
|
1410
|
+
const databaseConfig = await this.productBuilderService.fetchDatabase(database);
|
|
1411
|
+
if (!databaseConfig) {
|
|
1412
|
+
throw new Error(`Database '${database}' not found`);
|
|
1413
|
+
}
|
|
1414
|
+
const databaseEnv = (_a = databaseConfig.envs) === null || _a === void 0 ? void 0 : _a.find((e) => e.slug === env);
|
|
1415
|
+
if (!databaseEnv) {
|
|
1416
|
+
throw new Error(`Environment '${env}' not found for database '${database}'`);
|
|
1417
|
+
}
|
|
1418
|
+
// Execute the database action using the database service
|
|
1419
|
+
const { DatabaseService } = await Promise.resolve().then(() => __importStar(require('../../database/databases.service')));
|
|
1420
|
+
const dbService = new DatabaseService({
|
|
1421
|
+
workspace_id: this.workspace_id,
|
|
1422
|
+
public_key: this.public_key,
|
|
1423
|
+
user_id: this.user_id,
|
|
1424
|
+
token: this.token,
|
|
1425
|
+
env_type: this.environment,
|
|
1426
|
+
private_key: this._privateKey,
|
|
1427
|
+
access_key: this.accessKey,
|
|
1428
|
+
});
|
|
1429
|
+
const result = await dbService.execute({
|
|
1430
|
+
product,
|
|
1431
|
+
env,
|
|
1432
|
+
database,
|
|
1433
|
+
action: event,
|
|
1434
|
+
input: input || {},
|
|
1435
|
+
});
|
|
1436
|
+
return { success: true, data: result };
|
|
1731
1437
|
}
|
|
1732
|
-
|
|
1733
|
-
|
|
1734
|
-
|
|
1735
|
-
|
|
1736
|
-
|
|
1737
|
-
|
|
1738
|
-
}
|
|
1739
|
-
|
|
1438
|
+
/**
|
|
1439
|
+
* Process database operation job (direct CRUD operations)
|
|
1440
|
+
* Handles operations like insert, find, update, delete, aggregate
|
|
1441
|
+
*/
|
|
1442
|
+
async processDatabaseOperation(data) {
|
|
1443
|
+
var _a;
|
|
1444
|
+
const { env, product, database, operation, input } = data;
|
|
1445
|
+
// Initialize product builder if not already done
|
|
1446
|
+
if (product) {
|
|
1447
|
+
await this.productBuilderService.initializeProductByTag(product);
|
|
1448
|
+
}
|
|
1449
|
+
// Get the database configuration
|
|
1450
|
+
const databaseConfig = await this.productBuilderService.fetchDatabase(database);
|
|
1451
|
+
if (!databaseConfig) {
|
|
1452
|
+
throw new Error(`Database '${database}' not found`);
|
|
1453
|
+
}
|
|
1454
|
+
const databaseEnv = (_a = databaseConfig.envs) === null || _a === void 0 ? void 0 : _a.find((e) => e.slug === env);
|
|
1455
|
+
if (!databaseEnv) {
|
|
1456
|
+
throw new Error(`Environment '${env}' not found for database '${database}'`);
|
|
1457
|
+
}
|
|
1458
|
+
// Execute the database operation using the database service
|
|
1459
|
+
const { DatabaseService } = await Promise.resolve().then(() => __importStar(require('../../database/databases.service')));
|
|
1460
|
+
const dbService = new DatabaseService({
|
|
1461
|
+
workspace_id: this.workspace_id,
|
|
1462
|
+
public_key: this.public_key,
|
|
1463
|
+
user_id: this.user_id,
|
|
1464
|
+
token: this.token,
|
|
1465
|
+
env_type: this.environment,
|
|
1466
|
+
private_key: this._privateKey,
|
|
1467
|
+
access_key: this.accessKey,
|
|
1468
|
+
});
|
|
1469
|
+
// Connect to the database
|
|
1470
|
+
await dbService.connect({
|
|
1471
|
+
product,
|
|
1472
|
+
env,
|
|
1473
|
+
database,
|
|
1474
|
+
});
|
|
1475
|
+
let result;
|
|
1476
|
+
// Execute the appropriate operation based on the operation type
|
|
1477
|
+
switch (operation) {
|
|
1478
|
+
case 'insert':
|
|
1479
|
+
result = await dbService.insert({
|
|
1480
|
+
product,
|
|
1481
|
+
env,
|
|
1482
|
+
database,
|
|
1483
|
+
table: input.table,
|
|
1484
|
+
data: input.data,
|
|
1485
|
+
returning: input.returning,
|
|
1486
|
+
});
|
|
1487
|
+
break;
|
|
1488
|
+
case 'find':
|
|
1489
|
+
case 'query':
|
|
1490
|
+
result = await dbService.query({
|
|
1491
|
+
product,
|
|
1492
|
+
env,
|
|
1493
|
+
database,
|
|
1494
|
+
table: input.table,
|
|
1495
|
+
where: input.where,
|
|
1496
|
+
select: input.select,
|
|
1497
|
+
include: input.include,
|
|
1498
|
+
orderBy: input.orderBy,
|
|
1499
|
+
limit: input.limit,
|
|
1500
|
+
offset: input.offset,
|
|
1501
|
+
});
|
|
1502
|
+
break;
|
|
1503
|
+
case 'update':
|
|
1504
|
+
result = await dbService.update({
|
|
1505
|
+
product,
|
|
1506
|
+
env,
|
|
1507
|
+
database,
|
|
1508
|
+
table: input.table,
|
|
1509
|
+
data: input.data,
|
|
1510
|
+
where: input.where,
|
|
1511
|
+
returning: input.returning,
|
|
1512
|
+
});
|
|
1513
|
+
break;
|
|
1514
|
+
case 'delete':
|
|
1515
|
+
result = await dbService.delete({
|
|
1516
|
+
product,
|
|
1517
|
+
env,
|
|
1518
|
+
database,
|
|
1519
|
+
table: input.table,
|
|
1520
|
+
where: input.where,
|
|
1521
|
+
});
|
|
1522
|
+
break;
|
|
1523
|
+
case 'upsert':
|
|
1524
|
+
result = await dbService.upsert({
|
|
1525
|
+
product,
|
|
1526
|
+
env,
|
|
1527
|
+
database,
|
|
1528
|
+
table: input.table,
|
|
1529
|
+
data: input.data,
|
|
1530
|
+
conflictKeys: input.conflictKeys || input.conflictFields,
|
|
1531
|
+
updateColumns: input.updateColumns,
|
|
1532
|
+
returning: input.returning,
|
|
1533
|
+
});
|
|
1534
|
+
break;
|
|
1535
|
+
case 'aggregate':
|
|
1536
|
+
result = await dbService.aggregate({
|
|
1537
|
+
product,
|
|
1538
|
+
env,
|
|
1539
|
+
database,
|
|
1540
|
+
table: input.table,
|
|
1541
|
+
operations: input.operations,
|
|
1542
|
+
where: input.where,
|
|
1543
|
+
});
|
|
1544
|
+
break;
|
|
1545
|
+
case 'groupBy':
|
|
1546
|
+
result = await dbService.groupBy({
|
|
1547
|
+
product,
|
|
1548
|
+
env,
|
|
1549
|
+
database,
|
|
1550
|
+
table: input.table,
|
|
1551
|
+
groupBy: input.groupBy || input.by,
|
|
1552
|
+
operations: input.operations,
|
|
1553
|
+
where: input.where,
|
|
1554
|
+
having: input.having,
|
|
1555
|
+
orderBy: input.orderBy,
|
|
1556
|
+
limit: input.limit,
|
|
1557
|
+
offset: input.offset,
|
|
1558
|
+
});
|
|
1559
|
+
break;
|
|
1560
|
+
case 'count':
|
|
1561
|
+
result = await dbService.count({
|
|
1562
|
+
product,
|
|
1563
|
+
env,
|
|
1564
|
+
database,
|
|
1565
|
+
table: input.table,
|
|
1566
|
+
where: input.where,
|
|
1567
|
+
});
|
|
1568
|
+
break;
|
|
1569
|
+
default:
|
|
1570
|
+
throw new Error(`Unknown database operation: ${operation}`);
|
|
1571
|
+
}
|
|
1572
|
+
return { success: true, data: result };
|
|
1573
|
+
}
|
|
1574
|
+
/**
|
|
1575
|
+
* Process graph action job (predefined graph actions)
|
|
1576
|
+
* Integrates with the graph service to execute predefined graph operations
|
|
1577
|
+
*/
|
|
1578
|
+
async processGraphAction(data) {
|
|
1579
|
+
const { env, product, graph, event, input } = data;
|
|
1580
|
+
// Initialize product builder if not already done
|
|
1581
|
+
if (product) {
|
|
1582
|
+
await this.productBuilderService.initializeProductByTag(product);
|
|
1583
|
+
}
|
|
1584
|
+
// Execute the graph action using the graph service
|
|
1585
|
+
const { GraphService } = await Promise.resolve().then(() => __importStar(require('../../graph/graphs.service')));
|
|
1586
|
+
const graphService = new GraphService({
|
|
1587
|
+
workspace_id: this.workspace_id,
|
|
1588
|
+
public_key: this.public_key,
|
|
1589
|
+
user_id: this.user_id,
|
|
1590
|
+
token: this.token,
|
|
1591
|
+
env_type: this.environment,
|
|
1592
|
+
private_key: this._privateKey,
|
|
1593
|
+
access_key: this.accessKey,
|
|
1594
|
+
});
|
|
1595
|
+
// Connect to the graph database
|
|
1596
|
+
await graphService.connect({
|
|
1597
|
+
product,
|
|
1598
|
+
env,
|
|
1599
|
+
graph,
|
|
1600
|
+
});
|
|
1601
|
+
// Execute the graph action
|
|
1602
|
+
const result = await graphService.execute({
|
|
1603
|
+
product,
|
|
1604
|
+
env,
|
|
1605
|
+
graph,
|
|
1606
|
+
action: event,
|
|
1607
|
+
input: input || {},
|
|
1608
|
+
});
|
|
1609
|
+
return { success: result.success, data: result.data, error: result.error };
|
|
1610
|
+
}
|
|
1611
|
+
/**
|
|
1612
|
+
* Process graph operation job (direct graph operations)
|
|
1613
|
+
* Handles operations like createNode, findNodes, createRelationship, traverse, etc.
|
|
1614
|
+
*/
|
|
1615
|
+
async processGraphOperation(data) {
|
|
1616
|
+
const { env, product, graph, operation, input } = data;
|
|
1617
|
+
// Initialize product builder if not already done
|
|
1618
|
+
if (product) {
|
|
1619
|
+
await this.productBuilderService.initializeProductByTag(product);
|
|
1620
|
+
}
|
|
1621
|
+
// Execute the graph operation using the graph service
|
|
1622
|
+
const { GraphService } = await Promise.resolve().then(() => __importStar(require('../../graph/graphs.service')));
|
|
1623
|
+
const graphService = new GraphService({
|
|
1624
|
+
workspace_id: this.workspace_id,
|
|
1625
|
+
public_key: this.public_key,
|
|
1626
|
+
user_id: this.user_id,
|
|
1627
|
+
token: this.token,
|
|
1628
|
+
env_type: this.environment,
|
|
1629
|
+
private_key: this._privateKey,
|
|
1630
|
+
access_key: this.accessKey,
|
|
1631
|
+
});
|
|
1632
|
+
// Connect to the graph database
|
|
1633
|
+
await graphService.connect({
|
|
1634
|
+
product,
|
|
1635
|
+
env,
|
|
1636
|
+
graph,
|
|
1637
|
+
});
|
|
1638
|
+
let result;
|
|
1639
|
+
// Execute the appropriate operation based on the operation type
|
|
1640
|
+
switch (operation) {
|
|
1641
|
+
case 'createNode':
|
|
1642
|
+
result = await graphService.createNode({
|
|
1643
|
+
labels: input.labels,
|
|
1644
|
+
properties: input.properties,
|
|
1645
|
+
});
|
|
1646
|
+
break;
|
|
1647
|
+
case 'findNodes':
|
|
1648
|
+
result = await graphService.findNodes({
|
|
1649
|
+
labels: input.labels,
|
|
1650
|
+
where: input.where,
|
|
1651
|
+
limit: input.limit,
|
|
1652
|
+
offset: input.offset,
|
|
1653
|
+
orderBy: input.orderBy,
|
|
1654
|
+
});
|
|
1655
|
+
break;
|
|
1656
|
+
case 'findNodeById':
|
|
1657
|
+
result = await graphService.findNodeById(input.id);
|
|
1658
|
+
break;
|
|
1659
|
+
case 'updateNode':
|
|
1660
|
+
result = await graphService.updateNode({
|
|
1661
|
+
id: input.id,
|
|
1662
|
+
properties: input.properties,
|
|
1663
|
+
labels: input.labels,
|
|
1664
|
+
});
|
|
1665
|
+
break;
|
|
1666
|
+
case 'deleteNode':
|
|
1667
|
+
result = await graphService.deleteNode({
|
|
1668
|
+
id: input.id,
|
|
1669
|
+
detach: input.detach,
|
|
1670
|
+
});
|
|
1671
|
+
break;
|
|
1672
|
+
case 'mergeNode':
|
|
1673
|
+
result = await graphService.mergeNode({
|
|
1674
|
+
labels: input.labels,
|
|
1675
|
+
matchProperties: input.matchProperties,
|
|
1676
|
+
onCreate: input.onCreate || input.setProperties,
|
|
1677
|
+
onMatch: input.onMatch,
|
|
1678
|
+
});
|
|
1679
|
+
break;
|
|
1680
|
+
case 'createRelationship':
|
|
1681
|
+
result = await graphService.createRelationship({
|
|
1682
|
+
type: input.type,
|
|
1683
|
+
startNodeId: input.startNodeId || input.from,
|
|
1684
|
+
endNodeId: input.endNodeId || input.to,
|
|
1685
|
+
properties: input.properties,
|
|
1686
|
+
});
|
|
1687
|
+
break;
|
|
1688
|
+
case 'findRelationships':
|
|
1689
|
+
result = await graphService.findRelationships({
|
|
1690
|
+
types: input.types,
|
|
1691
|
+
startNodeId: input.startNodeId,
|
|
1692
|
+
endNodeId: input.endNodeId,
|
|
1693
|
+
where: input.where,
|
|
1694
|
+
limit: input.limit,
|
|
1695
|
+
});
|
|
1696
|
+
break;
|
|
1697
|
+
case 'updateRelationship':
|
|
1698
|
+
result = await graphService.updateRelationship({
|
|
1699
|
+
id: input.id,
|
|
1700
|
+
properties: input.properties,
|
|
1701
|
+
});
|
|
1702
|
+
break;
|
|
1703
|
+
case 'deleteRelationship':
|
|
1704
|
+
result = await graphService.deleteRelationship({
|
|
1705
|
+
id: input.id,
|
|
1706
|
+
});
|
|
1707
|
+
break;
|
|
1708
|
+
case 'traverse':
|
|
1709
|
+
result = await graphService.traverse({
|
|
1710
|
+
startNodeId: input.startNodeId,
|
|
1711
|
+
direction: input.direction,
|
|
1712
|
+
relationshipTypes: input.relationshipTypes,
|
|
1713
|
+
maxDepth: input.maxDepth,
|
|
1714
|
+
minDepth: input.minDepth,
|
|
1715
|
+
nodeFilter: input.nodeFilter || input.where,
|
|
1716
|
+
relationshipFilter: input.relationshipFilter,
|
|
1717
|
+
limit: input.limit,
|
|
1718
|
+
});
|
|
1719
|
+
break;
|
|
1720
|
+
case 'shortestPath':
|
|
1721
|
+
result = await graphService.shortestPath({
|
|
1722
|
+
startNodeId: input.startNodeId,
|
|
1723
|
+
endNodeId: input.endNodeId,
|
|
1724
|
+
relationshipTypes: input.relationshipTypes,
|
|
1725
|
+
maxDepth: input.maxDepth,
|
|
1726
|
+
});
|
|
1727
|
+
break;
|
|
1728
|
+
case 'query':
|
|
1729
|
+
result = await graphService.query(input.query, input.params);
|
|
1730
|
+
break;
|
|
1731
|
+
case 'countNodes':
|
|
1732
|
+
result = await graphService.countNodes(input.labels, input.where);
|
|
1733
|
+
break;
|
|
1734
|
+
case 'countRelationships':
|
|
1735
|
+
result = await graphService.countRelationships(input.types, input.where);
|
|
1736
|
+
break;
|
|
1737
|
+
case 'getStatistics':
|
|
1738
|
+
result = await graphService.getStatistics();
|
|
1739
|
+
break;
|
|
1740
|
+
default:
|
|
1741
|
+
throw new Error(`Unknown graph operation: ${operation}`);
|
|
1742
|
+
}
|
|
1743
|
+
return { success: true, data: result };
|
|
1744
|
+
}
|
|
1745
|
+
/**
|
|
1746
|
+
* Process workflow job
|
|
1747
|
+
* Executes a workflow using the workflow service
|
|
1748
|
+
*/
|
|
1749
|
+
async processWorkflow(data) {
|
|
1750
|
+
const { env, product, workflow, input, idempotency_key } = data;
|
|
1751
|
+
// Initialize product builder if not already done
|
|
1752
|
+
if (product) {
|
|
1753
|
+
await this.productBuilderService.initializeProductByTag(product);
|
|
1754
|
+
}
|
|
1755
|
+
// Execute the workflow using the workflow service
|
|
1756
|
+
const { WorkflowService } = await Promise.resolve().then(() => __importStar(require('../../workflows/workflows.service')));
|
|
1757
|
+
const workflowService = new WorkflowService({
|
|
1758
|
+
workspace_id: this.workspace_id,
|
|
1759
|
+
public_key: this.public_key,
|
|
1760
|
+
user_id: this.user_id,
|
|
1761
|
+
token: this.token,
|
|
1762
|
+
env_type: this.environment,
|
|
1763
|
+
private_key: this._privateKey,
|
|
1764
|
+
access_key: this.accessKey
|
|
1765
|
+
});
|
|
1766
|
+
// Execute the workflow
|
|
1767
|
+
const result = await workflowService.execute({
|
|
1768
|
+
product,
|
|
1769
|
+
env,
|
|
1770
|
+
tag: workflow,
|
|
1771
|
+
input: input || {},
|
|
1772
|
+
idempotency_key,
|
|
1773
|
+
});
|
|
1774
|
+
return {
|
|
1775
|
+
success: result.status === 'completed',
|
|
1776
|
+
data: result.output,
|
|
1777
|
+
workflow_id: result.workflow_id,
|
|
1778
|
+
status: result.status,
|
|
1779
|
+
error: result.error,
|
|
1780
|
+
};
|
|
1740
1781
|
}
|
|
1741
1782
|
async getAndStoreAuth(appEnv, access_tag) {
|
|
1742
1783
|
try {
|
|
1743
|
-
// const payload = JSON.parse(decrypt(env.auth.data, this.productBuilderService.
|
|
1744
|
-
const payload = appEnv.auth.data;
|
|
1784
|
+
// const payload = JSON.parse(decrypt(env.auth.data, this.productBuilderService.fetchPrivateKey()));
|
|
1785
|
+
const payload = JSON.parse((0, processor_utils_1.decrypt)(String(appEnv.auth.data), this.productBuilderService.fetchPrivateKey()));
|
|
1745
1786
|
let app = await this.fetchThirdPartyApp(access_tag);
|
|
1746
1787
|
const auth = app.auths.find((item) => item.tag === appEnv.auth.auth_tag);
|
|
1788
|
+
console.log('JAMESY', auth);
|
|
1747
1789
|
if (!auth) {
|
|
1748
1790
|
// throw an error
|
|
1749
1791
|
throw new Error(`Cannot find auth ${appEnv.auth.auth_tag} on environment ${appEnv.product_env_slug}`);
|
|
@@ -1758,9 +1800,16 @@ class ProcessorService {
|
|
|
1758
1800
|
if (env.base_url) {
|
|
1759
1801
|
request_base_url = env.base_url;
|
|
1760
1802
|
}
|
|
1803
|
+
if (action.envs && action.envs.length) {
|
|
1804
|
+
const env = action.envs.find((item) => item.slug === appEnv.app_env_slug);
|
|
1805
|
+
if (env && env.base_url) {
|
|
1806
|
+
request_base_url = env.base_url;
|
|
1807
|
+
}
|
|
1808
|
+
}
|
|
1809
|
+
console.log('payloadabi!!!!', payload);
|
|
1761
1810
|
const results = await this.sendActionRequest(request_base_url, url, payload, method, appEnv.app_env_slug);
|
|
1762
|
-
const values = (0, processor_utils_1.encrypt)(JSON.stringify(results), this.productBuilderService.
|
|
1763
|
-
const productApp = this.productBuilderService.fetchApp(access_tag);
|
|
1811
|
+
const values = (0, processor_utils_1.encrypt)(JSON.stringify(results), this.productBuilderService.fetchPrivateKey());
|
|
1812
|
+
const productApp = await this.productBuilderService.fetchApp(access_tag);
|
|
1764
1813
|
for (let i = 0; i < productApp.envs.length; i++) {
|
|
1765
1814
|
if (productApp.envs[i].app_env_slug === env.slug) {
|
|
1766
1815
|
productApp.envs[i].auth.values = values; // write new values
|
|
@@ -1856,7 +1905,7 @@ class ProcessorService {
|
|
|
1856
1905
|
// generate indexes
|
|
1857
1906
|
return (0, processor_utils_1.generateIndexes)(operator, iter, init, valueValue);
|
|
1858
1907
|
}
|
|
1859
|
-
async runAction(event, additional_logs, returnValue = true) {
|
|
1908
|
+
async runAction(event, additional_logs, returnValue = true, bootstrapData) {
|
|
1860
1909
|
try {
|
|
1861
1910
|
const { event: action_tag, app: access_tag, condition, cache: cache_tag } = event;
|
|
1862
1911
|
let indexes = [];
|
|
@@ -1869,28 +1918,59 @@ class ProcessorService {
|
|
|
1869
1918
|
if (condition && condition.type === types_1.Conditions.LOOP) {
|
|
1870
1919
|
indexes = await this.extractLoopIndexes(event, additional_logs);
|
|
1871
1920
|
}
|
|
1872
|
-
let
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
if
|
|
1880
|
-
|
|
1921
|
+
let action;
|
|
1922
|
+
let env;
|
|
1923
|
+
let retries;
|
|
1924
|
+
let recipient_workspace_id;
|
|
1925
|
+
let app_active;
|
|
1926
|
+
let app_id = '';
|
|
1927
|
+
let app_env_slug = '';
|
|
1928
|
+
// Use bootstrap data if provided, otherwise fetch via API
|
|
1929
|
+
if (bootstrapData) {
|
|
1930
|
+
action = bootstrapData.action;
|
|
1931
|
+
env = bootstrapData.app_env;
|
|
1932
|
+
retries = bootstrapData.retries;
|
|
1933
|
+
recipient_workspace_id = bootstrapData.recipient_workspace_id;
|
|
1934
|
+
app_active = bootstrapData.app_active;
|
|
1935
|
+
app_env_slug = env.slug;
|
|
1936
|
+
additional_logs.app_env = app_env_slug;
|
|
1881
1937
|
}
|
|
1882
|
-
|
|
1883
|
-
|
|
1938
|
+
else {
|
|
1939
|
+
// Fallback to original API-based fetching (for features/workflows that don't use bootstrap)
|
|
1940
|
+
const appData = await this.fetchThirdPartyApp(access_tag);
|
|
1941
|
+
const { actions, envs: appEnvs, retries: appRetries, workspace_id: appWorkspaceId, active } = appData;
|
|
1942
|
+
const productApp = await this.productBuilderService.fetchApp(access_tag);
|
|
1943
|
+
const { envs: productEnvs } = productApp;
|
|
1944
|
+
const envMapping = productEnvs.find((item) => item.product_env_slug === this.processEnv.slug);
|
|
1945
|
+
app_env_slug = (envMapping === null || envMapping === void 0 ? void 0 : envMapping.app_env_slug) || '';
|
|
1946
|
+
additional_logs.app_env = app_env_slug;
|
|
1947
|
+
env = appEnvs.find((item) => item.slug === app_env_slug);
|
|
1948
|
+
action = actions.find((item) => item.tag === action_tag);
|
|
1949
|
+
retries = appRetries;
|
|
1950
|
+
recipient_workspace_id = appWorkspaceId;
|
|
1951
|
+
app_active = active;
|
|
1952
|
+
app_id = appData._id;
|
|
1953
|
+
if (!action) {
|
|
1954
|
+
throw new Error(`Action ${action_tag} not found in ${access_tag}`);
|
|
1955
|
+
}
|
|
1956
|
+
}
|
|
1957
|
+
if (!app_active && recipient_workspace_id !== this.baseLogs.workspace_id) {
|
|
1958
|
+
throw new Error(`App ${event.app} is not active`);
|
|
1884
1959
|
}
|
|
1885
|
-
|
|
1886
|
-
|
|
1887
|
-
throw new Error(`Action ${action_tag} not found in ${access_tag}`);
|
|
1960
|
+
if (!env.active) {
|
|
1961
|
+
throw new Error(`Action environment ${env.slug} is not active`);
|
|
1888
1962
|
}
|
|
1889
1963
|
const { query, headers, body, params, request_type, method, base_url, resource } = action;
|
|
1890
1964
|
let request_base_url = base_url;
|
|
1891
1965
|
if (env.base_url) {
|
|
1892
1966
|
request_base_url = env.base_url;
|
|
1893
1967
|
}
|
|
1968
|
+
if (action.envs && action.envs.length) {
|
|
1969
|
+
const actionEnv = action.envs.find((item) => item.slug === app_env_slug);
|
|
1970
|
+
if (actionEnv && actionEnv.base_url) {
|
|
1971
|
+
request_base_url = actionEnv.base_url;
|
|
1972
|
+
}
|
|
1973
|
+
}
|
|
1894
1974
|
const samples = {
|
|
1895
1975
|
query: (query === null || query === void 0 ? void 0 : query.data) || [],
|
|
1896
1976
|
headers: (headers === null || headers === void 0 ? void 0 : headers.data) || [],
|
|
@@ -1899,9 +1979,9 @@ class ProcessorService {
|
|
|
1899
1979
|
};
|
|
1900
1980
|
let payloads;
|
|
1901
1981
|
let result;
|
|
1902
|
-
const product = this.productBuilderService.fetchProduct();
|
|
1982
|
+
//const product = this.productBuilderService.fetchProduct();
|
|
1903
1983
|
if (cache_tag && this.redisClient) {
|
|
1904
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
1984
|
+
const productCache = await this.productBuilderService.fetchCache(cache_tag);
|
|
1905
1985
|
if (!productCache) {
|
|
1906
1986
|
throw new Error('Invalid cache tag ');
|
|
1907
1987
|
}
|
|
@@ -1909,7 +1989,7 @@ class ProcessorService {
|
|
|
1909
1989
|
const check = await this.fetchFromCache({
|
|
1910
1990
|
cache_tag,
|
|
1911
1991
|
input: inputString,
|
|
1912
|
-
privateKey:
|
|
1992
|
+
privateKey: this.productBuilderService.fetchPrivateKey(),
|
|
1913
1993
|
expiry: productCache.expiry,
|
|
1914
1994
|
}, additional_logs);
|
|
1915
1995
|
if (check) {
|
|
@@ -1922,31 +2002,31 @@ class ProcessorService {
|
|
|
1922
2002
|
}
|
|
1923
2003
|
if (request_type === types_1.DataFormats.JSON || request_type === types_1.DataFormats.URLENCODED || !request_type) {
|
|
1924
2004
|
if (indexes.length == 0) {
|
|
1925
|
-
payloads = await this.constructJSONDataPayloads(event.input, additional_logs, samples, event);
|
|
2005
|
+
payloads = (await this.constructJSONDataPayloads(event.input, additional_logs, samples, event));
|
|
1926
2006
|
if (request_type === types_1.DataFormats.URLENCODED) {
|
|
1927
2007
|
payloads.body = (0, processor_utils_1.toFormUrlEncoded)(payloads.body);
|
|
1928
2008
|
}
|
|
1929
2009
|
additional_logs.recipient_workspace_id = recipient_workspace_id;
|
|
1930
|
-
result = await this.processRequest({ request_base_url, resource, method, env, payloads, app_id
|
|
2010
|
+
result = await this.processRequest({ request_base_url, resource, method, env, payloads, app_id }, event, { retries }, additional_logs, returnValue);
|
|
1931
2011
|
}
|
|
1932
2012
|
else {
|
|
1933
2013
|
const promises = indexes.map(async (index) => {
|
|
1934
2014
|
payloads = await this.constructJSONDataPayloads(event.input, additional_logs, samples, event, index);
|
|
1935
2015
|
additional_logs.recipient_workspace_id = recipient_workspace_id;
|
|
1936
|
-
await this.processRequest({ request_base_url, resource, method, env, payloads, app_id
|
|
2016
|
+
await this.processRequest({ request_base_url, resource, method, env, payloads, app_id }, event, { retries }, additional_logs, returnValue);
|
|
1937
2017
|
});
|
|
1938
2018
|
result = await Promise.all(promises);
|
|
1939
2019
|
}
|
|
1940
2020
|
}
|
|
1941
2021
|
if (cache_tag && this.redisClient && result) {
|
|
1942
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
2022
|
+
const productCache = await this.productBuilderService.fetchCache(cache_tag);
|
|
1943
2023
|
if (!productCache) {
|
|
1944
2024
|
throw new Error('Invalid cache tag ');
|
|
1945
2025
|
}
|
|
1946
2026
|
const inputString = JSON.stringify(event.input);
|
|
1947
2027
|
await this.addToCache({
|
|
1948
2028
|
input: inputString,
|
|
1949
|
-
privateKey:
|
|
2029
|
+
privateKey: this.productBuilderService.fetchPrivateKey(),
|
|
1950
2030
|
data: JSON.stringify(result),
|
|
1951
2031
|
cache_tag,
|
|
1952
2032
|
timestamp: Date.now(),
|
|
@@ -1963,6 +2043,7 @@ class ProcessorService {
|
|
|
1963
2043
|
}
|
|
1964
2044
|
}
|
|
1965
2045
|
async processRequest(payload, event, retries, additional_logs, returnValue = false) {
|
|
2046
|
+
var _a;
|
|
1966
2047
|
const { request_base_url, resource, payloads, method, env, app_id } = payload;
|
|
1967
2048
|
const start = Date.now();
|
|
1968
2049
|
try {
|
|
@@ -1973,7 +2054,11 @@ class ProcessorService {
|
|
|
1973
2054
|
const end = Date.now();
|
|
1974
2055
|
this.requestTime += end - start;
|
|
1975
2056
|
this.totalRequests += 1;
|
|
1976
|
-
this.
|
|
2057
|
+
const { pricing_tag, pricing_cost, is_overage, currency } = await this.processPricingCost(Object.assign(Object.assign({}, additional_logs), { app_id, workspace_id: this.workspace_id }));
|
|
2058
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { pricing_tag,
|
|
2059
|
+
pricing_cost,
|
|
2060
|
+
currency,
|
|
2061
|
+
is_overage, message: 'Process http request - success', successful_execution: true, data: { response: (0, processor_utils_1.anonymizeObject)(results) }, status: types_1.LogEventStatus.SUCCESS, app_id, action: event.event, start,
|
|
1977
2062
|
end }));
|
|
1978
2063
|
await this.addToSuccessOutput(event, results, additional_logs);
|
|
1979
2064
|
if (returnValue) {
|
|
@@ -1986,7 +2071,7 @@ class ProcessorService {
|
|
|
1986
2071
|
catch (e) {
|
|
1987
2072
|
const end = Date.now();
|
|
1988
2073
|
let error = e;
|
|
1989
|
-
if (e.response.data) {
|
|
2074
|
+
if ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.data) {
|
|
1990
2075
|
error = e.response.data;
|
|
1991
2076
|
}
|
|
1992
2077
|
this.requestTime += end - start;
|
|
@@ -2019,6 +2104,67 @@ class ProcessorService {
|
|
|
2019
2104
|
}
|
|
2020
2105
|
}
|
|
2021
2106
|
}
|
|
2107
|
+
async processPricingCost(additional_logs) {
|
|
2108
|
+
try {
|
|
2109
|
+
const { app_id, workspace_id } = additional_logs;
|
|
2110
|
+
if (!app_id || !workspace_id) {
|
|
2111
|
+
throw new Error('app_id and workspace_id are required in additional_logs');
|
|
2112
|
+
}
|
|
2113
|
+
const primaryPricing = this.pricingService.fetchPricing();
|
|
2114
|
+
const overagePricing = this.pricingService.fetchOveragePricing();
|
|
2115
|
+
const requests = await this.requestTrackerService.incrementRequest(app_id, workspace_id);
|
|
2116
|
+
const limitCheck = (0, request_utils_1.checkLimitExceeded)(requests, primaryPricing.limits);
|
|
2117
|
+
let finalCost = 0;
|
|
2118
|
+
let usedPricing = primaryPricing;
|
|
2119
|
+
let isOverage = false;
|
|
2120
|
+
let finalCurrency = primaryPricing.currency;
|
|
2121
|
+
if ((0, request_utils_1.isFreeTag)(primaryPricing.pricing_tag)) {
|
|
2122
|
+
if (limitCheck.exceeded) {
|
|
2123
|
+
const overageRequests = (0, request_utils_1.calculateOverageRequests)(requests, primaryPricing.limits);
|
|
2124
|
+
finalCost = (0, request_utils_1.calculateCost)(overagePricing.pricing_mode, overagePricing.unit_price, overageRequests);
|
|
2125
|
+
usedPricing = overagePricing;
|
|
2126
|
+
isOverage = true;
|
|
2127
|
+
finalCurrency = overagePricing.currency;
|
|
2128
|
+
}
|
|
2129
|
+
else {
|
|
2130
|
+
finalCost = 0;
|
|
2131
|
+
}
|
|
2132
|
+
}
|
|
2133
|
+
else {
|
|
2134
|
+
if (limitCheck.exceeded) {
|
|
2135
|
+
const overageRequests = (0, request_utils_1.calculateOverageRequests)(requests, primaryPricing.limits);
|
|
2136
|
+
const overageCost = (0, request_utils_1.calculateCost)(overagePricing.pricing_mode, overagePricing.unit_price, overageRequests);
|
|
2137
|
+
finalCost = overageCost;
|
|
2138
|
+
isOverage = true;
|
|
2139
|
+
}
|
|
2140
|
+
else {
|
|
2141
|
+
const pricingRequests = (0, request_utils_1.calculateRequests)(requests);
|
|
2142
|
+
finalCost = (0, request_utils_1.calculateCost)(primaryPricing.pricing_mode, primaryPricing.unit_price, pricingRequests);
|
|
2143
|
+
}
|
|
2144
|
+
}
|
|
2145
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process Pricing Cost - success', data: {
|
|
2146
|
+
pricing_tag: usedPricing.pricing_tag,
|
|
2147
|
+
cost: finalCost,
|
|
2148
|
+
currency: finalCurrency,
|
|
2149
|
+
is_overage: isOverage,
|
|
2150
|
+
requests: requests,
|
|
2151
|
+
limit_exceeded: limitCheck.exceeded,
|
|
2152
|
+
}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2153
|
+
return {
|
|
2154
|
+
pricing_tag: usedPricing.pricing_tag,
|
|
2155
|
+
pricing_cost: Math.round(finalCost * 100) / 100,
|
|
2156
|
+
currency: finalCurrency,
|
|
2157
|
+
is_overage: isOverage,
|
|
2158
|
+
requests_made: requests,
|
|
2159
|
+
limit_exceeded: limitCheck.exceeded,
|
|
2160
|
+
exceeded_limits: limitCheck.exceededLimits,
|
|
2161
|
+
};
|
|
2162
|
+
}
|
|
2163
|
+
catch (e) {
|
|
2164
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process Pricing Cost - failed', data: { error: e }, status: types_1.LogEventStatus.FAIL }));
|
|
2165
|
+
throw e;
|
|
2166
|
+
}
|
|
2167
|
+
}
|
|
2022
2168
|
async addToSuccessOutput(event, output, additional_logs) {
|
|
2023
2169
|
// Remove event from failed, skipped, and waiting arrays
|
|
2024
2170
|
this.processingOutput.failure = this.processingOutput.failure.filter((data) => !(data.event.sequence_tag === event.sequence_tag && data.event.event === event.event));
|
|
@@ -2028,7 +2174,6 @@ class ProcessorService {
|
|
|
2028
2174
|
event = (0, processor_utils_1.cleanBlob)(event);
|
|
2029
2175
|
}
|
|
2030
2176
|
this.processingOutput.success.push({ event, output });
|
|
2031
|
-
await this.processWaitingEvents(additional_logs);
|
|
2032
2177
|
if (this.checkIsSuccessful() && this.doneWithProcessing) {
|
|
2033
2178
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_feature_execution: true, message: 'Process feature - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2034
2179
|
await this.logService.publish();
|
|
@@ -2048,18 +2193,6 @@ class ProcessorService {
|
|
|
2048
2193
|
}
|
|
2049
2194
|
return success;
|
|
2050
2195
|
}
|
|
2051
|
-
addToWaitingOutput(event, dependants) {
|
|
2052
|
-
const exists = this.processingOutput.waiting.findIndex((item) => {
|
|
2053
|
-
return item.event.event === event.event && item.event.sequence_tag === event.sequence_tag;
|
|
2054
|
-
});
|
|
2055
|
-
const skipped = this.processingOutput.skipped.findIndex((item) => {
|
|
2056
|
-
return item.event.event === event.event && item.event.sequence_tag === event.sequence_tag;
|
|
2057
|
-
});
|
|
2058
|
-
if (!exists && !skipped) {
|
|
2059
|
-
this.processingOutput.waiting.push({ event, dependants });
|
|
2060
|
-
}
|
|
2061
|
-
// addToSkippedOutput()
|
|
2062
|
-
}
|
|
2063
2196
|
async addToFailureOutput(e, event, payload, additional_logs, policy = {}) {
|
|
2064
2197
|
try {
|
|
2065
2198
|
this.processingFailure = true;
|
|
@@ -2084,7 +2217,7 @@ class ProcessorService {
|
|
|
2084
2217
|
let retryable = true;
|
|
2085
2218
|
if (event.type === types_1.FeatureEventTypes.STORAGE) {
|
|
2086
2219
|
event = (0, processor_utils_1.cleanBlob)(event);
|
|
2087
|
-
if (!
|
|
2220
|
+
if (!event.allow_fail)
|
|
2088
2221
|
retryable = false;
|
|
2089
2222
|
}
|
|
2090
2223
|
let retries_left = retries || max;
|
|
@@ -2117,9 +2250,6 @@ class ProcessorService {
|
|
|
2117
2250
|
if (event.type === types_1.FeatureEventTypes.ACTION) {
|
|
2118
2251
|
this.processRequest(payload, event, policy, additional_logs);
|
|
2119
2252
|
}
|
|
2120
|
-
if (event.type === types_1.FeatureEventTypes.DB_ACTION) {
|
|
2121
|
-
this.processDBRequest(event, event.input, payload.database_tag, payload.databaseEnv, payload.action_tag, additional_logs);
|
|
2122
|
-
}
|
|
2123
2253
|
if (event.type === types_1.FeatureEventTypes.STORAGE) {
|
|
2124
2254
|
this.processStorageRequest(event, event.input, payload.storageEnv, additional_logs);
|
|
2125
2255
|
}
|
|
@@ -2127,9 +2257,6 @@ class ProcessorService {
|
|
|
2127
2257
|
}
|
|
2128
2258
|
if (allow_fail === false && retries_left === 0) {
|
|
2129
2259
|
this.published = true;
|
|
2130
|
-
if (this.feature) {
|
|
2131
|
-
additional_logs.failed_feature_execution = true;
|
|
2132
|
-
}
|
|
2133
2260
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Ran out of retries - failed', data: Object.assign(Object.assign({}, output), { payload: (0, processor_utils_1.anonymizeObject)(output.payload) }), status: types_1.LogEventStatus.FAIL }));
|
|
2134
2261
|
//throw new Error("Run out of retries")
|
|
2135
2262
|
this.end = Date.now();
|
|
@@ -2144,52 +2271,15 @@ class ProcessorService {
|
|
|
2144
2271
|
}
|
|
2145
2272
|
}
|
|
2146
2273
|
generateRetryMetrices(error_code, retries) {
|
|
2147
|
-
var _a
|
|
2148
|
-
|
|
2149
|
-
|
|
2150
|
-
|
|
2151
|
-
|
|
2152
|
-
|
|
2153
|
-
|
|
2154
|
-
|
|
2155
|
-
|
|
2156
|
-
case '502':
|
|
2157
|
-
allow_fail = ((_c = retries === null || retries === void 0 ? void 0 : retries.policy[502]) === null || _c === void 0 ? void 0 : _c.available) || false;
|
|
2158
|
-
retry_at = ((_d = retries === null || retries === void 0 ? void 0 : retries.policy[502]) === null || _d === void 0 ? void 0 : _d.lag) || 0;
|
|
2159
|
-
break;
|
|
2160
|
-
case '503':
|
|
2161
|
-
allow_fail = ((_e = retries === null || retries === void 0 ? void 0 : retries.policy[503]) === null || _e === void 0 ? void 0 : _e.available) || false;
|
|
2162
|
-
retry_at = ((_f = retries === null || retries === void 0 ? void 0 : retries.policy[503]) === null || _f === void 0 ? void 0 : _f.lag) || 0;
|
|
2163
|
-
break;
|
|
2164
|
-
case '504':
|
|
2165
|
-
allow_fail = ((_g = retries === null || retries === void 0 ? void 0 : retries.policy[504]) === null || _g === void 0 ? void 0 : _g.available) || false;
|
|
2166
|
-
retry_at = ((_h = retries === null || retries === void 0 ? void 0 : retries.policy[504]) === null || _h === void 0 ? void 0 : _h.lag) || 0;
|
|
2167
|
-
break;
|
|
2168
|
-
case '400':
|
|
2169
|
-
allow_fail = ((_j = retries === null || retries === void 0 ? void 0 : retries.policy[400]) === null || _j === void 0 ? void 0 : _j.available) || false;
|
|
2170
|
-
retry_at = ((_k = retries === null || retries === void 0 ? void 0 : retries.policy[400]) === null || _k === void 0 ? void 0 : _k.lag) || 0;
|
|
2171
|
-
break;
|
|
2172
|
-
case '401':
|
|
2173
|
-
allow_fail = ((_l = retries === null || retries === void 0 ? void 0 : retries.policy[401]) === null || _l === void 0 ? void 0 : _l.available) || false;
|
|
2174
|
-
retry_at = ((_m = retries === null || retries === void 0 ? void 0 : retries.policy[401]) === null || _m === void 0 ? void 0 : _m.lag) || 0;
|
|
2175
|
-
break;
|
|
2176
|
-
case '403':
|
|
2177
|
-
allow_fail = ((_o = retries === null || retries === void 0 ? void 0 : retries.policy[403]) === null || _o === void 0 ? void 0 : _o.available) || false;
|
|
2178
|
-
retry_at = ((_p = retries === null || retries === void 0 ? void 0 : retries.policy[403]) === null || _p === void 0 ? void 0 : _p.lag) || 0;
|
|
2179
|
-
break;
|
|
2180
|
-
case '404':
|
|
2181
|
-
allow_fail = ((_q = retries === null || retries === void 0 ? void 0 : retries.policy[404]) === null || _q === void 0 ? void 0 : _q.available) || false;
|
|
2182
|
-
retry_at = ((_r = retries === null || retries === void 0 ? void 0 : retries.policy[404]) === null || _r === void 0 ? void 0 : _r.lag) || 0;
|
|
2183
|
-
break;
|
|
2184
|
-
case '1000': // all non http errors
|
|
2185
|
-
allow_fail = true;
|
|
2186
|
-
retry_at = 500;
|
|
2187
|
-
default:
|
|
2188
|
-
allow_fail = true;
|
|
2189
|
-
retry_at = 0;
|
|
2190
|
-
max = 0;
|
|
2191
|
-
break;
|
|
2192
|
-
}
|
|
2274
|
+
var _a;
|
|
2275
|
+
const isSpecial = error_code === '1000';
|
|
2276
|
+
if (isSpecial) {
|
|
2277
|
+
return { allow_fail: true, max: (retries === null || retries === void 0 ? void 0 : retries.max) || 0, retry_at: 500 };
|
|
2278
|
+
}
|
|
2279
|
+
const policy = (_a = retries === null || retries === void 0 ? void 0 : retries.policy) === null || _a === void 0 ? void 0 : _a[error_code];
|
|
2280
|
+
const allow_fail = policy ? policy.available : true;
|
|
2281
|
+
const retry_at = policy ? policy.lag : 0;
|
|
2282
|
+
const max = (retries === null || retries === void 0 ? void 0 : retries.max) || 0;
|
|
2193
2283
|
return { allow_fail, max, retry_at };
|
|
2194
2284
|
}
|
|
2195
2285
|
async sendActionRequest(base_url, resource, payload, method, env) {
|
|
@@ -2213,8 +2303,9 @@ class ProcessorService {
|
|
|
2213
2303
|
params: query,
|
|
2214
2304
|
headers: authHeaders,
|
|
2215
2305
|
timeout: 15000,
|
|
2216
|
-
withCredentials: false
|
|
2306
|
+
withCredentials: false,
|
|
2217
2307
|
};
|
|
2308
|
+
console.log('REQUEST!!!!', request);
|
|
2218
2309
|
const response = await axios_1.default.request(request);
|
|
2219
2310
|
return response.data;
|
|
2220
2311
|
}
|
|
@@ -2225,14 +2316,17 @@ class ProcessorService {
|
|
|
2225
2316
|
}
|
|
2226
2317
|
async processStorage(action) {
|
|
2227
2318
|
//TODO: schema validation
|
|
2228
|
-
const { env, input, retries, event, product: product_tag } = action;
|
|
2319
|
+
const { env, input, retries, event, product: product_tag, session, cache } = action;
|
|
2229
2320
|
const additional_logs = {
|
|
2230
2321
|
parent_tag: event,
|
|
2231
2322
|
type: types_1.LogEventTypes.STORAGE,
|
|
2232
2323
|
name: 'Process Storage',
|
|
2233
2324
|
};
|
|
2325
|
+
// Session log fields (will be populated if session is provided)
|
|
2326
|
+
let sessionLogFields = {};
|
|
2327
|
+
let resolvedInput = input;
|
|
2234
2328
|
try {
|
|
2235
|
-
this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.STORAGE);
|
|
2329
|
+
await this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.STORAGE);
|
|
2236
2330
|
this.input = input;
|
|
2237
2331
|
this.start = Date.now();
|
|
2238
2332
|
this.component = types_1.LogEventTypes.STORAGE;
|
|
@@ -2243,23 +2337,57 @@ class ProcessorService {
|
|
|
2243
2337
|
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
2244
2338
|
this.baseLogs = Object.assign({ product_tag: this.productTag, product_id: this.productId, workspace_id: this.workspace_id, env,
|
|
2245
2339
|
process_id, data: this.clone }, additional_logs);
|
|
2246
|
-
|
|
2340
|
+
// Single bootstrap call to fetch all storage data
|
|
2341
|
+
const bootstrapData = await this.productBuilderService.bootstrapStorage({
|
|
2342
|
+
product_tag,
|
|
2343
|
+
env_slug: env,
|
|
2344
|
+
storage_tag: event,
|
|
2345
|
+
});
|
|
2346
|
+
// Initialize from bootstrap data
|
|
2347
|
+
this.productId = bootstrapData.product_id;
|
|
2348
|
+
this.processEnv = bootstrapData.env;
|
|
2349
|
+
// Process session if provided - verify and resolve $Session{} references
|
|
2350
|
+
if (session && bootstrapData.private_key) {
|
|
2351
|
+
const { processSessionForExecution } = await Promise.resolve().then(() => __importStar(require('../../sessions')));
|
|
2352
|
+
const sessionResult = await processSessionForExecution(session, bootstrapData.private_key, input, env);
|
|
2353
|
+
if (sessionResult.error) {
|
|
2354
|
+
throw new Error(`Session validation failed: ${sessionResult.error}`);
|
|
2355
|
+
}
|
|
2356
|
+
resolvedInput = sessionResult.input;
|
|
2357
|
+
sessionLogFields = sessionResult.logFields;
|
|
2358
|
+
}
|
|
2359
|
+
// Initialize log service if needed
|
|
2360
|
+
if (!this.logService) {
|
|
2361
|
+
this.logService = new logs_service_1.default({
|
|
2362
|
+
product_id: this.productId,
|
|
2363
|
+
workspace_id: this.workspace_id,
|
|
2364
|
+
public_key: this.public_key,
|
|
2365
|
+
user_id: this.user_id,
|
|
2366
|
+
token: this.token,
|
|
2367
|
+
env_type: this.environment,
|
|
2368
|
+
});
|
|
2369
|
+
}
|
|
2247
2370
|
this.process_id = process_id;
|
|
2248
|
-
|
|
2249
|
-
|
|
2371
|
+
this.baseLogs.product_id = this.productId;
|
|
2372
|
+
// Add session fields to base logs
|
|
2373
|
+
this.baseLogs = Object.assign(Object.assign({}, this.baseLogs), sessionLogFields);
|
|
2374
|
+
const productEnv = bootstrapData.env;
|
|
2250
2375
|
if (!productEnv.active) {
|
|
2251
2376
|
throw new Error(`Environment ${env} is not active`);
|
|
2252
2377
|
}
|
|
2253
2378
|
const payload = {
|
|
2254
2379
|
type: types_1.FeatureEventTypes.STORAGE,
|
|
2255
2380
|
event,
|
|
2256
|
-
cache:
|
|
2257
|
-
input,
|
|
2381
|
+
cache: cache,
|
|
2382
|
+
input: resolvedInput,
|
|
2258
2383
|
env: productEnv,
|
|
2259
2384
|
retries: retries || 0,
|
|
2260
2385
|
allow_fail: false,
|
|
2261
2386
|
};
|
|
2262
|
-
const result = await this.runStorage(payload
|
|
2387
|
+
const result = await this.runStorage(payload, additional_logs, {
|
|
2388
|
+
storage: bootstrapData.storage,
|
|
2389
|
+
storage_env: bootstrapData.storage_env,
|
|
2390
|
+
});
|
|
2263
2391
|
this.end = Date.now();
|
|
2264
2392
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Storing file - success', data: { input: this.clone, result }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2265
2393
|
await this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
@@ -2285,7 +2413,7 @@ class ProcessorService {
|
|
|
2285
2413
|
name: `Subscribe to broker topic`,
|
|
2286
2414
|
};
|
|
2287
2415
|
try {
|
|
2288
|
-
this.validateActionDataMappingInput(data.input, types_1.FeatureEventTypes.STORAGE);
|
|
2416
|
+
await this.validateActionDataMappingInput(data.input, types_1.FeatureEventTypes.STORAGE);
|
|
2289
2417
|
this.start = Date.now();
|
|
2290
2418
|
this.productTag = data.product;
|
|
2291
2419
|
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
@@ -2294,7 +2422,7 @@ class ProcessorService {
|
|
|
2294
2422
|
await this.intializeProduct(additional_logs);
|
|
2295
2423
|
this.baseLogs.product_id = this.productId;
|
|
2296
2424
|
this.process_id = process_id;
|
|
2297
|
-
const productEnv = this.fetchEnv(data.env, additional_logs);
|
|
2425
|
+
const productEnv = await this.fetchEnv(data.env, additional_logs);
|
|
2298
2426
|
this.processEnv = productEnv;
|
|
2299
2427
|
if (!productEnv.active) {
|
|
2300
2428
|
throw new Error(`Environment ${data.env} is not active`);
|
|
@@ -2333,8 +2461,11 @@ class ProcessorService {
|
|
|
2333
2461
|
type: types_1.LogEventTypes.MESSAGEBROKER,
|
|
2334
2462
|
name: 'Publish to broker topic',
|
|
2335
2463
|
};
|
|
2464
|
+
// Session log fields (will be populated if session is provided)
|
|
2465
|
+
let sessionLogFields = {};
|
|
2466
|
+
let resolvedInput = data.input;
|
|
2336
2467
|
try {
|
|
2337
|
-
this.validateActionDataMappingInput(data.input, types_1.FeatureEventTypes.PUBLISH);
|
|
2468
|
+
await this.validateActionDataMappingInput(data.input, types_1.FeatureEventTypes.PUBLISH);
|
|
2338
2469
|
this.start = Date.now();
|
|
2339
2470
|
// clone
|
|
2340
2471
|
this.clone = (0, processor_utils_1.structuredClone)(data.input);
|
|
@@ -2346,7 +2477,20 @@ class ProcessorService {
|
|
|
2346
2477
|
await this.intializeProduct(additional_logs);
|
|
2347
2478
|
this.baseLogs.product_id = this.productId;
|
|
2348
2479
|
this.process_id = process_id;
|
|
2349
|
-
|
|
2480
|
+
// Process session if provided - verify and resolve $Session{} references
|
|
2481
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
2482
|
+
if (data.session && privateKey) {
|
|
2483
|
+
const { processSessionForExecution } = await Promise.resolve().then(() => __importStar(require('../../sessions')));
|
|
2484
|
+
const sessionResult = await processSessionForExecution(data.session, privateKey, data.input, data.env);
|
|
2485
|
+
if (sessionResult.error) {
|
|
2486
|
+
throw new Error(`Session validation failed: ${sessionResult.error}`);
|
|
2487
|
+
}
|
|
2488
|
+
resolvedInput = sessionResult.input;
|
|
2489
|
+
sessionLogFields = sessionResult.logFields;
|
|
2490
|
+
}
|
|
2491
|
+
// Add session fields to base logs
|
|
2492
|
+
this.baseLogs = Object.assign(Object.assign({}, this.baseLogs), sessionLogFields);
|
|
2493
|
+
const productEnv = await this.fetchEnv(data.env, additional_logs);
|
|
2350
2494
|
this.processEnv = productEnv;
|
|
2351
2495
|
if (!productEnv.active) {
|
|
2352
2496
|
throw new Error(`Environment ${data.env} is not active`);
|
|
@@ -2355,7 +2499,7 @@ class ProcessorService {
|
|
|
2355
2499
|
type: types_1.FeatureEventTypes.PUBLISH,
|
|
2356
2500
|
event: data.event,
|
|
2357
2501
|
cache: data.cache,
|
|
2358
|
-
input:
|
|
2502
|
+
input: resolvedInput,
|
|
2359
2503
|
env: productEnv,
|
|
2360
2504
|
retries: 0,
|
|
2361
2505
|
allow_fail: false,
|
|
@@ -2374,7 +2518,242 @@ class ProcessorService {
|
|
|
2374
2518
|
return { process_id: this.process_id };
|
|
2375
2519
|
}
|
|
2376
2520
|
}
|
|
2377
|
-
async processJob(job) {
|
|
2521
|
+
async processJob(job, additional_logs = {}) {
|
|
2522
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2523
|
+
const productJob = await this.productBuilderService.fetchJob(job.event);
|
|
2524
|
+
if (!productJob) {
|
|
2525
|
+
throw new Error(`Job ${job.event} not found`);
|
|
2526
|
+
}
|
|
2527
|
+
await this.validateActionDataMappingInput(job.input, productJob.type);
|
|
2528
|
+
const NOW = Date.now();
|
|
2529
|
+
// Treat anything above Jan 1, 2023 as a timestamp (to be safe and future-proof)
|
|
2530
|
+
const IS_PROBABLY_TIMESTAMP = job.start_at > 1672531200000;
|
|
2531
|
+
const scheduled_at = IS_PROBABLY_TIMESTAMP ? job.start_at : NOW + job.start_at;
|
|
2532
|
+
const delay = Math.max(0, scheduled_at - NOW);
|
|
2533
|
+
let jobInput;
|
|
2534
|
+
let namespace = productJob.type;
|
|
2535
|
+
if (productJob.type === types_1.JobEventTypes.ACTION) {
|
|
2536
|
+
const input = {
|
|
2537
|
+
env: job.env,
|
|
2538
|
+
product: job.product,
|
|
2539
|
+
app: productJob.app,
|
|
2540
|
+
cache: job.cache,
|
|
2541
|
+
input: job.input,
|
|
2542
|
+
action: job.event,
|
|
2543
|
+
session: job.session,
|
|
2544
|
+
};
|
|
2545
|
+
jobInput = input;
|
|
2546
|
+
namespace = 'actions';
|
|
2547
|
+
}
|
|
2548
|
+
else if (productJob.type === types_1.JobEventTypes.NOTIFICATION) {
|
|
2549
|
+
const input = {
|
|
2550
|
+
env: job.env,
|
|
2551
|
+
product: job.product,
|
|
2552
|
+
event: job.event,
|
|
2553
|
+
input: job.input,
|
|
2554
|
+
session: job.session,
|
|
2555
|
+
cache: job.cache,
|
|
2556
|
+
};
|
|
2557
|
+
jobInput = input;
|
|
2558
|
+
namespace = 'notifications';
|
|
2559
|
+
}
|
|
2560
|
+
else if (productJob.type === types_1.JobEventTypes.PUBLISH) {
|
|
2561
|
+
const input = {
|
|
2562
|
+
env: job.env,
|
|
2563
|
+
product: job.product,
|
|
2564
|
+
event: job.event,
|
|
2565
|
+
cache: job.cache,
|
|
2566
|
+
session: job.session,
|
|
2567
|
+
input: job.input,
|
|
2568
|
+
};
|
|
2569
|
+
jobInput = input;
|
|
2570
|
+
namespace = 'events';
|
|
2571
|
+
}
|
|
2572
|
+
else if (productJob.type === types_1.JobEventTypes.STORAGE) {
|
|
2573
|
+
const input = {
|
|
2574
|
+
env: job.env,
|
|
2575
|
+
product: job.product,
|
|
2576
|
+
event: job.event,
|
|
2577
|
+
cache: job.cache,
|
|
2578
|
+
session: job.session,
|
|
2579
|
+
input: job.input,
|
|
2580
|
+
};
|
|
2581
|
+
jobInput = input;
|
|
2582
|
+
namespace = 'storage';
|
|
2583
|
+
}
|
|
2584
|
+
else if (productJob.type === types_1.JobEventTypes.DATABASE_ACTION) {
|
|
2585
|
+
// Database action job - predefined database actions
|
|
2586
|
+
const input = {
|
|
2587
|
+
env: job.env,
|
|
2588
|
+
product: job.product,
|
|
2589
|
+
database: productJob.app,
|
|
2590
|
+
event: productJob.event,
|
|
2591
|
+
cache: job.cache,
|
|
2592
|
+
session: job.session,
|
|
2593
|
+
input: job.input,
|
|
2594
|
+
};
|
|
2595
|
+
jobInput = input;
|
|
2596
|
+
namespace = 'database';
|
|
2597
|
+
}
|
|
2598
|
+
else if (productJob.type === types_1.JobEventTypes.DATABASE_OPERATION) {
|
|
2599
|
+
// Database operation job - direct database operations like createOne, findMany, etc.
|
|
2600
|
+
const input = {
|
|
2601
|
+
env: job.env,
|
|
2602
|
+
product: job.product,
|
|
2603
|
+
database: productJob.app,
|
|
2604
|
+
operation: productJob.event,
|
|
2605
|
+
cache: job.cache,
|
|
2606
|
+
session: job.session,
|
|
2607
|
+
input: job.input,
|
|
2608
|
+
};
|
|
2609
|
+
jobInput = input;
|
|
2610
|
+
namespace = 'database';
|
|
2611
|
+
}
|
|
2612
|
+
else if (productJob.type === types_1.JobEventTypes.GRAPH_ACTION) {
|
|
2613
|
+
// Graph action job - predefined graph actions
|
|
2614
|
+
const input = {
|
|
2615
|
+
env: job.env,
|
|
2616
|
+
product: job.product,
|
|
2617
|
+
graph: productJob.app,
|
|
2618
|
+
event: productJob.event,
|
|
2619
|
+
cache: job.cache,
|
|
2620
|
+
session: job.session,
|
|
2621
|
+
input: job.input,
|
|
2622
|
+
};
|
|
2623
|
+
jobInput = input;
|
|
2624
|
+
namespace = 'graphs';
|
|
2625
|
+
}
|
|
2626
|
+
else if (productJob.type === types_1.JobEventTypes.GRAPH_OPERATION) {
|
|
2627
|
+
// Graph operation job - direct graph operations like createNode, traverse, etc.
|
|
2628
|
+
const input = {
|
|
2629
|
+
env: job.env,
|
|
2630
|
+
product: job.product,
|
|
2631
|
+
graph: productJob.app,
|
|
2632
|
+
operation: productJob.event,
|
|
2633
|
+
cache: job.cache,
|
|
2634
|
+
session: job.session,
|
|
2635
|
+
input: job.input,
|
|
2636
|
+
};
|
|
2637
|
+
jobInput = input;
|
|
2638
|
+
namespace = 'graphs';
|
|
2639
|
+
}
|
|
2640
|
+
else if (productJob.type === types_1.JobEventTypes.WORKFLOW) {
|
|
2641
|
+
// Workflow job - execute a workflow
|
|
2642
|
+
const input = {
|
|
2643
|
+
env: job.env,
|
|
2644
|
+
product: job.product,
|
|
2645
|
+
workflow: productJob.app,
|
|
2646
|
+
cache: job.cache,
|
|
2647
|
+
session: job.session,
|
|
2648
|
+
input: job.input,
|
|
2649
|
+
};
|
|
2650
|
+
jobInput = input;
|
|
2651
|
+
namespace = 'workflows';
|
|
2652
|
+
}
|
|
2653
|
+
else {
|
|
2654
|
+
throw new Error(`Job type ${productJob.type} not supported`);
|
|
2655
|
+
}
|
|
2656
|
+
// Determine if this is a recurring job
|
|
2657
|
+
const isRecurring = !!(((_a = job.repeat) === null || _a === void 0 ? void 0 : _a.cron) || ((_b = job.repeat) === null || _b === void 0 ? void 0 : _b.every));
|
|
2658
|
+
// Generate a unique job ID
|
|
2659
|
+
const jobId = `job_${(0, uuid_1.v4)().replace(/-/g, '').substring(0, 16)}`;
|
|
2660
|
+
// Build queue options
|
|
2661
|
+
const options = {
|
|
2662
|
+
jobId,
|
|
2663
|
+
};
|
|
2664
|
+
// Handle `delay` only if repeat.every is not defined
|
|
2665
|
+
if (!((_c = job.repeat) === null || _c === void 0 ? void 0 : _c.every) && delay > 0) {
|
|
2666
|
+
options.delay = delay;
|
|
2667
|
+
}
|
|
2668
|
+
// Add repeat config if defined
|
|
2669
|
+
let next_run_at;
|
|
2670
|
+
if (job.repeat) {
|
|
2671
|
+
const { every, cron, tz, limit, endDate } = job.repeat;
|
|
2672
|
+
if (cron) {
|
|
2673
|
+
options.repeat = {
|
|
2674
|
+
cron,
|
|
2675
|
+
tz,
|
|
2676
|
+
limit,
|
|
2677
|
+
endDate,
|
|
2678
|
+
};
|
|
2679
|
+
// Calculate next run time from cron
|
|
2680
|
+
try {
|
|
2681
|
+
const cronParser = require('cron-parser');
|
|
2682
|
+
const cronOptions = {};
|
|
2683
|
+
if (tz)
|
|
2684
|
+
cronOptions.tz = tz;
|
|
2685
|
+
const interval = cronParser.parseExpression(cron, cronOptions);
|
|
2686
|
+
next_run_at = interval.next().getTime();
|
|
2687
|
+
}
|
|
2688
|
+
catch (err) {
|
|
2689
|
+
next_run_at = NOW;
|
|
2690
|
+
}
|
|
2691
|
+
}
|
|
2692
|
+
else if (every) {
|
|
2693
|
+
options.repeat = {
|
|
2694
|
+
every,
|
|
2695
|
+
limit,
|
|
2696
|
+
endDate,
|
|
2697
|
+
};
|
|
2698
|
+
next_run_at = NOW + every;
|
|
2699
|
+
}
|
|
2700
|
+
}
|
|
2701
|
+
// Store job metadata in Redis if redisClient is available
|
|
2702
|
+
if (this.redisClient) {
|
|
2703
|
+
const jobData = {
|
|
2704
|
+
id: jobId,
|
|
2705
|
+
status: delay > 0 ? 'scheduled' : 'queued',
|
|
2706
|
+
type: productJob.type,
|
|
2707
|
+
namespace,
|
|
2708
|
+
product: job.product,
|
|
2709
|
+
env: job.env,
|
|
2710
|
+
event: job.event,
|
|
2711
|
+
app: productJob.app,
|
|
2712
|
+
scheduled_at,
|
|
2713
|
+
recurring: isRecurring,
|
|
2714
|
+
cron: (_d = job.repeat) === null || _d === void 0 ? void 0 : _d.cron,
|
|
2715
|
+
every: (_e = job.repeat) === null || _e === void 0 ? void 0 : _e.every,
|
|
2716
|
+
next_run_at,
|
|
2717
|
+
execution_count: 0,
|
|
2718
|
+
limit: (_f = job.repeat) === null || _f === void 0 ? void 0 : _f.limit,
|
|
2719
|
+
end_date: ((_g = job.repeat) === null || _g === void 0 ? void 0 : _g.endDate)
|
|
2720
|
+
? typeof job.repeat.endDate === 'string'
|
|
2721
|
+
? new Date(job.repeat.endDate).getTime()
|
|
2722
|
+
: job.repeat.endDate
|
|
2723
|
+
: undefined,
|
|
2724
|
+
tz: (_h = job.repeat) === null || _h === void 0 ? void 0 : _h.tz,
|
|
2725
|
+
retries: job.retries || 0,
|
|
2726
|
+
retry_count: 0,
|
|
2727
|
+
input: jobInput,
|
|
2728
|
+
session: job.session,
|
|
2729
|
+
cache: job.cache,
|
|
2730
|
+
workspace_id: this.workspace_id,
|
|
2731
|
+
created_at: NOW,
|
|
2732
|
+
updated_at: NOW,
|
|
2733
|
+
};
|
|
2734
|
+
// Store in Redis with TTL of 90 days
|
|
2735
|
+
const jobKey = `job:${this.workspace_id}:${jobId}`;
|
|
2736
|
+
const redis = this.redisClient;
|
|
2737
|
+
await redis.setex(jobKey, 90 * 24 * 60 * 60, JSON.stringify(jobData));
|
|
2738
|
+
// Add to status index
|
|
2739
|
+
const statusKey = `job_status:${this.workspace_id}:${jobData.status}`;
|
|
2740
|
+
await redis.sadd(statusKey, jobId);
|
|
2741
|
+
// Add to product index
|
|
2742
|
+
const productKey = `job_product:${this.workspace_id}:${job.product}`;
|
|
2743
|
+
await redis.sadd(productKey, jobId);
|
|
2744
|
+
}
|
|
2745
|
+
// Add job input with the job ID for tracking
|
|
2746
|
+
jobInput._job_id = jobId;
|
|
2747
|
+
// Add job to queue
|
|
2748
|
+
await this.queues.jobs.add(productJob.type, jobInput, options);
|
|
2749
|
+
return {
|
|
2750
|
+
job_id: jobId,
|
|
2751
|
+
status: delay > 0 ? 'scheduled' : 'queued',
|
|
2752
|
+
scheduled_at,
|
|
2753
|
+
recurring: isRecurring,
|
|
2754
|
+
next_run_at,
|
|
2755
|
+
};
|
|
2756
|
+
}
|
|
2378
2757
|
async sendExpoNotification(payload, device_tokens) {
|
|
2379
2758
|
const message = {
|
|
2380
2759
|
to: device_tokens,
|
|
@@ -2428,19 +2807,53 @@ class ProcessorService {
|
|
|
2428
2807
|
await this.sendFirebaseNotification({ title, body, data }, payload.device_tokens, notification.credentials);
|
|
2429
2808
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send Firebase notification - success', data: { title, body: (0, processor_utils_1.anonymizeObject)(data), data: (0, processor_utils_1.anonymizeObject)(data) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2430
2809
|
}
|
|
2431
|
-
async runNotification(notification, additional_logs) {
|
|
2432
|
-
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2810
|
+
async runNotification(notification, additional_logs, bootstrapData) {
|
|
2811
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
|
|
2433
2812
|
const { event } = notification;
|
|
2434
2813
|
const input = notification.input;
|
|
2435
2814
|
try {
|
|
2436
|
-
|
|
2437
|
-
|
|
2438
|
-
|
|
2439
|
-
if
|
|
2440
|
-
|
|
2441
|
-
|
|
2442
|
-
|
|
2443
|
-
|
|
2815
|
+
let notificationEvent;
|
|
2816
|
+
let message;
|
|
2817
|
+
let envConfig;
|
|
2818
|
+
// Use bootstrap data if provided, otherwise fetch via API
|
|
2819
|
+
if (bootstrapData) {
|
|
2820
|
+
notificationEvent = bootstrapData.notification;
|
|
2821
|
+
message = bootstrapData.message;
|
|
2822
|
+
envConfig = bootstrapData.env_config;
|
|
2823
|
+
}
|
|
2824
|
+
else {
|
|
2825
|
+
// Fallback to original API-based fetching
|
|
2826
|
+
notificationEvent = (await this.productBuilderService.fetchNotification(event.split(':')[0]));
|
|
2827
|
+
message = await this.productBuilderService.fetchNotificationMessage(event);
|
|
2828
|
+
if (!message) {
|
|
2829
|
+
throw new Error(`Message ${event} not found`);
|
|
2830
|
+
}
|
|
2831
|
+
envConfig = (_a = notificationEvent.envs) === null || _a === void 0 ? void 0 : _a.find((data) => data.slug === notification.env.slug);
|
|
2832
|
+
}
|
|
2833
|
+
if (!envConfig) {
|
|
2834
|
+
throw new Error(`Notification env config for ${notification.env.slug} not found`);
|
|
2835
|
+
}
|
|
2836
|
+
let { push_notifications: notifications, emails, callbacks, sms: smses, } = envConfig;
|
|
2837
|
+
// Resolve any $Secret{} references in notification configs
|
|
2838
|
+
const secretsService = (0, secrets_1.getSecretsService)();
|
|
2839
|
+
if (secretsService) {
|
|
2840
|
+
if (notifications && (0, secrets_1.mightContainSecrets)(notifications)) {
|
|
2841
|
+
const resolved = await secretsService.resolve(notifications, { env: notification.env.slug });
|
|
2842
|
+
notifications = resolved.value;
|
|
2843
|
+
}
|
|
2844
|
+
if (emails && (0, secrets_1.mightContainSecrets)(emails)) {
|
|
2845
|
+
const resolved = await secretsService.resolve(emails, { env: notification.env.slug });
|
|
2846
|
+
emails = resolved.value;
|
|
2847
|
+
}
|
|
2848
|
+
if (callbacks && (0, secrets_1.mightContainSecrets)(callbacks)) {
|
|
2849
|
+
const resolved = await secretsService.resolve(callbacks, { env: notification.env.slug });
|
|
2850
|
+
callbacks = resolved.value;
|
|
2851
|
+
}
|
|
2852
|
+
if (smses && (0, secrets_1.mightContainSecrets)(smses)) {
|
|
2853
|
+
const resolved = await secretsService.resolve(smses, { env: notification.env.slug });
|
|
2854
|
+
smses = resolved.value;
|
|
2855
|
+
}
|
|
2856
|
+
}
|
|
2444
2857
|
const { push_notification: push, email, callback, sms } = message;
|
|
2445
2858
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetching notification', data: notificationEvent, status: types_1.LogEventStatus.SUCCESS }));
|
|
2446
2859
|
if (push) {
|
|
@@ -2486,7 +2899,11 @@ class ProcessorService {
|
|
|
2486
2899
|
const templateMaker = (0, handlebars_1.compile)(email.template);
|
|
2487
2900
|
const template = templateMaker(input.email.template);
|
|
2488
2901
|
const subject = (0, processor_utils_1.replacePlaceholderString)(email.subject, input.email.subject || {});
|
|
2489
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate email template - success', data: {
|
|
2902
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate email template - success', data: {
|
|
2903
|
+
template: (0, processor_utils_1.anonymizeValue)(template),
|
|
2904
|
+
subject: (0, processor_utils_1.anonymizeValue)(subject),
|
|
2905
|
+
input: (0, processor_utils_1.anonymizeObject)(input.email),
|
|
2906
|
+
}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2490
2907
|
const mailOptions = {
|
|
2491
2908
|
from,
|
|
2492
2909
|
to: input.email.recipients,
|
|
@@ -2504,10 +2921,10 @@ class ProcessorService {
|
|
|
2504
2921
|
}
|
|
2505
2922
|
if (callback && callbacks) {
|
|
2506
2923
|
const payload = {
|
|
2507
|
-
query: Object.assign(Object.assign({}, (
|
|
2508
|
-
headers: Object.assign(Object.assign({}, (
|
|
2509
|
-
params: Object.assign(Object.assign({}, (
|
|
2510
|
-
body: Object.assign(Object.assign({}, (
|
|
2924
|
+
query: Object.assign(Object.assign({}, (_b = input.callback) === null || _b === void 0 ? void 0 : _b.query), (_c = callbacks.auth) === null || _c === void 0 ? void 0 : _c.query),
|
|
2925
|
+
headers: Object.assign(Object.assign({}, (_d = input.callback) === null || _d === void 0 ? void 0 : _d.headers), (_e = callbacks.auth) === null || _e === void 0 ? void 0 : _e.headers),
|
|
2926
|
+
params: Object.assign(Object.assign({}, (_f = input.callback) === null || _f === void 0 ? void 0 : _f.params), (_g = callbacks.auth) === null || _g === void 0 ? void 0 : _g.params),
|
|
2927
|
+
body: Object.assign(Object.assign({}, (_h = input.callback) === null || _h === void 0 ? void 0 : _h.body), (_j = callbacks.auth) === null || _j === void 0 ? void 0 : _j.body),
|
|
2511
2928
|
};
|
|
2512
2929
|
input.callback.body = await this.generatePayload(payload.body, notification, additional_logs, message.callback_data.filter((data) => data.parent_key === 'body'));
|
|
2513
2930
|
input.callback.query = await this.generatePayload(payload.query, notification, additional_logs, message.callback_data.filter((data) => data.parent_key === 'query'));
|
|
@@ -2529,7 +2946,7 @@ class ProcessorService {
|
|
|
2529
2946
|
}
|
|
2530
2947
|
if (sms && smses) {
|
|
2531
2948
|
try {
|
|
2532
|
-
input.sms.body = await (0, processor_utils_1.replacePlaceholderString)(sms, input.sms.body);
|
|
2949
|
+
input.sms.body = (await (0, processor_utils_1.replacePlaceholderString)(sms, input.sms.body));
|
|
2533
2950
|
const SmsClient = await (0, sms_repo_1.loadSMSClient)();
|
|
2534
2951
|
const smsClient = new SmsClient(smses);
|
|
2535
2952
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { name: 'Send sms - initiated', data: { message: input.sms.body, config: (0, processor_utils_1.anonymizeObject)(smses) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
@@ -2548,193 +2965,123 @@ class ProcessorService {
|
|
|
2548
2965
|
throw e;
|
|
2549
2966
|
}
|
|
2550
2967
|
}
|
|
2551
|
-
async runMigration(product_tag, tag, env, type) {
|
|
2552
|
-
try {
|
|
2553
|
-
this.productTag = product_tag;
|
|
2554
|
-
const [dbTag, migrationTag] = tag.split(':');
|
|
2555
|
-
if (!dbTag || !migrationTag) {
|
|
2556
|
-
throw new Error('tag should be in the format database_tag:migration_tag');
|
|
2557
|
-
}
|
|
2558
|
-
this.start = Date.now();
|
|
2559
|
-
const additional_logs = {
|
|
2560
|
-
parent_tag: dbTag,
|
|
2561
|
-
child_tag: migrationTag,
|
|
2562
|
-
type: types_1.LogEventTypes.DB_MIGRATION,
|
|
2563
|
-
name: 'Run Migration',
|
|
2564
|
-
};
|
|
2565
|
-
await this.intializeProduct(additional_logs);
|
|
2566
|
-
const db = this.productBuilderService.fetchDatabase(dbTag);
|
|
2567
|
-
if (!db) {
|
|
2568
|
-
throw new Error('Database not found');
|
|
2569
|
-
}
|
|
2570
|
-
if (db.type === types_1.DatabaseTypes.MONGODB) {
|
|
2571
|
-
throw new Error(`${db.type} does not support migrations`);
|
|
2572
|
-
}
|
|
2573
|
-
const migration = this.productBuilderService.fetchDatabaseMigration(tag);
|
|
2574
|
-
if (!migration) {
|
|
2575
|
-
throw new Error('Database migration not found');
|
|
2576
|
-
}
|
|
2577
|
-
const dbEnv = db.envs.find((el) => el.slug === env);
|
|
2578
|
-
if (!dbEnv) {
|
|
2579
|
-
throw new Error(`Environment ${env} not found`);
|
|
2580
|
-
}
|
|
2581
|
-
const productEnv = this.fetchEnv(env, additional_logs);
|
|
2582
|
-
if (!productEnv.active) {
|
|
2583
|
-
throw new Error(`Environment ${env} is not active`);
|
|
2584
|
-
}
|
|
2585
|
-
const product = this.productBuilderService.fetchProduct();
|
|
2586
|
-
const migrations = this.productBuilderService.fetchDatabaseMigrations(dbTag);
|
|
2587
|
-
//this.processEnv = productEnv;
|
|
2588
|
-
/* const check = migration.envs.find((migrationEnv) => migrationEnv.slug === env);
|
|
2589
|
-
if (!check) {
|
|
2590
|
-
throw new Error(`Migration does not exist for environment ${env}`);
|
|
2591
|
-
}*/
|
|
2592
|
-
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
2593
|
-
this.baseLogs = Object.assign({ product_tag: this.productTag, product_id: this.productId, workspace_id: this.workspace_id, env,
|
|
2594
|
-
process_id, data: { tag, env } }, additional_logs);
|
|
2595
|
-
const migrationsToRun = (0, processor_utils_1.getMigrationsToRun)((0, processor_utils_1.structuredClone)(migrations), type, migrationTag, env);
|
|
2596
|
-
if (db.type === types_1.DatabaseTypes.POSTGRES) {
|
|
2597
|
-
const PostgresDBHandler = await (0, postgres_repo_1.loadPostgresHandler)();
|
|
2598
|
-
const pgHandler = new PostgresDBHandler((0, processor_utils_1.decrypt)(dbEnv.connection_url, product.private_key));
|
|
2599
|
-
for (const migrationToRun of migrationsToRun) {
|
|
2600
|
-
const envS = migration.envs.find((envT) => envT.slug === env && type === envT.type);
|
|
2601
|
-
if (envS && envS.status === types_1.MigrationStatus.PROCESSED) {
|
|
2602
|
-
continue;
|
|
2603
|
-
}
|
|
2604
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: `Starting migration ${type} name: ${migrationToRun.name}`, data: { migration: migrationToRun, type }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2605
|
-
const { type: migType, result, status, processed_at, } = await pgHandler.runMigration(migrationToRun.value[type], type, envS);
|
|
2606
|
-
if (!envS) {
|
|
2607
|
-
migrationToRun.envs.push({ slug: env, results: result, status, type, processed_at });
|
|
2608
|
-
}
|
|
2609
|
-
else {
|
|
2610
|
-
migrationToRun.envs.map((envT) => {
|
|
2611
|
-
if (envT.slug === env && type === envT.type) {
|
|
2612
|
-
envT.results = [...result];
|
|
2613
|
-
envT.processed_at = processed_at;
|
|
2614
|
-
return envT;
|
|
2615
|
-
}
|
|
2616
|
-
return envT;
|
|
2617
|
-
});
|
|
2618
|
-
}
|
|
2619
|
-
this.productBuilderService.updateDatabaseMigration(Object.assign(Object.assign({}, migrationToRun), { tag }));
|
|
2620
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: `Starting migration ${type} name: ${migrationToRun.name}`, data: { migration: migrationToRun, type }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2621
|
-
}
|
|
2622
|
-
return { process_id };
|
|
2623
|
-
}
|
|
2624
|
-
}
|
|
2625
|
-
catch (e) {
|
|
2626
|
-
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { failed_execution: true, message: 'Attempt migration - failed', data: e, status: types_1.LogEventStatus.FAIL }));
|
|
2627
|
-
await this.logService.publish();
|
|
2628
|
-
}
|
|
2629
|
-
}
|
|
2630
2968
|
/*async runFunction(data: IFeatureEvent, additional_logs: Partial<ILogData>): Promise<any> {
|
|
2631
|
-
|
|
2632
|
-
|
|
2633
|
-
|
|
2634
|
-
|
|
2635
|
-
|
|
2636
|
-
|
|
2637
|
-
|
|
2638
|
-
|
|
2639
|
-
|
|
2640
|
-
|
|
2641
|
-
|
|
2642
|
-
|
|
2643
|
-
});
|
|
2644
|
-
|
|
2645
|
-
const process_id = generateObjectId();
|
|
2646
|
-
this.process_id = process_id;
|
|
2647
|
-
|
|
2648
|
-
|
|
2649
|
-
try {
|
|
2650
|
-
await this.intializeProduct(additional_logs);
|
|
2651
|
-
this.logService.add({
|
|
2652
|
-
...this.baseLogs,
|
|
2653
|
-
...additional_logs,
|
|
2654
|
-
message: 'Attempt function - initiated',
|
|
2655
|
-
data,
|
|
2656
|
-
status: LogEventStatus.PROCESSING,
|
|
2657
|
-
});
|
|
2658
|
-
|
|
2659
|
-
this.logService.add({
|
|
2660
|
-
...this.baseLogs,
|
|
2661
|
-
...additional_logs,
|
|
2662
|
-
message: 'Fetch function - initiated',
|
|
2663
|
-
data: data,
|
|
2664
|
-
status: LogEventStatus.PROCESSING,
|
|
2665
|
-
});
|
|
2666
|
-
|
|
2667
|
-
const cloudFunction = this.productBuilderService.fetchFunction(data.event);
|
|
2668
|
-
|
|
2669
|
-
this.logService.add({
|
|
2670
|
-
...this.baseLogs,
|
|
2671
|
-
...additional_logs,
|
|
2672
|
-
message: 'Fetch function - success',
|
|
2673
|
-
data: data,
|
|
2674
|
-
status: LogEventStatus.SUCCESS,
|
|
2675
|
-
});
|
|
2676
|
-
|
|
2677
|
-
this.logService.add({
|
|
2678
|
-
...this.baseLogs,
|
|
2679
|
-
...additional_logs,
|
|
2680
|
-
message: 'Validate function payload - initiated',
|
|
2681
|
-
data: { data, payload: input.payload },
|
|
2682
|
-
status: LogEventStatus.PROCESSING,
|
|
2683
|
-
});
|
|
2684
|
-
|
|
2685
|
-
validateFunctionInputKeys(cloudFunction.inputs, input.payload);
|
|
2686
|
-
|
|
2687
|
-
this.logService.add({
|
|
2688
|
-
...this.baseLogs,
|
|
2689
|
-
...additional_logs,
|
|
2690
|
-
message: 'Validate function payload - success',
|
|
2691
|
-
data: { data, payload: input.payload },
|
|
2692
|
-
status: LogEventStatus.SUCCESS,
|
|
2693
|
-
});
|
|
2694
|
-
|
|
2695
|
-
this.logService.add({
|
|
2696
|
-
...this.baseLogs,
|
|
2697
|
-
...additional_logs,
|
|
2698
|
-
message: 'Run function - initiated',
|
|
2699
|
-
data: { data, payload: input.payload },
|
|
2700
|
-
status: LogEventStatus.PROCESSING,
|
|
2701
|
-
});
|
|
2702
|
-
|
|
2703
|
-
const response = await makeFunctionsRequest(cloudFunction, input.payload);
|
|
2704
|
-
|
|
2705
|
-
this.logService.add({
|
|
2706
|
-
...this.baseLogs,
|
|
2707
|
-
...additional_logs,
|
|
2708
|
-
message: 'Run function - success',
|
|
2709
|
-
data: { data, payload: input.payload },
|
|
2710
|
-
status: LogEventStatus.SUCCESS,
|
|
2711
|
-
});
|
|
2712
|
-
} catch (e) {
|
|
2713
|
-
this.logService.add({
|
|
2714
|
-
...this.baseLogs,
|
|
2715
|
-
...additional_logs,
|
|
2716
|
-
message: 'Run function - failed',
|
|
2717
|
-
data: e,
|
|
2718
|
-
status: LogEventStatus.FAIL,
|
|
2969
|
+
const { product_id, env } = data;
|
|
2970
|
+
const input = data.input as IFunctionRequest;
|
|
2971
|
+
|
|
2972
|
+
this.productId = product_id;
|
|
2973
|
+
|
|
2974
|
+
this.logService = new LogsService({
|
|
2975
|
+
product_id,
|
|
2976
|
+
workspace_id: this.workspace_id,
|
|
2977
|
+
public_key: this.public_key,
|
|
2978
|
+
user_id: this.user_id,
|
|
2979
|
+
token: this.token,
|
|
2980
|
+
env_type: this.environment,
|
|
2719
2981
|
});
|
|
2720
|
-
|
|
2721
|
-
|
|
2722
|
-
|
|
2723
|
-
|
|
2982
|
+
|
|
2983
|
+
const process_id = generateObjectId();
|
|
2984
|
+
this.process_id = process_id;
|
|
2985
|
+
|
|
2986
|
+
|
|
2987
|
+
try {
|
|
2988
|
+
await this.intializeProduct(additional_logs);
|
|
2989
|
+
this.logService.add({
|
|
2990
|
+
...this.baseLogs,
|
|
2991
|
+
...additional_logs,
|
|
2992
|
+
message: 'Attempt function - initiated',
|
|
2993
|
+
data,
|
|
2994
|
+
status: LogEventStatus.PROCESSING,
|
|
2995
|
+
});
|
|
2996
|
+
|
|
2997
|
+
this.logService.add({
|
|
2998
|
+
...this.baseLogs,
|
|
2999
|
+
...additional_logs,
|
|
3000
|
+
message: 'Fetch function - initiated',
|
|
3001
|
+
data: data,
|
|
3002
|
+
status: LogEventStatus.PROCESSING,
|
|
3003
|
+
});
|
|
3004
|
+
|
|
3005
|
+
const cloudFunction = await this.productBuilderService.fetchFunction(data.event);
|
|
3006
|
+
|
|
3007
|
+
this.logService.add({
|
|
3008
|
+
...this.baseLogs,
|
|
3009
|
+
...additional_logs,
|
|
3010
|
+
message: 'Fetch function - success',
|
|
3011
|
+
data: data,
|
|
3012
|
+
status: LogEventStatus.SUCCESS,
|
|
3013
|
+
});
|
|
3014
|
+
|
|
3015
|
+
this.logService.add({
|
|
3016
|
+
...this.baseLogs,
|
|
3017
|
+
...additional_logs,
|
|
3018
|
+
message: 'Validate function payload - initiated',
|
|
3019
|
+
data: { data, payload: input.payload },
|
|
3020
|
+
status: LogEventStatus.PROCESSING,
|
|
3021
|
+
});
|
|
3022
|
+
|
|
3023
|
+
validateFunctionInputKeys(cloudFunction.inputs, input.payload);
|
|
3024
|
+
|
|
3025
|
+
this.logService.add({
|
|
3026
|
+
...this.baseLogs,
|
|
3027
|
+
...additional_logs,
|
|
3028
|
+
message: 'Validate function payload - success',
|
|
3029
|
+
data: { data, payload: input.payload },
|
|
3030
|
+
status: LogEventStatus.SUCCESS,
|
|
3031
|
+
});
|
|
3032
|
+
|
|
3033
|
+
this.logService.add({
|
|
3034
|
+
...this.baseLogs,
|
|
3035
|
+
...additional_logs,
|
|
3036
|
+
message: 'Run function - initiated',
|
|
3037
|
+
data: { data, payload: input.payload },
|
|
3038
|
+
status: LogEventStatus.PROCESSING,
|
|
3039
|
+
});
|
|
3040
|
+
|
|
3041
|
+
const response = await makeFunctionsRequest(cloudFunction, input.payload);
|
|
3042
|
+
|
|
3043
|
+
this.logService.add({
|
|
3044
|
+
...this.baseLogs,
|
|
3045
|
+
...additional_logs,
|
|
3046
|
+
message: 'Run function - success',
|
|
3047
|
+
data: { data, payload: input.payload },
|
|
3048
|
+
status: LogEventStatus.SUCCESS,
|
|
3049
|
+
});
|
|
3050
|
+
} catch (e) {
|
|
3051
|
+
this.logService.add({
|
|
3052
|
+
...this.baseLogs,
|
|
3053
|
+
...additional_logs,
|
|
3054
|
+
message: 'Run function - failed',
|
|
3055
|
+
data: e,
|
|
3056
|
+
status: LogEventStatus.FAIL,
|
|
3057
|
+
});
|
|
3058
|
+
await this.logService.publish();
|
|
3059
|
+
}
|
|
3060
|
+
}*/
|
|
3061
|
+
async runStorage(data, additional_logs = {}, bootstrapData) {
|
|
2724
3062
|
const { product_id, env, event, cache: cache_tag } = data;
|
|
2725
3063
|
const input = data.input;
|
|
2726
3064
|
try {
|
|
2727
|
-
|
|
2728
|
-
|
|
2729
|
-
|
|
3065
|
+
let storage;
|
|
3066
|
+
let storageEnv;
|
|
3067
|
+
// Use bootstrap data if provided, otherwise fetch via API
|
|
3068
|
+
if (bootstrapData) {
|
|
3069
|
+
storage = bootstrapData.storage;
|
|
3070
|
+
storageEnv = bootstrapData.storage_env;
|
|
3071
|
+
}
|
|
3072
|
+
else {
|
|
3073
|
+
// Fallback to original API-based fetching
|
|
3074
|
+
await this.intializeProduct(additional_logs);
|
|
3075
|
+
storage = await this.productBuilderService.fetchStorage(event);
|
|
3076
|
+
storageEnv = storage.envs.find((el) => el.slug === env.slug);
|
|
3077
|
+
}
|
|
2730
3078
|
if (!storageEnv) {
|
|
2731
3079
|
throw new Error(`Storage env for ${env.slug} not found`);
|
|
2732
3080
|
}
|
|
2733
3081
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch storage details - success', data: { storage }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2734
|
-
const product = this.productBuilderService.fetchProduct();
|
|
2735
3082
|
let result;
|
|
2736
3083
|
if (cache_tag && this.redisClient) {
|
|
2737
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
3084
|
+
const productCache = await this.productBuilderService.fetchCache(cache_tag);
|
|
2738
3085
|
if (!productCache) {
|
|
2739
3086
|
throw new Error('Invalid cache tag ');
|
|
2740
3087
|
}
|
|
@@ -2742,7 +3089,7 @@ class ProcessorService {
|
|
|
2742
3089
|
const check = await this.fetchFromCache({
|
|
2743
3090
|
cache_tag,
|
|
2744
3091
|
input: inputString,
|
|
2745
|
-
privateKey:
|
|
3092
|
+
privateKey: this.productBuilderService.fetchPrivateKey(),
|
|
2746
3093
|
expiry: productCache.expiry,
|
|
2747
3094
|
}, additional_logs);
|
|
2748
3095
|
if (check) {
|
|
@@ -2752,18 +3099,22 @@ class ProcessorService {
|
|
|
2752
3099
|
}
|
|
2753
3100
|
}
|
|
2754
3101
|
input.buffer = input.buffer ? await this.generateStringValues(input.buffer, '', additional_logs, []) : undefined;
|
|
2755
|
-
input.fileName = input.fileName
|
|
2756
|
-
|
|
3102
|
+
input.fileName = input.fileName
|
|
3103
|
+
? await this.generateStringValues(input.fileName, '', additional_logs, [])
|
|
3104
|
+
: undefined;
|
|
3105
|
+
input.mimeType = input.mimeType
|
|
3106
|
+
? await this.generateStringValues(input.mimeType, '', additional_logs, [])
|
|
3107
|
+
: undefined;
|
|
2757
3108
|
result = await this.processStorageRequest(data, input, storageEnv, additional_logs);
|
|
2758
3109
|
if (cache_tag && this.redisClient) {
|
|
2759
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
3110
|
+
const productCache = await this.productBuilderService.fetchCache(cache_tag);
|
|
2760
3111
|
if (!productCache) {
|
|
2761
3112
|
throw new Error('Invalid cache tag ');
|
|
2762
3113
|
}
|
|
2763
3114
|
const inputString = JSON.stringify(input);
|
|
2764
3115
|
await this.addToCache({
|
|
2765
3116
|
input: inputString,
|
|
2766
|
-
privateKey:
|
|
3117
|
+
privateKey: this.productBuilderService.fetchPrivateKey(),
|
|
2767
3118
|
data: JSON.stringify(result),
|
|
2768
3119
|
cache_tag,
|
|
2769
3120
|
timestamp: Date.now(),
|
|
@@ -2778,131 +3129,13 @@ class ProcessorService {
|
|
|
2778
3129
|
throw e;
|
|
2779
3130
|
}
|
|
2780
3131
|
}
|
|
2781
|
-
async runDBAction(db_action, additional_logs = {}) {
|
|
2782
|
-
const { product_id, env, event, cache: cache_tag } = db_action;
|
|
2783
|
-
const input = db_action.input;
|
|
2784
|
-
try {
|
|
2785
|
-
//await this.intializeProduct(additional_logs);
|
|
2786
|
-
const [database_tag, action_tag] = event.split(':');
|
|
2787
|
-
const product = this.productBuilderService.fetchProduct();
|
|
2788
|
-
const database = await this.productBuilderService.fetchDatabase(database_tag);
|
|
2789
|
-
const databaseAction = await this.productBuilderService.fetchDatabaseAction(event);
|
|
2790
|
-
const databaseEnv = database.envs.find((el) => el.slug === env.slug);
|
|
2791
|
-
if (!databaseEnv) {
|
|
2792
|
-
throw new Error(`Database env for ${env.slug} not found`);
|
|
2793
|
-
}
|
|
2794
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch database action - success', data: databaseAction, status: types_1.LogEventStatus.SUCCESS }));
|
|
2795
|
-
input.data = await this.generatePayload(input.data, db_action, additional_logs, databaseAction.data);
|
|
2796
|
-
if (Array.isArray(input.data)) {
|
|
2797
|
-
await Promise.all(input.data.map(async (d) => {
|
|
2798
|
-
const dataValidationPayload = (await this.inputService.parseJson({
|
|
2799
|
-
data: d,
|
|
2800
|
-
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2801
|
-
}));
|
|
2802
|
-
this.inputService.validateInput(dataValidationPayload, databaseAction.data);
|
|
2803
|
-
}));
|
|
2804
|
-
}
|
|
2805
|
-
else {
|
|
2806
|
-
const dataValidationPayload = (await this.inputService.parseJson({
|
|
2807
|
-
data: input.data,
|
|
2808
|
-
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2809
|
-
}));
|
|
2810
|
-
this.inputService.validateInput(dataValidationPayload, databaseAction.data);
|
|
2811
|
-
if (input.filter) {
|
|
2812
|
-
const filterValidationPayload = (await this.inputService.parseJson({
|
|
2813
|
-
data: input.filter,
|
|
2814
|
-
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2815
|
-
}));
|
|
2816
|
-
this.inputService.validateInput(filterValidationPayload, databaseAction.filterData);
|
|
2817
|
-
}
|
|
2818
|
-
}
|
|
2819
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate database action payload - success', data: { db_action, payload: (0, processor_utils_1.anonymizeObject)(input.data) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2820
|
-
if (database.type === types_1.DatabaseTypes.MONGODB) {
|
|
2821
|
-
if (databaseAction.type === types_1.DatabaseActionTypes.UPDATE) {
|
|
2822
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate database update action filter - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2823
|
-
}
|
|
2824
|
-
}
|
|
2825
|
-
let result;
|
|
2826
|
-
if (cache_tag && this.redisClient) {
|
|
2827
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
2828
|
-
if (!productCache) {
|
|
2829
|
-
throw new Error('Invalid cache tag ');
|
|
2830
|
-
}
|
|
2831
|
-
const inputString = JSON.stringify(input);
|
|
2832
|
-
const check = await this.fetchFromCache({
|
|
2833
|
-
cache_tag,
|
|
2834
|
-
input: inputString,
|
|
2835
|
-
privateKey: product.private_key,
|
|
2836
|
-
expiry: productCache.expiry,
|
|
2837
|
-
}, additional_logs);
|
|
2838
|
-
if (check) {
|
|
2839
|
-
result = JSON.parse(check);
|
|
2840
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Run database action query - return from cache', data: { result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2841
|
-
return result;
|
|
2842
|
-
}
|
|
2843
|
-
}
|
|
2844
|
-
const template = typeof databaseAction.template === 'string' ? databaseAction.template : JSON.stringify(databaseAction.template);
|
|
2845
|
-
if (database.type === types_1.DatabaseTypes.MONGODB) {
|
|
2846
|
-
const MongoDBHandler = await (0, mongo_repo_1.loadMongoDbHandler)();
|
|
2847
|
-
if (!MongoDBHandler) {
|
|
2848
|
-
throw new Error(`Running in browser, mongo handler not loaded.`);
|
|
2849
|
-
}
|
|
2850
|
-
const mongoHandler = new MongoDBHandler(databaseEnv.connection_url);
|
|
2851
|
-
if (databaseAction.type === types_1.DatabaseActionTypes.UPDATE) {
|
|
2852
|
-
const filterTemplate = typeof databaseAction.filterTemplate === 'string'
|
|
2853
|
-
? databaseAction.filterTemplate
|
|
2854
|
-
: JSON.stringify(databaseAction.filterTemplate);
|
|
2855
|
-
result = await mongoHandler[databaseAction.type](databaseAction.data, template, input.data, databaseAction.tableName, databaseAction.filterData, filterTemplate, input.filter);
|
|
2856
|
-
}
|
|
2857
|
-
else {
|
|
2858
|
-
result = await mongoHandler[databaseAction.type](databaseAction.data, template, input.data, databaseAction.tableName);
|
|
2859
|
-
}
|
|
2860
|
-
}
|
|
2861
|
-
else if (database.type === types_1.DatabaseTypes.POSTGRES) {
|
|
2862
|
-
const PostgresDBHandler = await (0, postgres_repo_1.loadPostgresHandler)();
|
|
2863
|
-
if (!PostgresDBHandler) {
|
|
2864
|
-
throw new Error(`Running in browser, postgres handler not loaded.`);
|
|
2865
|
-
}
|
|
2866
|
-
const pgHandler = new PostgresDBHandler((0, processor_utils_1.decrypt)(databaseEnv.connection_url, this.productBuilderService.fetchProduct().private_key));
|
|
2867
|
-
if (databaseAction.type !== types_1.DatabaseActionTypes.AGGREGATE) {
|
|
2868
|
-
result = await pgHandler[databaseAction.type](databaseAction.data, template, input.data);
|
|
2869
|
-
}
|
|
2870
|
-
}
|
|
2871
|
-
//await this.logService.publish();
|
|
2872
|
-
// const result = this.processDBRequest(db_action, input, database_tag, databaseEnv, action_tag, additional_logs);
|
|
2873
|
-
await this.addToSuccessOutput(db_action, result, additional_logs);
|
|
2874
|
-
if (cache_tag && this.redisClient) {
|
|
2875
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
2876
|
-
if (!productCache) {
|
|
2877
|
-
throw new Error('Invalid cache tag ');
|
|
2878
|
-
}
|
|
2879
|
-
const inputString = JSON.stringify(input);
|
|
2880
|
-
await this.addToCache({
|
|
2881
|
-
input: inputString,
|
|
2882
|
-
privateKey: product.private_key,
|
|
2883
|
-
data: JSON.stringify(result),
|
|
2884
|
-
cache_tag,
|
|
2885
|
-
timestamp: Date.now(),
|
|
2886
|
-
product_tag: this.productTag,
|
|
2887
|
-
component_tag: database_tag,
|
|
2888
|
-
component_type: types_1.ProductComponents.DATABASE_ACTION,
|
|
2889
|
-
}, additional_logs);
|
|
2890
|
-
}
|
|
2891
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Attempt database action - successful', data: { result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2892
|
-
return result;
|
|
2893
|
-
}
|
|
2894
|
-
catch (e) {
|
|
2895
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Attempt database action - failed', data: { e: e.toString() }, status: types_1.LogEventStatus.FAIL }));
|
|
2896
|
-
throw e;
|
|
2897
|
-
}
|
|
2898
|
-
}
|
|
2899
3132
|
async runBrokerSubscribe(data, additional_logs = {}) {
|
|
2900
3133
|
const { env, event } = data;
|
|
2901
3134
|
const input = data.input;
|
|
2902
3135
|
try {
|
|
2903
3136
|
await this.intializeProduct(additional_logs);
|
|
2904
3137
|
const [brokerTag, topicTag] = event.split(':');
|
|
2905
|
-
const broker = this.productBuilderService.fetchMessageBroker(brokerTag);
|
|
3138
|
+
const broker = await this.productBuilderService.fetchMessageBroker(brokerTag);
|
|
2906
3139
|
if (!broker) {
|
|
2907
3140
|
throw new Error(`Message Broker ${brokerTag} not found`);
|
|
2908
3141
|
}
|
|
@@ -2910,7 +3143,7 @@ class ProcessorService {
|
|
|
2910
3143
|
if (!brokerEnv) {
|
|
2911
3144
|
throw new Error(`Broker env for ${env.slug} not found`);
|
|
2912
3145
|
}
|
|
2913
|
-
const topic = this.productBuilderService.fetchMessageBrokerTopic(event);
|
|
3146
|
+
const topic = await this.productBuilderService.fetchMessageBrokerTopic(event);
|
|
2914
3147
|
if (!topic) {
|
|
2915
3148
|
throw new Error(`Topic ${topicTag} not found in broker ${brokerTag}`);
|
|
2916
3149
|
}
|
|
@@ -2930,9 +3163,7 @@ class ProcessorService {
|
|
|
2930
3163
|
if (createBrokerService) {
|
|
2931
3164
|
const brokerService = createBrokerService(brokerEnv.type, brokerEnv.config);
|
|
2932
3165
|
await brokerService.subscribe(url, input.callback);
|
|
2933
|
-
|
|
2934
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Subscribe to broker topic - success', data: { event }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2935
|
-
}
|
|
3166
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Subscribe to broker topic - success', data: { event }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2936
3167
|
return;
|
|
2937
3168
|
}
|
|
2938
3169
|
else {
|
|
@@ -2951,7 +3182,7 @@ class ProcessorService {
|
|
|
2951
3182
|
try {
|
|
2952
3183
|
await this.intializeProduct(additional_logs);
|
|
2953
3184
|
const [brokerTag, topicTag] = event.split(':');
|
|
2954
|
-
const broker = this.productBuilderService.fetchMessageBroker(brokerTag);
|
|
3185
|
+
const broker = await this.productBuilderService.fetchMessageBroker(brokerTag);
|
|
2955
3186
|
if (!broker) {
|
|
2956
3187
|
throw new Error(`Message Broker ${brokerTag} not found`);
|
|
2957
3188
|
}
|
|
@@ -2959,7 +3190,7 @@ class ProcessorService {
|
|
|
2959
3190
|
if (!brokerEnv) {
|
|
2960
3191
|
throw new Error(`Broker env for ${env.slug} not found`);
|
|
2961
3192
|
}
|
|
2962
|
-
const topic = this.productBuilderService.fetchMessageBrokerTopic(event);
|
|
3193
|
+
const topic = await this.productBuilderService.fetchMessageBrokerTopic(event);
|
|
2963
3194
|
if (!topic) {
|
|
2964
3195
|
throw new Error(`Topic ${topicTag} not found in broker ${brokerTag}`);
|
|
2965
3196
|
}
|
|
@@ -2979,9 +3210,7 @@ class ProcessorService {
|
|
|
2979
3210
|
if (createBrokerService) {
|
|
2980
3211
|
const brokerService = createBrokerService(brokerEnv.type, brokerEnv.config);
|
|
2981
3212
|
await brokerService.publish(url, input.message);
|
|
2982
|
-
|
|
2983
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Publish to broker topic - success', successful_execution: true, data: { event }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2984
|
-
}
|
|
3213
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Publish to broker topic - success', successful_execution: true, data: { event }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2985
3214
|
return;
|
|
2986
3215
|
}
|
|
2987
3216
|
else {
|
|
@@ -3023,7 +3252,7 @@ class ProcessorService {
|
|
|
3023
3252
|
const result = { url: await (0, storage_util_1.uploadBlobToCloud)({ data: input.buffer, destinationPath: input.fileName, config }) };
|
|
3024
3253
|
try {
|
|
3025
3254
|
await this.processorApiService.saveFileURL({
|
|
3026
|
-
url: (0, processor_utils_1.encrypt)(result.url, this.productBuilderService.
|
|
3255
|
+
url: (0, processor_utils_1.encrypt)(result.url, this.productBuilderService.fetchPrivateKey()),
|
|
3027
3256
|
provider: storageEnv.type,
|
|
3028
3257
|
product: this.productTag,
|
|
3029
3258
|
process_id: this.process_id,
|
|
@@ -3031,7 +3260,7 @@ class ProcessorService {
|
|
|
3031
3260
|
type: input.mimeType,
|
|
3032
3261
|
event: data.event,
|
|
3033
3262
|
env: data.env.slug,
|
|
3034
|
-
size: Buffer.from(input.buffer).length
|
|
3263
|
+
size: Buffer.from(input.buffer).length,
|
|
3035
3264
|
}, this.getUserAccess());
|
|
3036
3265
|
}
|
|
3037
3266
|
catch (e) { }
|
|
@@ -3048,35 +3277,6 @@ class ProcessorService {
|
|
|
3048
3277
|
throw e;
|
|
3049
3278
|
}
|
|
3050
3279
|
}
|
|
3051
|
-
async processDBRequest(db_action, input, database_tag, databaseEnv, action_tag, additional_logs, returnValue = false) {
|
|
3052
|
-
try {
|
|
3053
|
-
const result = await this.processorApiService.processProduct(this.productId, {
|
|
3054
|
-
input: input,
|
|
3055
|
-
database_tag: database_tag,
|
|
3056
|
-
database_env_slug: databaseEnv.slug,
|
|
3057
|
-
tag: action_tag,
|
|
3058
|
-
component: types_1.ProductComponents.DATABASE_ACTION,
|
|
3059
|
-
}, {
|
|
3060
|
-
user_id: this.user_id,
|
|
3061
|
-
token: this.token,
|
|
3062
|
-
workspace_id: this.workspace_id,
|
|
3063
|
-
public_key: this.public_key,
|
|
3064
|
-
});
|
|
3065
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Run database query - success', data: { payload: (0, processor_utils_1.anonymizeObject)(input) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3066
|
-
return result;
|
|
3067
|
-
}
|
|
3068
|
-
catch (e) {
|
|
3069
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Running database query - failed', data: { payload: (0, processor_utils_1.anonymizeObject)(input), e }, status: types_1.LogEventStatus.FAIL }));
|
|
3070
|
-
const value = await this.addToFailureOutput(e, db_action, {
|
|
3071
|
-
db_action,
|
|
3072
|
-
input,
|
|
3073
|
-
database_tag,
|
|
3074
|
-
databaseEnv,
|
|
3075
|
-
action_tag,
|
|
3076
|
-
}, additional_logs);
|
|
3077
|
-
return value;
|
|
3078
|
-
}
|
|
3079
|
-
}
|
|
3080
3280
|
async writeResult(status, retryable = true) {
|
|
3081
3281
|
this.processorApiService.saveResult({
|
|
3082
3282
|
status,
|
|
@@ -3084,14 +3284,108 @@ class ProcessorService {
|
|
|
3084
3284
|
start: this.start,
|
|
3085
3285
|
end: this.end,
|
|
3086
3286
|
retryable,
|
|
3087
|
-
result: (0, processor_utils_1.encrypt)(JSON.stringify(this.processingOutput), this.productBuilderService.
|
|
3287
|
+
result: (0, processor_utils_1.encrypt)(JSON.stringify(this.processingOutput), this.productBuilderService.fetchPrivateKey()),
|
|
3088
3288
|
process_id: this.process_id,
|
|
3089
|
-
feature_id:
|
|
3289
|
+
feature_id: null,
|
|
3090
3290
|
product_id: this.productId,
|
|
3091
3291
|
env: this.processEnv.slug,
|
|
3092
|
-
input: (0, processor_utils_1.encrypt)(JSON.stringify(this.input), this.productBuilderService.
|
|
3292
|
+
input: (0, processor_utils_1.encrypt)(JSON.stringify(this.input), this.productBuilderService.fetchPrivateKey()),
|
|
3093
3293
|
}, this.getUserAccess());
|
|
3094
3294
|
}
|
|
3295
|
+
/**
|
|
3296
|
+
* Separate credentials into prefixed (e.g., 'headers:Authorization') and non-prefixed (e.g., 'api_key').
|
|
3297
|
+
* Prefixed credentials are applied directly to the correct section after resolution.
|
|
3298
|
+
* Non-prefixed credentials go through InputResolver to determine their placement.
|
|
3299
|
+
*/
|
|
3300
|
+
separateCredentials(credentials) {
|
|
3301
|
+
const prefixed = {};
|
|
3302
|
+
const nonPrefixed = {};
|
|
3303
|
+
for (const [key, value] of Object.entries(credentials)) {
|
|
3304
|
+
if (key.startsWith('headers:') || key.startsWith('body:') ||
|
|
3305
|
+
key.startsWith('params:') || key.startsWith('query:')) {
|
|
3306
|
+
prefixed[key] = value;
|
|
3307
|
+
}
|
|
3308
|
+
else {
|
|
3309
|
+
nonPrefixed[key] = value;
|
|
3310
|
+
}
|
|
3311
|
+
}
|
|
3312
|
+
return { prefixed, nonPrefixed };
|
|
3313
|
+
}
|
|
3314
|
+
/**
|
|
3315
|
+
* Check if a key exists in the action schema for a given section (headers, body, params, query).
|
|
3316
|
+
* Returns true if the key is defined in the schema, false otherwise.
|
|
3317
|
+
*/
|
|
3318
|
+
isKeyInActionSchema(action, section, key) {
|
|
3319
|
+
if (!action)
|
|
3320
|
+
return false;
|
|
3321
|
+
const sectionSchema = action[section];
|
|
3322
|
+
if (!sectionSchema || !sectionSchema.data)
|
|
3323
|
+
return false;
|
|
3324
|
+
// Check if the key exists in the schema's data array
|
|
3325
|
+
return sectionSchema.data.some(item => item.key === key);
|
|
3326
|
+
}
|
|
3327
|
+
/**
|
|
3328
|
+
* Apply prefixed credentials (e.g., 'headers:Authorization') to resolved input.
|
|
3329
|
+
* Credentials are applied with lower priority - existing values in resolvedInput take precedence.
|
|
3330
|
+
* Only applies credentials if the action schema defines the corresponding field.
|
|
3331
|
+
*/
|
|
3332
|
+
applyPrefixedCredentials(credentials, resolvedInput, action) {
|
|
3333
|
+
const result = {
|
|
3334
|
+
body: Object.assign({}, (resolvedInput.body || {})),
|
|
3335
|
+
params: Object.assign({}, (resolvedInput.params || {})),
|
|
3336
|
+
query: Object.assign({}, (resolvedInput.query || {})),
|
|
3337
|
+
headers: Object.assign({}, (resolvedInput.headers || {})),
|
|
3338
|
+
};
|
|
3339
|
+
for (const [key, value] of Object.entries(credentials)) {
|
|
3340
|
+
if (key.startsWith('headers:')) {
|
|
3341
|
+
const headerKey = key.substring(8); // Remove 'headers:' prefix
|
|
3342
|
+
// Only apply if the action schema expects this header field
|
|
3343
|
+
if (!this.isKeyInActionSchema(action, 'headers', headerKey)) {
|
|
3344
|
+
continue; // Skip - action doesn't expect this credential
|
|
3345
|
+
}
|
|
3346
|
+
// Only set if not already present (user input takes precedence)
|
|
3347
|
+
if (!(headerKey in (result.headers || {}))) {
|
|
3348
|
+
result.headers = result.headers || {};
|
|
3349
|
+
result.headers[headerKey] = value;
|
|
3350
|
+
}
|
|
3351
|
+
}
|
|
3352
|
+
else if (key.startsWith('body:')) {
|
|
3353
|
+
const bodyKey = key.substring(5); // Remove 'body:' prefix
|
|
3354
|
+
// Only apply if the action schema expects this body field
|
|
3355
|
+
if (!this.isKeyInActionSchema(action, 'body', bodyKey)) {
|
|
3356
|
+
continue; // Skip - action doesn't expect this credential
|
|
3357
|
+
}
|
|
3358
|
+
if (!(bodyKey in (result.body || {}))) {
|
|
3359
|
+
result.body = result.body || {};
|
|
3360
|
+
result.body[bodyKey] = value;
|
|
3361
|
+
}
|
|
3362
|
+
}
|
|
3363
|
+
else if (key.startsWith('params:')) {
|
|
3364
|
+
const paramsKey = key.substring(7); // Remove 'params:' prefix
|
|
3365
|
+
// Only apply if the action schema expects this params field
|
|
3366
|
+
if (!this.isKeyInActionSchema(action, 'params', paramsKey)) {
|
|
3367
|
+
continue; // Skip - action doesn't expect this credential
|
|
3368
|
+
}
|
|
3369
|
+
if (!(paramsKey in (result.params || {}))) {
|
|
3370
|
+
result.params = result.params || {};
|
|
3371
|
+
result.params[paramsKey] = value;
|
|
3372
|
+
}
|
|
3373
|
+
}
|
|
3374
|
+
else if (key.startsWith('query:')) {
|
|
3375
|
+
const queryKey = key.substring(6); // Remove 'query:' prefix
|
|
3376
|
+
// Only apply if the action schema expects this query field
|
|
3377
|
+
if (!this.isKeyInActionSchema(action, 'query', queryKey)) {
|
|
3378
|
+
continue; // Skip - action doesn't expect this credential
|
|
3379
|
+
}
|
|
3380
|
+
if (!(queryKey in (result.query || {}))) {
|
|
3381
|
+
result.query = result.query || {};
|
|
3382
|
+
result.query[queryKey] = value;
|
|
3383
|
+
}
|
|
3384
|
+
}
|
|
3385
|
+
// Ignore keys without recognized prefixes - credentials should always be prefixed
|
|
3386
|
+
}
|
|
3387
|
+
return result;
|
|
3388
|
+
}
|
|
3095
3389
|
async validateActionDataMappingInput(input, type) {
|
|
3096
3390
|
try {
|
|
3097
3391
|
if (type === types_1.FeatureEventTypes.ACTION || type === types_1.WebhookEventTypes.WEBHOOK_REGISTER) {
|
|
@@ -3111,7 +3405,7 @@ class ProcessorService {
|
|
|
3111
3405
|
}
|
|
3112
3406
|
async processAction(action) {
|
|
3113
3407
|
//TODO: schema validation
|
|
3114
|
-
const { env, input, retries, event, app, product: product_tag } = action;
|
|
3408
|
+
const { env, input, retries, action: event, app, product: product_tag, session, cache } = action;
|
|
3115
3409
|
const additional_logs = {
|
|
3116
3410
|
parent_tag: (0, string_utils_1.extractOriginAndTag)(app),
|
|
3117
3411
|
child_tag: event,
|
|
@@ -3119,39 +3413,141 @@ class ProcessorService {
|
|
|
3119
3413
|
name: 'Process Action',
|
|
3120
3414
|
};
|
|
3121
3415
|
this.component = types_1.LogEventTypes.ACTION;
|
|
3416
|
+
// Session log fields (will be populated if session is provided)
|
|
3417
|
+
let sessionLogFields = {};
|
|
3418
|
+
let resolvedInput = input;
|
|
3122
3419
|
try {
|
|
3123
|
-
// validate input do not allow $Sequence or $Length and $Size of $Input
|
|
3124
|
-
this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.ACTION);
|
|
3125
3420
|
this.input = action;
|
|
3126
3421
|
this.start = Date.now();
|
|
3127
|
-
this.productTag = product_tag;
|
|
3128
3422
|
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
3423
|
+
if (product_tag) {
|
|
3424
|
+
this.productTag = product_tag;
|
|
3425
|
+
}
|
|
3129
3426
|
this.baseLogs = {
|
|
3130
|
-
product_tag: this.productTag,
|
|
3427
|
+
product_tag: this.productTag || '',
|
|
3131
3428
|
workspace_id: this.workspace_id,
|
|
3132
3429
|
env,
|
|
3133
3430
|
type: types_1.LogEventTypes.ACTION,
|
|
3134
3431
|
process_id,
|
|
3135
3432
|
data: input,
|
|
3136
3433
|
};
|
|
3137
|
-
|
|
3434
|
+
// Single ultra-lightweight bootstrap call - returns action data directly
|
|
3435
|
+
const bootstrapData = await this.productBuilderService.bootstrapAction({
|
|
3436
|
+
product_tag,
|
|
3437
|
+
env_slug: env,
|
|
3438
|
+
access_tag: app,
|
|
3439
|
+
action_tag: event,
|
|
3440
|
+
});
|
|
3441
|
+
// Initialize from bootstrap data
|
|
3442
|
+
if (bootstrapData.product_id) {
|
|
3443
|
+
this.productId = bootstrapData.product_id;
|
|
3444
|
+
}
|
|
3445
|
+
// Resolve flat input to structured format using action schema
|
|
3446
|
+
// This must happen AFTER bootstrap when we have the action schema
|
|
3447
|
+
if (bootstrapData.action && input && typeof input === 'object') {
|
|
3448
|
+
if (!(0, utils_1.isStructuredInput)(input)) {
|
|
3449
|
+
// Flat input detected - resolve using action schema
|
|
3450
|
+
let flatInput = Object.assign({}, input);
|
|
3451
|
+
// Gather all credentials (static + OAuth)
|
|
3452
|
+
// Priority: user input > OAuth > static config
|
|
3453
|
+
const allCredentials = {};
|
|
3454
|
+
// Static credentials (lowest priority)
|
|
3455
|
+
const sharedCredentials = credential_manager_1.credentialManager.get({ product: product_tag, app, env });
|
|
3456
|
+
if (sharedCredentials) {
|
|
3457
|
+
Object.assign(allCredentials, sharedCredentials);
|
|
3458
|
+
}
|
|
3459
|
+
// OAuth credentials (higher priority, overwrites static)
|
|
3460
|
+
if (product_tag && oauth_manager_1.oauthManager.has(product_tag, app, env)) {
|
|
3461
|
+
const oauthCredentials = await oauth_manager_1.oauthManager.getCredentials(product_tag, app, env);
|
|
3462
|
+
if (oauthCredentials) {
|
|
3463
|
+
Object.assign(allCredentials, oauthCredentials);
|
|
3464
|
+
}
|
|
3465
|
+
}
|
|
3466
|
+
// Separate credentials into prefixed and non-prefixed
|
|
3467
|
+
const { prefixed, nonPrefixed } = this.separateCredentials(allCredentials);
|
|
3468
|
+
const inputResolver = new utils_1.InputResolver();
|
|
3469
|
+
// Build location map to check which keys exist in the schema
|
|
3470
|
+
const locationMap = inputResolver.buildLocationMap(bootstrapData.action);
|
|
3471
|
+
// Merge non-prefixed credentials into flatInput BEFORE resolution
|
|
3472
|
+
// Only include credentials that exist in the action schema
|
|
3473
|
+
// User input takes precedence over credentials
|
|
3474
|
+
for (const [key, value] of Object.entries(nonPrefixed)) {
|
|
3475
|
+
// Skip if user already provided this key
|
|
3476
|
+
if (key in flatInput) {
|
|
3477
|
+
continue;
|
|
3478
|
+
}
|
|
3479
|
+
// Only include if the key exists in the action schema
|
|
3480
|
+
if (locationMap.allValidKeys.has(key)) {
|
|
3481
|
+
flatInput[key] = value;
|
|
3482
|
+
}
|
|
3483
|
+
}
|
|
3484
|
+
try {
|
|
3485
|
+
resolvedInput = inputResolver.resolve(flatInput, bootstrapData.action, { strict: true });
|
|
3486
|
+
}
|
|
3487
|
+
catch (e) {
|
|
3488
|
+
if (e instanceof utils_1.InputResolutionError) {
|
|
3489
|
+
throw new Error(`Input resolution failed for action '${event}': ${e.message}`);
|
|
3490
|
+
}
|
|
3491
|
+
throw e;
|
|
3492
|
+
}
|
|
3493
|
+
// Apply prefixed credentials AFTER resolution (directly to correct section)
|
|
3494
|
+
// Only credentials that match fields in the action schema are applied
|
|
3495
|
+
if (Object.keys(prefixed).length > 0) {
|
|
3496
|
+
resolvedInput = this.applyPrefixedCredentials(prefixed, resolvedInput, bootstrapData.action);
|
|
3497
|
+
}
|
|
3498
|
+
}
|
|
3499
|
+
else {
|
|
3500
|
+
// Already structured - validate with existing schema
|
|
3501
|
+
await this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.ACTION);
|
|
3502
|
+
}
|
|
3503
|
+
}
|
|
3504
|
+
else {
|
|
3505
|
+
// Fallback: validate with existing schema if no action bootstrap data
|
|
3506
|
+
await this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.ACTION);
|
|
3507
|
+
}
|
|
3508
|
+
// Process session if provided - verify and resolve $Session{} references
|
|
3509
|
+
if (session && bootstrapData.private_key) {
|
|
3510
|
+
const { processSessionForExecution } = await Promise.resolve().then(() => __importStar(require('../../sessions')));
|
|
3511
|
+
const sessionResult = await processSessionForExecution(session, bootstrapData.private_key, resolvedInput, env);
|
|
3512
|
+
if (sessionResult.error) {
|
|
3513
|
+
throw new Error(`Session validation failed: ${sessionResult.error}`);
|
|
3514
|
+
}
|
|
3515
|
+
resolvedInput = sessionResult.input;
|
|
3516
|
+
sessionLogFields = sessionResult.logFields;
|
|
3517
|
+
}
|
|
3518
|
+
// Initialize log service if needed
|
|
3519
|
+
if (!this.logService) {
|
|
3520
|
+
this.logService = new logs_service_1.default({
|
|
3521
|
+
product_id: this.productId,
|
|
3522
|
+
workspace_id: this.workspace_id,
|
|
3523
|
+
public_key: this.public_key,
|
|
3524
|
+
user_id: this.user_id,
|
|
3525
|
+
token: this.token,
|
|
3526
|
+
env_type: this.environment,
|
|
3527
|
+
});
|
|
3528
|
+
}
|
|
3138
3529
|
this.process_id = process_id;
|
|
3139
3530
|
this.baseLogs.product_id = this.productId;
|
|
3140
|
-
|
|
3141
|
-
this.
|
|
3142
|
-
|
|
3143
|
-
throw new Error(`Environment ${env} is not active`);
|
|
3144
|
-
}
|
|
3531
|
+
// Add session fields to base logs
|
|
3532
|
+
this.baseLogs = Object.assign(Object.assign({}, this.baseLogs), sessionLogFields);
|
|
3533
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Bootstrap action - success', data: { product_id: this.productId, env: env }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3145
3534
|
const result = await this.runAction({
|
|
3146
3535
|
type: types_1.FeatureEventTypes.ACTION,
|
|
3147
3536
|
event,
|
|
3148
|
-
cache:
|
|
3537
|
+
cache: cache,
|
|
3149
3538
|
app,
|
|
3150
|
-
input,
|
|
3151
|
-
env:
|
|
3539
|
+
input: resolvedInput,
|
|
3540
|
+
env: this.processEnv,
|
|
3152
3541
|
retries: retries || 0,
|
|
3153
3542
|
allow_fail: false,
|
|
3154
|
-
|
|
3543
|
+
hasProduct: !!product_tag,
|
|
3544
|
+
}, additional_logs, true, {
|
|
3545
|
+
action: bootstrapData.action,
|
|
3546
|
+
app_env: bootstrapData.app_env,
|
|
3547
|
+
retries: bootstrapData.retries,
|
|
3548
|
+
app_active: bootstrapData.app_active,
|
|
3549
|
+
recipient_workspace_id: bootstrapData.recipient_workspace_id,
|
|
3550
|
+
});
|
|
3155
3551
|
this.end = Date.now();
|
|
3156
3552
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute action - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3157
3553
|
await this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
@@ -3159,70 +3555,18 @@ class ProcessorService {
|
|
|
3159
3555
|
return result;
|
|
3160
3556
|
}
|
|
3161
3557
|
catch (e) {
|
|
3162
|
-
|
|
3558
|
+
console.log('ERRRRROORRRRR!!!!!!', e);
|
|
3559
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute action - failed', data: { e: e.toString() }, status: types_1.LogEventStatus.FAIL }));
|
|
3163
3560
|
this.end = Date.now();
|
|
3164
3561
|
await this.writeResult(types_1.LogEventStatus.FAIL);
|
|
3165
3562
|
await this.logService.publish();
|
|
3166
3563
|
return { process_id: this.process_id };
|
|
3167
3564
|
}
|
|
3168
3565
|
}
|
|
3169
|
-
async processDBAction(action) {
|
|
3170
|
-
//TODO: schema validation
|
|
3171
|
-
const { env, input, retries, event, product: product_tag } = action;
|
|
3172
|
-
const [parent_tag, child_tag] = event.split(':');
|
|
3173
|
-
this.component = types_1.LogEventTypes.DB_ACTION;
|
|
3174
|
-
if (!parent_tag || !child_tag) {
|
|
3175
|
-
throw new Error(`database action events should be in the format notification_tag:message_tag`);
|
|
3176
|
-
}
|
|
3177
|
-
const additional_logs = {
|
|
3178
|
-
parent_tag,
|
|
3179
|
-
child_tag,
|
|
3180
|
-
type: types_1.LogEventTypes.DB_ACTION,
|
|
3181
|
-
name: 'Process database action',
|
|
3182
|
-
};
|
|
3183
|
-
try {
|
|
3184
|
-
this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.DB_ACTION);
|
|
3185
|
-
this.input = action;
|
|
3186
|
-
this.start = Date.now();
|
|
3187
|
-
this.productTag = product_tag;
|
|
3188
|
-
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
3189
|
-
this.baseLogs = Object.assign({ product_tag: this.productTag, workspace_id: this.workspace_id, env,
|
|
3190
|
-
process_id, data: input }, additional_logs);
|
|
3191
|
-
await this.intializeProduct(additional_logs);
|
|
3192
|
-
this.baseLogs.product_id = this.productId;
|
|
3193
|
-
this.process_id = process_id;
|
|
3194
|
-
const productEnv = this.fetchEnv(env, additional_logs);
|
|
3195
|
-
this.processEnv = productEnv;
|
|
3196
|
-
if (!productEnv.active) {
|
|
3197
|
-
throw new Error(`Environment ${env} is not active`);
|
|
3198
|
-
}
|
|
3199
|
-
const payload = {
|
|
3200
|
-
type: types_1.FeatureEventTypes.DB_ACTION,
|
|
3201
|
-
event,
|
|
3202
|
-
input,
|
|
3203
|
-
cache: action.cache,
|
|
3204
|
-
env: productEnv,
|
|
3205
|
-
retries: retries || 0,
|
|
3206
|
-
allow_fail: false,
|
|
3207
|
-
};
|
|
3208
|
-
const result = await this.runDBAction(payload, additional_logs);
|
|
3209
|
-
this.end = Date.now();
|
|
3210
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute database action', data: { input: (0, processor_utils_1.anonymizeObject)(input), result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3211
|
-
await this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
3212
|
-
await this.logService.publish();
|
|
3213
|
-
return result;
|
|
3214
|
-
}
|
|
3215
|
-
catch (e) {
|
|
3216
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute database action', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
3217
|
-
this.end = Date.now();
|
|
3218
|
-
await this.logService.publish();
|
|
3219
|
-
return { process_id: this.process_id };
|
|
3220
|
-
}
|
|
3221
|
-
}
|
|
3222
|
-
async processFunction(data) { }
|
|
3223
3566
|
async processNotification(action) {
|
|
3224
3567
|
//TODO: schema validation
|
|
3225
|
-
|
|
3568
|
+
var _a;
|
|
3569
|
+
const { env, input, retries, event, product: product_tag, session, cache } = action;
|
|
3226
3570
|
const [parent_tag, child_tag] = event.split(':');
|
|
3227
3571
|
if (!parent_tag || !child_tag) {
|
|
3228
3572
|
throw new Error(`database action events should be in the format notification_tag:message_tag`);
|
|
@@ -3234,32 +3578,72 @@ class ProcessorService {
|
|
|
3234
3578
|
type: types_1.LogEventTypes.NOTIFICATIONS,
|
|
3235
3579
|
name: 'Process Notification',
|
|
3236
3580
|
};
|
|
3581
|
+
// Session log fields (will be populated if session is provided)
|
|
3582
|
+
let sessionLogFields = {};
|
|
3583
|
+
let resolvedInput = input;
|
|
3237
3584
|
try {
|
|
3238
|
-
this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.NOTIFICATION);
|
|
3585
|
+
await this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.NOTIFICATION);
|
|
3239
3586
|
this.input = action;
|
|
3240
3587
|
this.start = Date.now();
|
|
3241
3588
|
this.productTag = product_tag;
|
|
3242
3589
|
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
3243
3590
|
this.baseLogs = Object.assign({ product_tag: this.productTag, workspace_id: this.workspace_id, env,
|
|
3244
3591
|
process_id, data: input }, additional_logs);
|
|
3245
|
-
|
|
3592
|
+
// Single bootstrap call to fetch all notification data
|
|
3593
|
+
const bootstrapData = await this.productBuilderService.bootstrapNotification({
|
|
3594
|
+
product_tag,
|
|
3595
|
+
env_slug: env,
|
|
3596
|
+
notification_tag: parent_tag,
|
|
3597
|
+
message_tag: child_tag,
|
|
3598
|
+
});
|
|
3599
|
+
// Initialize from bootstrap data
|
|
3600
|
+
this.productId = bootstrapData.product_id;
|
|
3601
|
+
this.processEnv = bootstrapData.env;
|
|
3602
|
+
// Process session if provided - verify and resolve $Session{} references
|
|
3603
|
+
if (session && bootstrapData.private_key) {
|
|
3604
|
+
const { processSessionForExecution } = await Promise.resolve().then(() => __importStar(require('../../sessions')));
|
|
3605
|
+
const sessionResult = await processSessionForExecution(session, bootstrapData.private_key, input, env);
|
|
3606
|
+
if (sessionResult.error) {
|
|
3607
|
+
throw new Error(`Session validation failed: ${sessionResult.error}`);
|
|
3608
|
+
}
|
|
3609
|
+
resolvedInput = sessionResult.input;
|
|
3610
|
+
sessionLogFields = sessionResult.logFields;
|
|
3611
|
+
}
|
|
3612
|
+
// Initialize log service if needed
|
|
3613
|
+
if (!this.logService) {
|
|
3614
|
+
this.logService = new logs_service_1.default({
|
|
3615
|
+
product_id: this.productId,
|
|
3616
|
+
workspace_id: this.workspace_id,
|
|
3617
|
+
public_key: this.public_key,
|
|
3618
|
+
user_id: this.user_id,
|
|
3619
|
+
token: this.token,
|
|
3620
|
+
env_type: this.environment,
|
|
3621
|
+
});
|
|
3622
|
+
}
|
|
3246
3623
|
this.process_id = process_id;
|
|
3247
3624
|
this.baseLogs.product_id = this.productId;
|
|
3248
|
-
|
|
3249
|
-
this.
|
|
3625
|
+
// Add session fields to base logs
|
|
3626
|
+
this.baseLogs = Object.assign(Object.assign({}, this.baseLogs), sessionLogFields);
|
|
3627
|
+
const productEnv = bootstrapData.env;
|
|
3250
3628
|
if (!productEnv.active) {
|
|
3251
3629
|
throw new Error(`Environment ${env} is not active`);
|
|
3252
3630
|
}
|
|
3253
3631
|
const payload = {
|
|
3254
3632
|
type: types_1.FeatureEventTypes.NOTIFICATION,
|
|
3255
3633
|
event,
|
|
3256
|
-
input,
|
|
3257
|
-
cache:
|
|
3634
|
+
input: resolvedInput,
|
|
3635
|
+
cache: cache,
|
|
3258
3636
|
env: productEnv,
|
|
3259
3637
|
retries: retries || 0,
|
|
3260
3638
|
allow_fail: false,
|
|
3261
3639
|
};
|
|
3262
|
-
|
|
3640
|
+
// Find the env config for the notification
|
|
3641
|
+
const envConfig = (_a = bootstrapData.notification.envs) === null || _a === void 0 ? void 0 : _a.find((data) => data.slug === env);
|
|
3642
|
+
const result = await this.runNotification(payload, additional_logs, {
|
|
3643
|
+
notification: bootstrapData.notification,
|
|
3644
|
+
message: bootstrapData.message,
|
|
3645
|
+
env_config: envConfig,
|
|
3646
|
+
});
|
|
3263
3647
|
this.end = Date.now();
|
|
3264
3648
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send notification - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3265
3649
|
await this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
@@ -3277,7 +3661,7 @@ class ProcessorService {
|
|
|
3277
3661
|
try {
|
|
3278
3662
|
const data = await this.processorApiService.fetchRemoteCaches(payload, this.getUserAccess());
|
|
3279
3663
|
return data.map((data) => {
|
|
3280
|
-
data.value = (0, processor_utils_1.decrypt)(data.value, this.productBuilderService.
|
|
3664
|
+
data.value = (0, processor_utils_1.decrypt)(data.value, this.productBuilderService.fetchPrivateKey());
|
|
3281
3665
|
return data;
|
|
3282
3666
|
});
|
|
3283
3667
|
}
|
|
@@ -3345,6 +3729,33 @@ class ProcessorService {
|
|
|
3345
3729
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Cache Found', data: { key }, successful_execution: true, status: types_1.LogEventStatus.SUCCESS, cache_tag }));
|
|
3346
3730
|
return (0, processor_utils_1.decrypt)(record.data, privateKey);
|
|
3347
3731
|
}
|
|
3732
|
+
/**
|
|
3733
|
+
* Writes the healthcheck result to Redis cache for fast status retrieval.
|
|
3734
|
+
*/
|
|
3735
|
+
async writeHealthcheckResultToCache(data, result) {
|
|
3736
|
+
if (!this.redisClient)
|
|
3737
|
+
return;
|
|
3738
|
+
const key = `healthcheck:${data.product}:${data.healthcheck}:${data.env}`;
|
|
3739
|
+
console.log('LOG TO CACHE', key, JSON.stringify(result));
|
|
3740
|
+
await this.redisClient.set(key, JSON.stringify(result));
|
|
3741
|
+
console.log();
|
|
3742
|
+
}
|
|
3743
|
+
/**
|
|
3744
|
+
* Fetches the latest healthcheck status for a product/env from Redis cache.
|
|
3745
|
+
*/
|
|
3746
|
+
async getHealthcheckStatusFromCache(productTag, envSlug) {
|
|
3747
|
+
if (!this.redisClient)
|
|
3748
|
+
return null;
|
|
3749
|
+
const key = `healthcheck:${productTag}:${envSlug}`;
|
|
3750
|
+
const cached = await this.redisClient.get(key);
|
|
3751
|
+
return cached ? JSON.parse(cached) : null;
|
|
3752
|
+
}
|
|
3753
|
+
/**
|
|
3754
|
+
* Updates the healthcheck in the remote DB for a product with all envs' results.
|
|
3755
|
+
*/
|
|
3756
|
+
async updateHealthcheckOnProcessor(productTag, envs) {
|
|
3757
|
+
return this.productBuilderService.updateHealthcheck(productTag, { envs });
|
|
3758
|
+
}
|
|
3348
3759
|
}
|
|
3349
3760
|
exports.default = ProcessorService;
|
|
3350
3761
|
//# sourceMappingURL=processor.service.js.map
|