@ductape/sdk 0.0.4-v9 → 0.0.4-v90
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/agent-context.d.ts +98 -0
- package/dist/agents/agent-context.js +588 -0
- package/dist/agents/agent-context.js.map +1 -0
- package/dist/agents/agent-executor.d.ts +180 -0
- package/dist/agents/agent-executor.js +715 -0
- package/dist/agents/agent-executor.js.map +1 -0
- package/dist/agents/agents.service.d.ts +310 -0
- package/dist/agents/agents.service.js +1249 -0
- package/dist/agents/agents.service.js.map +1 -0
- package/dist/agents/index.d.ts +55 -0
- package/dist/agents/index.js +110 -0
- package/dist/agents/index.js.map +1 -0
- package/dist/agents/memory-manager.d.ts +182 -0
- package/dist/agents/memory-manager.js +383 -0
- package/dist/agents/memory-manager.js.map +1 -0
- package/dist/agents/tool-registry.d.ts +141 -0
- package/dist/agents/tool-registry.js +355 -0
- package/dist/agents/tool-registry.js.map +1 -0
- package/dist/agents/types/agents.types.d.ts +1227 -0
- package/dist/agents/types/agents.types.js +12 -0
- package/dist/agents/types/agents.types.js.map +1 -0
- package/dist/agents/types/index.d.ts +6 -0
- package/dist/agents/types/index.js +23 -0
- package/dist/agents/types/index.js.map +1 -0
- package/dist/agents/vector-store-adapter.d.ts +108 -0
- package/dist/agents/vector-store-adapter.js +213 -0
- package/dist/agents/vector-store-adapter.js.map +1 -0
- package/dist/api/services/appApi.service.d.ts +51 -5
- package/dist/api/services/appApi.service.js +101 -3
- package/dist/api/services/appApi.service.js.map +1 -1
- package/dist/api/services/pricingApi.service.d.ts +10 -0
- package/dist/api/services/pricingApi.service.js +34 -0
- package/dist/api/services/pricingApi.service.js.map +1 -0
- package/dist/api/services/processorApi.service.d.ts +322 -0
- package/dist/api/services/processorApi.service.js +252 -0
- package/dist/api/services/processorApi.service.js.map +1 -1
- package/dist/api/services/productsApi.service.d.ts +105 -0
- package/dist/api/services/productsApi.service.js +139 -3
- package/dist/api/services/productsApi.service.js.map +1 -1
- package/dist/api/services/resilienceApi.service.d.ts +106 -0
- package/dist/api/services/resilienceApi.service.js +224 -0
- package/dist/api/services/resilienceApi.service.js.map +1 -0
- package/dist/api/services/secretsApi.service.d.ts +50 -0
- package/dist/api/services/secretsApi.service.js +124 -0
- package/dist/api/services/secretsApi.service.js.map +1 -0
- package/dist/api/services/workflowApi.service.d.ts +199 -0
- package/dist/api/services/workflowApi.service.js +183 -0
- package/dist/api/services/workflowApi.service.js.map +1 -0
- package/dist/api/services/workspaceApi.service.d.ts +8 -0
- package/dist/api/services/workspaceApi.service.js +20 -0
- package/dist/api/services/workspaceApi.service.js.map +1 -1
- package/dist/api/urls.d.ts +62 -0
- package/dist/api/urls.js +87 -17
- package/dist/api/urls.js.map +1 -1
- package/dist/api/utils/auth.utils.d.ts +1 -3
- package/dist/api/utils/auth.utils.js.map +1 -1
- package/dist/api/utils/cache.utils.d.ts +1 -1
- package/dist/api/utils/cache.utils.js +2 -2
- package/dist/api/utils/cache.utils.js.map +1 -1
- package/dist/api/utils/strings.utils.d.ts +2 -0
- package/dist/api/utils/strings.utils.js +14 -0
- package/dist/api/utils/strings.utils.js.map +1 -1
- package/dist/apps/services/app.service.d.ts +41 -33
- package/dist/apps/services/app.service.js +472 -184
- package/dist/apps/services/app.service.js.map +1 -1
- package/dist/apps/utils/auth-context-manager.d.ts +137 -0
- package/dist/apps/utils/auth-context-manager.js +248 -0
- package/dist/apps/utils/auth-context-manager.js.map +1 -0
- package/dist/apps/utils/credential-manager.d.ts +128 -0
- package/dist/apps/utils/credential-manager.js +199 -0
- package/dist/apps/utils/credential-manager.js.map +1 -0
- package/dist/apps/utils/index.d.ts +10 -0
- package/dist/apps/utils/index.js +54 -0
- package/dist/apps/utils/index.js.map +1 -0
- package/dist/apps/utils/input-helpers.d.ts +67 -0
- package/dist/apps/utils/input-helpers.js +185 -0
- package/dist/apps/utils/input-helpers.js.map +1 -0
- package/dist/apps/utils/input-resolver.d.ts +165 -0
- package/dist/apps/utils/input-resolver.js +477 -0
- package/dist/apps/utils/input-resolver.js.map +1 -0
- package/dist/apps/utils/oauth-manager.d.ts +196 -0
- package/dist/apps/utils/oauth-manager.js +429 -0
- package/dist/apps/utils/oauth-manager.js.map +1 -0
- package/dist/apps/validators/joi-validators/create.appAction.validator.d.ts +1 -2
- package/dist/apps/validators/joi-validators/create.appAction.validator.js +21 -2
- package/dist/apps/validators/joi-validators/create.appAction.validator.js.map +1 -1
- package/dist/apps/validators/joi-validators/update.appAction.validator.js +11 -1
- package/dist/apps/validators/joi-validators/update.appAction.validator.js.map +1 -1
- package/dist/apps/validators/joi-validators/update.appActionResponse.validator.d.ts +1 -1
- package/dist/apps/validators/joi-validators/update.appActionResponse.validator.js +34 -1
- package/dist/apps/validators/joi-validators/update.appActionResponse.validator.js.map +1 -1
- package/dist/bin.d.ts +26 -0
- package/dist/bin.js +28 -0
- package/dist/bin.js.map +1 -0
- package/dist/brokers/brokers.service.d.ts +297 -0
- package/dist/brokers/brokers.service.js +797 -0
- package/dist/brokers/brokers.service.js.map +1 -0
- package/dist/brokers/index.d.ts +46 -0
- package/dist/brokers/index.js +83 -0
- package/dist/brokers/index.js.map +1 -0
- package/dist/brokers/types/index.d.ts +316 -0
- package/dist/brokers/types/index.js +8 -0
- package/dist/brokers/types/index.js.map +1 -0
- package/dist/brokers/utils/broker.util.d.ts +33 -0
- package/dist/brokers/utils/broker.util.js +125 -0
- package/dist/brokers/utils/broker.util.js.map +1 -0
- package/dist/brokers/utils/providers/aws-sqs.service.d.ts +16 -0
- package/dist/brokers/utils/providers/aws-sqs.service.js +71 -0
- package/dist/brokers/utils/providers/aws-sqs.service.js.map +1 -0
- package/dist/brokers/utils/providers/google-pubsub.service.d.ts +16 -0
- package/dist/brokers/utils/providers/google-pubsub.service.js +43 -0
- package/dist/brokers/utils/providers/google-pubsub.service.js.map +1 -0
- package/dist/brokers/utils/providers/index.d.ts +6 -0
- package/dist/brokers/utils/providers/index.js +16 -0
- package/dist/brokers/utils/providers/index.js.map +1 -0
- package/dist/brokers/utils/providers/kafka.service.d.ts +23 -0
- package/dist/brokers/utils/providers/kafka.service.js +131 -0
- package/dist/brokers/utils/providers/kafka.service.js.map +1 -0
- package/dist/brokers/utils/providers/nats.service.d.ts +18 -0
- package/dist/brokers/utils/providers/nats.service.js +63 -0
- package/dist/brokers/utils/providers/nats.service.js.map +1 -0
- package/dist/brokers/utils/providers/rabbitmq.service.d.ts +15 -0
- package/dist/brokers/utils/providers/rabbitmq.service.js +151 -0
- package/dist/brokers/utils/providers/rabbitmq.service.js.map +1 -0
- package/dist/brokers/utils/providers/redis.service.d.ts +18 -0
- package/dist/brokers/utils/providers/redis.service.js +93 -0
- package/dist/brokers/utils/providers/redis.service.js.map +1 -0
- package/dist/cache/cache.manager.d.ts +229 -0
- package/dist/cache/cache.manager.js +460 -0
- package/dist/cache/cache.manager.js.map +1 -0
- package/dist/cache/cache.service.d.ts +186 -0
- package/dist/cache/cache.service.js +437 -0
- package/dist/cache/cache.service.js.map +1 -0
- package/dist/cache/index.d.ts +52 -0
- package/dist/cache/index.js +79 -0
- package/dist/cache/index.js.map +1 -0
- package/dist/cache/types/index.d.ts +106 -0
- package/dist/cache/types/index.js +6 -0
- package/dist/cache/types/index.js.map +1 -0
- package/dist/clients/pricing.client.d.ts +3 -0
- package/dist/clients/pricing.client.js +33 -0
- package/dist/clients/pricing.client.js.map +1 -0
- package/dist/database/actions/action-manager.d.ts +170 -0
- package/dist/database/actions/action-manager.js +465 -0
- package/dist/database/actions/action-manager.js.map +1 -0
- package/dist/database/actions/index.d.ts +6 -0
- package/dist/database/actions/index.js +13 -0
- package/dist/database/actions/index.js.map +1 -0
- package/dist/database/adapters/adapter.factory.d.ts +62 -0
- package/dist/database/adapters/adapter.factory.js +97 -0
- package/dist/database/adapters/adapter.factory.js.map +1 -0
- package/dist/database/adapters/base.adapter.d.ts +423 -0
- package/dist/database/adapters/base.adapter.js +260 -0
- package/dist/database/adapters/base.adapter.js.map +1 -0
- package/dist/database/adapters/cassandra.adapter.d.ts +92 -0
- package/dist/database/adapters/cassandra.adapter.js +1091 -0
- package/dist/database/adapters/cassandra.adapter.js.map +1 -0
- package/dist/database/adapters/dynamodb.adapter.d.ts +110 -0
- package/dist/database/adapters/dynamodb.adapter.js +1564 -0
- package/dist/database/adapters/dynamodb.adapter.js.map +1 -0
- package/dist/database/adapters/index.d.ts +11 -0
- package/dist/database/adapters/index.js +27 -0
- package/dist/database/adapters/index.js.map +1 -0
- package/dist/database/adapters/mariadb.adapter.d.ts +100 -0
- package/dist/database/adapters/mariadb.adapter.js +247 -0
- package/dist/database/adapters/mariadb.adapter.js.map +1 -0
- package/dist/database/adapters/mongodb.adapter.d.ts +121 -0
- package/dist/database/adapters/mongodb.adapter.js +1284 -0
- package/dist/database/adapters/mongodb.adapter.js.map +1 -0
- package/dist/database/adapters/mysql.adapter.d.ts +86 -0
- package/dist/database/adapters/mysql.adapter.js +1371 -0
- package/dist/database/adapters/mysql.adapter.js.map +1 -0
- package/dist/database/adapters/postgresql.adapter.d.ts +90 -0
- package/dist/database/adapters/postgresql.adapter.js +1487 -0
- package/dist/database/adapters/postgresql.adapter.js.map +1 -0
- package/dist/database/databases.service.d.ts +1408 -0
- package/dist/database/databases.service.js +2953 -0
- package/dist/database/databases.service.js.map +1 -0
- package/dist/database/index.d.ts +46 -0
- package/dist/database/index.js +109 -0
- package/dist/database/index.js.map +1 -0
- package/dist/database/migrations/index.d.ts +6 -0
- package/dist/database/migrations/index.js +12 -0
- package/dist/database/migrations/index.js.map +1 -0
- package/dist/database/migrations/migration-engine.d.ts +136 -0
- package/dist/database/migrations/migration-engine.js +1421 -0
- package/dist/database/migrations/migration-engine.js.map +1 -0
- package/dist/database/operators/aggregation-builder.d.ts +67 -0
- package/dist/database/operators/aggregation-builder.js +841 -0
- package/dist/database/operators/aggregation-builder.js.map +1 -0
- package/dist/database/operators/index.d.ts +7 -0
- package/dist/database/operators/index.js +15 -0
- package/dist/database/operators/index.js.map +1 -0
- package/dist/database/operators/query-builder.d.ts +69 -0
- package/dist/database/operators/query-builder.js +447 -0
- package/dist/database/operators/query-builder.js.map +1 -0
- package/dist/database/presave/decrypt.d.ts +25 -0
- package/dist/database/presave/decrypt.js +146 -0
- package/dist/database/presave/decrypt.js.map +1 -0
- package/dist/database/presave/index.d.ts +9 -0
- package/dist/database/presave/index.js +18 -0
- package/dist/database/presave/index.js.map +1 -0
- package/dist/database/presave/presave-processor.d.ts +148 -0
- package/dist/database/presave/presave-processor.js +702 -0
- package/dist/database/presave/presave-processor.js.map +1 -0
- package/dist/database/schema/index.d.ts +7 -0
- package/dist/database/schema/index.js +13 -0
- package/dist/database/schema/index.js.map +1 -0
- package/dist/database/schema/schema-manager.d.ts +258 -0
- package/dist/database/schema/schema-manager.js +638 -0
- package/dist/database/schema/schema-manager.js.map +1 -0
- package/dist/database/transactions/index.d.ts +6 -0
- package/dist/database/transactions/index.js +13 -0
- package/dist/database/transactions/index.js.map +1 -0
- package/dist/database/transactions/transaction-manager.d.ts +113 -0
- package/dist/database/transactions/transaction-manager.js +344 -0
- package/dist/database/transactions/transaction-manager.js.map +1 -0
- package/dist/database/triggers/index.d.ts +7 -0
- package/dist/database/triggers/index.js +14 -0
- package/dist/database/triggers/index.js.map +1 -0
- package/dist/database/triggers/trigger-processor.d.ts +239 -0
- package/dist/database/triggers/trigger-processor.js +1034 -0
- package/dist/database/triggers/trigger-processor.js.map +1 -0
- package/dist/database/types/action.interface.d.ts +148 -0
- package/dist/database/types/action.interface.js +6 -0
- package/dist/database/types/action.interface.js.map +1 -0
- package/dist/database/types/aggregation.interface.d.ts +185 -0
- package/dist/database/types/aggregation.interface.js +6 -0
- package/dist/database/types/aggregation.interface.js.map +1 -0
- package/dist/database/types/connection.interface.d.ts +137 -0
- package/dist/database/types/connection.interface.js +6 -0
- package/dist/database/types/connection.interface.js.map +1 -0
- package/dist/database/types/enums.d.ts +195 -0
- package/dist/database/types/enums.js +244 -0
- package/dist/database/types/enums.js.map +1 -0
- package/dist/database/types/index.d.ts +14 -0
- package/dist/database/types/index.js +31 -0
- package/dist/database/types/index.js.map +1 -0
- package/dist/database/types/migration.interface.d.ts +686 -0
- package/dist/database/types/migration.interface.js +9 -0
- package/dist/database/types/migration.interface.js.map +1 -0
- package/dist/database/types/presave.interface.d.ts +292 -0
- package/dist/database/types/presave.interface.js +60 -0
- package/dist/database/types/presave.interface.js.map +1 -0
- package/dist/database/types/query.interface.d.ts +205 -0
- package/dist/database/types/query.interface.js +6 -0
- package/dist/database/types/query.interface.js.map +1 -0
- package/dist/database/types/schema.interface.d.ts +412 -0
- package/dist/database/types/schema.interface.js +6 -0
- package/dist/database/types/schema.interface.js.map +1 -0
- package/dist/database/types/transaction.interface.d.ts +84 -0
- package/dist/database/types/transaction.interface.js +6 -0
- package/dist/database/types/transaction.interface.js.map +1 -0
- package/dist/database/types/trigger.interface.d.ts +612 -0
- package/dist/database/types/trigger.interface.js +121 -0
- package/dist/database/types/trigger.interface.js.map +1 -0
- package/dist/database/types/write.interface.d.ts +216 -0
- package/dist/database/types/write.interface.js +6 -0
- package/dist/database/types/write.interface.js.map +1 -0
- package/dist/database/utils/database-error.d.ts +96 -0
- package/dist/database/utils/database-error.js +221 -0
- package/dist/database/utils/database-error.js.map +1 -0
- package/dist/database/utils/index.d.ts +6 -0
- package/dist/database/utils/index.js +11 -0
- package/dist/database/utils/index.js.map +1 -0
- package/dist/graph/adapters/adapter.factory.d.ts +47 -0
- package/dist/graph/adapters/adapter.factory.js +77 -0
- package/dist/graph/adapters/adapter.factory.js.map +1 -0
- package/dist/graph/adapters/arangodb.adapter.d.ts +86 -0
- package/dist/graph/adapters/arangodb.adapter.js +1588 -0
- package/dist/graph/adapters/arangodb.adapter.js.map +1 -0
- package/dist/graph/adapters/base.adapter.d.ts +264 -0
- package/dist/graph/adapters/base.adapter.js +156 -0
- package/dist/graph/adapters/base.adapter.js.map +1 -0
- package/dist/graph/adapters/index.d.ts +11 -0
- package/dist/graph/adapters/index.js +21 -0
- package/dist/graph/adapters/index.js.map +1 -0
- package/dist/graph/adapters/memgraph.adapter.d.ts +110 -0
- package/dist/graph/adapters/memgraph.adapter.js +1452 -0
- package/dist/graph/adapters/memgraph.adapter.js.map +1 -0
- package/dist/graph/adapters/neo4j.adapter.d.ts +81 -0
- package/dist/graph/adapters/neo4j.adapter.js +1317 -0
- package/dist/graph/adapters/neo4j.adapter.js.map +1 -0
- package/dist/graph/adapters/neptune.adapter.d.ts +82 -0
- package/dist/graph/adapters/neptune.adapter.js +1369 -0
- package/dist/graph/adapters/neptune.adapter.js.map +1 -0
- package/dist/graph/graphs.service.d.ts +568 -0
- package/dist/graph/graphs.service.js +1948 -0
- package/dist/graph/graphs.service.js.map +1 -0
- package/dist/graph/index.d.ts +57 -0
- package/dist/graph/index.js +77 -0
- package/dist/graph/index.js.map +1 -0
- package/dist/graph/transactions/index.d.ts +4 -0
- package/dist/graph/transactions/index.js +9 -0
- package/dist/graph/transactions/index.js.map +1 -0
- package/dist/graph/transactions/transaction-manager.d.ts +61 -0
- package/dist/graph/transactions/transaction-manager.js +126 -0
- package/dist/graph/transactions/transaction-manager.js.map +1 -0
- package/dist/graph/types/connection.interface.d.ts +149 -0
- package/dist/graph/types/connection.interface.js +9 -0
- package/dist/graph/types/connection.interface.js.map +1 -0
- package/dist/graph/types/enums.d.ts +101 -0
- package/dist/graph/types/enums.js +114 -0
- package/dist/graph/types/enums.js.map +1 -0
- package/dist/graph/types/index.d.ts +13 -0
- package/dist/graph/types/index.js +20 -0
- package/dist/graph/types/index.js.map +1 -0
- package/dist/graph/types/node.interface.d.ts +248 -0
- package/dist/graph/types/node.interface.js +9 -0
- package/dist/graph/types/node.interface.js.map +1 -0
- package/dist/graph/types/query.interface.d.ts +175 -0
- package/dist/graph/types/query.interface.js +9 -0
- package/dist/graph/types/query.interface.js.map +1 -0
- package/dist/graph/types/relationship.interface.d.ts +207 -0
- package/dist/graph/types/relationship.interface.js +9 -0
- package/dist/graph/types/relationship.interface.js.map +1 -0
- package/dist/graph/types/schema.interface.d.ts +295 -0
- package/dist/graph/types/schema.interface.js +9 -0
- package/dist/graph/types/schema.interface.js.map +1 -0
- package/dist/graph/types/transaction.interface.d.ts +55 -0
- package/dist/graph/types/transaction.interface.js +9 -0
- package/dist/graph/types/transaction.interface.js.map +1 -0
- package/dist/graph/types/traversal.interface.d.ts +181 -0
- package/dist/graph/types/traversal.interface.js +9 -0
- package/dist/graph/types/traversal.interface.js.map +1 -0
- package/dist/graph/utils/graph-error.d.ts +71 -0
- package/dist/graph/utils/graph-error.js +142 -0
- package/dist/graph/utils/graph-error.js.map +1 -0
- package/dist/graph/utils/index.d.ts +4 -0
- package/dist/graph/utils/index.js +9 -0
- package/dist/graph/utils/index.js.map +1 -0
- package/dist/imports/imports.service.d.ts +3 -3
- package/dist/imports/imports.service.js +8 -7
- package/dist/imports/imports.service.js.map +1 -1
- package/dist/imports/imports.types.d.ts +8 -0
- package/dist/imports/repos/openApi.repo.d.ts +1 -1
- package/dist/imports/repos/openApi.repo.js +414 -47
- package/dist/imports/repos/openApi.repo.js.map +1 -1
- package/dist/imports/repos/postmanV21.repo.d.ts +1 -1
- package/dist/imports/repos/postmanV21.repo.js +126 -83
- package/dist/imports/repos/postmanV21.repo.js.map +1 -1
- package/dist/index.d.ts +3654 -289
- package/dist/index.js +5066 -669
- package/dist/index.js.map +1 -1
- package/dist/init.interface.d.ts +407 -0
- package/dist/init.interface.js +3 -0
- package/dist/init.interface.js.map +1 -0
- package/dist/inputs/inputs.service.d.ts +1 -1
- package/dist/inputs/utils/inputs.utils.create.js +1 -1
- package/dist/inputs/utils/inputs.utils.create.js.map +1 -1
- package/dist/jobs/index.d.ts +38 -0
- package/dist/jobs/index.js +50 -0
- package/dist/jobs/index.js.map +1 -0
- package/dist/jobs/jobs.service.d.ts +154 -0
- package/dist/jobs/jobs.service.js +491 -0
- package/dist/jobs/jobs.service.js.map +1 -0
- package/dist/jobs/jobs.state.d.ts +113 -0
- package/dist/jobs/jobs.state.js +447 -0
- package/dist/jobs/jobs.state.js.map +1 -0
- package/dist/jobs/types.d.ts +449 -0
- package/dist/jobs/types.js +74 -0
- package/dist/jobs/types.js.map +1 -0
- package/dist/logs/logs.service.js +6 -2
- package/dist/logs/logs.service.js.map +1 -1
- package/dist/logs/logs.types.d.ts +19 -1
- package/dist/logs/logs.types.js +6 -0
- package/dist/logs/logs.types.js.map +1 -1
- package/dist/models/index.d.ts +6 -0
- package/dist/models/index.js +11 -0
- package/dist/models/index.js.map +1 -0
- package/dist/models/models.service.d.ts +137 -0
- package/dist/models/models.service.js +195 -0
- package/dist/models/models.service.js.map +1 -0
- package/dist/notifications/index.d.ts +13 -0
- package/dist/notifications/index.js +26 -0
- package/dist/notifications/index.js.map +1 -0
- package/dist/notifications/notifications.service.d.ts +265 -0
- package/dist/notifications/notifications.service.js +862 -0
- package/dist/notifications/notifications.service.js.map +1 -0
- package/dist/notifications/types/index.d.ts +4 -0
- package/dist/notifications/types/index.js +21 -0
- package/dist/notifications/types/index.js.map +1 -0
- package/dist/notifications/types/notifications.types.d.ts +402 -0
- package/dist/notifications/types/notifications.types.js +49 -0
- package/dist/notifications/types/notifications.types.js.map +1 -0
- package/dist/parsers/index.d.ts +3 -0
- package/dist/parsers/index.js +27 -0
- package/dist/parsers/index.js.map +1 -0
- package/dist/parsers/pipelines/postman.pipelines.d.ts +15 -0
- package/dist/parsers/pipelines/postman.pipelines.js +103 -0
- package/dist/parsers/pipelines/postman.pipelines.js.map +1 -0
- package/dist/parsers/types/postman.types.d.ts +200 -0
- package/dist/parsers/types/postman.types.js +3 -0
- package/dist/parsers/types/postman.types.js.map +1 -0
- package/dist/parsers/utils/postman.utils.d.ts +12 -0
- package/dist/parsers/utils/postman.utils.js +116 -0
- package/dist/parsers/utils/postman.utils.js.map +1 -0
- package/dist/parsers/validators/postman-auth.validators.d.ts +10 -0
- package/dist/parsers/validators/postman-auth.validators.js +127 -0
- package/dist/parsers/validators/postman-auth.validators.js.map +1 -0
- package/dist/parsers/validators/postman-request.validators.d.ts +13 -0
- package/dist/parsers/validators/postman-request.validators.js +139 -0
- package/dist/parsers/validators/postman-request.validators.js.map +1 -0
- package/dist/parsers/validators/postman-response.validators.d.ts +13 -0
- package/dist/parsers/validators/postman-response.validators.js +150 -0
- package/dist/parsers/validators/postman-response.validators.js.map +1 -0
- package/dist/parsers/validators/postman-variable.validators.d.ts +14 -0
- package/dist/parsers/validators/postman-variable.validators.js +163 -0
- package/dist/parsers/validators/postman-variable.validators.js.map +1 -0
- package/dist/pricing/pricing.repo.js +1 -0
- package/dist/pricing/pricing.repo.js.map +1 -0
- package/dist/pricing/pricing.service.d.ts +24 -0
- package/dist/pricing/pricing.service.js +51 -0
- package/dist/pricing/pricing.service.js.map +1 -0
- package/dist/pricing/pricing.types.d.ts +76 -0
- package/dist/pricing/pricing.types.js +21 -0
- package/dist/pricing/pricing.types.js.map +1 -0
- package/dist/pricing/utils/string.utils.d.ts +1 -0
- package/dist/pricing/utils/string.utils.js +9 -0
- package/dist/pricing/utils/string.utils.js.map +1 -0
- package/dist/processor/services/processor.service.d.ts +117 -73
- package/dist/processor/services/processor.service.js +1557 -1276
- package/dist/processor/services/processor.service.js.map +1 -1
- package/dist/processor/services/request.service.d.ts +36 -0
- package/dist/processor/services/request.service.js +304 -0
- package/dist/processor/services/request.service.js.map +1 -0
- package/dist/processor/types/request.types.d.ts +14 -0
- package/dist/processor/types/request.types.js +3 -0
- package/dist/processor/types/request.types.js.map +1 -0
- package/dist/processor/utils/processor.utils.js +32 -20
- package/dist/processor/utils/processor.utils.js.map +1 -1
- package/dist/processor/utils/request.utils.d.ts +20 -0
- package/dist/processor/utils/request.utils.js +113 -0
- package/dist/processor/utils/request.utils.js.map +1 -0
- package/dist/products/services/products.service.d.ts +365 -75
- package/dist/products/services/products.service.js +2809 -414
- package/dist/products/services/products.service.js.map +1 -1
- package/dist/products/utils/string.utils.d.ts +1 -1
- package/dist/products/utils/string.utils.js +14 -2
- package/dist/products/utils/string.utils.js.map +1 -1
- package/dist/products/validators/index.d.ts +7 -1
- package/dist/products/validators/index.js +16 -1
- package/dist/products/validators/index.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productAgent.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productAgent.validator.js +266 -0
- package/dist/products/validators/joi-validators/create.productAgent.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productDatabase.validator.js +5 -0
- package/dist/products/validators/joi-validators/create.productDatabase.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productEnv.validator.js +1 -0
- package/dist/products/validators/joi-validators/create.productEnv.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productGraph.validator.js +89 -0
- package/dist/products/validators/joi-validators/create.productGraph.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productHealthcheck.validator.d.ts +4 -0
- package/dist/products/validators/joi-validators/create.productHealthcheck.validator.js +58 -0
- package/dist/products/validators/joi-validators/create.productHealthcheck.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productMessageBrokers.validator.js +81 -25
- package/dist/products/validators/joi-validators/create.productMessageBrokers.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productModel.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productModel.validator.js +132 -0
- package/dist/products/validators/joi-validators/create.productModel.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/create.productNotification.validator.js +133 -45
- package/dist/products/validators/joi-validators/create.productNotification.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productStorage.validator.js +77 -18
- package/dist/products/validators/joi-validators/create.productStorage.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productVector.validator.d.ts +3 -0
- package/dist/products/validators/joi-validators/create.productVector.validator.js +135 -0
- package/dist/products/validators/joi-validators/create.productVector.validator.js.map +1 -0
- package/dist/products/validators/joi-validators/update.dataValue.validator.js +1 -0
- package/dist/products/validators/joi-validators/update.dataValue.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/update.productDatabase.validator.js +5 -0
- package/dist/products/validators/joi-validators/update.productDatabase.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/update.productEnv.validator.js +3 -0
- package/dist/products/validators/joi-validators/update.productEnv.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/update.productGraph.validator.js +88 -0
- package/dist/products/validators/joi-validators/update.productGraph.validator.js.map +1 -0
- package/dist/resilience/fallback.service.d.ts +140 -0
- package/dist/resilience/fallback.service.js +764 -0
- package/dist/resilience/fallback.service.js.map +1 -0
- package/dist/resilience/healthcheck.service.d.ts +159 -0
- package/dist/resilience/healthcheck.service.js +943 -0
- package/dist/resilience/healthcheck.service.js.map +1 -0
- package/dist/resilience/index.d.ts +104 -0
- package/dist/resilience/index.js +140 -0
- package/dist/resilience/index.js.map +1 -0
- package/dist/resilience/quota.service.d.ts +82 -0
- package/dist/resilience/quota.service.js +516 -0
- package/dist/resilience/quota.service.js.map +1 -0
- package/dist/resilience/resilience.service.d.ts +98 -0
- package/dist/resilience/resilience.service.js +560 -0
- package/dist/resilience/resilience.service.js.map +1 -0
- package/dist/resilience/types/index.d.ts +513 -0
- package/dist/resilience/types/index.js +29 -0
- package/dist/resilience/types/index.js.map +1 -0
- package/dist/secrets/index.d.ts +10 -0
- package/dist/secrets/index.js +33 -0
- package/dist/secrets/index.js.map +1 -0
- package/dist/secrets/secrets.resolver.d.ts +52 -0
- package/dist/secrets/secrets.resolver.js +233 -0
- package/dist/secrets/secrets.resolver.js.map +1 -0
- package/dist/secrets/secrets.service.d.ts +93 -0
- package/dist/secrets/secrets.service.js +258 -0
- package/dist/secrets/secrets.service.js.map +1 -0
- package/dist/secrets/secrets.types.d.ts +188 -0
- package/dist/secrets/secrets.types.js +87 -0
- package/dist/secrets/secrets.types.js.map +1 -0
- package/dist/sessions/index.d.ts +50 -0
- package/dist/sessions/index.js +93 -0
- package/dist/sessions/index.js.map +1 -0
- package/dist/sessions/sessions.helper.d.ts +68 -0
- package/dist/sessions/sessions.helper.js +116 -0
- package/dist/sessions/sessions.helper.js.map +1 -0
- package/dist/sessions/sessions.resolver.d.ts +157 -0
- package/dist/sessions/sessions.resolver.js +374 -0
- package/dist/sessions/sessions.resolver.js.map +1 -0
- package/dist/sessions/sessions.service.d.ts +178 -0
- package/dist/sessions/sessions.service.js +923 -0
- package/dist/sessions/sessions.service.js.map +1 -0
- package/dist/sessions/types/index.d.ts +298 -0
- package/dist/sessions/types/index.js +6 -0
- package/dist/sessions/types/index.js.map +1 -0
- package/dist/storage/index.d.ts +66 -0
- package/dist/storage/index.js +99 -0
- package/dist/storage/index.js.map +1 -0
- package/dist/storage/storage.service.d.ts +174 -0
- package/dist/storage/storage.service.js +757 -0
- package/dist/storage/storage.service.js.map +1 -0
- package/dist/storage/types/index.d.ts +267 -0
- package/dist/storage/types/index.js +6 -0
- package/dist/storage/types/index.js.map +1 -0
- package/dist/storage/utils/storage.util.d.ts +62 -0
- package/dist/storage/utils/storage.util.js +593 -0
- package/dist/storage/utils/storage.util.js.map +1 -0
- package/dist/test/index.d.ts +3 -0
- package/dist/test/index.js +11 -0
- package/dist/test/index.js.map +1 -0
- package/dist/test/test.appBuilder.d.ts +0 -1
- package/dist/test/test.appBuilder.js +0 -15
- package/dist/test/test.appBuilder.js.map +1 -1
- package/dist/test/test.broker.kafka.js +172 -0
- package/dist/test/test.broker.kafka.js.map +1 -0
- package/dist/test/test.broker.nats.js +193 -0
- package/dist/test/test.broker.nats.js.map +1 -0
- package/dist/test/test.broker.pubsub.js +171 -0
- package/dist/test/test.broker.pubsub.js.map +1 -0
- package/dist/test/test.broker.rabbitmq.js +164 -0
- package/dist/test/test.broker.rabbitmq.js.map +1 -0
- package/dist/test/test.broker.redis.js +168 -0
- package/dist/test/test.broker.redis.js.map +1 -0
- package/dist/test/test.broker.sqs.d.ts +1 -0
- package/dist/test/test.broker.sqs.js +158 -0
- package/dist/test/test.broker.sqs.js.map +1 -0
- package/dist/test/test.caches.d.ts +1 -0
- package/dist/test/test.caches.js +231 -0
- package/dist/test/test.caches.js.map +1 -0
- package/dist/test/test.database.d.ts +1 -0
- package/dist/test/test.database.dynamo.d.ts +1 -0
- package/dist/test/test.database.dynamo.js +265 -0
- package/dist/test/test.database.dynamo.js.map +1 -0
- package/dist/test/test.database.js +140 -0
- package/dist/test/test.database.js.map +1 -0
- package/dist/test/test.database.mongo.d.ts +1 -0
- package/dist/test/test.database.mongo.js +371 -0
- package/dist/test/test.database.mongo.js.map +1 -0
- package/dist/test/test.database.mysql.d.ts +1 -0
- package/dist/test/test.database.mysql.js +415 -0
- package/dist/test/test.database.mysql.js.map +1 -0
- package/dist/test/test.database.postgres.d.ts +1 -0
- package/dist/test/test.database.postgres.js +412 -0
- package/dist/test/test.database.postgres.js.map +1 -0
- package/dist/test/test.email.brevo.d.ts +1 -0
- package/dist/test/test.email.brevo.js +326 -0
- package/dist/test/test.email.brevo.js.map +1 -0
- package/dist/test/test.email.mailgun.d.ts +1 -0
- package/dist/test/test.email.mailgun.js +352 -0
- package/dist/test/test.email.mailgun.js.map +1 -0
- package/dist/test/test.email.postmark.d.ts +1 -0
- package/dist/test/test.email.postmark.js +316 -0
- package/dist/test/test.email.postmark.js.map +1 -0
- package/dist/test/test.email.sendgrid.d.ts +1 -0
- package/dist/test/test.email.sendgrid.js +365 -0
- package/dist/test/test.email.sendgrid.js.map +1 -0
- package/dist/test/test.email.smtp.d.ts +1 -0
- package/dist/test/test.email.smtp.js +323 -0
- package/dist/test/test.email.smtp.js.map +1 -0
- package/dist/test/test.graph.arangodb.d.ts +1 -0
- package/dist/test/test.graph.arangodb.js +358 -0
- package/dist/test/test.graph.arangodb.js.map +1 -0
- package/dist/test/test.graph.memgraph.d.ts +1 -0
- package/dist/test/test.graph.memgraph.js +320 -0
- package/dist/test/test.graph.memgraph.js.map +1 -0
- package/dist/test/test.graph.neo4j.d.ts +1 -0
- package/dist/test/test.graph.neo4j.js +218 -0
- package/dist/test/test.graph.neo4j.js.map +1 -0
- package/dist/test/test.graph.neptune.d.ts +1 -0
- package/dist/test/test.graph.neptune.js +331 -0
- package/dist/test/test.graph.neptune.js.map +1 -0
- package/dist/test/test.health.js +1 -0
- package/dist/test/test.health.js.map +1 -0
- package/dist/test/test.import.d.ts +0 -1
- package/dist/test/test.import.js +0 -1459
- package/dist/test/test.import.js.map +1 -1
- package/dist/test/test.import.openapi.d.ts +0 -1
- package/dist/test/test.import.openapi.js +0 -75
- package/dist/test/test.import.openapi.js.map +1 -1
- package/dist/test/test.imports.js +14 -55
- package/dist/test/test.imports.js.map +1 -1
- package/dist/test/test.logs.d.ts +0 -1
- package/dist/test/test.logs.js +0 -17
- package/dist/test/test.logs.js.map +1 -1
- package/dist/test/test.notifications.d.ts +1 -0
- package/dist/test/test.notifications.js +198 -0
- package/dist/test/test.notifications.js.map +1 -0
- package/dist/test/test.notifiers.js +1 -0
- package/dist/test/test.notifiers.js.map +1 -0
- package/dist/test/test.processor.d.ts +0 -1
- package/dist/test/test.processor.js +0 -122
- package/dist/test/test.processor.js.map +1 -1
- package/dist/test/test.productBuilder.d.ts +0 -1
- package/dist/test/test.productBuilder.js +0 -660
- package/dist/test/test.productBuilder.js.map +1 -1
- package/dist/test/test.products.js +1 -0
- package/dist/test/test.products.js.map +1 -0
- package/dist/test/test.push.expo.d.ts +1 -0
- package/dist/test/test.push.expo.js +442 -0
- package/dist/test/test.push.expo.js.map +1 -0
- package/dist/test/test.push.firebase.d.ts +1 -0
- package/dist/test/test.push.firebase.js +409 -0
- package/dist/test/test.push.firebase.js.map +1 -0
- package/dist/test/test.session.d.ts +1 -0
- package/dist/test/test.session.js +299 -0
- package/dist/test/test.session.js.map +1 -0
- package/dist/test/test.sms.nexmo.d.ts +1 -0
- package/dist/test/test.sms.nexmo.js +278 -0
- package/dist/test/test.sms.nexmo.js.map +1 -0
- package/dist/test/test.sms.twilio.d.ts +1 -0
- package/dist/test/test.sms.twilio.js +275 -0
- package/dist/test/test.sms.twilio.js.map +1 -0
- package/dist/test/test.storage.d.ts +1 -0
- package/dist/test/test.storage.js +202 -0
- package/dist/test/test.storage.js.map +1 -0
- package/dist/test/test.triggers.d.ts +1 -0
- package/dist/test/test.triggers.js +314 -0
- package/dist/test/test.triggers.js.map +1 -0
- package/dist/test/test.vector.pinecone.d.ts +1 -0
- package/dist/test/test.vector.pinecone.js +238 -0
- package/dist/test/test.vector.pinecone.js.map +1 -0
- package/dist/test/test.vector.qdrant.d.ts +1 -0
- package/dist/test/test.vector.qdrant.js +307 -0
- package/dist/test/test.vector.qdrant.js.map +1 -0
- package/dist/test/test.vector.weaviate.d.ts +1 -0
- package/dist/test/test.vector.weaviate.js +325 -0
- package/dist/test/test.vector.weaviate.js.map +1 -0
- package/dist/types/appBuilder.types.d.ts +9 -2
- package/dist/types/enums.d.ts +11 -1
- package/dist/types/enums.js +10 -0
- package/dist/types/enums.js.map +1 -1
- package/dist/types/index.types.d.ts +4 -7
- package/dist/types/index.types.js +0 -1
- package/dist/types/index.types.js.map +1 -1
- package/dist/types/inputs.types.js +1 -1
- package/dist/types/inputs.types.js.map +1 -1
- package/dist/types/pricing.types.d.ts +4 -0
- package/dist/types/pricing.types.js +3 -0
- package/dist/types/pricing.types.js.map +1 -0
- package/dist/types/processor.types.d.ts +214 -33
- package/dist/types/processor.types.js +9 -1
- package/dist/types/processor.types.js.map +1 -1
- package/dist/types/productsBuilder.types.d.ts +978 -23
- package/dist/types/productsBuilder.types.js +210 -3
- package/dist/types/productsBuilder.types.js.map +1 -1
- package/dist/types/request-tracker.interface.js +1 -0
- package/dist/types/request-tracker.interface.js.map +1 -0
- package/dist/types/requests.types.d.ts +2 -0
- package/dist/utils/constants.d.ts +1 -0
- package/dist/utils/constants.js +5 -0
- package/dist/utils/constants.js.map +1 -0
- package/dist/utils/index.d.ts +0 -2
- package/dist/utils/index.js +24 -52
- package/dist/utils/index.js.map +1 -1
- package/dist/vector/actions/action-manager.d.ts +140 -0
- package/dist/vector/actions/action-manager.js +356 -0
- package/dist/vector/actions/action-manager.js.map +1 -0
- package/dist/vector/adapters/base.adapter.d.ts +169 -0
- package/dist/vector/adapters/base.adapter.js +218 -0
- package/dist/vector/adapters/base.adapter.js.map +1 -0
- package/dist/vector/adapters/index.d.ts +10 -0
- package/dist/vector/adapters/index.js +19 -0
- package/dist/vector/adapters/index.js.map +1 -0
- package/dist/vector/adapters/memory.adapter.d.ts +85 -0
- package/dist/vector/adapters/memory.adapter.js +505 -0
- package/dist/vector/adapters/memory.adapter.js.map +1 -0
- package/dist/vector/adapters/pinecone.adapter.d.ts +52 -0
- package/dist/vector/adapters/pinecone.adapter.js +433 -0
- package/dist/vector/adapters/pinecone.adapter.js.map +1 -0
- package/dist/vector/adapters/qdrant.adapter.d.ts +56 -0
- package/dist/vector/adapters/qdrant.adapter.js +442 -0
- package/dist/vector/adapters/qdrant.adapter.js.map +1 -0
- package/dist/vector/adapters/weaviate.adapter.d.ts +68 -0
- package/dist/vector/adapters/weaviate.adapter.js +645 -0
- package/dist/vector/adapters/weaviate.adapter.js.map +1 -0
- package/dist/vector/index.d.ts +36 -0
- package/dist/vector/index.js +70 -0
- package/dist/vector/index.js.map +1 -0
- package/dist/vector/types/action.interface.d.ts +195 -0
- package/dist/vector/types/action.interface.js +100 -0
- package/dist/vector/types/action.interface.js.map +1 -0
- package/dist/vector/types/connection.interface.d.ts +151 -0
- package/dist/vector/types/connection.interface.js +8 -0
- package/dist/vector/types/connection.interface.js.map +1 -0
- package/dist/vector/types/embedding.interface.d.ts +144 -0
- package/dist/vector/types/embedding.interface.js +8 -0
- package/dist/vector/types/embedding.interface.js.map +1 -0
- package/dist/vector/types/enums.d.ts +104 -0
- package/dist/vector/types/enums.js +113 -0
- package/dist/vector/types/enums.js.map +1 -0
- package/dist/vector/types/index.d.ts +11 -0
- package/dist/vector/types/index.js +23 -0
- package/dist/vector/types/index.js.map +1 -0
- package/dist/vector/types/vector.interface.d.ts +315 -0
- package/dist/vector/types/vector.interface.js +8 -0
- package/dist/vector/types/vector.interface.js.map +1 -0
- package/dist/vector/utils/index.d.ts +6 -0
- package/dist/vector/utils/index.js +11 -0
- package/dist/vector/utils/index.js.map +1 -0
- package/dist/vector/utils/vector-error.d.ts +69 -0
- package/dist/vector/utils/vector-error.js +116 -0
- package/dist/vector/utils/vector-error.js.map +1 -0
- package/dist/vector/vector-database.service.d.ts +474 -0
- package/dist/vector/vector-database.service.js +850 -0
- package/dist/vector/vector-database.service.js.map +1 -0
- package/dist/vector/vector.service.d.ts +283 -0
- package/dist/vector/vector.service.js +544 -0
- package/dist/vector/vector.service.js.map +1 -0
- package/dist/warehouse/executor/index.d.ts +5 -0
- package/dist/warehouse/executor/index.js +12 -0
- package/dist/warehouse/executor/index.js.map +1 -0
- package/dist/warehouse/executor/joins/index.d.ts +5 -0
- package/dist/warehouse/executor/joins/index.js +11 -0
- package/dist/warehouse/executor/joins/index.js.map +1 -0
- package/dist/warehouse/executor/joins/join-executor.d.ts +101 -0
- package/dist/warehouse/executor/joins/join-executor.js +493 -0
- package/dist/warehouse/executor/joins/join-executor.js.map +1 -0
- package/dist/warehouse/executor/joins/semantic-join.d.ts +64 -0
- package/dist/warehouse/executor/joins/semantic-join.js +241 -0
- package/dist/warehouse/executor/joins/semantic-join.js.map +1 -0
- package/dist/warehouse/executor/single-source-executor.d.ts +155 -0
- package/dist/warehouse/executor/single-source-executor.js +573 -0
- package/dist/warehouse/executor/single-source-executor.js.map +1 -0
- package/dist/warehouse/index.d.ts +79 -0
- package/dist/warehouse/index.js +111 -0
- package/dist/warehouse/index.js.map +1 -0
- package/dist/warehouse/parser/index.d.ts +4 -0
- package/dist/warehouse/parser/index.js +10 -0
- package/dist/warehouse/parser/index.js.map +1 -0
- package/dist/warehouse/parser/query-parser.d.ts +181 -0
- package/dist/warehouse/parser/query-parser.js +415 -0
- package/dist/warehouse/parser/query-parser.js.map +1 -0
- package/dist/warehouse/registry/data-source-registry.d.ts +207 -0
- package/dist/warehouse/registry/data-source-registry.js +396 -0
- package/dist/warehouse/registry/data-source-registry.js.map +1 -0
- package/dist/warehouse/registry/index.d.ts +4 -0
- package/dist/warehouse/registry/index.js +9 -0
- package/dist/warehouse/registry/index.js.map +1 -0
- package/dist/warehouse/transactions/index.d.ts +4 -0
- package/dist/warehouse/transactions/index.js +9 -0
- package/dist/warehouse/transactions/index.js.map +1 -0
- package/dist/warehouse/transactions/saga-orchestrator.d.ts +92 -0
- package/dist/warehouse/transactions/saga-orchestrator.js +383 -0
- package/dist/warehouse/transactions/saga-orchestrator.js.map +1 -0
- package/dist/warehouse/types/index.d.ts +9 -0
- package/dist/warehouse/types/index.js +33 -0
- package/dist/warehouse/types/index.js.map +1 -0
- package/dist/warehouse/types/join.interface.d.ts +225 -0
- package/dist/warehouse/types/join.interface.js +87 -0
- package/dist/warehouse/types/join.interface.js.map +1 -0
- package/dist/warehouse/types/query.interface.d.ts +232 -0
- package/dist/warehouse/types/query.interface.js +9 -0
- package/dist/warehouse/types/query.interface.js.map +1 -0
- package/dist/warehouse/types/transaction.interface.d.ts +236 -0
- package/dist/warehouse/types/transaction.interface.js +74 -0
- package/dist/warehouse/types/transaction.interface.js.map +1 -0
- package/dist/warehouse/types/where.interface.d.ts +208 -0
- package/dist/warehouse/types/where.interface.js +89 -0
- package/dist/warehouse/types/where.interface.js.map +1 -0
- package/dist/warehouse/warehouse.service.d.ts +200 -0
- package/dist/warehouse/warehouse.service.js +470 -0
- package/dist/warehouse/warehouse.service.js.map +1 -0
- package/dist/workflows/index.d.ts +30 -0
- package/dist/workflows/index.js +64 -0
- package/dist/workflows/index.js.map +1 -0
- package/dist/workflows/types/index.d.ts +6 -0
- package/dist/workflows/types/index.js +23 -0
- package/dist/workflows/types/index.js.map +1 -0
- package/dist/workflows/types/workflows.types.d.ts +1037 -0
- package/dist/workflows/types/workflows.types.js +13 -0
- package/dist/workflows/types/workflows.types.js.map +1 -0
- package/dist/workflows/workflow-builder.d.ts +70 -0
- package/dist/workflows/workflow-builder.js +338 -0
- package/dist/workflows/workflow-builder.js.map +1 -0
- package/dist/workflows/workflow-executor.d.ts +208 -0
- package/dist/workflows/workflow-executor.js +1194 -0
- package/dist/workflows/workflow-executor.js.map +1 -0
- package/dist/workflows/workflows.service.d.ts +410 -0
- package/dist/workflows/workflows.service.js +1724 -0
- package/dist/workflows/workflows.service.js.map +1 -0
- package/package.json +65 -12
- package/dist/actions/actions.repo.js +0 -13
- package/dist/actions/actions.repo.js.map +0 -1
- package/dist/actions/actions.service.js +0 -24
- package/dist/actions/actions.service.js.map +0 -1
- package/dist/actions/utils/actions.util.read.js +0 -427
- package/dist/actions/utils/actions.util.read.js.map +0 -1
- package/dist/api/services/integrationsApi.service.d.ts +0 -18
- package/dist/api/services/integrationsApi.service.js +0 -80
- package/dist/api/services/integrationsApi.service.js.map +0 -1
- package/dist/appBuilder/services/app.service.d.ts +0 -111
- package/dist/appBuilder/services/app.service.js +0 -737
- package/dist/appBuilder/services/app.service.js.map +0 -1
- package/dist/appBuilder/services/appBuilder.service.d.ts +0 -111
- package/dist/appBuilder/services/appBuilder.service.js +0 -662
- package/dist/appBuilder/services/appBuilder.service.js.map +0 -1
- package/dist/appBuilder/utils/objects.utils.d.ts +0 -3
- package/dist/appBuilder/utils/objects.utils.js +0 -9
- package/dist/appBuilder/utils/objects.utils.js.map +0 -1
- package/dist/appBuilder/utils/string.utils.d.ts +0 -2
- package/dist/appBuilder/utils/string.utils.js +0 -57
- package/dist/appBuilder/utils/string.utils.js.map +0 -1
- package/dist/appBuilder/validators/index.d.ts +0 -19
- package/dist/appBuilder/validators/index.js +0 -40
- package/dist/appBuilder/validators/index.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.app.validator.js +0 -10
- package/dist/appBuilder/validators/joi-validators/create.app.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appAction.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/create.appAction.validator.js +0 -20
- package/dist/appBuilder/validators/joi-validators/create.appAction.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appActionResponse.validator.d.ts +0 -7
- package/dist/appBuilder/validators/joi-validators/create.appActionResponse.validator.js +0 -44
- package/dist/appBuilder/validators/joi-validators/create.appActionResponse.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appAuth.validator.d.ts +0 -3
- package/dist/appBuilder/validators/joi-validators/create.appAuth.validator.js +0 -31
- package/dist/appBuilder/validators/joi-validators/create.appAuth.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appBody.validators.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/create.appBody.validators.js +0 -11
- package/dist/appBuilder/validators/joi-validators/create.appBody.validators.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appConstants.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/create.appConstants.validator.js +0 -12
- package/dist/appBuilder/validators/joi-validators/create.appConstants.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appEnv.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/create.appEnv.validator.js +0 -17
- package/dist/appBuilder/validators/joi-validators/create.appEnv.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appEvent.validator.d.ts +0 -5
- package/dist/appBuilder/validators/joi-validators/create.appEvent.validator.js +0 -30
- package/dist/appBuilder/validators/joi-validators/create.appEvent.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/create.appVariable.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/create.appVariable.validator.js +0 -14
- package/dist/appBuilder/validators/joi-validators/create.appVariable.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/sample.validator.d.ts +0 -5
- package/dist/appBuilder/validators/joi-validators/sample.validator.js +0 -26
- package/dist/appBuilder/validators/joi-validators/sample.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.app.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.app.validator.js +0 -34
- package/dist/appBuilder/validators/joi-validators/update.app.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appAction.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appAction.validator.js +0 -23
- package/dist/appBuilder/validators/joi-validators/update.appAction.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appActionResponse.validator.d.ts +0 -3
- package/dist/appBuilder/validators/joi-validators/update.appActionResponse.validator.js +0 -21
- package/dist/appBuilder/validators/joi-validators/update.appActionResponse.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appAuth.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appAuth.validator.js +0 -19
- package/dist/appBuilder/validators/joi-validators/update.appAuth.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appConstants.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appConstants.validator.js +0 -12
- package/dist/appBuilder/validators/joi-validators/update.appConstants.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appEnv.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appEnv.validator.js +0 -17
- package/dist/appBuilder/validators/joi-validators/update.appEnv.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appEvent.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appEvent.validator.js +0 -16
- package/dist/appBuilder/validators/joi-validators/update.appEvent.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.appVariables.validator.d.ts +0 -4
- package/dist/appBuilder/validators/joi-validators/update.appVariables.validator.js +0 -14
- package/dist/appBuilder/validators/joi-validators/update.appVariables.validator.js.map +0 -1
- package/dist/appBuilder/validators/joi-validators/update.validation.entityData.validator.js +0 -27
- package/dist/appBuilder/validators/joi-validators/update.validation.entityData.validator.js.map +0 -1
- package/dist/apps/validators/joi-validators/create.appEvent.validator.d.ts +0 -5
- package/dist/apps/validators/joi-validators/create.appEvent.validator.js +0 -30
- package/dist/apps/validators/joi-validators/create.appEvent.validator.js.map +0 -1
- package/dist/apps/validators/joi-validators/update.appEvent.validator.d.ts +0 -4
- package/dist/apps/validators/joi-validators/update.appEvent.validator.js +0 -16
- package/dist/apps/validators/joi-validators/update.appEvent.validator.js.map +0 -1
- package/dist/clients/integrations.client.d.ts +0 -2
- package/dist/clients/integrations.client.js +0 -26
- package/dist/clients/integrations.client.js.map +0 -1
- package/dist/integrationsBuilder/services/integration.service.d.ts +0 -138
- package/dist/integrationsBuilder/services/integration.service.js +0 -1148
- package/dist/integrationsBuilder/services/integration.service.js.map +0 -1
- package/dist/integrationsBuilder/services/integrationBuilder.service.d.ts +0 -130
- package/dist/integrationsBuilder/services/integrationBuilder.service.js +0 -1017
- package/dist/integrationsBuilder/services/integrationBuilder.service.js.map +0 -1
- package/dist/integrationsBuilder/utils/objects.utils.d.ts +0 -2
- package/dist/integrationsBuilder/utils/objects.utils.js +0 -48
- package/dist/integrationsBuilder/utils/objects.utils.js.map +0 -1
- package/dist/integrationsBuilder/utils/string.utils.d.ts +0 -1
- package/dist/integrationsBuilder/utils/string.utils.js +0 -9
- package/dist/integrationsBuilder/utils/string.utils.js.map +0 -1
- package/dist/integrationsBuilder/validators/index.d.ts +0 -18
- package/dist/integrationsBuilder/validators/index.js +0 -38
- package/dist/integrationsBuilder/validators/index.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integration.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integration.validator.js +0 -10
- package/dist/integrationsBuilder/validators/joi-validators/create.integration.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationApp.validator.d.ts +0 -4
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationApp.validator.js +0 -26
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationApp.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationCache.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationCache.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationCache.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationDatabase.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationDatabase.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationDatabase.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationEnv.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationEnv.validator.js +0 -10
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationEnv.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFeature.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFeature.validator.js +0 -60
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFeature.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFunction.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFunction.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationFunction.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationJob.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationJob.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationJob.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationNotification.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationNotification.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/create.integrationNotification.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationApp.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationApp.validator.js +0 -9
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationApp.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationCache.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationCache.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationCache.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationDatabase.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationDatabase.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationDatabase.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationEnv.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationEnv.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationEnv.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFeature.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFeature.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFeature.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFunction.validator copy.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFunction.validator copy.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationFunction.validator copy.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationJob.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationJob.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationJob.validator.js.map +0 -1
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationNotification.validator.d.ts +0 -3
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationNotification.validator.js +0 -8
- package/dist/integrationsBuilder/validators/joi-validators/update.integrationNotification.validator.js.map +0 -1
- package/dist/postman.d.ts +0 -1
- package/dist/postman.js +0 -21674
- package/dist/postman.js.map +0 -1
- package/dist/processor/utils/mongo.util.js +0 -152
- package/dist/processor/utils/mongo.util.js.map +0 -1
- package/dist/processor/utils/postgres.util.d.ts +0 -14
- package/dist/processor/utils/postgres.util.js +0 -83
- package/dist/processor/utils/postgres.util.js.map +0 -1
- package/dist/products/validators/joi-validators/create.product.validator.d.ts +0 -3
- package/dist/products/validators/joi-validators/create.product.validator.js +0 -10
- package/dist/products/validators/joi-validators/create.product.validator.js.map +0 -1
- package/dist/test.appBuilder.js +0 -14
- package/dist/test.appBuilder.js.map +0 -1
- package/dist/test.import.js +0 -24
- package/dist/test.import.js.map +0 -1
- package/dist/test.imports.js +0 -28
- package/dist/test.imports.js.map +0 -1
- package/dist/test.integrationBuilder.js +0 -276
- package/dist/test.integrationBuilder.js.map +0 -1
- package/dist/test.processor.js +0 -23
- package/dist/test.processor.js.map +0 -1
- package/dist/test.utils.js +0 -25
- package/dist/test.utils.js.map +0 -1
- package/dist/types/integrationsBuilder.types.d.ts +0 -276
- package/dist/types/integrationsBuilder.types.js +0 -51
- package/dist/types/integrationsBuilder.types.js.map +0 -1
- /package/dist/{actions/actions.repo.d.ts → pricing/pricing.repo.d.ts} +0 -0
- /package/dist/{appBuilder/validators/joi-validators/create.app.validator.d.ts → products/validators/joi-validators/create.productGraph.validator.d.ts} +0 -0
- /package/dist/{appBuilder/validators/joi-validators/update.validation.entityData.validator.d.ts → products/validators/joi-validators/update.productGraph.validator.d.ts} +0 -0
- /package/dist/{test.appBuilder.d.ts → test/test.broker.kafka.d.ts} +0 -0
- /package/dist/{test.imports.d.ts → test/test.broker.nats.d.ts} +0 -0
- /package/dist/{test.integrationBuilder.d.ts → test/test.broker.pubsub.d.ts} +0 -0
- /package/dist/{test.processor.d.ts → test/test.broker.rabbitmq.d.ts} +0 -0
- /package/dist/{test.utils.d.ts → test/test.broker.redis.d.ts} +0 -0
- /package/dist/{actions/actions.service.d.ts → test/test.health.d.ts} +0 -0
- /package/dist/{actions/utils/actions.util.read.d.ts → test/test.notifiers.d.ts} +0 -0
- /package/dist/{processor/utils/mongo.util.d.ts → test/test.products.d.ts} +0 -0
- /package/dist/{test.import.d.ts → types/request-tracker.interface.d.ts} +0 -0
|
@@ -56,38 +56,43 @@ const axios_1 = __importDefault(require("axios"));
|
|
|
56
56
|
const processorApi_service_1 = require("../../api/services/processorApi.service");
|
|
57
57
|
const expo_client_1 = __importDefault(require("../../clients/expo.client"));
|
|
58
58
|
const handlebars_1 = require("handlebars");
|
|
59
|
-
const functions_utils_1 = require("../../products/utils/functions.utils");
|
|
60
59
|
const string_utils_1 = require("../../products/utils/string.utils");
|
|
61
60
|
const create_productFeature_validator_1 = require("../../products/validators/joi-validators/create.productFeature.validator");
|
|
62
61
|
const validators_1 = require("../../products/validators");
|
|
63
62
|
const uuid_1 = require("uuid");
|
|
64
63
|
const urls_1 = require("../../api/urls");
|
|
65
|
-
//import { createBrokerService } from './messagebrokers';
|
|
66
64
|
const date_fns_1 = require("date-fns");
|
|
67
|
-
const mongo_repo_1 = require("../repos/mongo.repo");
|
|
68
|
-
const postgres_repo_1 = require("../repos/postgres.repo");
|
|
69
65
|
const storage_util_1 = require("../utils/storage.util");
|
|
70
66
|
const sms_repo_1 = require("../repos/sms.repo");
|
|
71
|
-
const
|
|
67
|
+
const pricing_service_1 = __importDefault(require("../../pricing/pricing.service"));
|
|
68
|
+
const request_utils_1 = require("../utils/request.utils");
|
|
69
|
+
const request_service_1 = __importDefault(require("./request.service"));
|
|
70
|
+
const app_service_1 = __importDefault(require("../../apps/services/app.service"));
|
|
71
|
+
const utils_1 = require("../../apps/utils");
|
|
72
|
+
const credential_manager_1 = require("../../apps/utils/credential-manager");
|
|
73
|
+
const oauth_manager_1 = require("../../apps/utils/oauth-manager");
|
|
74
|
+
const secrets_1 = require("../../secrets");
|
|
72
75
|
async function loadBrokerService() {
|
|
73
76
|
if (typeof window === 'undefined') {
|
|
74
|
-
const {
|
|
75
|
-
return
|
|
77
|
+
const { loadBrokerService: loadBroker } = await Promise.resolve().then(() => __importStar(require('../../brokers')));
|
|
78
|
+
return loadBroker();
|
|
76
79
|
}
|
|
77
80
|
return null;
|
|
78
81
|
}
|
|
79
82
|
async function loadJWT() {
|
|
80
83
|
if (typeof window === 'undefined') {
|
|
81
|
-
const JWT = await Promise.resolve().then(() => __importStar(require(
|
|
84
|
+
const JWT = await Promise.resolve().then(() => __importStar(require('jsonwebtoken')));
|
|
82
85
|
return JWT;
|
|
83
86
|
}
|
|
84
87
|
return null;
|
|
85
88
|
}
|
|
86
89
|
class ProcessorService {
|
|
87
|
-
constructor({ workspace_id, public_key, user_id, token, env_type, redis_client, queues }) {
|
|
90
|
+
constructor({ workspace_id, public_key, user_id, token, env_type, private_key, access_key, redis_client, queues }) {
|
|
88
91
|
this.workspace_id = workspace_id;
|
|
89
92
|
this.public_key = public_key;
|
|
90
93
|
this.user_id = user_id;
|
|
94
|
+
this._privateKey = private_key;
|
|
95
|
+
this.accessKey = access_key;
|
|
91
96
|
this.token = token;
|
|
92
97
|
this.published = false;
|
|
93
98
|
this.productBuilderService = new products_service_1.default({
|
|
@@ -96,7 +101,23 @@ class ProcessorService {
|
|
|
96
101
|
user_id,
|
|
97
102
|
token,
|
|
98
103
|
env_type,
|
|
99
|
-
redis_client
|
|
104
|
+
redis_client,
|
|
105
|
+
});
|
|
106
|
+
this.appBuilderService = new app_service_1.default({
|
|
107
|
+
workspace_id,
|
|
108
|
+
public_key,
|
|
109
|
+
user_id,
|
|
110
|
+
token,
|
|
111
|
+
env_type,
|
|
112
|
+
redis_client,
|
|
113
|
+
});
|
|
114
|
+
this.pricingService = new pricing_service_1.default({
|
|
115
|
+
workspace_id,
|
|
116
|
+
public_key,
|
|
117
|
+
user_id,
|
|
118
|
+
token,
|
|
119
|
+
env_type,
|
|
120
|
+
redis_client,
|
|
100
121
|
});
|
|
101
122
|
this.inputService = new inputs_service_1.default();
|
|
102
123
|
this.requestTime = 0;
|
|
@@ -108,50 +129,133 @@ class ProcessorService {
|
|
|
108
129
|
skipped: [],
|
|
109
130
|
};
|
|
110
131
|
this.apps = [];
|
|
132
|
+
this.requestTrackerService = request_service_1.default.getInstance(redis_client);
|
|
111
133
|
this.processorApiService = new processorApi_service_1.ProcessorApiService(env_type);
|
|
112
134
|
this.environment = env_type;
|
|
135
|
+
// Store redis client for job state management
|
|
113
136
|
if (redis_client) {
|
|
114
137
|
this.redisClient = redis_client;
|
|
115
138
|
}
|
|
139
|
+
// Start healthcheck workers only if product tag is available
|
|
140
|
+
if (redis_client && this.productTag) {
|
|
141
|
+
this.startHealthcheckWorkers();
|
|
142
|
+
}
|
|
116
143
|
if (queues) {
|
|
117
144
|
this.queues = queues;
|
|
118
145
|
}
|
|
119
146
|
}
|
|
147
|
+
/**
|
|
148
|
+
* Start healthcheck workers for all products/environments after Redis is connected.
|
|
149
|
+
* This is called automatically in the constructor if redisClient is present.
|
|
150
|
+
*/
|
|
151
|
+
async startHealthcheckWorkers() {
|
|
152
|
+
// Fetch all products (or the current product if context is single-tenant)
|
|
153
|
+
// For demo, we use the current product only
|
|
154
|
+
await this.productBuilderService.initializeProductByTag(this.productTag);
|
|
155
|
+
const healthchecks = await this.productBuilderService.fetchProductHealthchecks();
|
|
156
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
157
|
+
for (const healthcheck of healthchecks) {
|
|
158
|
+
for (const env of healthcheck.envs) {
|
|
159
|
+
// Each env gets its own worker (setInterval)
|
|
160
|
+
const interval = healthcheck.interval || 60000; // default 60s
|
|
161
|
+
setInterval(async () => {
|
|
162
|
+
try {
|
|
163
|
+
// Decrypt input for this env
|
|
164
|
+
let decryptedInput = env.input;
|
|
165
|
+
if (typeof decryptedInput === 'string') {
|
|
166
|
+
decryptedInput = JSON.parse((0, processor_utils_1.decrypt)(decryptedInput, privateKey));
|
|
167
|
+
}
|
|
168
|
+
// Prepare action input
|
|
169
|
+
const actionInput = {
|
|
170
|
+
env: env.slug,
|
|
171
|
+
product: this.productTag,
|
|
172
|
+
app: healthcheck.app,
|
|
173
|
+
input: decryptedInput,
|
|
174
|
+
action: healthcheck.event,
|
|
175
|
+
retries: healthcheck.retries || 0,
|
|
176
|
+
};
|
|
177
|
+
// Process the action
|
|
178
|
+
const result = await this.processAction(actionInput);
|
|
179
|
+
// Log result (success/failure)
|
|
180
|
+
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { message: `Healthcheck processed for ${healthcheck.tag} in env ${env.slug}`, data: { result }, status: types_1.LogEventStatus.SUCCESS }));
|
|
181
|
+
}
|
|
182
|
+
catch (e) {
|
|
183
|
+
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { message: `Healthcheck failed for ${healthcheck.tag} in env ${env.slug}`, data: { error: e.toString() }, status: types_1.LogEventStatus.FAIL }));
|
|
184
|
+
}
|
|
185
|
+
}, interval);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
/**
|
|
190
|
+
* Manually trigger healthcheck processing for all healthchecks (can be called externally if needed)
|
|
191
|
+
*/
|
|
192
|
+
async processAllHealthchecksForProduct(productTag) {
|
|
193
|
+
await this.productBuilderService.initializeProductByTag(productTag);
|
|
194
|
+
const healthchecks = await this.productBuilderService.fetchProductHealthchecks();
|
|
195
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
196
|
+
for (const healthcheck of healthchecks) {
|
|
197
|
+
for (const env of healthcheck.envs) {
|
|
198
|
+
try {
|
|
199
|
+
let decryptedInput = env.input;
|
|
200
|
+
if (typeof decryptedInput === 'string') {
|
|
201
|
+
decryptedInput = JSON.parse((0, processor_utils_1.decrypt)(decryptedInput, privateKey));
|
|
202
|
+
}
|
|
203
|
+
const actionInput = {
|
|
204
|
+
env: env.slug,
|
|
205
|
+
product: productTag,
|
|
206
|
+
app: healthcheck.app,
|
|
207
|
+
input: decryptedInput,
|
|
208
|
+
action: healthcheck.event,
|
|
209
|
+
retries: healthcheck.retries || 0,
|
|
210
|
+
};
|
|
211
|
+
const result = await this.processAction(actionInput);
|
|
212
|
+
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { message: `Manual healthcheck processed for ${healthcheck.tag} in env ${env.slug}`, data: { result }, status: types_1.LogEventStatus.SUCCESS }));
|
|
213
|
+
}
|
|
214
|
+
catch (e) {
|
|
215
|
+
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { message: `Manual healthcheck failed for ${healthcheck.tag} in env ${env.slug}`, data: { error: e.toString() }, status: types_1.LogEventStatus.FAIL }));
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
}
|
|
120
220
|
async generateSession(payload) {
|
|
121
221
|
try {
|
|
122
222
|
const { product: product_tag, env: slug, tag, data } = payload;
|
|
123
|
-
const input = await this.inputService.parseJson({
|
|
223
|
+
const input = (await this.inputService.parseJson({
|
|
124
224
|
data,
|
|
125
225
|
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
126
|
-
});
|
|
226
|
+
}));
|
|
127
227
|
await this.productBuilderService.initializeProductByTag(product_tag);
|
|
128
|
-
|
|
129
|
-
const session = this.productBuilderService.fetchSession(tag);
|
|
228
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
229
|
+
const session = await this.productBuilderService.fetchSession(tag);
|
|
130
230
|
if (!session) {
|
|
131
231
|
throw new Error(`Session with tag ${tag} does not exist`);
|
|
132
232
|
}
|
|
133
|
-
const env = this.productBuilderService.fetchEnv(slug);
|
|
233
|
+
const env = await this.productBuilderService.fetchEnv(slug);
|
|
134
234
|
if (!env) {
|
|
135
235
|
throw new Error(`Env with slug ${slug} does not exist`);
|
|
136
236
|
}
|
|
137
237
|
await this.inputService.validateInput(input, session.schema_data);
|
|
138
238
|
const stages = (0, string_utils_1.extractStages)(session.selector);
|
|
139
|
-
|
|
239
|
+
// Convert stages to string[] for functions expecting string[]
|
|
240
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
241
|
+
const user = (0, processor_utils_1.extractSelectorValue)(data, stringStages, session.selector);
|
|
140
242
|
const expiry = (0, processor_utils_1.calculateJWTExpiry)(session.expiry, session.period);
|
|
141
243
|
const end_at = (0, processor_utils_1.calculateExpiry)(session.expiry, session.period);
|
|
142
244
|
const JWT = await loadJWT();
|
|
143
245
|
if (JWT) {
|
|
144
246
|
const session_id = (0, uuid_1.v4)();
|
|
145
|
-
const token = JWT.sign({ session: payload.tag, env: payload.env, session_id, data },
|
|
146
|
-
|
|
247
|
+
const token = JWT.sign({ session: payload.tag, env: payload.env, session_id, data }, privateKey, {
|
|
248
|
+
expiresIn: expiry,
|
|
249
|
+
});
|
|
250
|
+
const refreshToken = (0, processor_utils_1.encrypt)(JSON.stringify(data), privateKey);
|
|
147
251
|
// WRITE REFRESH TOKEN TO DATABASE... TO INVALIDATE DELETE FROM DATABASE
|
|
148
252
|
const details = {
|
|
149
253
|
identifier: user,
|
|
150
254
|
start_at: Date.now(),
|
|
151
255
|
end_at,
|
|
152
256
|
session_tag: tag,
|
|
153
|
-
data: (0, processor_utils_1.encrypt)(JSON.stringify(JSON.stringify(data)),
|
|
154
|
-
session_id
|
|
257
|
+
data: (0, processor_utils_1.encrypt)(JSON.stringify(JSON.stringify(data)), privateKey),
|
|
258
|
+
session_id,
|
|
155
259
|
};
|
|
156
260
|
await this.processorApiService.createSessionInfo(Object.assign({ product_tag, env: slug, refreshToken }, details), this.getUserAccess());
|
|
157
261
|
return {
|
|
@@ -169,11 +273,12 @@ class ProcessorService {
|
|
|
169
273
|
}
|
|
170
274
|
async refreshSession(payload) {
|
|
171
275
|
await this.productBuilderService.initializeProductByTag(payload.product);
|
|
276
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
172
277
|
// validate token validity
|
|
173
278
|
const { refreshToken } = payload, payloadData = __rest(payload, ["refreshToken"]);
|
|
174
279
|
const valid = await this.processorApiService.validateRefreshToken({ refreshToken, product: payload.product, env: payload.env }, this.getUserAccess());
|
|
175
280
|
if (valid) {
|
|
176
|
-
const data = JSON.parse((0, processor_utils_1.decrypt)(refreshToken,
|
|
281
|
+
const data = JSON.parse((0, processor_utils_1.decrypt)(refreshToken, privateKey));
|
|
177
282
|
return await this.generateSession(Object.assign(Object.assign({}, payloadData), { data }));
|
|
178
283
|
}
|
|
179
284
|
else {
|
|
@@ -182,12 +287,13 @@ class ProcessorService {
|
|
|
182
287
|
}
|
|
183
288
|
async decryptSession(data) {
|
|
184
289
|
await this.productBuilderService.initializeProductByTag(data.product);
|
|
290
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
185
291
|
const JWT = await loadJWT();
|
|
186
292
|
if (!JWT) {
|
|
187
293
|
throw new Error(`Running in browser, token service not loaded.`);
|
|
188
294
|
}
|
|
189
295
|
try {
|
|
190
|
-
const res = await JWT.verify(data.token,
|
|
296
|
+
const res = (await JWT.verify(data.token, privateKey));
|
|
191
297
|
if (res.session !== data.tag) {
|
|
192
298
|
throw new Error(`Invalid token for session ${data.tag}`);
|
|
193
299
|
}
|
|
@@ -198,14 +304,13 @@ class ProcessorService {
|
|
|
198
304
|
}
|
|
199
305
|
catch (e) {
|
|
200
306
|
console.log(e);
|
|
201
|
-
throw new Error(
|
|
307
|
+
throw new Error('Invalid/Expired token');
|
|
202
308
|
}
|
|
203
309
|
}
|
|
204
310
|
async registerWebhook(data) {
|
|
205
311
|
const { product: product_tag, access_tag, webhook_tag, envs } = data;
|
|
206
312
|
await this.productBuilderService.initializeProductByTag(product_tag);
|
|
207
|
-
const
|
|
208
|
-
const { version, envs: appEnvs } = this.productBuilderService.fetchApp(access_tag);
|
|
313
|
+
const { version, envs: appEnvs } = await this.productBuilderService.fetchApp(access_tag);
|
|
209
314
|
const app = await this.productBuilderService.fetchThirdPartyAppByAccessTag(access_tag);
|
|
210
315
|
const { webhooks } = app.versions.find((data) => data.tag === version);
|
|
211
316
|
if (!webhooks) {
|
|
@@ -216,7 +321,7 @@ class ProcessorService {
|
|
|
216
321
|
if (!webhook) {
|
|
217
322
|
throw new Error(`Webhook tag ${webhook_tag} not found`);
|
|
218
323
|
}
|
|
219
|
-
const productEnvs = this.productBuilderService.fetchEnvs();
|
|
324
|
+
const productEnvs = await this.productBuilderService.fetchEnvs();
|
|
220
325
|
productEnvs.map((env) => {
|
|
221
326
|
const exists = envs.findIndex((dbEnv) => dbEnv.slug === env.slug);
|
|
222
327
|
if (exists === -1) {
|
|
@@ -260,7 +365,7 @@ class ProcessorService {
|
|
|
260
365
|
if (replacedUrl && replacedUrl !== env.url && replacedUrl) {
|
|
261
366
|
throw new Error(`Ductape expects the url ${replacedUrl} in request body to match inputted url ${env.url}`);
|
|
262
367
|
}
|
|
263
|
-
const exists = this.fetchEnv(env.slug, {});
|
|
368
|
+
const exists = await this.fetchEnv(env.slug, {});
|
|
264
369
|
if (!exists) {
|
|
265
370
|
throw new Error(`Env ${env.slug} does not exist`);
|
|
266
371
|
}
|
|
@@ -282,7 +387,7 @@ class ProcessorService {
|
|
|
282
387
|
webhook_tag,
|
|
283
388
|
version,
|
|
284
389
|
sender_workspace_id: app.workspace_id,
|
|
285
|
-
receiver_workspace_id:
|
|
390
|
+
receiver_workspace_id: this.getUserAccess().workspace_id,
|
|
286
391
|
app_tag: app.tag,
|
|
287
392
|
product_tag,
|
|
288
393
|
active,
|
|
@@ -293,8 +398,7 @@ class ProcessorService {
|
|
|
293
398
|
async generateWebhookLink(data) {
|
|
294
399
|
const { product: product_tag, access_tag, webhook_tag, env: product_env, url, method } = data;
|
|
295
400
|
await this.productBuilderService.initializeProductByTag(product_tag);
|
|
296
|
-
const
|
|
297
|
-
const { version, envs: appEnvs } = this.productBuilderService.fetchApp(access_tag);
|
|
401
|
+
const { version, envs: appEnvs } = await this.productBuilderService.fetchApp(access_tag);
|
|
298
402
|
const app = await this.productBuilderService.fetchThirdPartyAppByAccessTag(access_tag);
|
|
299
403
|
const { webhooks } = app.versions.find((data) => data.tag === version);
|
|
300
404
|
if (!webhooks) {
|
|
@@ -315,7 +419,7 @@ class ProcessorService {
|
|
|
315
419
|
webhook_tag,
|
|
316
420
|
version,
|
|
317
421
|
sender_workspace_id: app.workspace_id,
|
|
318
|
-
receiver_workspace_id:
|
|
422
|
+
receiver_workspace_id: this.getUserAccess().workspace_id,
|
|
319
423
|
app_tag: app.tag,
|
|
320
424
|
product_tag,
|
|
321
425
|
active: false,
|
|
@@ -323,239 +427,6 @@ class ProcessorService {
|
|
|
323
427
|
return await this.webhookApi.generateLink(payload, this.getUserAccess());
|
|
324
428
|
//return res.link;
|
|
325
429
|
}
|
|
326
|
-
async processQuota(data) {
|
|
327
|
-
this.start = Date.now();
|
|
328
|
-
const { product: product_tag, env, input, tag: quota_tag } = data;
|
|
329
|
-
try {
|
|
330
|
-
this.productTag = product_tag;
|
|
331
|
-
const additional_logs = {
|
|
332
|
-
name: 'Process Quota',
|
|
333
|
-
type: types_1.LogEventTypes.QUOTA,
|
|
334
|
-
parent_tag: quota_tag,
|
|
335
|
-
};
|
|
336
|
-
await this.intializeProduct(additional_logs);
|
|
337
|
-
const process_id = this.process_id || (0, processor_utils_1.generateObjectId)();
|
|
338
|
-
this.process_id = process_id;
|
|
339
|
-
this.baseLogs = {
|
|
340
|
-
product_tag: this.productTag,
|
|
341
|
-
product_id: this.productId,
|
|
342
|
-
workspace_id: this.workspace_id,
|
|
343
|
-
env,
|
|
344
|
-
type: types_1.LogEventTypes.QUOTA,
|
|
345
|
-
process_id,
|
|
346
|
-
data: input,
|
|
347
|
-
};
|
|
348
|
-
this.quota = this.fetchQuota(quota_tag, additional_logs);
|
|
349
|
-
if (!this.quota) {
|
|
350
|
-
throw new Error(`Quota ${quota_tag} not found`);
|
|
351
|
-
}
|
|
352
|
-
this.logService.setFeatureId(this.quota._id);
|
|
353
|
-
this.processEnv = this.fetchEnv(env, additional_logs);
|
|
354
|
-
if (!this.processEnv.active) {
|
|
355
|
-
throw new Error(`Environment ${data.env} is not active`);
|
|
356
|
-
}
|
|
357
|
-
const { input: quotaInput, options } = this.quota;
|
|
358
|
-
// validate feature input and log failure
|
|
359
|
-
this.validateJSONFeatureInput(input, quotaInput, additional_logs);
|
|
360
|
-
// split processes
|
|
361
|
-
//this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
362
|
-
await this.logService.publish();
|
|
363
|
-
return await this.runQuotaOptions(options, input, additional_logs);
|
|
364
|
-
}
|
|
365
|
-
catch (e) {
|
|
366
|
-
this.end = Date.now();
|
|
367
|
-
await this.logService.publish();
|
|
368
|
-
throw e;
|
|
369
|
-
}
|
|
370
|
-
}
|
|
371
|
-
async runQuotaOptions(options, input, additional_logs) {
|
|
372
|
-
try {
|
|
373
|
-
const quotaManager = new quota_service_1.default(options, this.redisClient);
|
|
374
|
-
const getNextProvider = quotaManager.getNextProvider();
|
|
375
|
-
const quotaInput = await this.mapQuotaFallbackInput(getNextProvider.input, input, getNextProvider.app);
|
|
376
|
-
const result = await this.processEvent({
|
|
377
|
-
app: getNextProvider.app,
|
|
378
|
-
type: getNextProvider.type,
|
|
379
|
-
event: getNextProvider.event,
|
|
380
|
-
input: quotaInput,
|
|
381
|
-
retries: getNextProvider.retries,
|
|
382
|
-
allow_fail: false
|
|
383
|
-
});
|
|
384
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process quota successful', successful_execution: true, data: { result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.PROCESSING }));
|
|
385
|
-
return result;
|
|
386
|
-
}
|
|
387
|
-
catch (e) {
|
|
388
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process quota failed', failed_execution: true, data: { e }, status: types_1.LogEventStatus.PROCESSING }));
|
|
389
|
-
throw e;
|
|
390
|
-
}
|
|
391
|
-
}
|
|
392
|
-
async mapQuotaFallbackInput(providerInput, quotaInput, app) {
|
|
393
|
-
if (Array.isArray(providerInput)) {
|
|
394
|
-
return providerInput.map(async (item) => await this.mapQuotaFallbackInput(item, quotaInput, app));
|
|
395
|
-
}
|
|
396
|
-
else if (providerInput && typeof providerInput === 'object') {
|
|
397
|
-
const result = {};
|
|
398
|
-
for (const key in providerInput) {
|
|
399
|
-
result[key] = await this.mapQuotaFallbackInput(providerInput[key], quotaInput, app);
|
|
400
|
-
}
|
|
401
|
-
return result;
|
|
402
|
-
}
|
|
403
|
-
else {
|
|
404
|
-
// Base case: primitive value
|
|
405
|
-
return await this.overrideQuotaFallbackInput(providerInput, quotaInput, app);
|
|
406
|
-
}
|
|
407
|
-
}
|
|
408
|
-
async overrideQuotaFallbackInput(value, quotaInput, app) {
|
|
409
|
-
if (value.startsWith('$Input{')) {
|
|
410
|
-
return quotaInput[(0, string_utils_1.extractStages)(value)[0]];
|
|
411
|
-
}
|
|
412
|
-
else if (value.startsWith('$Auth')) {
|
|
413
|
-
return await this.generateAuthValue((0, string_utils_1.extractStages)(value), app, [], {});
|
|
414
|
-
}
|
|
415
|
-
else if (value.startsWith('$')) {
|
|
416
|
-
this.input = Object.assign(Object.assign({}, this.input), { input: quotaInput });
|
|
417
|
-
return await this.generateOperatorValues(value, '', {});
|
|
418
|
-
}
|
|
419
|
-
else {
|
|
420
|
-
return value;
|
|
421
|
-
}
|
|
422
|
-
}
|
|
423
|
-
async runFallbackOptions(options, input, additional_logs) {
|
|
424
|
-
return await this.executeActionWithFallback(options, input);
|
|
425
|
-
}
|
|
426
|
-
async executeActionWithFallback(providers, input) {
|
|
427
|
-
let index = 0;
|
|
428
|
-
for (const provider of providers) {
|
|
429
|
-
try {
|
|
430
|
-
const payload = {
|
|
431
|
-
app: provider.app,
|
|
432
|
-
type: provider.type,
|
|
433
|
-
event: provider.event,
|
|
434
|
-
input: await this.mapQuotaFallbackInput(provider.input, input, provider.app),
|
|
435
|
-
retries: provider.retries,
|
|
436
|
-
allow_fail: false
|
|
437
|
-
};
|
|
438
|
-
const result = await this.processEvent(payload);
|
|
439
|
-
if (result.status === types_1.LogEventStatus.FAIL) {
|
|
440
|
-
throw new Error(result.errors);
|
|
441
|
-
}
|
|
442
|
-
index++;
|
|
443
|
-
}
|
|
444
|
-
catch (error) {
|
|
445
|
-
if (index > 0) {
|
|
446
|
-
throw error;
|
|
447
|
-
}
|
|
448
|
-
console.warn(`${provider.app || provider.event} failed: ${error}`);
|
|
449
|
-
}
|
|
450
|
-
}
|
|
451
|
-
//throw new Error("All providers failed.");
|
|
452
|
-
}
|
|
453
|
-
async processFallback(data) {
|
|
454
|
-
this.start = Date.now();
|
|
455
|
-
const { product: product_tag, env, input, tag: fallback_tag } = data;
|
|
456
|
-
try {
|
|
457
|
-
this.productTag = product_tag;
|
|
458
|
-
const additional_logs = {
|
|
459
|
-
name: 'Process Fallback',
|
|
460
|
-
type: types_1.LogEventTypes.FALLBACK,
|
|
461
|
-
parent_tag: fallback_tag,
|
|
462
|
-
};
|
|
463
|
-
await this.intializeProduct(additional_logs);
|
|
464
|
-
const process_id = this.process_id || (0, processor_utils_1.generateObjectId)();
|
|
465
|
-
this.process_id = process_id;
|
|
466
|
-
this.baseLogs = {
|
|
467
|
-
product_tag: this.productTag,
|
|
468
|
-
product_id: this.productId,
|
|
469
|
-
workspace_id: this.workspace_id,
|
|
470
|
-
env,
|
|
471
|
-
type: types_1.LogEventTypes.FALLBACK,
|
|
472
|
-
process_id,
|
|
473
|
-
data: input,
|
|
474
|
-
};
|
|
475
|
-
this.fallback = this.fetchFallback(fallback_tag, additional_logs);
|
|
476
|
-
if (!this.fallback) {
|
|
477
|
-
throw new Error(`Fallback "${fallback_tag}" not found`);
|
|
478
|
-
}
|
|
479
|
-
this.logService.setFeatureId(this.fallback._id);
|
|
480
|
-
this.processEnv = this.fetchEnv(env, additional_logs);
|
|
481
|
-
if (!this.processEnv.active) {
|
|
482
|
-
throw new Error(`Environment ${data.env} is not active`);
|
|
483
|
-
}
|
|
484
|
-
const { input: fallbackInput, options } = this.fallback;
|
|
485
|
-
// validate feature input and log failure
|
|
486
|
-
this.validateJSONFeatureInput(input, fallbackInput, additional_logs);
|
|
487
|
-
// split processes
|
|
488
|
-
//this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
489
|
-
return await this.runFallbackOptions(options, input, additional_logs);
|
|
490
|
-
}
|
|
491
|
-
catch (e) {
|
|
492
|
-
this.end = Date.now();
|
|
493
|
-
await this.logService.publish();
|
|
494
|
-
throw e;
|
|
495
|
-
}
|
|
496
|
-
}
|
|
497
|
-
async processFeature(data, awaitResolution = false) {
|
|
498
|
-
this.start = Date.now();
|
|
499
|
-
this.input = data;
|
|
500
|
-
const { product: product_tag, env, input, tag: feature_tag } = data;
|
|
501
|
-
let additional_logs;
|
|
502
|
-
let passedValidation;
|
|
503
|
-
try {
|
|
504
|
-
this.productTag = product_tag;
|
|
505
|
-
additional_logs = {
|
|
506
|
-
name: 'Process feature',
|
|
507
|
-
type: types_1.LogEventTypes.FEATURE,
|
|
508
|
-
};
|
|
509
|
-
await this.intializeProduct(additional_logs);
|
|
510
|
-
this.component = types_1.LogEventTypes.FEATURE;
|
|
511
|
-
const process_id = this.process_id || (0, processor_utils_1.generateObjectId)();
|
|
512
|
-
this.process_id = process_id;
|
|
513
|
-
this.feature = this.fetchFeature(feature_tag, additional_logs);
|
|
514
|
-
if (!this.feature) {
|
|
515
|
-
throw new Error(`Feature "${feature_tag}" not found`);
|
|
516
|
-
}
|
|
517
|
-
this.baseLogs = {
|
|
518
|
-
product_tag: this.productTag,
|
|
519
|
-
product_id: this.productId,
|
|
520
|
-
workspace_id: this.workspace_id,
|
|
521
|
-
env,
|
|
522
|
-
type: types_1.LogEventTypes.FEATURE,
|
|
523
|
-
process_id,
|
|
524
|
-
data: input,
|
|
525
|
-
feature_tag: feature_tag,
|
|
526
|
-
feature_id: this.feature._id,
|
|
527
|
-
};
|
|
528
|
-
this.logService.setFeatureId(this.feature._id);
|
|
529
|
-
this.processEnv = this.fetchEnv(env, additional_logs);
|
|
530
|
-
if (!this.processEnv.active) {
|
|
531
|
-
throw new Error(`Environment ${data.env} is not active`);
|
|
532
|
-
}
|
|
533
|
-
const { input: featureInput, sequence, output } = this.feature;
|
|
534
|
-
// validate feature input and log failure
|
|
535
|
-
this.validateJSONFeatureInput(input, featureInput, additional_logs);
|
|
536
|
-
// split processes
|
|
537
|
-
this.sequenceLevels = await this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
538
|
-
await this.processSequenceLevels(additional_logs);
|
|
539
|
-
return { process_id };
|
|
540
|
-
//return this.generateOutput(output as unknown as Record<string, IFeatureOutput>);
|
|
541
|
-
}
|
|
542
|
-
catch (e) {
|
|
543
|
-
this.end = Date.now();
|
|
544
|
-
if (this.logService) {
|
|
545
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process feature - failed', data: Object.assign(Object.assign({}, data), { input: (0, processor_utils_1.anonymizeObject)(data.input) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
546
|
-
await this.logService.publish();
|
|
547
|
-
if (passedValidation) {
|
|
548
|
-
return { process_id: this.process_id };
|
|
549
|
-
}
|
|
550
|
-
else {
|
|
551
|
-
throw e;
|
|
552
|
-
}
|
|
553
|
-
}
|
|
554
|
-
else {
|
|
555
|
-
throw e;
|
|
556
|
-
}
|
|
557
|
-
}
|
|
558
|
-
}
|
|
559
430
|
async intializeProduct(additional_logs) {
|
|
560
431
|
if (!this.logService) {
|
|
561
432
|
this.logService = new logs_service_1.default({
|
|
@@ -574,8 +445,8 @@ class ProcessorService {
|
|
|
574
445
|
else {
|
|
575
446
|
await this.productBuilderService.initializeProduct(this.productId);
|
|
576
447
|
}
|
|
577
|
-
|
|
578
|
-
|
|
448
|
+
this.productId = this.productBuilderService.fetchProductId();
|
|
449
|
+
const workspace_id = this.productBuilderService.fetchWorkspaceId();
|
|
579
450
|
if (workspace_id !== this.workspace_id) {
|
|
580
451
|
throw new Error('Access Denied');
|
|
581
452
|
}
|
|
@@ -586,42 +457,35 @@ class ProcessorService {
|
|
|
586
457
|
throw e;
|
|
587
458
|
}
|
|
588
459
|
}
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
}
|
|
600
|
-
fetchQuota(tag, additional_logs) {
|
|
601
|
-
try {
|
|
602
|
-
const quota = this.productBuilderService.fetchQuota(tag); // validate feature exists
|
|
603
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch quota - success', data: { tag, quota }, status: types_1.LogEventStatus.SUCCESS }));
|
|
604
|
-
return quota;
|
|
605
|
-
}
|
|
606
|
-
catch (e) {
|
|
607
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch quota - failed', data: { tag, e }, status: types_1.LogEventStatus.FAIL }));
|
|
608
|
-
throw e;
|
|
460
|
+
async initializePricing(additional_logs, access_tag) {
|
|
461
|
+
if (!this.logService) {
|
|
462
|
+
this.logService = new logs_service_1.default({
|
|
463
|
+
product_id: this.productId,
|
|
464
|
+
workspace_id: this.workspace_id,
|
|
465
|
+
public_key: this.public_key,
|
|
466
|
+
user_id: this.user_id,
|
|
467
|
+
token: this.token,
|
|
468
|
+
env_type: this.environment,
|
|
469
|
+
});
|
|
609
470
|
}
|
|
610
|
-
}
|
|
611
|
-
fetchFallback(tag, additional_logs) {
|
|
612
471
|
try {
|
|
613
|
-
|
|
614
|
-
this.
|
|
615
|
-
|
|
472
|
+
console.log(`Initializing pricing for access tag: ${access_tag}`);
|
|
473
|
+
const product_app = await this.productBuilderService.fetchApp(access_tag); // validate app exists
|
|
474
|
+
console.log(`Found product app: ${JSON.stringify(product_app)}`);
|
|
475
|
+
const app = await this.productBuilderService.fetchThirdPartyAppByAccessTag(product_app.access_tag);
|
|
476
|
+
await this.pricingService.initializePricingByTag(product_app.pricing_tag, app._id);
|
|
477
|
+
const { pricing_tag } = this.pricingService.fetchPricing();
|
|
478
|
+
this.pricingTag = pricing_tag;
|
|
479
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Pricing initialize - success', data: { pricing_tag: this.pricingTag }, status: types_1.LogEventStatus.SUCCESS }));
|
|
616
480
|
}
|
|
617
481
|
catch (e) {
|
|
618
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: '
|
|
482
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Pricing initialize - failed', data: e, status: types_1.LogEventStatus.FAIL }));
|
|
619
483
|
throw e;
|
|
620
484
|
}
|
|
621
485
|
}
|
|
622
|
-
fetchEnv(env, additional_logs) {
|
|
486
|
+
async fetchEnv(env, additional_logs) {
|
|
623
487
|
try {
|
|
624
|
-
const product_env = this.productBuilderService.fetchEnv(env); // validate env exists
|
|
488
|
+
const product_env = await this.productBuilderService.fetchEnv(env); // validate env exists
|
|
625
489
|
if (!product_env) {
|
|
626
490
|
throw new Error(`Env ${env} not found`);
|
|
627
491
|
}
|
|
@@ -633,271 +497,6 @@ class ProcessorService {
|
|
|
633
497
|
throw e;
|
|
634
498
|
}
|
|
635
499
|
}
|
|
636
|
-
validateJSONFeatureInput(input, feature_input, additional_logs) {
|
|
637
|
-
try {
|
|
638
|
-
(0, processor_utils_1.validateFeatureJSONInput)(input, feature_input);
|
|
639
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Input validation - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
640
|
-
}
|
|
641
|
-
catch (e) {
|
|
642
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Input validation - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
643
|
-
throw e;
|
|
644
|
-
}
|
|
645
|
-
}
|
|
646
|
-
async splitSequenceIntoLevels(data, additional_logs) {
|
|
647
|
-
try {
|
|
648
|
-
const levels = {};
|
|
649
|
-
const tagMap = new Map(data.map((seq) => [seq.tag, seq]));
|
|
650
|
-
const assignedLevels = new Map();
|
|
651
|
-
let currentLevel = 1;
|
|
652
|
-
let remainingSequences = [...data];
|
|
653
|
-
while (remainingSequences.length > 0) {
|
|
654
|
-
const currentLevelSequences = [];
|
|
655
|
-
remainingSequences = remainingSequences.filter((seq) => {
|
|
656
|
-
var _a;
|
|
657
|
-
const parentLevels = ((_a = seq.parents) === null || _a === void 0 ? void 0 : _a.map((parent) => { var _a; return (_a = assignedLevels.get(parent)) !== null && _a !== void 0 ? _a : -1; })) || [];
|
|
658
|
-
const isCurrentLevel = parentLevels.length === 0 || Math.max(...parentLevels) === currentLevel - 1;
|
|
659
|
-
if (isCurrentLevel) {
|
|
660
|
-
currentLevelSequences.push(seq);
|
|
661
|
-
assignedLevels.set(seq.tag, currentLevel);
|
|
662
|
-
return false; // Remove from remainingSequences
|
|
663
|
-
}
|
|
664
|
-
return true;
|
|
665
|
-
});
|
|
666
|
-
if (currentLevelSequences.length > 0) {
|
|
667
|
-
levels[currentLevel] = currentLevelSequences;
|
|
668
|
-
currentLevel++;
|
|
669
|
-
}
|
|
670
|
-
else {
|
|
671
|
-
break; // Prevent infinite loop if there's a cycle
|
|
672
|
-
}
|
|
673
|
-
}
|
|
674
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Split sequence - success', data: { levels }, status: types_1.LogEventStatus.SUCCESS }));
|
|
675
|
-
return levels;
|
|
676
|
-
}
|
|
677
|
-
catch (e) {
|
|
678
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Split sequence - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
679
|
-
throw e;
|
|
680
|
-
}
|
|
681
|
-
}
|
|
682
|
-
async processSequenceLevels(additional_logs) {
|
|
683
|
-
try {
|
|
684
|
-
const levelEvents = {};
|
|
685
|
-
Object.entries(this.sequenceLevels).forEach(([level, sequences]) => {
|
|
686
|
-
levelEvents[parseInt(level)] = this.fetchLevelEvents(sequences, parseInt(level));
|
|
687
|
-
});
|
|
688
|
-
let previousLevelComplete = true;
|
|
689
|
-
for (const level of Object.keys(levelEvents)
|
|
690
|
-
.map(Number)
|
|
691
|
-
.sort((a, b) => a - b)) {
|
|
692
|
-
if (previousLevelComplete) {
|
|
693
|
-
previousLevelComplete = await this.processLevelEvents(levelEvents[level], additional_logs);
|
|
694
|
-
}
|
|
695
|
-
else {
|
|
696
|
-
break;
|
|
697
|
-
}
|
|
698
|
-
}
|
|
699
|
-
this.doneWithProcessing = true;
|
|
700
|
-
if (previousLevelComplete && !this.published) {
|
|
701
|
-
let message;
|
|
702
|
-
let status;
|
|
703
|
-
let successful_feature_execution;
|
|
704
|
-
let failed_feature_execution;
|
|
705
|
-
if (this.processingOutput.failure.length === 0) {
|
|
706
|
-
message = 'Process feature - success';
|
|
707
|
-
status = types_1.LogEventStatus.SUCCESS;
|
|
708
|
-
successful_feature_execution = true;
|
|
709
|
-
}
|
|
710
|
-
else if (this.processingFailure) {
|
|
711
|
-
message = 'Process feature - processing';
|
|
712
|
-
status = types_1.LogEventStatus.PROCESSING;
|
|
713
|
-
successful_feature_execution = false;
|
|
714
|
-
failed_feature_execution = false;
|
|
715
|
-
}
|
|
716
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_feature_execution,
|
|
717
|
-
failed_feature_execution,
|
|
718
|
-
message, data: {}, status }));
|
|
719
|
-
await this.logService.publish();
|
|
720
|
-
this.end = Date.now();
|
|
721
|
-
await this.writeResult(status);
|
|
722
|
-
}
|
|
723
|
-
return true;
|
|
724
|
-
}
|
|
725
|
-
catch (e) {
|
|
726
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process sequence levels - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
727
|
-
throw e;
|
|
728
|
-
}
|
|
729
|
-
}
|
|
730
|
-
async processLevelEvents(events, additional_logs) {
|
|
731
|
-
try {
|
|
732
|
-
const promises = events.map((event) => {
|
|
733
|
-
const dependants = this.fetchActionRequestDependents(event.input, additional_logs);
|
|
734
|
-
const passed = this.checkDependentsSuccess(dependants);
|
|
735
|
-
if (passed) {
|
|
736
|
-
// TODO: comparison to see if all depending events are in success || dependants is empty
|
|
737
|
-
return this.processEvent(event);
|
|
738
|
-
}
|
|
739
|
-
else {
|
|
740
|
-
this.addToWaitingOutput(event, dependants);
|
|
741
|
-
}
|
|
742
|
-
});
|
|
743
|
-
return Promise.all(promises);
|
|
744
|
-
}
|
|
745
|
-
catch (e) {
|
|
746
|
-
throw e;
|
|
747
|
-
}
|
|
748
|
-
}
|
|
749
|
-
async processFailedEvents(additional_logs) {
|
|
750
|
-
try {
|
|
751
|
-
const { failure } = this.processingOutput;
|
|
752
|
-
const promises = failure.map((failed) => {
|
|
753
|
-
if (failed.retries_left > 0 && new Date().getTime() > failed.retry_at) {
|
|
754
|
-
return this.processEvent(failed.event); // process events should also take care of this.processingOutput
|
|
755
|
-
}
|
|
756
|
-
if (failed.retries_left === 0 && !failed.allow_fail) {
|
|
757
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess failed events - failed', data: Object.assign(Object.assign({}, failed), { reason: 'Ran out of Retries' }), status: types_1.LogEventStatus.FAIL }));
|
|
758
|
-
throw new Error(`Event ${failed.event.event} failed in sequence ${failed.event.sequence_tag}, ran out of retries and the feature cannot run without it succeeding`);
|
|
759
|
-
}
|
|
760
|
-
});
|
|
761
|
-
Promise.all(promises);
|
|
762
|
-
}
|
|
763
|
-
catch (e) {
|
|
764
|
-
throw e;
|
|
765
|
-
}
|
|
766
|
-
}
|
|
767
|
-
async processWaitingEvents(additional_logs) {
|
|
768
|
-
try {
|
|
769
|
-
const { waiting } = this.processingOutput;
|
|
770
|
-
const promises = waiting.map((waiting) => {
|
|
771
|
-
const { dependants } = waiting;
|
|
772
|
-
if (this.checkDependentsSuccess(dependants)) {
|
|
773
|
-
// TODO: comparison to see if all depending events are in success || dependants is empty
|
|
774
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess waiting events - initiated', data: Object.assign({}, waiting), status: types_1.LogEventStatus.PROCESSING }));
|
|
775
|
-
return this.processEvent(waiting.event);
|
|
776
|
-
}
|
|
777
|
-
else {
|
|
778
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess waiting events - waiting', data: Object.assign({}, waiting), status: types_1.LogEventStatus.WAITING }));
|
|
779
|
-
}
|
|
780
|
-
});
|
|
781
|
-
return Promise.all(promises);
|
|
782
|
-
}
|
|
783
|
-
catch (e) {
|
|
784
|
-
throw e;
|
|
785
|
-
}
|
|
786
|
-
}
|
|
787
|
-
checkDependentsSuccess(dependants) {
|
|
788
|
-
let pass = true;
|
|
789
|
-
for (let i = 0; i < dependants.length; i++) {
|
|
790
|
-
if (!this.processingOutput.success.find((item) => item.event.sequence_tag === dependants[i].sequence_tag && item.event.event === dependants[i].event_tag)) {
|
|
791
|
-
pass = false;
|
|
792
|
-
}
|
|
793
|
-
}
|
|
794
|
-
return pass;
|
|
795
|
-
}
|
|
796
|
-
fetchActionRequestDependents(input, additional_logs) {
|
|
797
|
-
try {
|
|
798
|
-
const dependents = [];
|
|
799
|
-
if (input.query) {
|
|
800
|
-
dependents.push(...this.fetchDependents(input.query, additional_logs));
|
|
801
|
-
}
|
|
802
|
-
if (input.body) {
|
|
803
|
-
dependents.push(...this.fetchDependents(input.body, additional_logs));
|
|
804
|
-
}
|
|
805
|
-
if (input.headers) {
|
|
806
|
-
dependents.push(...this.fetchDependents(input.headers, additional_logs));
|
|
807
|
-
}
|
|
808
|
-
if (input.params) {
|
|
809
|
-
dependents.push(...this.fetchDependents(input.params, additional_logs));
|
|
810
|
-
}
|
|
811
|
-
if (input.data) {
|
|
812
|
-
dependents.push(...this.fetchDependents(input.data, additional_logs));
|
|
813
|
-
}
|
|
814
|
-
if (input.fileName) {
|
|
815
|
-
dependents.push(...this.valueStringDepsCheck(input.fileName));
|
|
816
|
-
}
|
|
817
|
-
if (input.buffer) {
|
|
818
|
-
dependents.push(...this.valueStringDepsCheck(input.buffer));
|
|
819
|
-
}
|
|
820
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch request dependencies - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), dependents }, status: types_1.LogEventStatus.SUCCESS }));
|
|
821
|
-
return dependents;
|
|
822
|
-
}
|
|
823
|
-
catch (e) {
|
|
824
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch request dependents - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
825
|
-
}
|
|
826
|
-
}
|
|
827
|
-
fetchDependents(obj, additional_logs) {
|
|
828
|
-
try {
|
|
829
|
-
const dependants = [];
|
|
830
|
-
for (const key in obj) {
|
|
831
|
-
const value = obj[key];
|
|
832
|
-
if (typeof value === 'object') {
|
|
833
|
-
if ('function' in value && 'values' in value) {
|
|
834
|
-
const { function: func, values } = value;
|
|
835
|
-
for (let i = 0; i < values.length; i++) {
|
|
836
|
-
if (values[i].startsWith('$Sequence')) {
|
|
837
|
-
const stages = this.productBuilderService.extractStages(values[i]);
|
|
838
|
-
dependants.push({
|
|
839
|
-
sequence_tag: stages[0],
|
|
840
|
-
event_tag: stages[1],
|
|
841
|
-
});
|
|
842
|
-
}
|
|
843
|
-
else if (values[i].startsWith('$')) {
|
|
844
|
-
const funcArgs = (0, functions_utils_1.extractFunctionAndArgs)(values[i]);
|
|
845
|
-
if (funcArgs.args.length) {
|
|
846
|
-
funcArgs.args.map((arg) => {
|
|
847
|
-
if (arg.startsWith('$Sequence')) {
|
|
848
|
-
const stages = this.productBuilderService.extractStages(arg);
|
|
849
|
-
dependants.push({
|
|
850
|
-
sequence_tag: stages[0],
|
|
851
|
-
event_tag: stages[1],
|
|
852
|
-
});
|
|
853
|
-
}
|
|
854
|
-
});
|
|
855
|
-
}
|
|
856
|
-
}
|
|
857
|
-
}
|
|
858
|
-
}
|
|
859
|
-
else {
|
|
860
|
-
dependants.push(...this.fetchDependents(value, additional_logs));
|
|
861
|
-
}
|
|
862
|
-
}
|
|
863
|
-
else if (typeof value === 'string') {
|
|
864
|
-
dependants.push(...this.valueStringDepsCheck(value.trim()));
|
|
865
|
-
}
|
|
866
|
-
}
|
|
867
|
-
return dependants;
|
|
868
|
-
}
|
|
869
|
-
catch (e) {
|
|
870
|
-
throw e;
|
|
871
|
-
}
|
|
872
|
-
}
|
|
873
|
-
valueStringDepsCheck(value) {
|
|
874
|
-
const dependants = [];
|
|
875
|
-
if (value.startsWith('$Sequence')) {
|
|
876
|
-
const stages = this.productBuilderService.extractStages(value);
|
|
877
|
-
dependants.push({ sequence_tag: stages[0], event_tag: stages[1] });
|
|
878
|
-
}
|
|
879
|
-
else if (value.startsWith('$')) {
|
|
880
|
-
const funcArgs = (0, functions_utils_1.extractFunctionAndArgs)(value);
|
|
881
|
-
if (funcArgs && funcArgs.args.length) {
|
|
882
|
-
funcArgs.args.map((arg) => {
|
|
883
|
-
if (arg.startsWith('$Sequence')) {
|
|
884
|
-
const stages = this.productBuilderService.extractStages(arg);
|
|
885
|
-
dependants.push({
|
|
886
|
-
sequence_tag: stages[0],
|
|
887
|
-
event_tag: stages[1],
|
|
888
|
-
});
|
|
889
|
-
}
|
|
890
|
-
else {
|
|
891
|
-
const args = arg.split(',');
|
|
892
|
-
args.map((arg) => {
|
|
893
|
-
dependants.push(...this.valueStringDepsCheck(arg.trim()));
|
|
894
|
-
});
|
|
895
|
-
}
|
|
896
|
-
});
|
|
897
|
-
}
|
|
898
|
-
}
|
|
899
|
-
return dependants;
|
|
900
|
-
}
|
|
901
500
|
async constructJSONDataPayloads(object, additional_logs, samples, event, loopIndex = 0) {
|
|
902
501
|
try {
|
|
903
502
|
const payload = {};
|
|
@@ -957,6 +556,7 @@ class ProcessorService {
|
|
|
957
556
|
async generatePayload(obj, event, additional_logs, sample = [], index = {}, loopIndex = null) {
|
|
958
557
|
try {
|
|
959
558
|
const payload = {};
|
|
559
|
+
console.log('Payload Construction', { obj, event, sample, index, loopIndex });
|
|
960
560
|
const keys = Object.keys(obj);
|
|
961
561
|
for (let i = 0; i < keys.length; i++) {
|
|
962
562
|
const key = keys[i];
|
|
@@ -1002,22 +602,32 @@ class ProcessorService {
|
|
|
1002
602
|
const locatorFor$Index = (0, string_utils_1.validateAndLocateTag)(value);
|
|
1003
603
|
if (value.startsWith('$Auth{') && value.endsWith('}')) {
|
|
1004
604
|
// should only be allowed in apps
|
|
1005
|
-
|
|
605
|
+
// Convert stages to string[] for functions expecting string[]
|
|
606
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
607
|
+
return await this.generateAuthValue(stringStages, app, sample, additional_logs);
|
|
1006
608
|
}
|
|
1007
609
|
else if (value.startsWith('$Sequence{') && value.endsWith('}')) {
|
|
1008
|
-
|
|
610
|
+
// Convert stages to string[] for functions expecting string[]
|
|
611
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
612
|
+
return await this.generateSequenceValue(stringStages, locatorFor$Index, loopIndex); // pass
|
|
1009
613
|
}
|
|
1010
614
|
else if (value.startsWith('$Input{') && value.endsWith('}')) {
|
|
1011
|
-
|
|
615
|
+
// Convert stages to string[] for functions expecting string[]
|
|
616
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
617
|
+
return await this.generateInputValue(this.input.input, stringStages);
|
|
1012
618
|
}
|
|
1013
619
|
else if (value === '$Default') {
|
|
1014
620
|
return await this.generateDefaultValue(sample, Object.assign(Object.assign({}, index), { key }));
|
|
1015
621
|
}
|
|
1016
622
|
else if (value.startsWith('$Variable{') && value.endsWith('}')) {
|
|
1017
|
-
|
|
623
|
+
// Convert stages to string[] for functions expecting string[]
|
|
624
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
625
|
+
return await this.generateVariableValue(stringStages);
|
|
1018
626
|
}
|
|
1019
627
|
else if (value.startsWith('$Constant{') && value.endsWith('}')) {
|
|
1020
|
-
|
|
628
|
+
// Convert stages to string[] for functions expecting string[]
|
|
629
|
+
const stringStages = stages.map((stage) => String(stage));
|
|
630
|
+
return await this.generateConstantValue(stringStages);
|
|
1021
631
|
}
|
|
1022
632
|
else if (value.startsWith('$Size{') || value.startsWith('$Length{')) {
|
|
1023
633
|
const { matchLength, matchSize } = (0, string_utils_1.checkLengthAndSizeMatches)(value);
|
|
@@ -1142,6 +752,7 @@ class ProcessorService {
|
|
|
1142
752
|
const placeholdersStr = match[1];
|
|
1143
753
|
const separator = match[2];
|
|
1144
754
|
const placeHolders = placeholdersStr.split(',').map((data) => data.trim());
|
|
755
|
+
console.log('placeHolders', { placeHolders, separator });
|
|
1145
756
|
const values = await Promise.all(placeHolders.map(async (holder) => {
|
|
1146
757
|
return await this.generateStringValues(holder, app, additional_logs, sample, index, key, loopIndex);
|
|
1147
758
|
}));
|
|
@@ -1414,7 +1025,7 @@ class ProcessorService {
|
|
|
1414
1025
|
}
|
|
1415
1026
|
async generateVariableValue(stages) {
|
|
1416
1027
|
try {
|
|
1417
|
-
const app = this.productBuilderService.fetchApp(stages[0]);
|
|
1028
|
+
const app = await this.productBuilderService.fetchApp(stages[0]);
|
|
1418
1029
|
const env = app.envs.find((items) => items.product_env_slug === this.processEnv.slug);
|
|
1419
1030
|
if (!env) {
|
|
1420
1031
|
throw new Error(`App ${stages[0]} variables needs to have a definition for env: ${this.processEnv.slug}`);
|
|
@@ -1460,6 +1071,7 @@ class ProcessorService {
|
|
|
1460
1071
|
}
|
|
1461
1072
|
async generateAuthValue(stages, app, sample, additional_logs) {
|
|
1462
1073
|
try {
|
|
1074
|
+
console.log('Generate Auth Data', { stages, app, sample });
|
|
1463
1075
|
let auth_data = await this.fetchAuthData(app, additional_logs); //TODO: should use stages[0]
|
|
1464
1076
|
// take the app tag in index 0..
|
|
1465
1077
|
if (!auth_data) {
|
|
@@ -1475,7 +1087,7 @@ class ProcessorService {
|
|
|
1475
1087
|
}
|
|
1476
1088
|
async fetchAuthData(app_tag, additional_logs) {
|
|
1477
1089
|
try {
|
|
1478
|
-
const app = this.productBuilderService.fetchApp(app_tag);
|
|
1090
|
+
const app = await this.productBuilderService.fetchApp(app_tag);
|
|
1479
1091
|
if (!app) {
|
|
1480
1092
|
throw new Error(`App ${app_tag} not found in $Auth value`);
|
|
1481
1093
|
}
|
|
@@ -1486,6 +1098,7 @@ class ProcessorService {
|
|
|
1486
1098
|
if (!env.auth) {
|
|
1487
1099
|
throw new Error(`App ${app_tag} in auth needs to have a definition for auth in env: ${this.processEnv.slug}`);
|
|
1488
1100
|
}
|
|
1101
|
+
console.log('Envroment', env.auth);
|
|
1489
1102
|
let values = env.auth.values;
|
|
1490
1103
|
if (!values) {
|
|
1491
1104
|
// no auth values
|
|
@@ -1493,9 +1106,10 @@ class ProcessorService {
|
|
|
1493
1106
|
}
|
|
1494
1107
|
if (!env.auth.expiry || (env.auth.expiry && Date.now() > new Date(env.auth.expiry).getTime())) {
|
|
1495
1108
|
// refresh
|
|
1109
|
+
console.log('REFRESH DATA', env, app_tag);
|
|
1496
1110
|
values = await this.getAndStoreAuth(env, app_tag);
|
|
1497
1111
|
}
|
|
1498
|
-
const decrypted = (0, processor_utils_1.decrypt)(values, this.productBuilderService.
|
|
1112
|
+
const decrypted = (0, processor_utils_1.decrypt)(values, this.productBuilderService.fetchPrivateKey());
|
|
1499
1113
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch auth data - success', data: { auth: (0, processor_utils_1.anonymizeValue)(decrypted) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
1500
1114
|
return JSON.parse(decrypted);
|
|
1501
1115
|
}
|
|
@@ -1547,16 +1161,6 @@ class ProcessorService {
|
|
|
1547
1161
|
};
|
|
1548
1162
|
return this.runAction(event, additional_logs);
|
|
1549
1163
|
}
|
|
1550
|
-
if (event.type === types_1.FeatureEventTypes.DB_ACTION) {
|
|
1551
|
-
const [parent_tag, child_tag] = event.event.split(':');
|
|
1552
|
-
const additional_logs = {
|
|
1553
|
-
parent_tag,
|
|
1554
|
-
child_tag,
|
|
1555
|
-
type: types_1.LogEventTypes.DB_ACTION,
|
|
1556
|
-
name: 'Process feature database action',
|
|
1557
|
-
};
|
|
1558
|
-
return this.runDBAction(event, additional_logs);
|
|
1559
|
-
}
|
|
1560
1164
|
if (event.type === types_1.FeatureEventTypes.STORAGE) {
|
|
1561
1165
|
this.clone = (0, processor_utils_1.structuredClone)(event);
|
|
1562
1166
|
(0, processor_utils_1.cleanBlob)(this.clone);
|
|
@@ -1567,14 +1171,6 @@ class ProcessorService {
|
|
|
1567
1171
|
};
|
|
1568
1172
|
return this.runStorage(event, additional_logs);
|
|
1569
1173
|
}
|
|
1570
|
-
if (event.type === types_1.FeatureEventTypes.FEATURE) {
|
|
1571
|
-
const additional_logs = {
|
|
1572
|
-
parent_tag: event.event,
|
|
1573
|
-
type: types_1.LogEventTypes.FEATURE,
|
|
1574
|
-
name: 'Process sub-feature',
|
|
1575
|
-
};
|
|
1576
|
-
return this.runFeature(event, additional_logs);
|
|
1577
|
-
}
|
|
1578
1174
|
if (event.type === types_1.FeatureEventTypes.NOTIFICATION) {
|
|
1579
1175
|
const [parent_tag, child_tag] = event.event.split(':');
|
|
1580
1176
|
const additional_logs = {
|
|
@@ -1596,8 +1192,7 @@ class ProcessorService {
|
|
|
1596
1192
|
try {
|
|
1597
1193
|
return this.runBrokerPublish(event, additional_logs);
|
|
1598
1194
|
}
|
|
1599
|
-
catch (e) {
|
|
1600
|
-
}
|
|
1195
|
+
catch (e) { }
|
|
1601
1196
|
}
|
|
1602
1197
|
if (event.type === types_1.FeatureEventTypes.JOB) {
|
|
1603
1198
|
const additional_logs = {
|
|
@@ -1613,7 +1208,7 @@ class ProcessorService {
|
|
|
1613
1208
|
input: event.input,
|
|
1614
1209
|
start_at: event.start_at || 0,
|
|
1615
1210
|
cache: event.cache,
|
|
1616
|
-
session: this.input.session
|
|
1211
|
+
session: this.input.session,
|
|
1617
1212
|
};
|
|
1618
1213
|
// TODO: fix this
|
|
1619
1214
|
return this.processJob(input, additional_logs);
|
|
@@ -1623,188 +1218,574 @@ class ProcessorService {
|
|
|
1623
1218
|
throw e;
|
|
1624
1219
|
}
|
|
1625
1220
|
}
|
|
1626
|
-
async runFeature(event, additional_logs) {
|
|
1627
|
-
const freshInstance = Object.assign(Object.create(Object.getPrototypeOf(this)), this);
|
|
1628
|
-
const result = await freshInstance.processFeature({
|
|
1629
|
-
product: this.productTag,
|
|
1630
|
-
tag: event.event,
|
|
1631
|
-
env: this.processEnv.slug,
|
|
1632
|
-
input: event.input,
|
|
1633
|
-
}, true);
|
|
1634
|
-
const output = await freshInstance.generateOutput(result.process_id);
|
|
1635
|
-
if (output.status === types_1.LogEventStatus.FAIL) {
|
|
1636
|
-
await this.addToFailureOutput(output.errors, event, { process_id: result.process_id }, Object.assign(Object.assign({}, additional_logs), { process_id: result.process_id }));
|
|
1637
|
-
}
|
|
1638
|
-
else if (output.status === types_1.LogEventStatus.SUCCESS) {
|
|
1639
|
-
await this.addToSuccessOutput(event, output.data, Object.assign(Object.assign({}, additional_logs), { process_id: result.process_id }));
|
|
1640
|
-
}
|
|
1641
|
-
return output;
|
|
1642
|
-
}
|
|
1643
1221
|
async runJobs(job, additional_logs = {}) {
|
|
1644
|
-
|
|
1645
|
-
|
|
1646
|
-
|
|
1647
|
-
|
|
1648
|
-
|
|
1649
|
-
|
|
1650
|
-
const result = await this.processorApiService.fetchResult(process_id, this.getUserAccess());
|
|
1651
|
-
if (!result) {
|
|
1652
|
-
throw new Error(`Invalid process id ${process_id}`);
|
|
1222
|
+
var _a;
|
|
1223
|
+
const jobId = (_a = job.data) === null || _a === void 0 ? void 0 : _a._job_id;
|
|
1224
|
+
const jobType = job.name;
|
|
1225
|
+
// Update job status to running
|
|
1226
|
+
if (jobId && this.redisClient) {
|
|
1227
|
+
await this.updateJobStatus(jobId, 'running', { started_at: Date.now() });
|
|
1653
1228
|
}
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
}
|
|
1659
|
-
async generateOutput(process_id) {
|
|
1660
|
-
var _a, _b, _c, _d;
|
|
1661
|
-
const result = await this.fetchResult(process_id);
|
|
1662
|
-
if (result.component === types_1.LogEventTypes.FEATURE) {
|
|
1663
|
-
const additional_logs = {
|
|
1664
|
-
parent_tag: result.input.tag,
|
|
1665
|
-
type: types_1.LogEventTypes.FEATURE,
|
|
1666
|
-
name: 'Fetching Process Result',
|
|
1667
|
-
};
|
|
1668
|
-
if (result.status === types_1.LogEventStatus.PROCESSING) {
|
|
1669
|
-
return { process_id, status: result.status };
|
|
1229
|
+
try {
|
|
1230
|
+
let result;
|
|
1231
|
+
if (jobType === types_1.JobEventTypes.ACTION || jobType === types_1.FeatureEventTypes.ACTION) {
|
|
1232
|
+
result = await this.processAction(job.data);
|
|
1670
1233
|
}
|
|
1671
|
-
else if (
|
|
1672
|
-
|
|
1673
|
-
await this.intializeProduct(additional_logs);
|
|
1674
|
-
this.processingOutput = result.result;
|
|
1675
|
-
this.process_id = process_id;
|
|
1676
|
-
this.input = result.input;
|
|
1677
|
-
this.feature = await this.fetchFeature(result.input.tag, additional_logs);
|
|
1678
|
-
const { input: featureInput, sequence, output } = this.feature;
|
|
1679
|
-
const data = await this.generatePayload(output, null, additional_logs, []);
|
|
1680
|
-
return { process_id, status: result.status, data };
|
|
1234
|
+
else if (jobType === types_1.JobEventTypes.NOTIFICATION) {
|
|
1235
|
+
result = await this.processNotification(job.data);
|
|
1681
1236
|
}
|
|
1682
|
-
else if (
|
|
1683
|
-
|
|
1684
|
-
return { process_id, status: result.status, errors };
|
|
1237
|
+
else if (jobType === types_1.JobEventTypes.STORAGE) {
|
|
1238
|
+
result = await this.processStorage(job.data);
|
|
1685
1239
|
}
|
|
1686
|
-
|
|
1687
|
-
|
|
1688
|
-
|
|
1689
|
-
|
|
1240
|
+
else if (jobType === types_1.JobEventTypes.PUBLISH) {
|
|
1241
|
+
result = await this.processMessageBrokerPublish(job.data);
|
|
1242
|
+
}
|
|
1243
|
+
else if (jobType === types_1.JobEventTypes.DATABASE_ACTION) {
|
|
1244
|
+
// Database actions use processAction with database-specific input
|
|
1245
|
+
result = await this.processDatabaseAction(job.data);
|
|
1246
|
+
}
|
|
1247
|
+
else if (jobType === types_1.JobEventTypes.DATABASE_OPERATION) {
|
|
1248
|
+
// Handle database operations
|
|
1249
|
+
result = await this.processDatabaseOperation(job.data);
|
|
1690
1250
|
}
|
|
1691
|
-
else if (
|
|
1692
|
-
|
|
1251
|
+
else if (jobType === types_1.JobEventTypes.GRAPH_ACTION) {
|
|
1252
|
+
result = await this.processGraphAction(job.data);
|
|
1253
|
+
}
|
|
1254
|
+
else if (jobType === types_1.JobEventTypes.GRAPH_OPERATION) {
|
|
1255
|
+
result = await this.processGraphOperation(job.data);
|
|
1256
|
+
}
|
|
1257
|
+
else if (jobType === types_1.JobEventTypes.WORKFLOW) {
|
|
1258
|
+
result = await this.processWorkflow(job.data);
|
|
1693
1259
|
}
|
|
1694
1260
|
else {
|
|
1695
|
-
|
|
1261
|
+
throw new Error(`Unknown job type: ${jobType}`);
|
|
1696
1262
|
}
|
|
1697
|
-
|
|
1698
|
-
|
|
1699
|
-
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
}
|
|
1704
|
-
const additional_logs = {
|
|
1705
|
-
parent_tag: result.input.tag,
|
|
1706
|
-
type: result.component,
|
|
1707
|
-
name: 'Resume Process',
|
|
1708
|
-
};
|
|
1709
|
-
this.productTag = result.input.product;
|
|
1710
|
-
await this.intializeProduct(additional_logs);
|
|
1711
|
-
this.processingOutput = result.result;
|
|
1712
|
-
this.process_id = process_id;
|
|
1713
|
-
await this.processFailedEvents(additional_logs);
|
|
1714
|
-
await this.processWaitingEvents(additional_logs);
|
|
1715
|
-
this.input = result.input;
|
|
1716
|
-
this.start = Date.now();
|
|
1717
|
-
if (result.component === types_1.LogEventTypes.FEATURE) {
|
|
1718
|
-
this.feature = await this.fetchFeature(result.input.tag, additional_logs);
|
|
1719
|
-
const { input: featureInput, sequence, output } = this.feature;
|
|
1720
|
-
this.processEnv = this.fetchEnv(result.env, additional_logs);
|
|
1721
|
-
if (!this.processEnv.active) {
|
|
1722
|
-
throw new Error(`Environment ${result.env} is not active`);
|
|
1723
|
-
}
|
|
1724
|
-
// validate feature input and log failure
|
|
1725
|
-
this.validateJSONFeatureInput(result.input.input, featureInput, additional_logs);
|
|
1726
|
-
// split processes
|
|
1727
|
-
this.sequenceLevels = await this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
1728
|
-
await this.processSequenceLevels(additional_logs);
|
|
1729
|
-
}
|
|
1730
|
-
else {
|
|
1731
|
-
this.end = Date.now();
|
|
1732
|
-
let status = types_1.LogEventStatus.SUCCESS;
|
|
1733
|
-
if (this.processingOutput.failure.length > 0) {
|
|
1734
|
-
status = types_1.LogEventStatus.FAIL;
|
|
1263
|
+
// Update job status to completed
|
|
1264
|
+
if (jobId && this.redisClient) {
|
|
1265
|
+
await this.updateJobStatus(jobId, 'completed', {
|
|
1266
|
+
completed_at: Date.now(),
|
|
1267
|
+
result,
|
|
1268
|
+
});
|
|
1735
1269
|
}
|
|
1736
|
-
|
|
1737
|
-
}
|
|
1738
|
-
return { process_id };
|
|
1739
|
-
}
|
|
1740
|
-
async replayProcess(process_id) {
|
|
1741
|
-
var _a, _b, _c;
|
|
1742
|
-
const result = await this.fetchResult(process_id);
|
|
1743
|
-
if (!result) {
|
|
1744
|
-
throw new Error(`Invalid process id ${process_id}`);
|
|
1745
|
-
}
|
|
1746
|
-
this.productTag = result.input.product;
|
|
1747
|
-
this.process_id = process_id;
|
|
1748
|
-
this.input = result.input;
|
|
1749
|
-
this.start = Date.now();
|
|
1750
|
-
this.component = result.component;
|
|
1751
|
-
const additional_logs = {
|
|
1752
|
-
parent_tag: result.input.tag,
|
|
1753
|
-
type: result.component,
|
|
1754
|
-
name: 'Replay Process',
|
|
1755
|
-
};
|
|
1756
|
-
await this.intializeProduct(additional_logs);
|
|
1757
|
-
//await this.processFailedEvents(additional_logs);
|
|
1758
|
-
if (result.component === types_1.LogEventTypes.FEATURE) {
|
|
1759
|
-
//await this.processWaitingEvents(additional_logs);
|
|
1760
|
-
this.feature = await this.fetchFeature(result.input.tag, additional_logs);
|
|
1761
|
-
const { input: featureInput, sequence, output } = this.feature;
|
|
1762
|
-
this.processEnv = this.fetchEnv(result.env, additional_logs);
|
|
1763
|
-
if (!this.processEnv.active) {
|
|
1764
|
-
throw new Error(`Environment ${result.env} is not active`);
|
|
1765
|
-
}
|
|
1766
|
-
// validate feature input and log failure
|
|
1767
|
-
this.validateJSONFeatureInput(result.input.input, featureInput, additional_logs);
|
|
1768
|
-
// split processes
|
|
1769
|
-
this.sequenceLevels = await this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
1770
|
-
await this.processSequenceLevels(additional_logs);
|
|
1270
|
+
return result;
|
|
1771
1271
|
}
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1782
|
-
|
|
1272
|
+
catch (error) {
|
|
1273
|
+
// Update job status to failed or schedule retry
|
|
1274
|
+
if (jobId && this.redisClient) {
|
|
1275
|
+
const jobData = await this.getJobData(jobId);
|
|
1276
|
+
if (jobData) {
|
|
1277
|
+
const { shouldRetry, delay } = this.calculateJobRetry(jobData, error.code);
|
|
1278
|
+
if (shouldRetry) {
|
|
1279
|
+
await this.updateJobStatus(jobId, 'scheduled', {
|
|
1280
|
+
retry_count: (jobData.retry_count || 0) + 1,
|
|
1281
|
+
last_error: error.message,
|
|
1282
|
+
last_error_code: error.code,
|
|
1283
|
+
scheduled_at: Date.now() + delay,
|
|
1284
|
+
});
|
|
1285
|
+
// Re-queue the job with delay
|
|
1286
|
+
await this.queues.jobs.add(jobType, job.data, {
|
|
1287
|
+
jobId: `${jobId}_retry_${jobData.retry_count + 1}`,
|
|
1288
|
+
delay,
|
|
1289
|
+
});
|
|
1290
|
+
}
|
|
1291
|
+
else {
|
|
1292
|
+
await this.updateJobStatus(jobId, 'failed', {
|
|
1293
|
+
completed_at: Date.now(),
|
|
1294
|
+
last_error: error.message,
|
|
1295
|
+
last_error_code: error.code,
|
|
1296
|
+
});
|
|
1297
|
+
}
|
|
1298
|
+
// Add to execution history
|
|
1299
|
+
await this.addJobExecution(jobId, {
|
|
1300
|
+
number: (jobData.execution_count || 0) + 1,
|
|
1301
|
+
started_at: jobData.started_at || Date.now(),
|
|
1302
|
+
completed_at: Date.now(),
|
|
1303
|
+
duration_ms: Date.now() - (jobData.started_at || Date.now()),
|
|
1304
|
+
status: 'failed',
|
|
1305
|
+
error: error.message,
|
|
1306
|
+
error_code: error.code,
|
|
1307
|
+
});
|
|
1308
|
+
}
|
|
1309
|
+
}
|
|
1310
|
+
throw error;
|
|
1783
1311
|
}
|
|
1784
|
-
return { process_id };
|
|
1785
1312
|
}
|
|
1786
|
-
|
|
1787
|
-
|
|
1788
|
-
|
|
1789
|
-
|
|
1790
|
-
|
|
1791
|
-
|
|
1313
|
+
/**
|
|
1314
|
+
* Get job data from Redis
|
|
1315
|
+
*/
|
|
1316
|
+
async getJobData(jobId) {
|
|
1317
|
+
if (!this.redisClient)
|
|
1318
|
+
return null;
|
|
1319
|
+
const redis = this.redisClient;
|
|
1320
|
+
const jobKey = `job:${this.workspace_id}:${jobId}`;
|
|
1321
|
+
const data = await redis.get(jobKey);
|
|
1322
|
+
if (!data)
|
|
1323
|
+
return null;
|
|
1324
|
+
return JSON.parse(data);
|
|
1325
|
+
}
|
|
1326
|
+
/**
|
|
1327
|
+
* Update job status in Redis
|
|
1328
|
+
*/
|
|
1329
|
+
async updateJobStatus(jobId, status, updates = {}) {
|
|
1330
|
+
if (!this.redisClient)
|
|
1331
|
+
return;
|
|
1332
|
+
const redis = this.redisClient;
|
|
1333
|
+
const jobKey = `job:${this.workspace_id}:${jobId}`;
|
|
1334
|
+
const data = await redis.get(jobKey);
|
|
1335
|
+
if (!data)
|
|
1336
|
+
return;
|
|
1337
|
+
const jobData = JSON.parse(data);
|
|
1338
|
+
const oldStatus = jobData.status;
|
|
1339
|
+
const updatedJob = Object.assign(Object.assign(Object.assign({}, jobData), updates), { status, updated_at: Date.now(), execution_count: status === 'completed' || status === 'failed'
|
|
1340
|
+
? (jobData.execution_count || 0) + 1
|
|
1341
|
+
: jobData.execution_count });
|
|
1342
|
+
// Update job data
|
|
1343
|
+
await redis.setex(jobKey, 90 * 24 * 60 * 60, JSON.stringify(updatedJob));
|
|
1344
|
+
// Update status indices
|
|
1345
|
+
if (oldStatus !== status) {
|
|
1346
|
+
const oldStatusKey = `job_status:${this.workspace_id}:${oldStatus}`;
|
|
1347
|
+
const newStatusKey = `job_status:${this.workspace_id}:${status}`;
|
|
1348
|
+
await redis.srem(oldStatusKey, jobId);
|
|
1349
|
+
await redis.sadd(newStatusKey, jobId);
|
|
1350
|
+
}
|
|
1351
|
+
}
|
|
1352
|
+
/**
|
|
1353
|
+
* Add job execution record to history
|
|
1354
|
+
*/
|
|
1355
|
+
async addJobExecution(jobId, execution) {
|
|
1356
|
+
if (!this.redisClient)
|
|
1357
|
+
return;
|
|
1358
|
+
const redis = this.redisClient;
|
|
1359
|
+
const historyKey = `job_history:${this.workspace_id}:${jobId}`;
|
|
1360
|
+
await redis.lpush(historyKey, JSON.stringify(execution));
|
|
1361
|
+
await redis.ltrim(historyKey, 0, 99); // Keep last 100 executions
|
|
1362
|
+
await redis.expire(historyKey, 30 * 24 * 60 * 60); // 30 days TTL
|
|
1363
|
+
}
|
|
1364
|
+
/**
|
|
1365
|
+
* Calculate retry delay based on job configuration
|
|
1366
|
+
*/
|
|
1367
|
+
calculateJobRetry(jobData, errorCode) {
|
|
1368
|
+
const retryConfig = jobData.retry_config || {};
|
|
1369
|
+
const { initialDelay = 1000, maxDelay = 300000, backoffMultiplier = 2, retryableErrors, nonRetryableErrors, jitter = false, jitterPercent = 0.3, } = retryConfig;
|
|
1370
|
+
// Check if we've exceeded max retries
|
|
1371
|
+
if ((jobData.retry_count || 0) >= (jobData.retries || 0)) {
|
|
1372
|
+
return { shouldRetry: false, delay: 0 };
|
|
1373
|
+
}
|
|
1374
|
+
// Check error-based retry rules
|
|
1375
|
+
if (errorCode) {
|
|
1376
|
+
if (nonRetryableErrors === null || nonRetryableErrors === void 0 ? void 0 : nonRetryableErrors.includes(errorCode)) {
|
|
1377
|
+
return { shouldRetry: false, delay: 0 };
|
|
1378
|
+
}
|
|
1379
|
+
if (retryableErrors && !retryableErrors.includes(errorCode)) {
|
|
1380
|
+
return { shouldRetry: false, delay: 0 };
|
|
1381
|
+
}
|
|
1382
|
+
}
|
|
1383
|
+
// Calculate exponential backoff delay
|
|
1384
|
+
let delay = initialDelay * Math.pow(backoffMultiplier, jobData.retry_count || 0);
|
|
1385
|
+
delay = Math.min(delay, maxDelay);
|
|
1386
|
+
// Add jitter if enabled
|
|
1387
|
+
if (jitter) {
|
|
1388
|
+
const jitterAmount = delay * jitterPercent * Math.random();
|
|
1389
|
+
delay = delay + jitterAmount;
|
|
1390
|
+
}
|
|
1391
|
+
return { shouldRetry: true, delay: Math.round(delay) };
|
|
1392
|
+
}
|
|
1393
|
+
/**
|
|
1394
|
+
* Process database action job (predefined database actions)
|
|
1395
|
+
* Integrates with the database action manager to execute predefined operations
|
|
1396
|
+
*/
|
|
1397
|
+
async processDatabaseAction(data) {
|
|
1398
|
+
var _a;
|
|
1399
|
+
const { env, product, database, event, input } = data;
|
|
1400
|
+
// Initialize product builder if not already done
|
|
1401
|
+
if (product) {
|
|
1402
|
+
await this.productBuilderService.initializeProductByTag(product);
|
|
1403
|
+
}
|
|
1404
|
+
// Fetch the database action configuration
|
|
1405
|
+
const databaseAction = await this.productBuilderService.fetchDatabaseAction(`${database}:${event}`);
|
|
1406
|
+
if (!databaseAction) {
|
|
1407
|
+
throw new Error(`Database action '${event}' not found on database '${database}'`);
|
|
1408
|
+
}
|
|
1409
|
+
// Get the database environment configuration
|
|
1410
|
+
const databaseConfig = await this.productBuilderService.fetchDatabase(database);
|
|
1411
|
+
if (!databaseConfig) {
|
|
1412
|
+
throw new Error(`Database '${database}' not found`);
|
|
1413
|
+
}
|
|
1414
|
+
const databaseEnv = (_a = databaseConfig.envs) === null || _a === void 0 ? void 0 : _a.find((e) => e.slug === env);
|
|
1415
|
+
if (!databaseEnv) {
|
|
1416
|
+
throw new Error(`Environment '${env}' not found for database '${database}'`);
|
|
1417
|
+
}
|
|
1418
|
+
// Execute the database action using the database service
|
|
1419
|
+
const { DatabaseService } = await Promise.resolve().then(() => __importStar(require('../../database/databases.service')));
|
|
1420
|
+
const dbService = new DatabaseService({
|
|
1421
|
+
workspace_id: this.workspace_id,
|
|
1422
|
+
public_key: this.public_key,
|
|
1423
|
+
user_id: this.user_id,
|
|
1424
|
+
token: this.token,
|
|
1425
|
+
env_type: this.environment,
|
|
1426
|
+
private_key: this._privateKey,
|
|
1427
|
+
access_key: this.accessKey,
|
|
1428
|
+
});
|
|
1429
|
+
const result = await dbService.execute({
|
|
1430
|
+
product,
|
|
1431
|
+
env,
|
|
1432
|
+
database,
|
|
1433
|
+
action: event,
|
|
1434
|
+
input: input || {},
|
|
1435
|
+
});
|
|
1436
|
+
return { success: true, data: result };
|
|
1792
1437
|
}
|
|
1793
|
-
|
|
1794
|
-
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
|
|
1798
|
-
|
|
1799
|
-
}
|
|
1800
|
-
|
|
1438
|
+
/**
|
|
1439
|
+
* Process database operation job (direct CRUD operations)
|
|
1440
|
+
* Handles operations like insert, find, update, delete, aggregate
|
|
1441
|
+
*/
|
|
1442
|
+
async processDatabaseOperation(data) {
|
|
1443
|
+
var _a;
|
|
1444
|
+
const { env, product, database, operation, input } = data;
|
|
1445
|
+
// Initialize product builder if not already done
|
|
1446
|
+
if (product) {
|
|
1447
|
+
await this.productBuilderService.initializeProductByTag(product);
|
|
1448
|
+
}
|
|
1449
|
+
// Get the database configuration
|
|
1450
|
+
const databaseConfig = await this.productBuilderService.fetchDatabase(database);
|
|
1451
|
+
if (!databaseConfig) {
|
|
1452
|
+
throw new Error(`Database '${database}' not found`);
|
|
1453
|
+
}
|
|
1454
|
+
const databaseEnv = (_a = databaseConfig.envs) === null || _a === void 0 ? void 0 : _a.find((e) => e.slug === env);
|
|
1455
|
+
if (!databaseEnv) {
|
|
1456
|
+
throw new Error(`Environment '${env}' not found for database '${database}'`);
|
|
1457
|
+
}
|
|
1458
|
+
// Execute the database operation using the database service
|
|
1459
|
+
const { DatabaseService } = await Promise.resolve().then(() => __importStar(require('../../database/databases.service')));
|
|
1460
|
+
const dbService = new DatabaseService({
|
|
1461
|
+
workspace_id: this.workspace_id,
|
|
1462
|
+
public_key: this.public_key,
|
|
1463
|
+
user_id: this.user_id,
|
|
1464
|
+
token: this.token,
|
|
1465
|
+
env_type: this.environment,
|
|
1466
|
+
private_key: this._privateKey,
|
|
1467
|
+
access_key: this.accessKey,
|
|
1468
|
+
});
|
|
1469
|
+
// Connect to the database
|
|
1470
|
+
await dbService.connect({
|
|
1471
|
+
product,
|
|
1472
|
+
env,
|
|
1473
|
+
database,
|
|
1474
|
+
});
|
|
1475
|
+
let result;
|
|
1476
|
+
// Execute the appropriate operation based on the operation type
|
|
1477
|
+
switch (operation) {
|
|
1478
|
+
case 'insert':
|
|
1479
|
+
result = await dbService.insert({
|
|
1480
|
+
product,
|
|
1481
|
+
env,
|
|
1482
|
+
database,
|
|
1483
|
+
table: input.table,
|
|
1484
|
+
data: input.data,
|
|
1485
|
+
returning: input.returning,
|
|
1486
|
+
});
|
|
1487
|
+
break;
|
|
1488
|
+
case 'find':
|
|
1489
|
+
case 'query':
|
|
1490
|
+
result = await dbService.query({
|
|
1491
|
+
product,
|
|
1492
|
+
env,
|
|
1493
|
+
database,
|
|
1494
|
+
table: input.table,
|
|
1495
|
+
where: input.where,
|
|
1496
|
+
select: input.select,
|
|
1497
|
+
include: input.include,
|
|
1498
|
+
orderBy: input.orderBy,
|
|
1499
|
+
limit: input.limit,
|
|
1500
|
+
offset: input.offset,
|
|
1501
|
+
});
|
|
1502
|
+
break;
|
|
1503
|
+
case 'update':
|
|
1504
|
+
result = await dbService.update({
|
|
1505
|
+
product,
|
|
1506
|
+
env,
|
|
1507
|
+
database,
|
|
1508
|
+
table: input.table,
|
|
1509
|
+
data: input.data,
|
|
1510
|
+
where: input.where,
|
|
1511
|
+
returning: input.returning,
|
|
1512
|
+
});
|
|
1513
|
+
break;
|
|
1514
|
+
case 'delete':
|
|
1515
|
+
result = await dbService.delete({
|
|
1516
|
+
product,
|
|
1517
|
+
env,
|
|
1518
|
+
database,
|
|
1519
|
+
table: input.table,
|
|
1520
|
+
where: input.where,
|
|
1521
|
+
});
|
|
1522
|
+
break;
|
|
1523
|
+
case 'upsert':
|
|
1524
|
+
result = await dbService.upsert({
|
|
1525
|
+
product,
|
|
1526
|
+
env,
|
|
1527
|
+
database,
|
|
1528
|
+
table: input.table,
|
|
1529
|
+
data: input.data,
|
|
1530
|
+
conflictKeys: input.conflictKeys || input.conflictFields,
|
|
1531
|
+
updateColumns: input.updateColumns,
|
|
1532
|
+
returning: input.returning,
|
|
1533
|
+
});
|
|
1534
|
+
break;
|
|
1535
|
+
case 'aggregate':
|
|
1536
|
+
result = await dbService.aggregate({
|
|
1537
|
+
product,
|
|
1538
|
+
env,
|
|
1539
|
+
database,
|
|
1540
|
+
table: input.table,
|
|
1541
|
+
operations: input.operations,
|
|
1542
|
+
where: input.where,
|
|
1543
|
+
});
|
|
1544
|
+
break;
|
|
1545
|
+
case 'groupBy':
|
|
1546
|
+
result = await dbService.groupBy({
|
|
1547
|
+
product,
|
|
1548
|
+
env,
|
|
1549
|
+
database,
|
|
1550
|
+
table: input.table,
|
|
1551
|
+
groupBy: input.groupBy || input.by,
|
|
1552
|
+
operations: input.operations,
|
|
1553
|
+
where: input.where,
|
|
1554
|
+
having: input.having,
|
|
1555
|
+
orderBy: input.orderBy,
|
|
1556
|
+
limit: input.limit,
|
|
1557
|
+
offset: input.offset,
|
|
1558
|
+
});
|
|
1559
|
+
break;
|
|
1560
|
+
case 'count':
|
|
1561
|
+
result = await dbService.count({
|
|
1562
|
+
product,
|
|
1563
|
+
env,
|
|
1564
|
+
database,
|
|
1565
|
+
table: input.table,
|
|
1566
|
+
where: input.where,
|
|
1567
|
+
});
|
|
1568
|
+
break;
|
|
1569
|
+
default:
|
|
1570
|
+
throw new Error(`Unknown database operation: ${operation}`);
|
|
1571
|
+
}
|
|
1572
|
+
return { success: true, data: result };
|
|
1573
|
+
}
|
|
1574
|
+
/**
|
|
1575
|
+
* Process graph action job (predefined graph actions)
|
|
1576
|
+
* Integrates with the graph service to execute predefined graph operations
|
|
1577
|
+
*/
|
|
1578
|
+
async processGraphAction(data) {
|
|
1579
|
+
const { env, product, graph, event, input } = data;
|
|
1580
|
+
// Initialize product builder if not already done
|
|
1581
|
+
if (product) {
|
|
1582
|
+
await this.productBuilderService.initializeProductByTag(product);
|
|
1583
|
+
}
|
|
1584
|
+
// Execute the graph action using the graph service
|
|
1585
|
+
const { GraphService } = await Promise.resolve().then(() => __importStar(require('../../graph/graphs.service')));
|
|
1586
|
+
const graphService = new GraphService({
|
|
1587
|
+
workspace_id: this.workspace_id,
|
|
1588
|
+
public_key: this.public_key,
|
|
1589
|
+
user_id: this.user_id,
|
|
1590
|
+
token: this.token,
|
|
1591
|
+
env_type: this.environment,
|
|
1592
|
+
private_key: this._privateKey,
|
|
1593
|
+
access_key: this.accessKey,
|
|
1594
|
+
});
|
|
1595
|
+
// Connect to the graph database
|
|
1596
|
+
await graphService.connect({
|
|
1597
|
+
product,
|
|
1598
|
+
env,
|
|
1599
|
+
graph,
|
|
1600
|
+
});
|
|
1601
|
+
// Execute the graph action
|
|
1602
|
+
const result = await graphService.execute({
|
|
1603
|
+
product,
|
|
1604
|
+
env,
|
|
1605
|
+
graph,
|
|
1606
|
+
action: event,
|
|
1607
|
+
input: input || {},
|
|
1608
|
+
});
|
|
1609
|
+
return { success: result.success, data: result.data, error: result.error };
|
|
1610
|
+
}
|
|
1611
|
+
/**
|
|
1612
|
+
* Process graph operation job (direct graph operations)
|
|
1613
|
+
* Handles operations like createNode, findNodes, createRelationship, traverse, etc.
|
|
1614
|
+
*/
|
|
1615
|
+
async processGraphOperation(data) {
|
|
1616
|
+
const { env, product, graph, operation, input } = data;
|
|
1617
|
+
// Initialize product builder if not already done
|
|
1618
|
+
if (product) {
|
|
1619
|
+
await this.productBuilderService.initializeProductByTag(product);
|
|
1620
|
+
}
|
|
1621
|
+
// Execute the graph operation using the graph service
|
|
1622
|
+
const { GraphService } = await Promise.resolve().then(() => __importStar(require('../../graph/graphs.service')));
|
|
1623
|
+
const graphService = new GraphService({
|
|
1624
|
+
workspace_id: this.workspace_id,
|
|
1625
|
+
public_key: this.public_key,
|
|
1626
|
+
user_id: this.user_id,
|
|
1627
|
+
token: this.token,
|
|
1628
|
+
env_type: this.environment,
|
|
1629
|
+
private_key: this._privateKey,
|
|
1630
|
+
access_key: this.accessKey,
|
|
1631
|
+
});
|
|
1632
|
+
// Connect to the graph database
|
|
1633
|
+
await graphService.connect({
|
|
1634
|
+
product,
|
|
1635
|
+
env,
|
|
1636
|
+
graph,
|
|
1637
|
+
});
|
|
1638
|
+
let result;
|
|
1639
|
+
// Execute the appropriate operation based on the operation type
|
|
1640
|
+
switch (operation) {
|
|
1641
|
+
case 'createNode':
|
|
1642
|
+
result = await graphService.createNode({
|
|
1643
|
+
labels: input.labels,
|
|
1644
|
+
properties: input.properties,
|
|
1645
|
+
});
|
|
1646
|
+
break;
|
|
1647
|
+
case 'findNodes':
|
|
1648
|
+
result = await graphService.findNodes({
|
|
1649
|
+
labels: input.labels,
|
|
1650
|
+
where: input.where,
|
|
1651
|
+
limit: input.limit,
|
|
1652
|
+
offset: input.offset,
|
|
1653
|
+
orderBy: input.orderBy,
|
|
1654
|
+
});
|
|
1655
|
+
break;
|
|
1656
|
+
case 'findNodeById':
|
|
1657
|
+
result = await graphService.findNodeById(input.id);
|
|
1658
|
+
break;
|
|
1659
|
+
case 'updateNode':
|
|
1660
|
+
result = await graphService.updateNode({
|
|
1661
|
+
id: input.id,
|
|
1662
|
+
properties: input.properties,
|
|
1663
|
+
labels: input.labels,
|
|
1664
|
+
});
|
|
1665
|
+
break;
|
|
1666
|
+
case 'deleteNode':
|
|
1667
|
+
result = await graphService.deleteNode({
|
|
1668
|
+
id: input.id,
|
|
1669
|
+
detach: input.detach,
|
|
1670
|
+
});
|
|
1671
|
+
break;
|
|
1672
|
+
case 'mergeNode':
|
|
1673
|
+
result = await graphService.mergeNode({
|
|
1674
|
+
labels: input.labels,
|
|
1675
|
+
matchProperties: input.matchProperties,
|
|
1676
|
+
onCreate: input.onCreate || input.setProperties,
|
|
1677
|
+
onMatch: input.onMatch,
|
|
1678
|
+
});
|
|
1679
|
+
break;
|
|
1680
|
+
case 'createRelationship':
|
|
1681
|
+
result = await graphService.createRelationship({
|
|
1682
|
+
type: input.type,
|
|
1683
|
+
startNodeId: input.startNodeId || input.from,
|
|
1684
|
+
endNodeId: input.endNodeId || input.to,
|
|
1685
|
+
properties: input.properties,
|
|
1686
|
+
});
|
|
1687
|
+
break;
|
|
1688
|
+
case 'findRelationships':
|
|
1689
|
+
result = await graphService.findRelationships({
|
|
1690
|
+
types: input.types,
|
|
1691
|
+
startNodeId: input.startNodeId,
|
|
1692
|
+
endNodeId: input.endNodeId,
|
|
1693
|
+
where: input.where,
|
|
1694
|
+
limit: input.limit,
|
|
1695
|
+
});
|
|
1696
|
+
break;
|
|
1697
|
+
case 'updateRelationship':
|
|
1698
|
+
result = await graphService.updateRelationship({
|
|
1699
|
+
id: input.id,
|
|
1700
|
+
properties: input.properties,
|
|
1701
|
+
});
|
|
1702
|
+
break;
|
|
1703
|
+
case 'deleteRelationship':
|
|
1704
|
+
result = await graphService.deleteRelationship({
|
|
1705
|
+
id: input.id,
|
|
1706
|
+
});
|
|
1707
|
+
break;
|
|
1708
|
+
case 'traverse':
|
|
1709
|
+
result = await graphService.traverse({
|
|
1710
|
+
startNodeId: input.startNodeId,
|
|
1711
|
+
direction: input.direction,
|
|
1712
|
+
relationshipTypes: input.relationshipTypes,
|
|
1713
|
+
maxDepth: input.maxDepth,
|
|
1714
|
+
minDepth: input.minDepth,
|
|
1715
|
+
nodeFilter: input.nodeFilter || input.where,
|
|
1716
|
+
relationshipFilter: input.relationshipFilter,
|
|
1717
|
+
limit: input.limit,
|
|
1718
|
+
});
|
|
1719
|
+
break;
|
|
1720
|
+
case 'shortestPath':
|
|
1721
|
+
result = await graphService.shortestPath({
|
|
1722
|
+
startNodeId: input.startNodeId,
|
|
1723
|
+
endNodeId: input.endNodeId,
|
|
1724
|
+
relationshipTypes: input.relationshipTypes,
|
|
1725
|
+
maxDepth: input.maxDepth,
|
|
1726
|
+
});
|
|
1727
|
+
break;
|
|
1728
|
+
case 'query':
|
|
1729
|
+
result = await graphService.query(input.query, input.params);
|
|
1730
|
+
break;
|
|
1731
|
+
case 'countNodes':
|
|
1732
|
+
result = await graphService.countNodes(input.labels, input.where);
|
|
1733
|
+
break;
|
|
1734
|
+
case 'countRelationships':
|
|
1735
|
+
result = await graphService.countRelationships(input.types, input.where);
|
|
1736
|
+
break;
|
|
1737
|
+
case 'getStatistics':
|
|
1738
|
+
result = await graphService.getStatistics();
|
|
1739
|
+
break;
|
|
1740
|
+
default:
|
|
1741
|
+
throw new Error(`Unknown graph operation: ${operation}`);
|
|
1742
|
+
}
|
|
1743
|
+
return { success: true, data: result };
|
|
1744
|
+
}
|
|
1745
|
+
/**
|
|
1746
|
+
* Process workflow job
|
|
1747
|
+
* Executes a workflow using the workflow service
|
|
1748
|
+
*/
|
|
1749
|
+
async processWorkflow(data) {
|
|
1750
|
+
const { env, product, workflow, input, idempotency_key } = data;
|
|
1751
|
+
// Initialize product builder if not already done
|
|
1752
|
+
if (product) {
|
|
1753
|
+
await this.productBuilderService.initializeProductByTag(product);
|
|
1754
|
+
}
|
|
1755
|
+
// Execute the workflow using the workflow service
|
|
1756
|
+
const { WorkflowService } = await Promise.resolve().then(() => __importStar(require('../../workflows/workflows.service')));
|
|
1757
|
+
const workflowService = new WorkflowService({
|
|
1758
|
+
workspace_id: this.workspace_id,
|
|
1759
|
+
public_key: this.public_key,
|
|
1760
|
+
user_id: this.user_id,
|
|
1761
|
+
token: this.token,
|
|
1762
|
+
env_type: this.environment,
|
|
1763
|
+
private_key: this._privateKey,
|
|
1764
|
+
access_key: this.accessKey
|
|
1765
|
+
});
|
|
1766
|
+
// Execute the workflow
|
|
1767
|
+
const result = await workflowService.execute({
|
|
1768
|
+
product,
|
|
1769
|
+
env,
|
|
1770
|
+
tag: workflow,
|
|
1771
|
+
input: input || {},
|
|
1772
|
+
idempotency_key,
|
|
1773
|
+
});
|
|
1774
|
+
return {
|
|
1775
|
+
success: result.status === 'completed',
|
|
1776
|
+
data: result.output,
|
|
1777
|
+
workflow_id: result.workflow_id,
|
|
1778
|
+
status: result.status,
|
|
1779
|
+
error: result.error,
|
|
1780
|
+
};
|
|
1801
1781
|
}
|
|
1802
1782
|
async getAndStoreAuth(appEnv, access_tag) {
|
|
1803
1783
|
try {
|
|
1804
|
-
// const payload = JSON.parse(decrypt(env.auth.data, this.productBuilderService.
|
|
1805
|
-
const payload = appEnv.auth.data;
|
|
1784
|
+
// const payload = JSON.parse(decrypt(env.auth.data, this.productBuilderService.fetchPrivateKey()));
|
|
1785
|
+
const payload = JSON.parse((0, processor_utils_1.decrypt)(String(appEnv.auth.data), this.productBuilderService.fetchPrivateKey()));
|
|
1806
1786
|
let app = await this.fetchThirdPartyApp(access_tag);
|
|
1807
1787
|
const auth = app.auths.find((item) => item.tag === appEnv.auth.auth_tag);
|
|
1788
|
+
console.log('JAMESY', auth);
|
|
1808
1789
|
if (!auth) {
|
|
1809
1790
|
// throw an error
|
|
1810
1791
|
throw new Error(`Cannot find auth ${appEnv.auth.auth_tag} on environment ${appEnv.product_env_slug}`);
|
|
@@ -1819,9 +1800,16 @@ class ProcessorService {
|
|
|
1819
1800
|
if (env.base_url) {
|
|
1820
1801
|
request_base_url = env.base_url;
|
|
1821
1802
|
}
|
|
1803
|
+
if (action.envs && action.envs.length) {
|
|
1804
|
+
const env = action.envs.find((item) => item.slug === appEnv.app_env_slug);
|
|
1805
|
+
if (env && env.base_url) {
|
|
1806
|
+
request_base_url = env.base_url;
|
|
1807
|
+
}
|
|
1808
|
+
}
|
|
1809
|
+
console.log('payloadabi!!!!', payload);
|
|
1822
1810
|
const results = await this.sendActionRequest(request_base_url, url, payload, method, appEnv.app_env_slug);
|
|
1823
|
-
const values = (0, processor_utils_1.encrypt)(JSON.stringify(results), this.productBuilderService.
|
|
1824
|
-
const productApp = this.productBuilderService.fetchApp(access_tag);
|
|
1811
|
+
const values = (0, processor_utils_1.encrypt)(JSON.stringify(results), this.productBuilderService.fetchPrivateKey());
|
|
1812
|
+
const productApp = await this.productBuilderService.fetchApp(access_tag);
|
|
1825
1813
|
for (let i = 0; i < productApp.envs.length; i++) {
|
|
1826
1814
|
if (productApp.envs[i].app_env_slug === env.slug) {
|
|
1827
1815
|
productApp.envs[i].auth.values = values; // write new values
|
|
@@ -1917,7 +1905,7 @@ class ProcessorService {
|
|
|
1917
1905
|
// generate indexes
|
|
1918
1906
|
return (0, processor_utils_1.generateIndexes)(operator, iter, init, valueValue);
|
|
1919
1907
|
}
|
|
1920
|
-
async runAction(event, additional_logs, returnValue = true) {
|
|
1908
|
+
async runAction(event, additional_logs, returnValue = true, bootstrapData) {
|
|
1921
1909
|
try {
|
|
1922
1910
|
const { event: action_tag, app: access_tag, condition, cache: cache_tag } = event;
|
|
1923
1911
|
let indexes = [];
|
|
@@ -1930,28 +1918,59 @@ class ProcessorService {
|
|
|
1930
1918
|
if (condition && condition.type === types_1.Conditions.LOOP) {
|
|
1931
1919
|
indexes = await this.extractLoopIndexes(event, additional_logs);
|
|
1932
1920
|
}
|
|
1933
|
-
let
|
|
1934
|
-
|
|
1935
|
-
|
|
1936
|
-
|
|
1937
|
-
|
|
1938
|
-
|
|
1939
|
-
|
|
1940
|
-
if
|
|
1941
|
-
|
|
1921
|
+
let action;
|
|
1922
|
+
let env;
|
|
1923
|
+
let retries;
|
|
1924
|
+
let recipient_workspace_id;
|
|
1925
|
+
let app_active;
|
|
1926
|
+
let app_id = '';
|
|
1927
|
+
let app_env_slug = '';
|
|
1928
|
+
// Use bootstrap data if provided, otherwise fetch via API
|
|
1929
|
+
if (bootstrapData) {
|
|
1930
|
+
action = bootstrapData.action;
|
|
1931
|
+
env = bootstrapData.app_env;
|
|
1932
|
+
retries = bootstrapData.retries;
|
|
1933
|
+
recipient_workspace_id = bootstrapData.recipient_workspace_id;
|
|
1934
|
+
app_active = bootstrapData.app_active;
|
|
1935
|
+
app_env_slug = env.slug;
|
|
1936
|
+
additional_logs.app_env = app_env_slug;
|
|
1942
1937
|
}
|
|
1943
|
-
|
|
1944
|
-
|
|
1938
|
+
else {
|
|
1939
|
+
// Fallback to original API-based fetching (for features/workflows that don't use bootstrap)
|
|
1940
|
+
const appData = await this.fetchThirdPartyApp(access_tag);
|
|
1941
|
+
const { actions, envs: appEnvs, retries: appRetries, workspace_id: appWorkspaceId, active } = appData;
|
|
1942
|
+
const productApp = await this.productBuilderService.fetchApp(access_tag);
|
|
1943
|
+
const { envs: productEnvs } = productApp;
|
|
1944
|
+
const envMapping = productEnvs.find((item) => item.product_env_slug === this.processEnv.slug);
|
|
1945
|
+
app_env_slug = (envMapping === null || envMapping === void 0 ? void 0 : envMapping.app_env_slug) || '';
|
|
1946
|
+
additional_logs.app_env = app_env_slug;
|
|
1947
|
+
env = appEnvs.find((item) => item.slug === app_env_slug);
|
|
1948
|
+
action = actions.find((item) => item.tag === action_tag);
|
|
1949
|
+
retries = appRetries;
|
|
1950
|
+
recipient_workspace_id = appWorkspaceId;
|
|
1951
|
+
app_active = active;
|
|
1952
|
+
app_id = appData._id;
|
|
1953
|
+
if (!action) {
|
|
1954
|
+
throw new Error(`Action ${action_tag} not found in ${access_tag}`);
|
|
1955
|
+
}
|
|
1956
|
+
}
|
|
1957
|
+
if (!app_active && recipient_workspace_id !== this.baseLogs.workspace_id) {
|
|
1958
|
+
throw new Error(`App ${event.app} is not active`);
|
|
1945
1959
|
}
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
throw new Error(`Action ${action_tag} not found in ${access_tag}`);
|
|
1960
|
+
if (!env.active) {
|
|
1961
|
+
throw new Error(`Action environment ${env.slug} is not active`);
|
|
1949
1962
|
}
|
|
1950
1963
|
const { query, headers, body, params, request_type, method, base_url, resource } = action;
|
|
1951
1964
|
let request_base_url = base_url;
|
|
1952
1965
|
if (env.base_url) {
|
|
1953
1966
|
request_base_url = env.base_url;
|
|
1954
1967
|
}
|
|
1968
|
+
if (action.envs && action.envs.length) {
|
|
1969
|
+
const actionEnv = action.envs.find((item) => item.slug === app_env_slug);
|
|
1970
|
+
if (actionEnv && actionEnv.base_url) {
|
|
1971
|
+
request_base_url = actionEnv.base_url;
|
|
1972
|
+
}
|
|
1973
|
+
}
|
|
1955
1974
|
const samples = {
|
|
1956
1975
|
query: (query === null || query === void 0 ? void 0 : query.data) || [],
|
|
1957
1976
|
headers: (headers === null || headers === void 0 ? void 0 : headers.data) || [],
|
|
@@ -1960,9 +1979,9 @@ class ProcessorService {
|
|
|
1960
1979
|
};
|
|
1961
1980
|
let payloads;
|
|
1962
1981
|
let result;
|
|
1963
|
-
const product = this.productBuilderService.fetchProduct();
|
|
1982
|
+
//const product = this.productBuilderService.fetchProduct();
|
|
1964
1983
|
if (cache_tag && this.redisClient) {
|
|
1965
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
1984
|
+
const productCache = await this.productBuilderService.fetchCache(cache_tag);
|
|
1966
1985
|
if (!productCache) {
|
|
1967
1986
|
throw new Error('Invalid cache tag ');
|
|
1968
1987
|
}
|
|
@@ -1970,7 +1989,7 @@ class ProcessorService {
|
|
|
1970
1989
|
const check = await this.fetchFromCache({
|
|
1971
1990
|
cache_tag,
|
|
1972
1991
|
input: inputString,
|
|
1973
|
-
privateKey:
|
|
1992
|
+
privateKey: this.productBuilderService.fetchPrivateKey(),
|
|
1974
1993
|
expiry: productCache.expiry,
|
|
1975
1994
|
}, additional_logs);
|
|
1976
1995
|
if (check) {
|
|
@@ -1983,31 +2002,31 @@ class ProcessorService {
|
|
|
1983
2002
|
}
|
|
1984
2003
|
if (request_type === types_1.DataFormats.JSON || request_type === types_1.DataFormats.URLENCODED || !request_type) {
|
|
1985
2004
|
if (indexes.length == 0) {
|
|
1986
|
-
payloads = await this.constructJSONDataPayloads(event.input, additional_logs, samples, event);
|
|
2005
|
+
payloads = (await this.constructJSONDataPayloads(event.input, additional_logs, samples, event));
|
|
1987
2006
|
if (request_type === types_1.DataFormats.URLENCODED) {
|
|
1988
2007
|
payloads.body = (0, processor_utils_1.toFormUrlEncoded)(payloads.body);
|
|
1989
2008
|
}
|
|
1990
2009
|
additional_logs.recipient_workspace_id = recipient_workspace_id;
|
|
1991
|
-
result = await this.processRequest({ request_base_url, resource, method, env, payloads, app_id
|
|
2010
|
+
result = await this.processRequest({ request_base_url, resource, method, env, payloads, app_id }, event, { retries }, additional_logs, returnValue);
|
|
1992
2011
|
}
|
|
1993
2012
|
else {
|
|
1994
2013
|
const promises = indexes.map(async (index) => {
|
|
1995
2014
|
payloads = await this.constructJSONDataPayloads(event.input, additional_logs, samples, event, index);
|
|
1996
2015
|
additional_logs.recipient_workspace_id = recipient_workspace_id;
|
|
1997
|
-
await this.processRequest({ request_base_url, resource, method, env, payloads, app_id
|
|
2016
|
+
await this.processRequest({ request_base_url, resource, method, env, payloads, app_id }, event, { retries }, additional_logs, returnValue);
|
|
1998
2017
|
});
|
|
1999
2018
|
result = await Promise.all(promises);
|
|
2000
2019
|
}
|
|
2001
2020
|
}
|
|
2002
2021
|
if (cache_tag && this.redisClient && result) {
|
|
2003
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
2022
|
+
const productCache = await this.productBuilderService.fetchCache(cache_tag);
|
|
2004
2023
|
if (!productCache) {
|
|
2005
2024
|
throw new Error('Invalid cache tag ');
|
|
2006
2025
|
}
|
|
2007
2026
|
const inputString = JSON.stringify(event.input);
|
|
2008
2027
|
await this.addToCache({
|
|
2009
2028
|
input: inputString,
|
|
2010
|
-
privateKey:
|
|
2029
|
+
privateKey: this.productBuilderService.fetchPrivateKey(),
|
|
2011
2030
|
data: JSON.stringify(result),
|
|
2012
2031
|
cache_tag,
|
|
2013
2032
|
timestamp: Date.now(),
|
|
@@ -2024,6 +2043,7 @@ class ProcessorService {
|
|
|
2024
2043
|
}
|
|
2025
2044
|
}
|
|
2026
2045
|
async processRequest(payload, event, retries, additional_logs, returnValue = false) {
|
|
2046
|
+
var _a;
|
|
2027
2047
|
const { request_base_url, resource, payloads, method, env, app_id } = payload;
|
|
2028
2048
|
const start = Date.now();
|
|
2029
2049
|
try {
|
|
@@ -2034,7 +2054,11 @@ class ProcessorService {
|
|
|
2034
2054
|
const end = Date.now();
|
|
2035
2055
|
this.requestTime += end - start;
|
|
2036
2056
|
this.totalRequests += 1;
|
|
2037
|
-
this.
|
|
2057
|
+
const { pricing_tag, pricing_cost, is_overage, currency } = await this.processPricingCost(Object.assign(Object.assign({}, additional_logs), { app_id, workspace_id: this.workspace_id }));
|
|
2058
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { pricing_tag,
|
|
2059
|
+
pricing_cost,
|
|
2060
|
+
currency,
|
|
2061
|
+
is_overage, message: 'Process http request - success', successful_execution: true, data: { response: (0, processor_utils_1.anonymizeObject)(results) }, status: types_1.LogEventStatus.SUCCESS, app_id, action: event.event, start,
|
|
2038
2062
|
end }));
|
|
2039
2063
|
await this.addToSuccessOutput(event, results, additional_logs);
|
|
2040
2064
|
if (returnValue) {
|
|
@@ -2047,7 +2071,7 @@ class ProcessorService {
|
|
|
2047
2071
|
catch (e) {
|
|
2048
2072
|
const end = Date.now();
|
|
2049
2073
|
let error = e;
|
|
2050
|
-
if (e.response.data) {
|
|
2074
|
+
if ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.data) {
|
|
2051
2075
|
error = e.response.data;
|
|
2052
2076
|
}
|
|
2053
2077
|
this.requestTime += end - start;
|
|
@@ -2080,6 +2104,67 @@ class ProcessorService {
|
|
|
2080
2104
|
}
|
|
2081
2105
|
}
|
|
2082
2106
|
}
|
|
2107
|
+
async processPricingCost(additional_logs) {
|
|
2108
|
+
try {
|
|
2109
|
+
const { app_id, workspace_id } = additional_logs;
|
|
2110
|
+
if (!app_id || !workspace_id) {
|
|
2111
|
+
throw new Error('app_id and workspace_id are required in additional_logs');
|
|
2112
|
+
}
|
|
2113
|
+
const primaryPricing = this.pricingService.fetchPricing();
|
|
2114
|
+
const overagePricing = this.pricingService.fetchOveragePricing();
|
|
2115
|
+
const requests = await this.requestTrackerService.incrementRequest(app_id, workspace_id);
|
|
2116
|
+
const limitCheck = (0, request_utils_1.checkLimitExceeded)(requests, primaryPricing.limits);
|
|
2117
|
+
let finalCost = 0;
|
|
2118
|
+
let usedPricing = primaryPricing;
|
|
2119
|
+
let isOverage = false;
|
|
2120
|
+
let finalCurrency = primaryPricing.currency;
|
|
2121
|
+
if ((0, request_utils_1.isFreeTag)(primaryPricing.pricing_tag)) {
|
|
2122
|
+
if (limitCheck.exceeded) {
|
|
2123
|
+
const overageRequests = (0, request_utils_1.calculateOverageRequests)(requests, primaryPricing.limits);
|
|
2124
|
+
finalCost = (0, request_utils_1.calculateCost)(overagePricing.pricing_mode, overagePricing.unit_price, overageRequests);
|
|
2125
|
+
usedPricing = overagePricing;
|
|
2126
|
+
isOverage = true;
|
|
2127
|
+
finalCurrency = overagePricing.currency;
|
|
2128
|
+
}
|
|
2129
|
+
else {
|
|
2130
|
+
finalCost = 0;
|
|
2131
|
+
}
|
|
2132
|
+
}
|
|
2133
|
+
else {
|
|
2134
|
+
if (limitCheck.exceeded) {
|
|
2135
|
+
const overageRequests = (0, request_utils_1.calculateOverageRequests)(requests, primaryPricing.limits);
|
|
2136
|
+
const overageCost = (0, request_utils_1.calculateCost)(overagePricing.pricing_mode, overagePricing.unit_price, overageRequests);
|
|
2137
|
+
finalCost = overageCost;
|
|
2138
|
+
isOverage = true;
|
|
2139
|
+
}
|
|
2140
|
+
else {
|
|
2141
|
+
const pricingRequests = (0, request_utils_1.calculateRequests)(requests);
|
|
2142
|
+
finalCost = (0, request_utils_1.calculateCost)(primaryPricing.pricing_mode, primaryPricing.unit_price, pricingRequests);
|
|
2143
|
+
}
|
|
2144
|
+
}
|
|
2145
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process Pricing Cost - success', data: {
|
|
2146
|
+
pricing_tag: usedPricing.pricing_tag,
|
|
2147
|
+
cost: finalCost,
|
|
2148
|
+
currency: finalCurrency,
|
|
2149
|
+
is_overage: isOverage,
|
|
2150
|
+
requests: requests,
|
|
2151
|
+
limit_exceeded: limitCheck.exceeded,
|
|
2152
|
+
}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2153
|
+
return {
|
|
2154
|
+
pricing_tag: usedPricing.pricing_tag,
|
|
2155
|
+
pricing_cost: Math.round(finalCost * 100) / 100,
|
|
2156
|
+
currency: finalCurrency,
|
|
2157
|
+
is_overage: isOverage,
|
|
2158
|
+
requests_made: requests,
|
|
2159
|
+
limit_exceeded: limitCheck.exceeded,
|
|
2160
|
+
exceeded_limits: limitCheck.exceededLimits,
|
|
2161
|
+
};
|
|
2162
|
+
}
|
|
2163
|
+
catch (e) {
|
|
2164
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process Pricing Cost - failed', data: { error: e }, status: types_1.LogEventStatus.FAIL }));
|
|
2165
|
+
throw e;
|
|
2166
|
+
}
|
|
2167
|
+
}
|
|
2083
2168
|
async addToSuccessOutput(event, output, additional_logs) {
|
|
2084
2169
|
// Remove event from failed, skipped, and waiting arrays
|
|
2085
2170
|
this.processingOutput.failure = this.processingOutput.failure.filter((data) => !(data.event.sequence_tag === event.sequence_tag && data.event.event === event.event));
|
|
@@ -2089,7 +2174,6 @@ class ProcessorService {
|
|
|
2089
2174
|
event = (0, processor_utils_1.cleanBlob)(event);
|
|
2090
2175
|
}
|
|
2091
2176
|
this.processingOutput.success.push({ event, output });
|
|
2092
|
-
await this.processWaitingEvents(additional_logs);
|
|
2093
2177
|
if (this.checkIsSuccessful() && this.doneWithProcessing) {
|
|
2094
2178
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_feature_execution: true, message: 'Process feature - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2095
2179
|
await this.logService.publish();
|
|
@@ -2109,18 +2193,6 @@ class ProcessorService {
|
|
|
2109
2193
|
}
|
|
2110
2194
|
return success;
|
|
2111
2195
|
}
|
|
2112
|
-
addToWaitingOutput(event, dependants) {
|
|
2113
|
-
const exists = this.processingOutput.waiting.findIndex((item) => {
|
|
2114
|
-
return item.event.event === event.event && item.event.sequence_tag === event.sequence_tag;
|
|
2115
|
-
});
|
|
2116
|
-
const skipped = this.processingOutput.skipped.findIndex((item) => {
|
|
2117
|
-
return item.event.event === event.event && item.event.sequence_tag === event.sequence_tag;
|
|
2118
|
-
});
|
|
2119
|
-
if (!exists && !skipped) {
|
|
2120
|
-
this.processingOutput.waiting.push({ event, dependants });
|
|
2121
|
-
}
|
|
2122
|
-
// addToSkippedOutput()
|
|
2123
|
-
}
|
|
2124
2196
|
async addToFailureOutput(e, event, payload, additional_logs, policy = {}) {
|
|
2125
2197
|
try {
|
|
2126
2198
|
this.processingFailure = true;
|
|
@@ -2145,7 +2217,7 @@ class ProcessorService {
|
|
|
2145
2217
|
let retryable = true;
|
|
2146
2218
|
if (event.type === types_1.FeatureEventTypes.STORAGE) {
|
|
2147
2219
|
event = (0, processor_utils_1.cleanBlob)(event);
|
|
2148
|
-
if (!
|
|
2220
|
+
if (!event.allow_fail)
|
|
2149
2221
|
retryable = false;
|
|
2150
2222
|
}
|
|
2151
2223
|
let retries_left = retries || max;
|
|
@@ -2178,9 +2250,6 @@ class ProcessorService {
|
|
|
2178
2250
|
if (event.type === types_1.FeatureEventTypes.ACTION) {
|
|
2179
2251
|
this.processRequest(payload, event, policy, additional_logs);
|
|
2180
2252
|
}
|
|
2181
|
-
if (event.type === types_1.FeatureEventTypes.DB_ACTION) {
|
|
2182
|
-
this.processDBRequest(event, event.input, payload.database_tag, payload.databaseEnv, payload.action_tag, additional_logs);
|
|
2183
|
-
}
|
|
2184
2253
|
if (event.type === types_1.FeatureEventTypes.STORAGE) {
|
|
2185
2254
|
this.processStorageRequest(event, event.input, payload.storageEnv, additional_logs);
|
|
2186
2255
|
}
|
|
@@ -2188,9 +2257,6 @@ class ProcessorService {
|
|
|
2188
2257
|
}
|
|
2189
2258
|
if (allow_fail === false && retries_left === 0) {
|
|
2190
2259
|
this.published = true;
|
|
2191
|
-
if (this.feature) {
|
|
2192
|
-
additional_logs.failed_feature_execution = true;
|
|
2193
|
-
}
|
|
2194
2260
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Ran out of retries - failed', data: Object.assign(Object.assign({}, output), { payload: (0, processor_utils_1.anonymizeObject)(output.payload) }), status: types_1.LogEventStatus.FAIL }));
|
|
2195
2261
|
//throw new Error("Run out of retries")
|
|
2196
2262
|
this.end = Date.now();
|
|
@@ -2237,8 +2303,9 @@ class ProcessorService {
|
|
|
2237
2303
|
params: query,
|
|
2238
2304
|
headers: authHeaders,
|
|
2239
2305
|
timeout: 15000,
|
|
2240
|
-
withCredentials: false
|
|
2306
|
+
withCredentials: false,
|
|
2241
2307
|
};
|
|
2308
|
+
console.log('REQUEST!!!!', request);
|
|
2242
2309
|
const response = await axios_1.default.request(request);
|
|
2243
2310
|
return response.data;
|
|
2244
2311
|
}
|
|
@@ -2249,12 +2316,15 @@ class ProcessorService {
|
|
|
2249
2316
|
}
|
|
2250
2317
|
async processStorage(action) {
|
|
2251
2318
|
//TODO: schema validation
|
|
2252
|
-
const { env, input, retries, event, product: product_tag } = action;
|
|
2319
|
+
const { env, input, retries, event, product: product_tag, session, cache } = action;
|
|
2253
2320
|
const additional_logs = {
|
|
2254
2321
|
parent_tag: event,
|
|
2255
2322
|
type: types_1.LogEventTypes.STORAGE,
|
|
2256
2323
|
name: 'Process Storage',
|
|
2257
2324
|
};
|
|
2325
|
+
// Session log fields (will be populated if session is provided)
|
|
2326
|
+
let sessionLogFields = {};
|
|
2327
|
+
let resolvedInput = input;
|
|
2258
2328
|
try {
|
|
2259
2329
|
await this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.STORAGE);
|
|
2260
2330
|
this.input = input;
|
|
@@ -2267,23 +2337,57 @@ class ProcessorService {
|
|
|
2267
2337
|
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
2268
2338
|
this.baseLogs = Object.assign({ product_tag: this.productTag, product_id: this.productId, workspace_id: this.workspace_id, env,
|
|
2269
2339
|
process_id, data: this.clone }, additional_logs);
|
|
2270
|
-
|
|
2340
|
+
// Single bootstrap call to fetch all storage data
|
|
2341
|
+
const bootstrapData = await this.productBuilderService.bootstrapStorage({
|
|
2342
|
+
product_tag,
|
|
2343
|
+
env_slug: env,
|
|
2344
|
+
storage_tag: event,
|
|
2345
|
+
});
|
|
2346
|
+
// Initialize from bootstrap data
|
|
2347
|
+
this.productId = bootstrapData.product_id;
|
|
2348
|
+
this.processEnv = bootstrapData.env;
|
|
2349
|
+
// Process session if provided - verify and resolve $Session{} references
|
|
2350
|
+
if (session && bootstrapData.private_key) {
|
|
2351
|
+
const { processSessionForExecution } = await Promise.resolve().then(() => __importStar(require('../../sessions')));
|
|
2352
|
+
const sessionResult = await processSessionForExecution(session, bootstrapData.private_key, input, env);
|
|
2353
|
+
if (sessionResult.error) {
|
|
2354
|
+
throw new Error(`Session validation failed: ${sessionResult.error}`);
|
|
2355
|
+
}
|
|
2356
|
+
resolvedInput = sessionResult.input;
|
|
2357
|
+
sessionLogFields = sessionResult.logFields;
|
|
2358
|
+
}
|
|
2359
|
+
// Initialize log service if needed
|
|
2360
|
+
if (!this.logService) {
|
|
2361
|
+
this.logService = new logs_service_1.default({
|
|
2362
|
+
product_id: this.productId,
|
|
2363
|
+
workspace_id: this.workspace_id,
|
|
2364
|
+
public_key: this.public_key,
|
|
2365
|
+
user_id: this.user_id,
|
|
2366
|
+
token: this.token,
|
|
2367
|
+
env_type: this.environment,
|
|
2368
|
+
});
|
|
2369
|
+
}
|
|
2271
2370
|
this.process_id = process_id;
|
|
2272
|
-
|
|
2273
|
-
|
|
2371
|
+
this.baseLogs.product_id = this.productId;
|
|
2372
|
+
// Add session fields to base logs
|
|
2373
|
+
this.baseLogs = Object.assign(Object.assign({}, this.baseLogs), sessionLogFields);
|
|
2374
|
+
const productEnv = bootstrapData.env;
|
|
2274
2375
|
if (!productEnv.active) {
|
|
2275
2376
|
throw new Error(`Environment ${env} is not active`);
|
|
2276
2377
|
}
|
|
2277
2378
|
const payload = {
|
|
2278
2379
|
type: types_1.FeatureEventTypes.STORAGE,
|
|
2279
2380
|
event,
|
|
2280
|
-
cache:
|
|
2281
|
-
input,
|
|
2381
|
+
cache: cache,
|
|
2382
|
+
input: resolvedInput,
|
|
2282
2383
|
env: productEnv,
|
|
2283
2384
|
retries: retries || 0,
|
|
2284
2385
|
allow_fail: false,
|
|
2285
2386
|
};
|
|
2286
|
-
const result = await this.runStorage(payload
|
|
2387
|
+
const result = await this.runStorage(payload, additional_logs, {
|
|
2388
|
+
storage: bootstrapData.storage,
|
|
2389
|
+
storage_env: bootstrapData.storage_env,
|
|
2390
|
+
});
|
|
2287
2391
|
this.end = Date.now();
|
|
2288
2392
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Storing file - success', data: { input: this.clone, result }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2289
2393
|
await this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
@@ -2318,7 +2422,7 @@ class ProcessorService {
|
|
|
2318
2422
|
await this.intializeProduct(additional_logs);
|
|
2319
2423
|
this.baseLogs.product_id = this.productId;
|
|
2320
2424
|
this.process_id = process_id;
|
|
2321
|
-
const productEnv = this.fetchEnv(data.env, additional_logs);
|
|
2425
|
+
const productEnv = await this.fetchEnv(data.env, additional_logs);
|
|
2322
2426
|
this.processEnv = productEnv;
|
|
2323
2427
|
if (!productEnv.active) {
|
|
2324
2428
|
throw new Error(`Environment ${data.env} is not active`);
|
|
@@ -2357,6 +2461,9 @@ class ProcessorService {
|
|
|
2357
2461
|
type: types_1.LogEventTypes.MESSAGEBROKER,
|
|
2358
2462
|
name: 'Publish to broker topic',
|
|
2359
2463
|
};
|
|
2464
|
+
// Session log fields (will be populated if session is provided)
|
|
2465
|
+
let sessionLogFields = {};
|
|
2466
|
+
let resolvedInput = data.input;
|
|
2360
2467
|
try {
|
|
2361
2468
|
await this.validateActionDataMappingInput(data.input, types_1.FeatureEventTypes.PUBLISH);
|
|
2362
2469
|
this.start = Date.now();
|
|
@@ -2370,7 +2477,20 @@ class ProcessorService {
|
|
|
2370
2477
|
await this.intializeProduct(additional_logs);
|
|
2371
2478
|
this.baseLogs.product_id = this.productId;
|
|
2372
2479
|
this.process_id = process_id;
|
|
2373
|
-
|
|
2480
|
+
// Process session if provided - verify and resolve $Session{} references
|
|
2481
|
+
const privateKey = this.productBuilderService.fetchPrivateKey();
|
|
2482
|
+
if (data.session && privateKey) {
|
|
2483
|
+
const { processSessionForExecution } = await Promise.resolve().then(() => __importStar(require('../../sessions')));
|
|
2484
|
+
const sessionResult = await processSessionForExecution(data.session, privateKey, data.input, data.env);
|
|
2485
|
+
if (sessionResult.error) {
|
|
2486
|
+
throw new Error(`Session validation failed: ${sessionResult.error}`);
|
|
2487
|
+
}
|
|
2488
|
+
resolvedInput = sessionResult.input;
|
|
2489
|
+
sessionLogFields = sessionResult.logFields;
|
|
2490
|
+
}
|
|
2491
|
+
// Add session fields to base logs
|
|
2492
|
+
this.baseLogs = Object.assign(Object.assign({}, this.baseLogs), sessionLogFields);
|
|
2493
|
+
const productEnv = await this.fetchEnv(data.env, additional_logs);
|
|
2374
2494
|
this.processEnv = productEnv;
|
|
2375
2495
|
if (!productEnv.active) {
|
|
2376
2496
|
throw new Error(`Environment ${data.env} is not active`);
|
|
@@ -2379,7 +2499,7 @@ class ProcessorService {
|
|
|
2379
2499
|
type: types_1.FeatureEventTypes.PUBLISH,
|
|
2380
2500
|
event: data.event,
|
|
2381
2501
|
cache: data.cache,
|
|
2382
|
-
input:
|
|
2502
|
+
input: resolvedInput,
|
|
2383
2503
|
env: productEnv,
|
|
2384
2504
|
retries: 0,
|
|
2385
2505
|
allow_fail: false,
|
|
@@ -2399,8 +2519,8 @@ class ProcessorService {
|
|
|
2399
2519
|
}
|
|
2400
2520
|
}
|
|
2401
2521
|
async processJob(job, additional_logs = {}) {
|
|
2402
|
-
var _a;
|
|
2403
|
-
const productJob = this.productBuilderService.fetchJob(job.event);
|
|
2522
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2523
|
+
const productJob = await this.productBuilderService.fetchJob(job.event);
|
|
2404
2524
|
if (!productJob) {
|
|
2405
2525
|
throw new Error(`Job ${job.event} not found`);
|
|
2406
2526
|
}
|
|
@@ -2408,8 +2528,10 @@ class ProcessorService {
|
|
|
2408
2528
|
const NOW = Date.now();
|
|
2409
2529
|
// Treat anything above Jan 1, 2023 as a timestamp (to be safe and future-proof)
|
|
2410
2530
|
const IS_PROBABLY_TIMESTAMP = job.start_at > 1672531200000;
|
|
2411
|
-
const
|
|
2531
|
+
const scheduled_at = IS_PROBABLY_TIMESTAMP ? job.start_at : NOW + job.start_at;
|
|
2532
|
+
const delay = Math.max(0, scheduled_at - NOW);
|
|
2412
2533
|
let jobInput;
|
|
2534
|
+
let namespace = productJob.type;
|
|
2413
2535
|
if (productJob.type === types_1.JobEventTypes.ACTION) {
|
|
2414
2536
|
const input = {
|
|
2415
2537
|
env: job.env,
|
|
@@ -2417,93 +2539,220 @@ class ProcessorService {
|
|
|
2417
2539
|
app: productJob.app,
|
|
2418
2540
|
cache: job.cache,
|
|
2419
2541
|
input: job.input,
|
|
2542
|
+
action: job.event,
|
|
2543
|
+
session: job.session,
|
|
2544
|
+
};
|
|
2545
|
+
jobInput = input;
|
|
2546
|
+
namespace = 'actions';
|
|
2547
|
+
}
|
|
2548
|
+
else if (productJob.type === types_1.JobEventTypes.NOTIFICATION) {
|
|
2549
|
+
const input = {
|
|
2550
|
+
env: job.env,
|
|
2551
|
+
product: job.product,
|
|
2420
2552
|
event: job.event,
|
|
2421
|
-
|
|
2553
|
+
input: job.input,
|
|
2554
|
+
session: job.session,
|
|
2555
|
+
cache: job.cache,
|
|
2422
2556
|
};
|
|
2423
2557
|
jobInput = input;
|
|
2558
|
+
namespace = 'notifications';
|
|
2424
2559
|
}
|
|
2425
|
-
else if (productJob.type === types_1.JobEventTypes.
|
|
2560
|
+
else if (productJob.type === types_1.JobEventTypes.PUBLISH) {
|
|
2426
2561
|
const input = {
|
|
2427
2562
|
env: job.env,
|
|
2428
2563
|
product: job.product,
|
|
2564
|
+
event: job.event,
|
|
2429
2565
|
cache: job.cache,
|
|
2566
|
+
session: job.session,
|
|
2430
2567
|
input: job.input,
|
|
2568
|
+
};
|
|
2569
|
+
jobInput = input;
|
|
2570
|
+
namespace = 'events';
|
|
2571
|
+
}
|
|
2572
|
+
else if (productJob.type === types_1.JobEventTypes.STORAGE) {
|
|
2573
|
+
const input = {
|
|
2574
|
+
env: job.env,
|
|
2575
|
+
product: job.product,
|
|
2431
2576
|
event: job.event,
|
|
2577
|
+
cache: job.cache,
|
|
2432
2578
|
session: job.session,
|
|
2579
|
+
input: job.input,
|
|
2433
2580
|
};
|
|
2434
2581
|
jobInput = input;
|
|
2582
|
+
namespace = 'storage';
|
|
2435
2583
|
}
|
|
2436
|
-
else if (productJob.type === types_1.JobEventTypes.
|
|
2584
|
+
else if (productJob.type === types_1.JobEventTypes.DATABASE_ACTION) {
|
|
2585
|
+
// Database action job - predefined database actions
|
|
2437
2586
|
const input = {
|
|
2438
|
-
input: job.input,
|
|
2439
|
-
product: job.product,
|
|
2440
2587
|
env: job.env,
|
|
2441
|
-
|
|
2442
|
-
|
|
2588
|
+
product: job.product,
|
|
2589
|
+
database: productJob.app,
|
|
2590
|
+
event: productJob.event,
|
|
2443
2591
|
cache: job.cache,
|
|
2592
|
+
session: job.session,
|
|
2593
|
+
input: job.input,
|
|
2444
2594
|
};
|
|
2445
2595
|
jobInput = input;
|
|
2596
|
+
namespace = 'database';
|
|
2446
2597
|
}
|
|
2447
|
-
else if (productJob.type === types_1.JobEventTypes.
|
|
2598
|
+
else if (productJob.type === types_1.JobEventTypes.DATABASE_OPERATION) {
|
|
2599
|
+
// Database operation job - direct database operations like createOne, findMany, etc.
|
|
2448
2600
|
const input = {
|
|
2449
2601
|
env: job.env,
|
|
2450
2602
|
product: job.product,
|
|
2451
|
-
|
|
2603
|
+
database: productJob.app,
|
|
2604
|
+
operation: productJob.event,
|
|
2605
|
+
cache: job.cache,
|
|
2606
|
+
session: job.session,
|
|
2452
2607
|
input: job.input,
|
|
2608
|
+
};
|
|
2609
|
+
jobInput = input;
|
|
2610
|
+
namespace = 'database';
|
|
2611
|
+
}
|
|
2612
|
+
else if (productJob.type === types_1.JobEventTypes.GRAPH_ACTION) {
|
|
2613
|
+
// Graph action job - predefined graph actions
|
|
2614
|
+
const input = {
|
|
2615
|
+
env: job.env,
|
|
2616
|
+
product: job.product,
|
|
2617
|
+
graph: productJob.app,
|
|
2618
|
+
event: productJob.event,
|
|
2619
|
+
cache: job.cache,
|
|
2453
2620
|
session: job.session,
|
|
2454
|
-
|
|
2621
|
+
input: job.input,
|
|
2455
2622
|
};
|
|
2456
2623
|
jobInput = input;
|
|
2624
|
+
namespace = 'graphs';
|
|
2457
2625
|
}
|
|
2458
|
-
else if (productJob.type === types_1.JobEventTypes.
|
|
2626
|
+
else if (productJob.type === types_1.JobEventTypes.GRAPH_OPERATION) {
|
|
2627
|
+
// Graph operation job - direct graph operations like createNode, traverse, etc.
|
|
2459
2628
|
const input = {
|
|
2460
2629
|
env: job.env,
|
|
2461
2630
|
product: job.product,
|
|
2462
|
-
|
|
2631
|
+
graph: productJob.app,
|
|
2632
|
+
operation: productJob.event,
|
|
2463
2633
|
cache: job.cache,
|
|
2464
2634
|
session: job.session,
|
|
2465
|
-
input: job.input
|
|
2635
|
+
input: job.input,
|
|
2466
2636
|
};
|
|
2467
2637
|
jobInput = input;
|
|
2638
|
+
namespace = 'graphs';
|
|
2468
2639
|
}
|
|
2469
|
-
else if (productJob.type === types_1.JobEventTypes.
|
|
2640
|
+
else if (productJob.type === types_1.JobEventTypes.WORKFLOW) {
|
|
2641
|
+
// Workflow job - execute a workflow
|
|
2470
2642
|
const input = {
|
|
2471
2643
|
env: job.env,
|
|
2472
2644
|
product: job.product,
|
|
2473
|
-
|
|
2645
|
+
workflow: productJob.app,
|
|
2474
2646
|
cache: job.cache,
|
|
2475
2647
|
session: job.session,
|
|
2476
|
-
input: job.input
|
|
2648
|
+
input: job.input,
|
|
2477
2649
|
};
|
|
2478
2650
|
jobInput = input;
|
|
2651
|
+
namespace = 'workflows';
|
|
2479
2652
|
}
|
|
2480
2653
|
else {
|
|
2481
2654
|
throw new Error(`Job type ${productJob.type} not supported`);
|
|
2482
2655
|
}
|
|
2483
|
-
|
|
2656
|
+
// Determine if this is a recurring job
|
|
2657
|
+
const isRecurring = !!(((_a = job.repeat) === null || _a === void 0 ? void 0 : _a.cron) || ((_b = job.repeat) === null || _b === void 0 ? void 0 : _b.every));
|
|
2658
|
+
// Generate a unique job ID
|
|
2659
|
+
const jobId = `job_${(0, uuid_1.v4)().replace(/-/g, '').substring(0, 16)}`;
|
|
2660
|
+
// Build queue options
|
|
2661
|
+
const options = {
|
|
2662
|
+
jobId,
|
|
2663
|
+
};
|
|
2484
2664
|
// Handle `delay` only if repeat.every is not defined
|
|
2485
|
-
if (!((
|
|
2665
|
+
if (!((_c = job.repeat) === null || _c === void 0 ? void 0 : _c.every) && delay > 0) {
|
|
2486
2666
|
options.delay = delay;
|
|
2487
2667
|
}
|
|
2488
2668
|
// Add repeat config if defined
|
|
2669
|
+
let next_run_at;
|
|
2489
2670
|
if (job.repeat) {
|
|
2490
2671
|
const { every, cron, tz, limit, endDate } = job.repeat;
|
|
2491
|
-
|
|
2492
|
-
|
|
2672
|
+
if (cron) {
|
|
2673
|
+
options.repeat = {
|
|
2493
2674
|
cron,
|
|
2494
2675
|
tz,
|
|
2495
2676
|
limit,
|
|
2496
2677
|
endDate,
|
|
2678
|
+
};
|
|
2679
|
+
// Calculate next run time from cron
|
|
2680
|
+
try {
|
|
2681
|
+
const cronParser = require('cron-parser');
|
|
2682
|
+
const cronOptions = {};
|
|
2683
|
+
if (tz)
|
|
2684
|
+
cronOptions.tz = tz;
|
|
2685
|
+
const interval = cronParser.parseExpression(cron, cronOptions);
|
|
2686
|
+
next_run_at = interval.next().getTime();
|
|
2497
2687
|
}
|
|
2498
|
-
|
|
2499
|
-
|
|
2500
|
-
|
|
2501
|
-
|
|
2502
|
-
|
|
2503
|
-
|
|
2504
|
-
|
|
2688
|
+
catch (err) {
|
|
2689
|
+
next_run_at = NOW;
|
|
2690
|
+
}
|
|
2691
|
+
}
|
|
2692
|
+
else if (every) {
|
|
2693
|
+
options.repeat = {
|
|
2694
|
+
every,
|
|
2695
|
+
limit,
|
|
2696
|
+
endDate,
|
|
2697
|
+
};
|
|
2698
|
+
next_run_at = NOW + every;
|
|
2699
|
+
}
|
|
2505
2700
|
}
|
|
2701
|
+
// Store job metadata in Redis if redisClient is available
|
|
2702
|
+
if (this.redisClient) {
|
|
2703
|
+
const jobData = {
|
|
2704
|
+
id: jobId,
|
|
2705
|
+
status: delay > 0 ? 'scheduled' : 'queued',
|
|
2706
|
+
type: productJob.type,
|
|
2707
|
+
namespace,
|
|
2708
|
+
product: job.product,
|
|
2709
|
+
env: job.env,
|
|
2710
|
+
event: job.event,
|
|
2711
|
+
app: productJob.app,
|
|
2712
|
+
scheduled_at,
|
|
2713
|
+
recurring: isRecurring,
|
|
2714
|
+
cron: (_d = job.repeat) === null || _d === void 0 ? void 0 : _d.cron,
|
|
2715
|
+
every: (_e = job.repeat) === null || _e === void 0 ? void 0 : _e.every,
|
|
2716
|
+
next_run_at,
|
|
2717
|
+
execution_count: 0,
|
|
2718
|
+
limit: (_f = job.repeat) === null || _f === void 0 ? void 0 : _f.limit,
|
|
2719
|
+
end_date: ((_g = job.repeat) === null || _g === void 0 ? void 0 : _g.endDate)
|
|
2720
|
+
? typeof job.repeat.endDate === 'string'
|
|
2721
|
+
? new Date(job.repeat.endDate).getTime()
|
|
2722
|
+
: job.repeat.endDate
|
|
2723
|
+
: undefined,
|
|
2724
|
+
tz: (_h = job.repeat) === null || _h === void 0 ? void 0 : _h.tz,
|
|
2725
|
+
retries: job.retries || 0,
|
|
2726
|
+
retry_count: 0,
|
|
2727
|
+
input: jobInput,
|
|
2728
|
+
session: job.session,
|
|
2729
|
+
cache: job.cache,
|
|
2730
|
+
workspace_id: this.workspace_id,
|
|
2731
|
+
created_at: NOW,
|
|
2732
|
+
updated_at: NOW,
|
|
2733
|
+
};
|
|
2734
|
+
// Store in Redis with TTL of 90 days
|
|
2735
|
+
const jobKey = `job:${this.workspace_id}:${jobId}`;
|
|
2736
|
+
const redis = this.redisClient;
|
|
2737
|
+
await redis.setex(jobKey, 90 * 24 * 60 * 60, JSON.stringify(jobData));
|
|
2738
|
+
// Add to status index
|
|
2739
|
+
const statusKey = `job_status:${this.workspace_id}:${jobData.status}`;
|
|
2740
|
+
await redis.sadd(statusKey, jobId);
|
|
2741
|
+
// Add to product index
|
|
2742
|
+
const productKey = `job_product:${this.workspace_id}:${job.product}`;
|
|
2743
|
+
await redis.sadd(productKey, jobId);
|
|
2744
|
+
}
|
|
2745
|
+
// Add job input with the job ID for tracking
|
|
2746
|
+
jobInput._job_id = jobId;
|
|
2747
|
+
// Add job to queue
|
|
2506
2748
|
await this.queues.jobs.add(productJob.type, jobInput, options);
|
|
2749
|
+
return {
|
|
2750
|
+
job_id: jobId,
|
|
2751
|
+
status: delay > 0 ? 'scheduled' : 'queued',
|
|
2752
|
+
scheduled_at,
|
|
2753
|
+
recurring: isRecurring,
|
|
2754
|
+
next_run_at,
|
|
2755
|
+
};
|
|
2507
2756
|
}
|
|
2508
2757
|
async sendExpoNotification(payload, device_tokens) {
|
|
2509
2758
|
const message = {
|
|
@@ -2558,19 +2807,53 @@ class ProcessorService {
|
|
|
2558
2807
|
await this.sendFirebaseNotification({ title, body, data }, payload.device_tokens, notification.credentials);
|
|
2559
2808
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send Firebase notification - success', data: { title, body: (0, processor_utils_1.anonymizeObject)(data), data: (0, processor_utils_1.anonymizeObject)(data) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2560
2809
|
}
|
|
2561
|
-
async runNotification(notification, additional_logs) {
|
|
2562
|
-
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2810
|
+
async runNotification(notification, additional_logs, bootstrapData) {
|
|
2811
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
|
|
2563
2812
|
const { event } = notification;
|
|
2564
2813
|
const input = notification.input;
|
|
2565
2814
|
try {
|
|
2566
|
-
|
|
2567
|
-
|
|
2568
|
-
|
|
2569
|
-
if
|
|
2570
|
-
|
|
2571
|
-
|
|
2572
|
-
|
|
2573
|
-
|
|
2815
|
+
let notificationEvent;
|
|
2816
|
+
let message;
|
|
2817
|
+
let envConfig;
|
|
2818
|
+
// Use bootstrap data if provided, otherwise fetch via API
|
|
2819
|
+
if (bootstrapData) {
|
|
2820
|
+
notificationEvent = bootstrapData.notification;
|
|
2821
|
+
message = bootstrapData.message;
|
|
2822
|
+
envConfig = bootstrapData.env_config;
|
|
2823
|
+
}
|
|
2824
|
+
else {
|
|
2825
|
+
// Fallback to original API-based fetching
|
|
2826
|
+
notificationEvent = (await this.productBuilderService.fetchNotification(event.split(':')[0]));
|
|
2827
|
+
message = await this.productBuilderService.fetchNotificationMessage(event);
|
|
2828
|
+
if (!message) {
|
|
2829
|
+
throw new Error(`Message ${event} not found`);
|
|
2830
|
+
}
|
|
2831
|
+
envConfig = (_a = notificationEvent.envs) === null || _a === void 0 ? void 0 : _a.find((data) => data.slug === notification.env.slug);
|
|
2832
|
+
}
|
|
2833
|
+
if (!envConfig) {
|
|
2834
|
+
throw new Error(`Notification env config for ${notification.env.slug} not found`);
|
|
2835
|
+
}
|
|
2836
|
+
let { push_notifications: notifications, emails, callbacks, sms: smses, } = envConfig;
|
|
2837
|
+
// Resolve any $Secret{} references in notification configs
|
|
2838
|
+
const secretsService = (0, secrets_1.getSecretsService)();
|
|
2839
|
+
if (secretsService) {
|
|
2840
|
+
if (notifications && (0, secrets_1.mightContainSecrets)(notifications)) {
|
|
2841
|
+
const resolved = await secretsService.resolve(notifications, { env: notification.env.slug });
|
|
2842
|
+
notifications = resolved.value;
|
|
2843
|
+
}
|
|
2844
|
+
if (emails && (0, secrets_1.mightContainSecrets)(emails)) {
|
|
2845
|
+
const resolved = await secretsService.resolve(emails, { env: notification.env.slug });
|
|
2846
|
+
emails = resolved.value;
|
|
2847
|
+
}
|
|
2848
|
+
if (callbacks && (0, secrets_1.mightContainSecrets)(callbacks)) {
|
|
2849
|
+
const resolved = await secretsService.resolve(callbacks, { env: notification.env.slug });
|
|
2850
|
+
callbacks = resolved.value;
|
|
2851
|
+
}
|
|
2852
|
+
if (smses && (0, secrets_1.mightContainSecrets)(smses)) {
|
|
2853
|
+
const resolved = await secretsService.resolve(smses, { env: notification.env.slug });
|
|
2854
|
+
smses = resolved.value;
|
|
2855
|
+
}
|
|
2856
|
+
}
|
|
2574
2857
|
const { push_notification: push, email, callback, sms } = message;
|
|
2575
2858
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetching notification', data: notificationEvent, status: types_1.LogEventStatus.SUCCESS }));
|
|
2576
2859
|
if (push) {
|
|
@@ -2616,7 +2899,11 @@ class ProcessorService {
|
|
|
2616
2899
|
const templateMaker = (0, handlebars_1.compile)(email.template);
|
|
2617
2900
|
const template = templateMaker(input.email.template);
|
|
2618
2901
|
const subject = (0, processor_utils_1.replacePlaceholderString)(email.subject, input.email.subject || {});
|
|
2619
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate email template - success', data: {
|
|
2902
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Generate email template - success', data: {
|
|
2903
|
+
template: (0, processor_utils_1.anonymizeValue)(template),
|
|
2904
|
+
subject: (0, processor_utils_1.anonymizeValue)(subject),
|
|
2905
|
+
input: (0, processor_utils_1.anonymizeObject)(input.email),
|
|
2906
|
+
}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2620
2907
|
const mailOptions = {
|
|
2621
2908
|
from,
|
|
2622
2909
|
to: input.email.recipients,
|
|
@@ -2634,10 +2921,10 @@ class ProcessorService {
|
|
|
2634
2921
|
}
|
|
2635
2922
|
if (callback && callbacks) {
|
|
2636
2923
|
const payload = {
|
|
2637
|
-
query: Object.assign(Object.assign({}, (
|
|
2638
|
-
headers: Object.assign(Object.assign({}, (
|
|
2639
|
-
params: Object.assign(Object.assign({}, (
|
|
2640
|
-
body: Object.assign(Object.assign({}, (
|
|
2924
|
+
query: Object.assign(Object.assign({}, (_b = input.callback) === null || _b === void 0 ? void 0 : _b.query), (_c = callbacks.auth) === null || _c === void 0 ? void 0 : _c.query),
|
|
2925
|
+
headers: Object.assign(Object.assign({}, (_d = input.callback) === null || _d === void 0 ? void 0 : _d.headers), (_e = callbacks.auth) === null || _e === void 0 ? void 0 : _e.headers),
|
|
2926
|
+
params: Object.assign(Object.assign({}, (_f = input.callback) === null || _f === void 0 ? void 0 : _f.params), (_g = callbacks.auth) === null || _g === void 0 ? void 0 : _g.params),
|
|
2927
|
+
body: Object.assign(Object.assign({}, (_h = input.callback) === null || _h === void 0 ? void 0 : _h.body), (_j = callbacks.auth) === null || _j === void 0 ? void 0 : _j.body),
|
|
2641
2928
|
};
|
|
2642
2929
|
input.callback.body = await this.generatePayload(payload.body, notification, additional_logs, message.callback_data.filter((data) => data.parent_key === 'body'));
|
|
2643
2930
|
input.callback.query = await this.generatePayload(payload.query, notification, additional_logs, message.callback_data.filter((data) => data.parent_key === 'query'));
|
|
@@ -2659,7 +2946,7 @@ class ProcessorService {
|
|
|
2659
2946
|
}
|
|
2660
2947
|
if (sms && smses) {
|
|
2661
2948
|
try {
|
|
2662
|
-
input.sms.body = await (0, processor_utils_1.replacePlaceholderString)(sms, input.sms.body);
|
|
2949
|
+
input.sms.body = (await (0, processor_utils_1.replacePlaceholderString)(sms, input.sms.body));
|
|
2663
2950
|
const SmsClient = await (0, sms_repo_1.loadSMSClient)();
|
|
2664
2951
|
const smsClient = new SmsClient(smses);
|
|
2665
2952
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { name: 'Send sms - initiated', data: { message: input.sms.body, config: (0, processor_utils_1.anonymizeObject)(smses) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
@@ -2678,193 +2965,123 @@ class ProcessorService {
|
|
|
2678
2965
|
throw e;
|
|
2679
2966
|
}
|
|
2680
2967
|
}
|
|
2681
|
-
async runMigration(product_tag, tag, env, type) {
|
|
2682
|
-
try {
|
|
2683
|
-
this.productTag = product_tag;
|
|
2684
|
-
const [dbTag, migrationTag] = tag.split(':');
|
|
2685
|
-
if (!dbTag || !migrationTag) {
|
|
2686
|
-
throw new Error('tag should be in the format database_tag:migration_tag');
|
|
2687
|
-
}
|
|
2688
|
-
this.start = Date.now();
|
|
2689
|
-
const additional_logs = {
|
|
2690
|
-
parent_tag: dbTag,
|
|
2691
|
-
child_tag: migrationTag,
|
|
2692
|
-
type: types_1.LogEventTypes.DB_MIGRATION,
|
|
2693
|
-
name: 'Run Migration',
|
|
2694
|
-
};
|
|
2695
|
-
await this.intializeProduct(additional_logs);
|
|
2696
|
-
const db = this.productBuilderService.fetchDatabase(dbTag);
|
|
2697
|
-
if (!db) {
|
|
2698
|
-
throw new Error('Database not found');
|
|
2699
|
-
}
|
|
2700
|
-
if (db.type === types_1.DatabaseTypes.MONGODB) {
|
|
2701
|
-
throw new Error(`${db.type} does not support migrations`);
|
|
2702
|
-
}
|
|
2703
|
-
const migration = this.productBuilderService.fetchDatabaseMigration(tag);
|
|
2704
|
-
if (!migration) {
|
|
2705
|
-
throw new Error('Database migration not found');
|
|
2706
|
-
}
|
|
2707
|
-
const dbEnv = db.envs.find((el) => el.slug === env);
|
|
2708
|
-
if (!dbEnv) {
|
|
2709
|
-
throw new Error(`Environment ${env} not found`);
|
|
2710
|
-
}
|
|
2711
|
-
const productEnv = this.fetchEnv(env, additional_logs);
|
|
2712
|
-
if (!productEnv.active) {
|
|
2713
|
-
throw new Error(`Environment ${env} is not active`);
|
|
2714
|
-
}
|
|
2715
|
-
const product = this.productBuilderService.fetchProduct();
|
|
2716
|
-
const migrations = this.productBuilderService.fetchDatabaseMigrations(dbTag);
|
|
2717
|
-
//this.processEnv = productEnv;
|
|
2718
|
-
/* const check = migration.envs.find((migrationEnv) => migrationEnv.slug === env);
|
|
2719
|
-
if (!check) {
|
|
2720
|
-
throw new Error(`Migration does not exist for environment ${env}`);
|
|
2721
|
-
}*/
|
|
2722
|
-
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
2723
|
-
this.baseLogs = Object.assign({ product_tag: this.productTag, product_id: this.productId, workspace_id: this.workspace_id, env,
|
|
2724
|
-
process_id, data: { tag, env } }, additional_logs);
|
|
2725
|
-
const migrationsToRun = (0, processor_utils_1.getMigrationsToRun)((0, processor_utils_1.structuredClone)(migrations), type, migrationTag, env);
|
|
2726
|
-
if (db.type === types_1.DatabaseTypes.POSTGRES) {
|
|
2727
|
-
const PostgresDBHandler = await (0, postgres_repo_1.loadPostgresHandler)();
|
|
2728
|
-
const pgHandler = new PostgresDBHandler((0, processor_utils_1.decrypt)(dbEnv.connection_url, product.private_key));
|
|
2729
|
-
for (const migrationToRun of migrationsToRun) {
|
|
2730
|
-
const envS = migration.envs.find((envT) => envT.slug === env && type === envT.type);
|
|
2731
|
-
if (envS && envS.status === types_1.MigrationStatus.PROCESSED) {
|
|
2732
|
-
continue;
|
|
2733
|
-
}
|
|
2734
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: `Starting migration ${type} name: ${migrationToRun.name}`, data: { migration: migrationToRun, type }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2735
|
-
const { type: migType, result, status, processed_at, } = await pgHandler.runMigration(migrationToRun.value[type], type, envS);
|
|
2736
|
-
if (!envS) {
|
|
2737
|
-
migrationToRun.envs.push({ slug: env, results: result, status, type, processed_at });
|
|
2738
|
-
}
|
|
2739
|
-
else {
|
|
2740
|
-
migrationToRun.envs.map((envT) => {
|
|
2741
|
-
if (envT.slug === env && type === envT.type) {
|
|
2742
|
-
envT.results = [...result];
|
|
2743
|
-
envT.processed_at = processed_at;
|
|
2744
|
-
return envT;
|
|
2745
|
-
}
|
|
2746
|
-
return envT;
|
|
2747
|
-
});
|
|
2748
|
-
}
|
|
2749
|
-
this.productBuilderService.updateDatabaseMigration(Object.assign(Object.assign({}, migrationToRun), { tag }));
|
|
2750
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: `Starting migration ${type} name: ${migrationToRun.name}`, data: { migration: migrationToRun, type }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2751
|
-
}
|
|
2752
|
-
return { process_id };
|
|
2753
|
-
}
|
|
2754
|
-
}
|
|
2755
|
-
catch (e) {
|
|
2756
|
-
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { failed_execution: true, message: 'Attempt migration - failed', data: e, status: types_1.LogEventStatus.FAIL }));
|
|
2757
|
-
await this.logService.publish();
|
|
2758
|
-
}
|
|
2759
|
-
}
|
|
2760
2968
|
/*async runFunction(data: IFeatureEvent, additional_logs: Partial<ILogData>): Promise<any> {
|
|
2761
|
-
|
|
2762
|
-
|
|
2763
|
-
|
|
2764
|
-
|
|
2765
|
-
|
|
2766
|
-
|
|
2767
|
-
|
|
2768
|
-
|
|
2769
|
-
|
|
2770
|
-
|
|
2771
|
-
|
|
2772
|
-
|
|
2773
|
-
});
|
|
2774
|
-
|
|
2775
|
-
const process_id = generateObjectId();
|
|
2776
|
-
this.process_id = process_id;
|
|
2777
|
-
|
|
2778
|
-
|
|
2779
|
-
try {
|
|
2780
|
-
await this.intializeProduct(additional_logs);
|
|
2781
|
-
this.logService.add({
|
|
2782
|
-
...this.baseLogs,
|
|
2783
|
-
...additional_logs,
|
|
2784
|
-
message: 'Attempt function - initiated',
|
|
2785
|
-
data,
|
|
2786
|
-
status: LogEventStatus.PROCESSING,
|
|
2787
|
-
});
|
|
2788
|
-
|
|
2789
|
-
this.logService.add({
|
|
2790
|
-
...this.baseLogs,
|
|
2791
|
-
...additional_logs,
|
|
2792
|
-
message: 'Fetch function - initiated',
|
|
2793
|
-
data: data,
|
|
2794
|
-
status: LogEventStatus.PROCESSING,
|
|
2795
|
-
});
|
|
2796
|
-
|
|
2797
|
-
const cloudFunction = this.productBuilderService.fetchFunction(data.event);
|
|
2798
|
-
|
|
2799
|
-
this.logService.add({
|
|
2800
|
-
...this.baseLogs,
|
|
2801
|
-
...additional_logs,
|
|
2802
|
-
message: 'Fetch function - success',
|
|
2803
|
-
data: data,
|
|
2804
|
-
status: LogEventStatus.SUCCESS,
|
|
2805
|
-
});
|
|
2806
|
-
|
|
2807
|
-
this.logService.add({
|
|
2808
|
-
...this.baseLogs,
|
|
2809
|
-
...additional_logs,
|
|
2810
|
-
message: 'Validate function payload - initiated',
|
|
2811
|
-
data: { data, payload: input.payload },
|
|
2812
|
-
status: LogEventStatus.PROCESSING,
|
|
2813
|
-
});
|
|
2814
|
-
|
|
2815
|
-
validateFunctionInputKeys(cloudFunction.inputs, input.payload);
|
|
2816
|
-
|
|
2817
|
-
this.logService.add({
|
|
2818
|
-
...this.baseLogs,
|
|
2819
|
-
...additional_logs,
|
|
2820
|
-
message: 'Validate function payload - success',
|
|
2821
|
-
data: { data, payload: input.payload },
|
|
2822
|
-
status: LogEventStatus.SUCCESS,
|
|
2823
|
-
});
|
|
2824
|
-
|
|
2825
|
-
this.logService.add({
|
|
2826
|
-
...this.baseLogs,
|
|
2827
|
-
...additional_logs,
|
|
2828
|
-
message: 'Run function - initiated',
|
|
2829
|
-
data: { data, payload: input.payload },
|
|
2830
|
-
status: LogEventStatus.PROCESSING,
|
|
2969
|
+
const { product_id, env } = data;
|
|
2970
|
+
const input = data.input as IFunctionRequest;
|
|
2971
|
+
|
|
2972
|
+
this.productId = product_id;
|
|
2973
|
+
|
|
2974
|
+
this.logService = new LogsService({
|
|
2975
|
+
product_id,
|
|
2976
|
+
workspace_id: this.workspace_id,
|
|
2977
|
+
public_key: this.public_key,
|
|
2978
|
+
user_id: this.user_id,
|
|
2979
|
+
token: this.token,
|
|
2980
|
+
env_type: this.environment,
|
|
2831
2981
|
});
|
|
2832
|
-
|
|
2833
|
-
const
|
|
2834
|
-
|
|
2835
|
-
|
|
2836
|
-
|
|
2837
|
-
|
|
2838
|
-
|
|
2839
|
-
|
|
2840
|
-
|
|
2841
|
-
|
|
2842
|
-
|
|
2843
|
-
|
|
2844
|
-
|
|
2845
|
-
|
|
2846
|
-
|
|
2847
|
-
|
|
2848
|
-
|
|
2849
|
-
|
|
2850
|
-
|
|
2851
|
-
|
|
2852
|
-
|
|
2853
|
-
|
|
2982
|
+
|
|
2983
|
+
const process_id = generateObjectId();
|
|
2984
|
+
this.process_id = process_id;
|
|
2985
|
+
|
|
2986
|
+
|
|
2987
|
+
try {
|
|
2988
|
+
await this.intializeProduct(additional_logs);
|
|
2989
|
+
this.logService.add({
|
|
2990
|
+
...this.baseLogs,
|
|
2991
|
+
...additional_logs,
|
|
2992
|
+
message: 'Attempt function - initiated',
|
|
2993
|
+
data,
|
|
2994
|
+
status: LogEventStatus.PROCESSING,
|
|
2995
|
+
});
|
|
2996
|
+
|
|
2997
|
+
this.logService.add({
|
|
2998
|
+
...this.baseLogs,
|
|
2999
|
+
...additional_logs,
|
|
3000
|
+
message: 'Fetch function - initiated',
|
|
3001
|
+
data: data,
|
|
3002
|
+
status: LogEventStatus.PROCESSING,
|
|
3003
|
+
});
|
|
3004
|
+
|
|
3005
|
+
const cloudFunction = await this.productBuilderService.fetchFunction(data.event);
|
|
3006
|
+
|
|
3007
|
+
this.logService.add({
|
|
3008
|
+
...this.baseLogs,
|
|
3009
|
+
...additional_logs,
|
|
3010
|
+
message: 'Fetch function - success',
|
|
3011
|
+
data: data,
|
|
3012
|
+
status: LogEventStatus.SUCCESS,
|
|
3013
|
+
});
|
|
3014
|
+
|
|
3015
|
+
this.logService.add({
|
|
3016
|
+
...this.baseLogs,
|
|
3017
|
+
...additional_logs,
|
|
3018
|
+
message: 'Validate function payload - initiated',
|
|
3019
|
+
data: { data, payload: input.payload },
|
|
3020
|
+
status: LogEventStatus.PROCESSING,
|
|
3021
|
+
});
|
|
3022
|
+
|
|
3023
|
+
validateFunctionInputKeys(cloudFunction.inputs, input.payload);
|
|
3024
|
+
|
|
3025
|
+
this.logService.add({
|
|
3026
|
+
...this.baseLogs,
|
|
3027
|
+
...additional_logs,
|
|
3028
|
+
message: 'Validate function payload - success',
|
|
3029
|
+
data: { data, payload: input.payload },
|
|
3030
|
+
status: LogEventStatus.SUCCESS,
|
|
3031
|
+
});
|
|
3032
|
+
|
|
3033
|
+
this.logService.add({
|
|
3034
|
+
...this.baseLogs,
|
|
3035
|
+
...additional_logs,
|
|
3036
|
+
message: 'Run function - initiated',
|
|
3037
|
+
data: { data, payload: input.payload },
|
|
3038
|
+
status: LogEventStatus.PROCESSING,
|
|
3039
|
+
});
|
|
3040
|
+
|
|
3041
|
+
const response = await makeFunctionsRequest(cloudFunction, input.payload);
|
|
3042
|
+
|
|
3043
|
+
this.logService.add({
|
|
3044
|
+
...this.baseLogs,
|
|
3045
|
+
...additional_logs,
|
|
3046
|
+
message: 'Run function - success',
|
|
3047
|
+
data: { data, payload: input.payload },
|
|
3048
|
+
status: LogEventStatus.SUCCESS,
|
|
3049
|
+
});
|
|
3050
|
+
} catch (e) {
|
|
3051
|
+
this.logService.add({
|
|
3052
|
+
...this.baseLogs,
|
|
3053
|
+
...additional_logs,
|
|
3054
|
+
message: 'Run function - failed',
|
|
3055
|
+
data: e,
|
|
3056
|
+
status: LogEventStatus.FAIL,
|
|
3057
|
+
});
|
|
3058
|
+
await this.logService.publish();
|
|
3059
|
+
}
|
|
3060
|
+
}*/
|
|
3061
|
+
async runStorage(data, additional_logs = {}, bootstrapData) {
|
|
2854
3062
|
const { product_id, env, event, cache: cache_tag } = data;
|
|
2855
3063
|
const input = data.input;
|
|
2856
3064
|
try {
|
|
2857
|
-
|
|
2858
|
-
|
|
2859
|
-
|
|
3065
|
+
let storage;
|
|
3066
|
+
let storageEnv;
|
|
3067
|
+
// Use bootstrap data if provided, otherwise fetch via API
|
|
3068
|
+
if (bootstrapData) {
|
|
3069
|
+
storage = bootstrapData.storage;
|
|
3070
|
+
storageEnv = bootstrapData.storage_env;
|
|
3071
|
+
}
|
|
3072
|
+
else {
|
|
3073
|
+
// Fallback to original API-based fetching
|
|
3074
|
+
await this.intializeProduct(additional_logs);
|
|
3075
|
+
storage = await this.productBuilderService.fetchStorage(event);
|
|
3076
|
+
storageEnv = storage.envs.find((el) => el.slug === env.slug);
|
|
3077
|
+
}
|
|
2860
3078
|
if (!storageEnv) {
|
|
2861
3079
|
throw new Error(`Storage env for ${env.slug} not found`);
|
|
2862
3080
|
}
|
|
2863
3081
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch storage details - success', data: { storage }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2864
|
-
const product = this.productBuilderService.fetchProduct();
|
|
2865
3082
|
let result;
|
|
2866
3083
|
if (cache_tag && this.redisClient) {
|
|
2867
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
3084
|
+
const productCache = await this.productBuilderService.fetchCache(cache_tag);
|
|
2868
3085
|
if (!productCache) {
|
|
2869
3086
|
throw new Error('Invalid cache tag ');
|
|
2870
3087
|
}
|
|
@@ -2872,7 +3089,7 @@ class ProcessorService {
|
|
|
2872
3089
|
const check = await this.fetchFromCache({
|
|
2873
3090
|
cache_tag,
|
|
2874
3091
|
input: inputString,
|
|
2875
|
-
privateKey:
|
|
3092
|
+
privateKey: this.productBuilderService.fetchPrivateKey(),
|
|
2876
3093
|
expiry: productCache.expiry,
|
|
2877
3094
|
}, additional_logs);
|
|
2878
3095
|
if (check) {
|
|
@@ -2882,18 +3099,22 @@ class ProcessorService {
|
|
|
2882
3099
|
}
|
|
2883
3100
|
}
|
|
2884
3101
|
input.buffer = input.buffer ? await this.generateStringValues(input.buffer, '', additional_logs, []) : undefined;
|
|
2885
|
-
input.fileName = input.fileName
|
|
2886
|
-
|
|
3102
|
+
input.fileName = input.fileName
|
|
3103
|
+
? await this.generateStringValues(input.fileName, '', additional_logs, [])
|
|
3104
|
+
: undefined;
|
|
3105
|
+
input.mimeType = input.mimeType
|
|
3106
|
+
? await this.generateStringValues(input.mimeType, '', additional_logs, [])
|
|
3107
|
+
: undefined;
|
|
2887
3108
|
result = await this.processStorageRequest(data, input, storageEnv, additional_logs);
|
|
2888
3109
|
if (cache_tag && this.redisClient) {
|
|
2889
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
3110
|
+
const productCache = await this.productBuilderService.fetchCache(cache_tag);
|
|
2890
3111
|
if (!productCache) {
|
|
2891
3112
|
throw new Error('Invalid cache tag ');
|
|
2892
3113
|
}
|
|
2893
3114
|
const inputString = JSON.stringify(input);
|
|
2894
3115
|
await this.addToCache({
|
|
2895
3116
|
input: inputString,
|
|
2896
|
-
privateKey:
|
|
3117
|
+
privateKey: this.productBuilderService.fetchPrivateKey(),
|
|
2897
3118
|
data: JSON.stringify(result),
|
|
2898
3119
|
cache_tag,
|
|
2899
3120
|
timestamp: Date.now(),
|
|
@@ -2908,131 +3129,13 @@ class ProcessorService {
|
|
|
2908
3129
|
throw e;
|
|
2909
3130
|
}
|
|
2910
3131
|
}
|
|
2911
|
-
async runDBAction(db_action, additional_logs = {}) {
|
|
2912
|
-
const { product_id, env, event, cache: cache_tag } = db_action;
|
|
2913
|
-
const input = db_action.input;
|
|
2914
|
-
try {
|
|
2915
|
-
//await this.intializeProduct(additional_logs);
|
|
2916
|
-
const [database_tag, action_tag] = event.split(':');
|
|
2917
|
-
const product = this.productBuilderService.fetchProduct();
|
|
2918
|
-
const database = await this.productBuilderService.fetchDatabase(database_tag);
|
|
2919
|
-
const databaseAction = await this.productBuilderService.fetchDatabaseAction(event);
|
|
2920
|
-
const databaseEnv = database.envs.find((el) => el.slug === env.slug);
|
|
2921
|
-
if (!databaseEnv) {
|
|
2922
|
-
throw new Error(`Database env for ${env.slug} not found`);
|
|
2923
|
-
}
|
|
2924
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch database action - success', data: databaseAction, status: types_1.LogEventStatus.SUCCESS }));
|
|
2925
|
-
input.data = await this.generatePayload(input.data, db_action, additional_logs, databaseAction.data);
|
|
2926
|
-
if (Array.isArray(input.data)) {
|
|
2927
|
-
await Promise.all(input.data.map(async (d) => {
|
|
2928
|
-
const dataValidationPayload = (await this.inputService.parseJson({
|
|
2929
|
-
data: d,
|
|
2930
|
-
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2931
|
-
}));
|
|
2932
|
-
this.inputService.validateInput(dataValidationPayload, databaseAction.data);
|
|
2933
|
-
}));
|
|
2934
|
-
}
|
|
2935
|
-
else {
|
|
2936
|
-
const dataValidationPayload = (await this.inputService.parseJson({
|
|
2937
|
-
data: input.data,
|
|
2938
|
-
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2939
|
-
}));
|
|
2940
|
-
this.inputService.validateInput(dataValidationPayload, databaseAction.data);
|
|
2941
|
-
if (input.filter) {
|
|
2942
|
-
const filterValidationPayload = (await this.inputService.parseJson({
|
|
2943
|
-
data: input.filter,
|
|
2944
|
-
expected: types_1.ExpectedValues.PARSEINPUT,
|
|
2945
|
-
}));
|
|
2946
|
-
this.inputService.validateInput(filterValidationPayload, databaseAction.filterData);
|
|
2947
|
-
}
|
|
2948
|
-
}
|
|
2949
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate database action payload - success', data: { db_action, payload: (0, processor_utils_1.anonymizeObject)(input.data) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2950
|
-
if (database.type === types_1.DatabaseTypes.MONGODB) {
|
|
2951
|
-
if (databaseAction.type === types_1.DatabaseActionTypes.UPDATE) {
|
|
2952
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Validate database update action filter - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2953
|
-
}
|
|
2954
|
-
}
|
|
2955
|
-
let result;
|
|
2956
|
-
if (cache_tag && this.redisClient) {
|
|
2957
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
2958
|
-
if (!productCache) {
|
|
2959
|
-
throw new Error('Invalid cache tag ');
|
|
2960
|
-
}
|
|
2961
|
-
const inputString = JSON.stringify(input);
|
|
2962
|
-
const check = await this.fetchFromCache({
|
|
2963
|
-
cache_tag,
|
|
2964
|
-
input: inputString,
|
|
2965
|
-
privateKey: product.private_key,
|
|
2966
|
-
expiry: productCache.expiry,
|
|
2967
|
-
}, additional_logs);
|
|
2968
|
-
if (check) {
|
|
2969
|
-
result = JSON.parse(check);
|
|
2970
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Run database action query - return from cache', data: { result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2971
|
-
return result;
|
|
2972
|
-
}
|
|
2973
|
-
}
|
|
2974
|
-
const template = typeof databaseAction.template === 'string' ? databaseAction.template : JSON.stringify(databaseAction.template);
|
|
2975
|
-
if (database.type === types_1.DatabaseTypes.MONGODB) {
|
|
2976
|
-
const MongoDBHandler = await (0, mongo_repo_1.loadMongoDbHandler)();
|
|
2977
|
-
if (!MongoDBHandler) {
|
|
2978
|
-
throw new Error(`Running in browser, mongo handler not loaded.`);
|
|
2979
|
-
}
|
|
2980
|
-
const mongoHandler = new MongoDBHandler(databaseEnv.connection_url);
|
|
2981
|
-
if (databaseAction.type === types_1.DatabaseActionTypes.UPDATE) {
|
|
2982
|
-
const filterTemplate = typeof databaseAction.filterTemplate === 'string'
|
|
2983
|
-
? databaseAction.filterTemplate
|
|
2984
|
-
: JSON.stringify(databaseAction.filterTemplate);
|
|
2985
|
-
result = await mongoHandler[databaseAction.type](databaseAction.data, template, input.data, databaseAction.tableName, databaseAction.filterData, filterTemplate, input.filter);
|
|
2986
|
-
}
|
|
2987
|
-
else {
|
|
2988
|
-
result = await mongoHandler[databaseAction.type](databaseAction.data, template, input.data, databaseAction.tableName);
|
|
2989
|
-
}
|
|
2990
|
-
}
|
|
2991
|
-
else if (database.type === types_1.DatabaseTypes.POSTGRES) {
|
|
2992
|
-
const PostgresDBHandler = await (0, postgres_repo_1.loadPostgresHandler)();
|
|
2993
|
-
if (!PostgresDBHandler) {
|
|
2994
|
-
throw new Error(`Running in browser, postgres handler not loaded.`);
|
|
2995
|
-
}
|
|
2996
|
-
const pgHandler = new PostgresDBHandler((0, processor_utils_1.decrypt)(databaseEnv.connection_url, this.productBuilderService.fetchProduct().private_key));
|
|
2997
|
-
if (databaseAction.type !== types_1.DatabaseActionTypes.AGGREGATE) {
|
|
2998
|
-
result = await pgHandler[databaseAction.type](databaseAction.data, template, input.data);
|
|
2999
|
-
}
|
|
3000
|
-
}
|
|
3001
|
-
//await this.logService.publish();
|
|
3002
|
-
// const result = this.processDBRequest(db_action, input, database_tag, databaseEnv, action_tag, additional_logs);
|
|
3003
|
-
await this.addToSuccessOutput(db_action, result, additional_logs);
|
|
3004
|
-
if (cache_tag && this.redisClient) {
|
|
3005
|
-
const productCache = this.productBuilderService.fetchCache(cache_tag);
|
|
3006
|
-
if (!productCache) {
|
|
3007
|
-
throw new Error('Invalid cache tag ');
|
|
3008
|
-
}
|
|
3009
|
-
const inputString = JSON.stringify(input);
|
|
3010
|
-
await this.addToCache({
|
|
3011
|
-
input: inputString,
|
|
3012
|
-
privateKey: product.private_key,
|
|
3013
|
-
data: JSON.stringify(result),
|
|
3014
|
-
cache_tag,
|
|
3015
|
-
timestamp: Date.now(),
|
|
3016
|
-
product_tag: this.productTag,
|
|
3017
|
-
component_tag: database_tag,
|
|
3018
|
-
component_type: types_1.ProductComponents.DATABASE_ACTION,
|
|
3019
|
-
}, additional_logs);
|
|
3020
|
-
}
|
|
3021
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Attempt database action - successful', data: { result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3022
|
-
return result;
|
|
3023
|
-
}
|
|
3024
|
-
catch (e) {
|
|
3025
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Attempt database action - failed', data: { e: e.toString() }, status: types_1.LogEventStatus.FAIL }));
|
|
3026
|
-
throw e;
|
|
3027
|
-
}
|
|
3028
|
-
}
|
|
3029
3132
|
async runBrokerSubscribe(data, additional_logs = {}) {
|
|
3030
3133
|
const { env, event } = data;
|
|
3031
3134
|
const input = data.input;
|
|
3032
3135
|
try {
|
|
3033
3136
|
await this.intializeProduct(additional_logs);
|
|
3034
3137
|
const [brokerTag, topicTag] = event.split(':');
|
|
3035
|
-
const broker = this.productBuilderService.fetchMessageBroker(brokerTag);
|
|
3138
|
+
const broker = await this.productBuilderService.fetchMessageBroker(brokerTag);
|
|
3036
3139
|
if (!broker) {
|
|
3037
3140
|
throw new Error(`Message Broker ${brokerTag} not found`);
|
|
3038
3141
|
}
|
|
@@ -3040,7 +3143,7 @@ class ProcessorService {
|
|
|
3040
3143
|
if (!brokerEnv) {
|
|
3041
3144
|
throw new Error(`Broker env for ${env.slug} not found`);
|
|
3042
3145
|
}
|
|
3043
|
-
const topic = this.productBuilderService.fetchMessageBrokerTopic(event);
|
|
3146
|
+
const topic = await this.productBuilderService.fetchMessageBrokerTopic(event);
|
|
3044
3147
|
if (!topic) {
|
|
3045
3148
|
throw new Error(`Topic ${topicTag} not found in broker ${brokerTag}`);
|
|
3046
3149
|
}
|
|
@@ -3060,9 +3163,7 @@ class ProcessorService {
|
|
|
3060
3163
|
if (createBrokerService) {
|
|
3061
3164
|
const brokerService = createBrokerService(brokerEnv.type, brokerEnv.config);
|
|
3062
3165
|
await brokerService.subscribe(url, input.callback);
|
|
3063
|
-
|
|
3064
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Subscribe to broker topic - success', data: { event }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3065
|
-
}
|
|
3166
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Subscribe to broker topic - success', data: { event }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3066
3167
|
return;
|
|
3067
3168
|
}
|
|
3068
3169
|
else {
|
|
@@ -3081,7 +3182,7 @@ class ProcessorService {
|
|
|
3081
3182
|
try {
|
|
3082
3183
|
await this.intializeProduct(additional_logs);
|
|
3083
3184
|
const [brokerTag, topicTag] = event.split(':');
|
|
3084
|
-
const broker = this.productBuilderService.fetchMessageBroker(brokerTag);
|
|
3185
|
+
const broker = await this.productBuilderService.fetchMessageBroker(brokerTag);
|
|
3085
3186
|
if (!broker) {
|
|
3086
3187
|
throw new Error(`Message Broker ${brokerTag} not found`);
|
|
3087
3188
|
}
|
|
@@ -3089,7 +3190,7 @@ class ProcessorService {
|
|
|
3089
3190
|
if (!brokerEnv) {
|
|
3090
3191
|
throw new Error(`Broker env for ${env.slug} not found`);
|
|
3091
3192
|
}
|
|
3092
|
-
const topic = this.productBuilderService.fetchMessageBrokerTopic(event);
|
|
3193
|
+
const topic = await this.productBuilderService.fetchMessageBrokerTopic(event);
|
|
3093
3194
|
if (!topic) {
|
|
3094
3195
|
throw new Error(`Topic ${topicTag} not found in broker ${brokerTag}`);
|
|
3095
3196
|
}
|
|
@@ -3109,9 +3210,7 @@ class ProcessorService {
|
|
|
3109
3210
|
if (createBrokerService) {
|
|
3110
3211
|
const brokerService = createBrokerService(brokerEnv.type, brokerEnv.config);
|
|
3111
3212
|
await brokerService.publish(url, input.message);
|
|
3112
|
-
|
|
3113
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Publish to broker topic - success', successful_execution: true, data: { event }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3114
|
-
}
|
|
3213
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Publish to broker topic - success', successful_execution: true, data: { event }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3115
3214
|
return;
|
|
3116
3215
|
}
|
|
3117
3216
|
else {
|
|
@@ -3153,7 +3252,7 @@ class ProcessorService {
|
|
|
3153
3252
|
const result = { url: await (0, storage_util_1.uploadBlobToCloud)({ data: input.buffer, destinationPath: input.fileName, config }) };
|
|
3154
3253
|
try {
|
|
3155
3254
|
await this.processorApiService.saveFileURL({
|
|
3156
|
-
url: (0, processor_utils_1.encrypt)(result.url, this.productBuilderService.
|
|
3255
|
+
url: (0, processor_utils_1.encrypt)(result.url, this.productBuilderService.fetchPrivateKey()),
|
|
3157
3256
|
provider: storageEnv.type,
|
|
3158
3257
|
product: this.productTag,
|
|
3159
3258
|
process_id: this.process_id,
|
|
@@ -3161,7 +3260,7 @@ class ProcessorService {
|
|
|
3161
3260
|
type: input.mimeType,
|
|
3162
3261
|
event: data.event,
|
|
3163
3262
|
env: data.env.slug,
|
|
3164
|
-
size: Buffer.from(input.buffer).length
|
|
3263
|
+
size: Buffer.from(input.buffer).length,
|
|
3165
3264
|
}, this.getUserAccess());
|
|
3166
3265
|
}
|
|
3167
3266
|
catch (e) { }
|
|
@@ -3178,35 +3277,6 @@ class ProcessorService {
|
|
|
3178
3277
|
throw e;
|
|
3179
3278
|
}
|
|
3180
3279
|
}
|
|
3181
|
-
async processDBRequest(db_action, input, database_tag, databaseEnv, action_tag, additional_logs, returnValue = false) {
|
|
3182
|
-
try {
|
|
3183
|
-
const result = await this.processorApiService.processProduct(this.productId, {
|
|
3184
|
-
input: input,
|
|
3185
|
-
database_tag: database_tag,
|
|
3186
|
-
database_env_slug: databaseEnv.slug,
|
|
3187
|
-
tag: action_tag,
|
|
3188
|
-
component: types_1.ProductComponents.DATABASE_ACTION,
|
|
3189
|
-
}, {
|
|
3190
|
-
user_id: this.user_id,
|
|
3191
|
-
token: this.token,
|
|
3192
|
-
workspace_id: this.workspace_id,
|
|
3193
|
-
public_key: this.public_key,
|
|
3194
|
-
});
|
|
3195
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Run database query - success', data: { payload: (0, processor_utils_1.anonymizeObject)(input) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3196
|
-
return result;
|
|
3197
|
-
}
|
|
3198
|
-
catch (e) {
|
|
3199
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Running database query - failed', data: { payload: (0, processor_utils_1.anonymizeObject)(input), e }, status: types_1.LogEventStatus.FAIL }));
|
|
3200
|
-
const value = await this.addToFailureOutput(e, db_action, {
|
|
3201
|
-
db_action,
|
|
3202
|
-
input,
|
|
3203
|
-
database_tag,
|
|
3204
|
-
databaseEnv,
|
|
3205
|
-
action_tag,
|
|
3206
|
-
}, additional_logs);
|
|
3207
|
-
return value;
|
|
3208
|
-
}
|
|
3209
|
-
}
|
|
3210
3280
|
async writeResult(status, retryable = true) {
|
|
3211
3281
|
this.processorApiService.saveResult({
|
|
3212
3282
|
status,
|
|
@@ -3214,14 +3284,108 @@ class ProcessorService {
|
|
|
3214
3284
|
start: this.start,
|
|
3215
3285
|
end: this.end,
|
|
3216
3286
|
retryable,
|
|
3217
|
-
result: (0, processor_utils_1.encrypt)(JSON.stringify(this.processingOutput), this.productBuilderService.
|
|
3287
|
+
result: (0, processor_utils_1.encrypt)(JSON.stringify(this.processingOutput), this.productBuilderService.fetchPrivateKey()),
|
|
3218
3288
|
process_id: this.process_id,
|
|
3219
|
-
feature_id:
|
|
3289
|
+
feature_id: null,
|
|
3220
3290
|
product_id: this.productId,
|
|
3221
3291
|
env: this.processEnv.slug,
|
|
3222
|
-
input: (0, processor_utils_1.encrypt)(JSON.stringify(this.input), this.productBuilderService.
|
|
3292
|
+
input: (0, processor_utils_1.encrypt)(JSON.stringify(this.input), this.productBuilderService.fetchPrivateKey()),
|
|
3223
3293
|
}, this.getUserAccess());
|
|
3224
3294
|
}
|
|
3295
|
+
/**
|
|
3296
|
+
* Separate credentials into prefixed (e.g., 'headers:Authorization') and non-prefixed (e.g., 'api_key').
|
|
3297
|
+
* Prefixed credentials are applied directly to the correct section after resolution.
|
|
3298
|
+
* Non-prefixed credentials go through InputResolver to determine their placement.
|
|
3299
|
+
*/
|
|
3300
|
+
separateCredentials(credentials) {
|
|
3301
|
+
const prefixed = {};
|
|
3302
|
+
const nonPrefixed = {};
|
|
3303
|
+
for (const [key, value] of Object.entries(credentials)) {
|
|
3304
|
+
if (key.startsWith('headers:') || key.startsWith('body:') ||
|
|
3305
|
+
key.startsWith('params:') || key.startsWith('query:')) {
|
|
3306
|
+
prefixed[key] = value;
|
|
3307
|
+
}
|
|
3308
|
+
else {
|
|
3309
|
+
nonPrefixed[key] = value;
|
|
3310
|
+
}
|
|
3311
|
+
}
|
|
3312
|
+
return { prefixed, nonPrefixed };
|
|
3313
|
+
}
|
|
3314
|
+
/**
|
|
3315
|
+
* Check if a key exists in the action schema for a given section (headers, body, params, query).
|
|
3316
|
+
* Returns true if the key is defined in the schema, false otherwise.
|
|
3317
|
+
*/
|
|
3318
|
+
isKeyInActionSchema(action, section, key) {
|
|
3319
|
+
if (!action)
|
|
3320
|
+
return false;
|
|
3321
|
+
const sectionSchema = action[section];
|
|
3322
|
+
if (!sectionSchema || !sectionSchema.data)
|
|
3323
|
+
return false;
|
|
3324
|
+
// Check if the key exists in the schema's data array
|
|
3325
|
+
return sectionSchema.data.some(item => item.key === key);
|
|
3326
|
+
}
|
|
3327
|
+
/**
|
|
3328
|
+
* Apply prefixed credentials (e.g., 'headers:Authorization') to resolved input.
|
|
3329
|
+
* Credentials are applied with lower priority - existing values in resolvedInput take precedence.
|
|
3330
|
+
* Only applies credentials if the action schema defines the corresponding field.
|
|
3331
|
+
*/
|
|
3332
|
+
applyPrefixedCredentials(credentials, resolvedInput, action) {
|
|
3333
|
+
const result = {
|
|
3334
|
+
body: Object.assign({}, (resolvedInput.body || {})),
|
|
3335
|
+
params: Object.assign({}, (resolvedInput.params || {})),
|
|
3336
|
+
query: Object.assign({}, (resolvedInput.query || {})),
|
|
3337
|
+
headers: Object.assign({}, (resolvedInput.headers || {})),
|
|
3338
|
+
};
|
|
3339
|
+
for (const [key, value] of Object.entries(credentials)) {
|
|
3340
|
+
if (key.startsWith('headers:')) {
|
|
3341
|
+
const headerKey = key.substring(8); // Remove 'headers:' prefix
|
|
3342
|
+
// Only apply if the action schema expects this header field
|
|
3343
|
+
if (!this.isKeyInActionSchema(action, 'headers', headerKey)) {
|
|
3344
|
+
continue; // Skip - action doesn't expect this credential
|
|
3345
|
+
}
|
|
3346
|
+
// Only set if not already present (user input takes precedence)
|
|
3347
|
+
if (!(headerKey in (result.headers || {}))) {
|
|
3348
|
+
result.headers = result.headers || {};
|
|
3349
|
+
result.headers[headerKey] = value;
|
|
3350
|
+
}
|
|
3351
|
+
}
|
|
3352
|
+
else if (key.startsWith('body:')) {
|
|
3353
|
+
const bodyKey = key.substring(5); // Remove 'body:' prefix
|
|
3354
|
+
// Only apply if the action schema expects this body field
|
|
3355
|
+
if (!this.isKeyInActionSchema(action, 'body', bodyKey)) {
|
|
3356
|
+
continue; // Skip - action doesn't expect this credential
|
|
3357
|
+
}
|
|
3358
|
+
if (!(bodyKey in (result.body || {}))) {
|
|
3359
|
+
result.body = result.body || {};
|
|
3360
|
+
result.body[bodyKey] = value;
|
|
3361
|
+
}
|
|
3362
|
+
}
|
|
3363
|
+
else if (key.startsWith('params:')) {
|
|
3364
|
+
const paramsKey = key.substring(7); // Remove 'params:' prefix
|
|
3365
|
+
// Only apply if the action schema expects this params field
|
|
3366
|
+
if (!this.isKeyInActionSchema(action, 'params', paramsKey)) {
|
|
3367
|
+
continue; // Skip - action doesn't expect this credential
|
|
3368
|
+
}
|
|
3369
|
+
if (!(paramsKey in (result.params || {}))) {
|
|
3370
|
+
result.params = result.params || {};
|
|
3371
|
+
result.params[paramsKey] = value;
|
|
3372
|
+
}
|
|
3373
|
+
}
|
|
3374
|
+
else if (key.startsWith('query:')) {
|
|
3375
|
+
const queryKey = key.substring(6); // Remove 'query:' prefix
|
|
3376
|
+
// Only apply if the action schema expects this query field
|
|
3377
|
+
if (!this.isKeyInActionSchema(action, 'query', queryKey)) {
|
|
3378
|
+
continue; // Skip - action doesn't expect this credential
|
|
3379
|
+
}
|
|
3380
|
+
if (!(queryKey in (result.query || {}))) {
|
|
3381
|
+
result.query = result.query || {};
|
|
3382
|
+
result.query[queryKey] = value;
|
|
3383
|
+
}
|
|
3384
|
+
}
|
|
3385
|
+
// Ignore keys without recognized prefixes - credentials should always be prefixed
|
|
3386
|
+
}
|
|
3387
|
+
return result;
|
|
3388
|
+
}
|
|
3225
3389
|
async validateActionDataMappingInput(input, type) {
|
|
3226
3390
|
try {
|
|
3227
3391
|
if (type === types_1.FeatureEventTypes.ACTION || type === types_1.WebhookEventTypes.WEBHOOK_REGISTER) {
|
|
@@ -3241,7 +3405,7 @@ class ProcessorService {
|
|
|
3241
3405
|
}
|
|
3242
3406
|
async processAction(action) {
|
|
3243
3407
|
//TODO: schema validation
|
|
3244
|
-
const { env, input, retries, event, app, product: product_tag } = action;
|
|
3408
|
+
const { env, input, retries, action: event, app, product: product_tag, session, cache } = action;
|
|
3245
3409
|
const additional_logs = {
|
|
3246
3410
|
parent_tag: (0, string_utils_1.extractOriginAndTag)(app),
|
|
3247
3411
|
child_tag: event,
|
|
@@ -3249,39 +3413,141 @@ class ProcessorService {
|
|
|
3249
3413
|
name: 'Process Action',
|
|
3250
3414
|
};
|
|
3251
3415
|
this.component = types_1.LogEventTypes.ACTION;
|
|
3416
|
+
// Session log fields (will be populated if session is provided)
|
|
3417
|
+
let sessionLogFields = {};
|
|
3418
|
+
let resolvedInput = input;
|
|
3252
3419
|
try {
|
|
3253
|
-
// validate input do not allow $Sequence or $Length and $Size of $Input
|
|
3254
|
-
await this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.ACTION);
|
|
3255
3420
|
this.input = action;
|
|
3256
3421
|
this.start = Date.now();
|
|
3257
|
-
this.productTag = product_tag;
|
|
3258
3422
|
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
3423
|
+
if (product_tag) {
|
|
3424
|
+
this.productTag = product_tag;
|
|
3425
|
+
}
|
|
3259
3426
|
this.baseLogs = {
|
|
3260
|
-
product_tag: this.productTag,
|
|
3427
|
+
product_tag: this.productTag || '',
|
|
3261
3428
|
workspace_id: this.workspace_id,
|
|
3262
3429
|
env,
|
|
3263
3430
|
type: types_1.LogEventTypes.ACTION,
|
|
3264
3431
|
process_id,
|
|
3265
3432
|
data: input,
|
|
3266
3433
|
};
|
|
3267
|
-
|
|
3434
|
+
// Single ultra-lightweight bootstrap call - returns action data directly
|
|
3435
|
+
const bootstrapData = await this.productBuilderService.bootstrapAction({
|
|
3436
|
+
product_tag,
|
|
3437
|
+
env_slug: env,
|
|
3438
|
+
access_tag: app,
|
|
3439
|
+
action_tag: event,
|
|
3440
|
+
});
|
|
3441
|
+
// Initialize from bootstrap data
|
|
3442
|
+
if (bootstrapData.product_id) {
|
|
3443
|
+
this.productId = bootstrapData.product_id;
|
|
3444
|
+
}
|
|
3445
|
+
// Resolve flat input to structured format using action schema
|
|
3446
|
+
// This must happen AFTER bootstrap when we have the action schema
|
|
3447
|
+
if (bootstrapData.action && input && typeof input === 'object') {
|
|
3448
|
+
if (!(0, utils_1.isStructuredInput)(input)) {
|
|
3449
|
+
// Flat input detected - resolve using action schema
|
|
3450
|
+
let flatInput = Object.assign({}, input);
|
|
3451
|
+
// Gather all credentials (static + OAuth)
|
|
3452
|
+
// Priority: user input > OAuth > static config
|
|
3453
|
+
const allCredentials = {};
|
|
3454
|
+
// Static credentials (lowest priority)
|
|
3455
|
+
const sharedCredentials = credential_manager_1.credentialManager.get({ product: product_tag, app, env });
|
|
3456
|
+
if (sharedCredentials) {
|
|
3457
|
+
Object.assign(allCredentials, sharedCredentials);
|
|
3458
|
+
}
|
|
3459
|
+
// OAuth credentials (higher priority, overwrites static)
|
|
3460
|
+
if (product_tag && oauth_manager_1.oauthManager.has(product_tag, app, env)) {
|
|
3461
|
+
const oauthCredentials = await oauth_manager_1.oauthManager.getCredentials(product_tag, app, env);
|
|
3462
|
+
if (oauthCredentials) {
|
|
3463
|
+
Object.assign(allCredentials, oauthCredentials);
|
|
3464
|
+
}
|
|
3465
|
+
}
|
|
3466
|
+
// Separate credentials into prefixed and non-prefixed
|
|
3467
|
+
const { prefixed, nonPrefixed } = this.separateCredentials(allCredentials);
|
|
3468
|
+
const inputResolver = new utils_1.InputResolver();
|
|
3469
|
+
// Build location map to check which keys exist in the schema
|
|
3470
|
+
const locationMap = inputResolver.buildLocationMap(bootstrapData.action);
|
|
3471
|
+
// Merge non-prefixed credentials into flatInput BEFORE resolution
|
|
3472
|
+
// Only include credentials that exist in the action schema
|
|
3473
|
+
// User input takes precedence over credentials
|
|
3474
|
+
for (const [key, value] of Object.entries(nonPrefixed)) {
|
|
3475
|
+
// Skip if user already provided this key
|
|
3476
|
+
if (key in flatInput) {
|
|
3477
|
+
continue;
|
|
3478
|
+
}
|
|
3479
|
+
// Only include if the key exists in the action schema
|
|
3480
|
+
if (locationMap.allValidKeys.has(key)) {
|
|
3481
|
+
flatInput[key] = value;
|
|
3482
|
+
}
|
|
3483
|
+
}
|
|
3484
|
+
try {
|
|
3485
|
+
resolvedInput = inputResolver.resolve(flatInput, bootstrapData.action, { strict: true });
|
|
3486
|
+
}
|
|
3487
|
+
catch (e) {
|
|
3488
|
+
if (e instanceof utils_1.InputResolutionError) {
|
|
3489
|
+
throw new Error(`Input resolution failed for action '${event}': ${e.message}`);
|
|
3490
|
+
}
|
|
3491
|
+
throw e;
|
|
3492
|
+
}
|
|
3493
|
+
// Apply prefixed credentials AFTER resolution (directly to correct section)
|
|
3494
|
+
// Only credentials that match fields in the action schema are applied
|
|
3495
|
+
if (Object.keys(prefixed).length > 0) {
|
|
3496
|
+
resolvedInput = this.applyPrefixedCredentials(prefixed, resolvedInput, bootstrapData.action);
|
|
3497
|
+
}
|
|
3498
|
+
}
|
|
3499
|
+
else {
|
|
3500
|
+
// Already structured - validate with existing schema
|
|
3501
|
+
await this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.ACTION);
|
|
3502
|
+
}
|
|
3503
|
+
}
|
|
3504
|
+
else {
|
|
3505
|
+
// Fallback: validate with existing schema if no action bootstrap data
|
|
3506
|
+
await this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.ACTION);
|
|
3507
|
+
}
|
|
3508
|
+
// Process session if provided - verify and resolve $Session{} references
|
|
3509
|
+
if (session && bootstrapData.private_key) {
|
|
3510
|
+
const { processSessionForExecution } = await Promise.resolve().then(() => __importStar(require('../../sessions')));
|
|
3511
|
+
const sessionResult = await processSessionForExecution(session, bootstrapData.private_key, resolvedInput, env);
|
|
3512
|
+
if (sessionResult.error) {
|
|
3513
|
+
throw new Error(`Session validation failed: ${sessionResult.error}`);
|
|
3514
|
+
}
|
|
3515
|
+
resolvedInput = sessionResult.input;
|
|
3516
|
+
sessionLogFields = sessionResult.logFields;
|
|
3517
|
+
}
|
|
3518
|
+
// Initialize log service if needed
|
|
3519
|
+
if (!this.logService) {
|
|
3520
|
+
this.logService = new logs_service_1.default({
|
|
3521
|
+
product_id: this.productId,
|
|
3522
|
+
workspace_id: this.workspace_id,
|
|
3523
|
+
public_key: this.public_key,
|
|
3524
|
+
user_id: this.user_id,
|
|
3525
|
+
token: this.token,
|
|
3526
|
+
env_type: this.environment,
|
|
3527
|
+
});
|
|
3528
|
+
}
|
|
3268
3529
|
this.process_id = process_id;
|
|
3269
3530
|
this.baseLogs.product_id = this.productId;
|
|
3270
|
-
|
|
3271
|
-
this.
|
|
3272
|
-
|
|
3273
|
-
throw new Error(`Environment ${env} is not active`);
|
|
3274
|
-
}
|
|
3531
|
+
// Add session fields to base logs
|
|
3532
|
+
this.baseLogs = Object.assign(Object.assign({}, this.baseLogs), sessionLogFields);
|
|
3533
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Bootstrap action - success', data: { product_id: this.productId, env: env }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3275
3534
|
const result = await this.runAction({
|
|
3276
3535
|
type: types_1.FeatureEventTypes.ACTION,
|
|
3277
3536
|
event,
|
|
3278
|
-
cache:
|
|
3537
|
+
cache: cache,
|
|
3279
3538
|
app,
|
|
3280
|
-
input,
|
|
3281
|
-
env:
|
|
3539
|
+
input: resolvedInput,
|
|
3540
|
+
env: this.processEnv,
|
|
3282
3541
|
retries: retries || 0,
|
|
3283
3542
|
allow_fail: false,
|
|
3284
|
-
|
|
3543
|
+
hasProduct: !!product_tag,
|
|
3544
|
+
}, additional_logs, true, {
|
|
3545
|
+
action: bootstrapData.action,
|
|
3546
|
+
app_env: bootstrapData.app_env,
|
|
3547
|
+
retries: bootstrapData.retries,
|
|
3548
|
+
app_active: bootstrapData.app_active,
|
|
3549
|
+
recipient_workspace_id: bootstrapData.recipient_workspace_id,
|
|
3550
|
+
});
|
|
3285
3551
|
this.end = Date.now();
|
|
3286
3552
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute action - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3287
3553
|
await this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
@@ -3289,70 +3555,18 @@ class ProcessorService {
|
|
|
3289
3555
|
return result;
|
|
3290
3556
|
}
|
|
3291
3557
|
catch (e) {
|
|
3292
|
-
|
|
3558
|
+
console.log('ERRRRROORRRRR!!!!!!', e);
|
|
3559
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute action - failed', data: { e: e.toString() }, status: types_1.LogEventStatus.FAIL }));
|
|
3293
3560
|
this.end = Date.now();
|
|
3294
3561
|
await this.writeResult(types_1.LogEventStatus.FAIL);
|
|
3295
3562
|
await this.logService.publish();
|
|
3296
3563
|
return { process_id: this.process_id };
|
|
3297
3564
|
}
|
|
3298
3565
|
}
|
|
3299
|
-
async processDBAction(action) {
|
|
3300
|
-
//TODO: schema validation
|
|
3301
|
-
const { env, input, retries, event, product: product_tag } = action;
|
|
3302
|
-
const [parent_tag, child_tag] = event.split(':');
|
|
3303
|
-
this.component = types_1.LogEventTypes.DB_ACTION;
|
|
3304
|
-
if (!parent_tag || !child_tag) {
|
|
3305
|
-
throw new Error(`database action events should be in the format notification_tag:message_tag`);
|
|
3306
|
-
}
|
|
3307
|
-
const additional_logs = {
|
|
3308
|
-
parent_tag,
|
|
3309
|
-
child_tag,
|
|
3310
|
-
type: types_1.LogEventTypes.DB_ACTION,
|
|
3311
|
-
name: 'Process database action',
|
|
3312
|
-
};
|
|
3313
|
-
try {
|
|
3314
|
-
await this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.DB_ACTION);
|
|
3315
|
-
this.input = action;
|
|
3316
|
-
this.start = Date.now();
|
|
3317
|
-
this.productTag = product_tag;
|
|
3318
|
-
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
3319
|
-
this.baseLogs = Object.assign({ product_tag: this.productTag, workspace_id: this.workspace_id, env,
|
|
3320
|
-
process_id, data: input }, additional_logs);
|
|
3321
|
-
await this.intializeProduct(additional_logs);
|
|
3322
|
-
this.baseLogs.product_id = this.productId;
|
|
3323
|
-
this.process_id = process_id;
|
|
3324
|
-
const productEnv = this.fetchEnv(env, additional_logs);
|
|
3325
|
-
this.processEnv = productEnv;
|
|
3326
|
-
if (!productEnv.active) {
|
|
3327
|
-
throw new Error(`Environment ${env} is not active`);
|
|
3328
|
-
}
|
|
3329
|
-
const payload = {
|
|
3330
|
-
type: types_1.FeatureEventTypes.DB_ACTION,
|
|
3331
|
-
event,
|
|
3332
|
-
input,
|
|
3333
|
-
cache: action.cache,
|
|
3334
|
-
env: productEnv,
|
|
3335
|
-
retries: retries || 0,
|
|
3336
|
-
allow_fail: false,
|
|
3337
|
-
};
|
|
3338
|
-
const result = await this.runDBAction(payload, additional_logs);
|
|
3339
|
-
this.end = Date.now();
|
|
3340
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute database action', data: { input: (0, processor_utils_1.anonymizeObject)(input), result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3341
|
-
await this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
3342
|
-
await this.logService.publish();
|
|
3343
|
-
return result;
|
|
3344
|
-
}
|
|
3345
|
-
catch (e) {
|
|
3346
|
-
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute database action', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
3347
|
-
this.end = Date.now();
|
|
3348
|
-
await this.logService.publish();
|
|
3349
|
-
return { process_id: this.process_id };
|
|
3350
|
-
}
|
|
3351
|
-
}
|
|
3352
|
-
async processFunction(data) { }
|
|
3353
3566
|
async processNotification(action) {
|
|
3354
3567
|
//TODO: schema validation
|
|
3355
|
-
|
|
3568
|
+
var _a;
|
|
3569
|
+
const { env, input, retries, event, product: product_tag, session, cache } = action;
|
|
3356
3570
|
const [parent_tag, child_tag] = event.split(':');
|
|
3357
3571
|
if (!parent_tag || !child_tag) {
|
|
3358
3572
|
throw new Error(`database action events should be in the format notification_tag:message_tag`);
|
|
@@ -3364,6 +3578,9 @@ class ProcessorService {
|
|
|
3364
3578
|
type: types_1.LogEventTypes.NOTIFICATIONS,
|
|
3365
3579
|
name: 'Process Notification',
|
|
3366
3580
|
};
|
|
3581
|
+
// Session log fields (will be populated if session is provided)
|
|
3582
|
+
let sessionLogFields = {};
|
|
3583
|
+
let resolvedInput = input;
|
|
3367
3584
|
try {
|
|
3368
3585
|
await this.validateActionDataMappingInput(input, types_1.FeatureEventTypes.NOTIFICATION);
|
|
3369
3586
|
this.input = action;
|
|
@@ -3372,24 +3589,61 @@ class ProcessorService {
|
|
|
3372
3589
|
const process_id = (0, processor_utils_1.generateObjectId)();
|
|
3373
3590
|
this.baseLogs = Object.assign({ product_tag: this.productTag, workspace_id: this.workspace_id, env,
|
|
3374
3591
|
process_id, data: input }, additional_logs);
|
|
3375
|
-
|
|
3592
|
+
// Single bootstrap call to fetch all notification data
|
|
3593
|
+
const bootstrapData = await this.productBuilderService.bootstrapNotification({
|
|
3594
|
+
product_tag,
|
|
3595
|
+
env_slug: env,
|
|
3596
|
+
notification_tag: parent_tag,
|
|
3597
|
+
message_tag: child_tag,
|
|
3598
|
+
});
|
|
3599
|
+
// Initialize from bootstrap data
|
|
3600
|
+
this.productId = bootstrapData.product_id;
|
|
3601
|
+
this.processEnv = bootstrapData.env;
|
|
3602
|
+
// Process session if provided - verify and resolve $Session{} references
|
|
3603
|
+
if (session && bootstrapData.private_key) {
|
|
3604
|
+
const { processSessionForExecution } = await Promise.resolve().then(() => __importStar(require('../../sessions')));
|
|
3605
|
+
const sessionResult = await processSessionForExecution(session, bootstrapData.private_key, input, env);
|
|
3606
|
+
if (sessionResult.error) {
|
|
3607
|
+
throw new Error(`Session validation failed: ${sessionResult.error}`);
|
|
3608
|
+
}
|
|
3609
|
+
resolvedInput = sessionResult.input;
|
|
3610
|
+
sessionLogFields = sessionResult.logFields;
|
|
3611
|
+
}
|
|
3612
|
+
// Initialize log service if needed
|
|
3613
|
+
if (!this.logService) {
|
|
3614
|
+
this.logService = new logs_service_1.default({
|
|
3615
|
+
product_id: this.productId,
|
|
3616
|
+
workspace_id: this.workspace_id,
|
|
3617
|
+
public_key: this.public_key,
|
|
3618
|
+
user_id: this.user_id,
|
|
3619
|
+
token: this.token,
|
|
3620
|
+
env_type: this.environment,
|
|
3621
|
+
});
|
|
3622
|
+
}
|
|
3376
3623
|
this.process_id = process_id;
|
|
3377
3624
|
this.baseLogs.product_id = this.productId;
|
|
3378
|
-
|
|
3379
|
-
this.
|
|
3625
|
+
// Add session fields to base logs
|
|
3626
|
+
this.baseLogs = Object.assign(Object.assign({}, this.baseLogs), sessionLogFields);
|
|
3627
|
+
const productEnv = bootstrapData.env;
|
|
3380
3628
|
if (!productEnv.active) {
|
|
3381
3629
|
throw new Error(`Environment ${env} is not active`);
|
|
3382
3630
|
}
|
|
3383
3631
|
const payload = {
|
|
3384
3632
|
type: types_1.FeatureEventTypes.NOTIFICATION,
|
|
3385
3633
|
event,
|
|
3386
|
-
input,
|
|
3387
|
-
cache:
|
|
3634
|
+
input: resolvedInput,
|
|
3635
|
+
cache: cache,
|
|
3388
3636
|
env: productEnv,
|
|
3389
3637
|
retries: retries || 0,
|
|
3390
3638
|
allow_fail: false,
|
|
3391
3639
|
};
|
|
3392
|
-
|
|
3640
|
+
// Find the env config for the notification
|
|
3641
|
+
const envConfig = (_a = bootstrapData.notification.envs) === null || _a === void 0 ? void 0 : _a.find((data) => data.slug === env);
|
|
3642
|
+
const result = await this.runNotification(payload, additional_logs, {
|
|
3643
|
+
notification: bootstrapData.notification,
|
|
3644
|
+
message: bootstrapData.message,
|
|
3645
|
+
env_config: envConfig,
|
|
3646
|
+
});
|
|
3393
3647
|
this.end = Date.now();
|
|
3394
3648
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send notification - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
3395
3649
|
await this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
@@ -3407,7 +3661,7 @@ class ProcessorService {
|
|
|
3407
3661
|
try {
|
|
3408
3662
|
const data = await this.processorApiService.fetchRemoteCaches(payload, this.getUserAccess());
|
|
3409
3663
|
return data.map((data) => {
|
|
3410
|
-
data.value = (0, processor_utils_1.decrypt)(data.value, this.productBuilderService.
|
|
3664
|
+
data.value = (0, processor_utils_1.decrypt)(data.value, this.productBuilderService.fetchPrivateKey());
|
|
3411
3665
|
return data;
|
|
3412
3666
|
});
|
|
3413
3667
|
}
|
|
@@ -3475,6 +3729,33 @@ class ProcessorService {
|
|
|
3475
3729
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Cache Found', data: { key }, successful_execution: true, status: types_1.LogEventStatus.SUCCESS, cache_tag }));
|
|
3476
3730
|
return (0, processor_utils_1.decrypt)(record.data, privateKey);
|
|
3477
3731
|
}
|
|
3732
|
+
/**
|
|
3733
|
+
* Writes the healthcheck result to Redis cache for fast status retrieval.
|
|
3734
|
+
*/
|
|
3735
|
+
async writeHealthcheckResultToCache(data, result) {
|
|
3736
|
+
if (!this.redisClient)
|
|
3737
|
+
return;
|
|
3738
|
+
const key = `healthcheck:${data.product}:${data.healthcheck}:${data.env}`;
|
|
3739
|
+
console.log('LOG TO CACHE', key, JSON.stringify(result));
|
|
3740
|
+
await this.redisClient.set(key, JSON.stringify(result));
|
|
3741
|
+
console.log();
|
|
3742
|
+
}
|
|
3743
|
+
/**
|
|
3744
|
+
* Fetches the latest healthcheck status for a product/env from Redis cache.
|
|
3745
|
+
*/
|
|
3746
|
+
async getHealthcheckStatusFromCache(productTag, envSlug) {
|
|
3747
|
+
if (!this.redisClient)
|
|
3748
|
+
return null;
|
|
3749
|
+
const key = `healthcheck:${productTag}:${envSlug}`;
|
|
3750
|
+
const cached = await this.redisClient.get(key);
|
|
3751
|
+
return cached ? JSON.parse(cached) : null;
|
|
3752
|
+
}
|
|
3753
|
+
/**
|
|
3754
|
+
* Updates the healthcheck in the remote DB for a product with all envs' results.
|
|
3755
|
+
*/
|
|
3756
|
+
async updateHealthcheckOnProcessor(productTag, envs) {
|
|
3757
|
+
return this.productBuilderService.updateHealthcheck(productTag, { envs });
|
|
3758
|
+
}
|
|
3478
3759
|
}
|
|
3479
3760
|
exports.default = ProcessorService;
|
|
3480
3761
|
//# sourceMappingURL=processor.service.js.map
|