@ductape/sdk 0.0.3-beta10 → 0.0.3-beta12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/apps/services/app.service.js +4 -1
- package/dist/apps/services/app.service.js.map +1 -1
- package/dist/apps/utils/objects.utils.d.ts +1 -1
- package/dist/apps/utils/objects.utils.js +5 -3
- package/dist/apps/utils/objects.utils.js.map +1 -1
- package/dist/index.d.ts +10 -4
- package/dist/index.js +28 -3
- package/dist/index.js.map +1 -1
- package/dist/logs/logs.service.js +0 -1
- package/dist/logs/logs.service.js.map +1 -1
- package/dist/logs/logs.types.d.ts +2 -0
- package/dist/logs/logs.types.js.map +1 -1
- package/dist/processor/services/processor.service.d.ts +7 -3
- package/dist/processor/services/processor.service.js +153 -86
- package/dist/processor/services/processor.service.js.map +1 -1
- package/dist/processor/utils/storage.util.js +0 -1
- package/dist/processor/utils/storage.util.js.map +1 -1
- package/dist/products/services/products.service.d.ts +11 -4
- package/dist/products/services/products.service.js +169 -45
- package/dist/products/services/products.service.js.map +1 -1
- package/dist/products/utils/functions.utils.d.ts +1 -0
- package/dist/products/utils/functions.utils.js +11 -0
- package/dist/products/utils/functions.utils.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productFallback.validator.js +7 -1
- package/dist/products/validators/joi-validators/create.productFallback.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/create.productQuota.validator.js +8 -2
- package/dist/products/validators/joi-validators/create.productQuota.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/update.productFallback.validator.js +7 -2
- package/dist/products/validators/joi-validators/update.productFallback.validator.js.map +1 -1
- package/dist/products/validators/joi-validators/update.productQuota.validator.js +5 -1
- package/dist/products/validators/joi-validators/update.productQuota.validator.js.map +1 -1
- package/dist/types/productsBuilder.types.d.ts +2 -2
- package/package.json +1 -1
|
@@ -48,7 +48,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
48
48
|
};
|
|
49
49
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
50
50
|
const products_service_1 = __importDefault(require("../../products/services/products.service"));
|
|
51
|
-
const JWT = __importStar(require("jsonwebtoken"));
|
|
52
51
|
const types_1 = require("../../types");
|
|
53
52
|
const logs_service_1 = __importDefault(require("../../logs/logs.service"));
|
|
54
53
|
const inputs_service_1 = __importDefault(require("../../inputs/inputs.service"));
|
|
@@ -57,6 +56,7 @@ const http_client_1 = __importDefault(require("../../clients/http.client"));
|
|
|
57
56
|
const processorApi_service_1 = require("../../api/services/processorApi.service");
|
|
58
57
|
const expo_client_1 = __importDefault(require("../../clients/expo.client"));
|
|
59
58
|
const handlebars_1 = require("handlebars");
|
|
59
|
+
const functions_utils_1 = require("../../products/utils/functions.utils");
|
|
60
60
|
const string_utils_1 = require("../../products/utils/string.utils");
|
|
61
61
|
const create_productFeature_validator_1 = require("../../products/validators/joi-validators/create.productFeature.validator");
|
|
62
62
|
const validators_1 = require("../../products/validators");
|
|
@@ -78,12 +78,20 @@ async function loadBrokerService() {
|
|
|
78
78
|
}
|
|
79
79
|
return null;
|
|
80
80
|
}
|
|
81
|
+
async function loadJWT() {
|
|
82
|
+
if (typeof window === undefined) {
|
|
83
|
+
const JWT = await Promise.resolve().then(() => __importStar(require("jsonwebtoken")));
|
|
84
|
+
return JWT;
|
|
85
|
+
}
|
|
86
|
+
return null;
|
|
87
|
+
}
|
|
81
88
|
class ProcessorService {
|
|
82
89
|
constructor({ workspace_id, public_key, user_id, token, env_type, redis_client }) {
|
|
83
90
|
this.workspace_id = workspace_id;
|
|
84
91
|
this.public_key = public_key;
|
|
85
92
|
this.user_id = user_id;
|
|
86
93
|
this.token = token;
|
|
94
|
+
this.published = false;
|
|
87
95
|
this.productBuilderService = new products_service_1.default({
|
|
88
96
|
workspace_id,
|
|
89
97
|
public_key,
|
|
@@ -92,6 +100,8 @@ class ProcessorService {
|
|
|
92
100
|
env_type,
|
|
93
101
|
});
|
|
94
102
|
this.inputService = new inputs_service_1.default();
|
|
103
|
+
this.requestTime = 0;
|
|
104
|
+
this.totalRequests = 0;
|
|
95
105
|
this.processingOutput = {
|
|
96
106
|
success: [],
|
|
97
107
|
failure: [],
|
|
@@ -125,14 +135,20 @@ class ProcessorService {
|
|
|
125
135
|
}
|
|
126
136
|
await this.inputService.validateInput(input, session.schema_data);
|
|
127
137
|
const expiry = (0, processor_utils_1.calculateExpiry)(session.expiry, session.period);
|
|
128
|
-
const
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
138
|
+
const JWT = await loadJWT();
|
|
139
|
+
if (JWT) {
|
|
140
|
+
const token = JWT.sign(JSON.stringify(Object.assign({ type: "token" }, data)), this.productBuilderService.fetchProduct().private_key, { expiresIn: expiry });
|
|
141
|
+
const refreshToken = (0, processor_utils_1.encrypt)(JSON.stringify(data), this.productBuilderService.fetchProduct().private_key);
|
|
142
|
+
// WRITE REFRESH TOKEN TO DATABASE... TO INVALIDATE DELETE FROM DATABASE
|
|
143
|
+
await this.processorApiService.createRefreshToken({ product_tag, env: slug, refreshToken }, this.getUserAccess());
|
|
144
|
+
return {
|
|
145
|
+
token,
|
|
146
|
+
refreshToken,
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
else {
|
|
150
|
+
throw new Error(`Running in browser, token service not loaded.`);
|
|
151
|
+
}
|
|
136
152
|
}
|
|
137
153
|
catch (e) {
|
|
138
154
|
throw e;
|
|
@@ -373,7 +389,6 @@ class ProcessorService {
|
|
|
373
389
|
name: 'Process feature',
|
|
374
390
|
type: types_1.LogEventTypes.FEATURE,
|
|
375
391
|
};
|
|
376
|
-
console.log(additional_logs);
|
|
377
392
|
await this.intializeProduct(additional_logs);
|
|
378
393
|
this.component = types_1.LogEventTypes.FEATURE;
|
|
379
394
|
const process_id = this.process_id || (0, processor_utils_1.generateObjectId)();
|
|
@@ -400,7 +415,7 @@ class ProcessorService {
|
|
|
400
415
|
// validate feature input and log failure
|
|
401
416
|
this.validateJSONFeatureInput(input, featureInput, additional_logs);
|
|
402
417
|
// split processes
|
|
403
|
-
this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
418
|
+
this.sequenceLevels = await this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
404
419
|
await this.processSequenceLevels(additional_logs);
|
|
405
420
|
return { process_id };
|
|
406
421
|
//return this.generateOutput(output as unknown as Record<string, IFeatureOutput>);
|
|
@@ -418,6 +433,7 @@ class ProcessorService {
|
|
|
418
433
|
}
|
|
419
434
|
}
|
|
420
435
|
else {
|
|
436
|
+
console.log("FAILING HERE TOOO");
|
|
421
437
|
throw e;
|
|
422
438
|
}
|
|
423
439
|
}
|
|
@@ -513,7 +529,7 @@ class ProcessorService {
|
|
|
513
529
|
throw e;
|
|
514
530
|
}
|
|
515
531
|
}
|
|
516
|
-
splitSequenceIntoLevels(data, additional_logs) {
|
|
532
|
+
async splitSequenceIntoLevels(data, additional_logs) {
|
|
517
533
|
try {
|
|
518
534
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Split sequence - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
519
535
|
const levels = {};
|
|
@@ -555,7 +571,7 @@ class ProcessorService {
|
|
|
555
571
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process levels - initiated', data: { levels: this.sequenceLevels }, status: types_1.LogEventStatus.PROCESSING }));
|
|
556
572
|
const levelEvents = {};
|
|
557
573
|
Object.entries(this.sequenceLevels).forEach(([level, sequences]) => {
|
|
558
|
-
levelEvents[parseInt(level)] = this.fetchLevelEvents(sequences);
|
|
574
|
+
levelEvents[parseInt(level)] = this.fetchLevelEvents(sequences, parseInt(level));
|
|
559
575
|
});
|
|
560
576
|
let previousLevelComplete = true;
|
|
561
577
|
for (const level of Object.keys(levelEvents)
|
|
@@ -568,7 +584,8 @@ class ProcessorService {
|
|
|
568
584
|
break;
|
|
569
585
|
}
|
|
570
586
|
}
|
|
571
|
-
if (previousLevelComplete) {
|
|
587
|
+
if (previousLevelComplete && !this.published) {
|
|
588
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_feature_execution: true, message: 'Process feature - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
572
589
|
this.logService.publish();
|
|
573
590
|
this.end = Date.now();
|
|
574
591
|
this.writeResult(types_1.LogEventStatus.SUCCESS);
|
|
@@ -576,50 +593,67 @@ class ProcessorService {
|
|
|
576
593
|
}
|
|
577
594
|
catch (e) {
|
|
578
595
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process levels - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
596
|
+
console.log("SEQUENCE PROCESSING FAILED!!!");
|
|
579
597
|
throw e;
|
|
580
598
|
}
|
|
581
599
|
}
|
|
582
600
|
async processLevelEvents(events, additional_logs) {
|
|
583
|
-
|
|
584
|
-
const
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
601
|
+
try {
|
|
602
|
+
const promises = events.map((event) => {
|
|
603
|
+
const dependants = this.fetchActionRequestDependents(event.input, additional_logs);
|
|
604
|
+
const passed = this.checkDependentsSuccess(dependants);
|
|
605
|
+
if (passed) {
|
|
606
|
+
// TODO: comparison to see if all depending events are in success || dependants is empty
|
|
607
|
+
return this.processEvent(event);
|
|
608
|
+
}
|
|
609
|
+
else {
|
|
610
|
+
this.addToWaitingOutput(event, dependants);
|
|
611
|
+
}
|
|
612
|
+
});
|
|
613
|
+
return Promise.all(promises);
|
|
614
|
+
}
|
|
615
|
+
catch (e) {
|
|
616
|
+
throw e;
|
|
617
|
+
}
|
|
594
618
|
}
|
|
595
619
|
async processFailedEvents(additional_logs) {
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
620
|
+
try {
|
|
621
|
+
const { failure } = this.processingOutput;
|
|
622
|
+
const promises = failure.map((failed) => {
|
|
623
|
+
if (failed.retries_left > 0 && new Date().getTime() > failed.retry_at) {
|
|
624
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess failed events - initiated', data: Object.assign({}, failed), status: types_1.LogEventStatus.PROCESSING }));
|
|
625
|
+
return this.processEvent(failed.event); // process events should also take care of this.processingOutput
|
|
626
|
+
}
|
|
627
|
+
if (failed.retries_left === 0 && !failed.allow_fail) {
|
|
628
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess failed events - failed', data: Object.assign(Object.assign({}, failed), { reason: 'Ran out of Retries' }), status: types_1.LogEventStatus.FAIL }));
|
|
629
|
+
throw new Error(`Event ${failed.event.event} failed in sequence ${failed.event.sequence_tag}, ran out of retries and the feature cannot run without it succeeding`);
|
|
630
|
+
}
|
|
631
|
+
});
|
|
632
|
+
Promise.all(promises);
|
|
633
|
+
}
|
|
634
|
+
catch (e) {
|
|
635
|
+
throw e;
|
|
636
|
+
}
|
|
608
637
|
}
|
|
609
638
|
async processWaitingEvents(additional_logs) {
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
const
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
639
|
+
try {
|
|
640
|
+
const { waiting } = this.processingOutput;
|
|
641
|
+
const promises = waiting.map((waiting) => {
|
|
642
|
+
const { dependants } = waiting;
|
|
643
|
+
if (this.checkDependentsSuccess(dependants)) {
|
|
644
|
+
// TODO: comparison to see if all depending events are in success || dependants is empty
|
|
645
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess waiting events - initiated', data: Object.assign({}, waiting), status: types_1.LogEventStatus.PROCESSING }));
|
|
646
|
+
return this.processEvent(waiting.event);
|
|
647
|
+
}
|
|
648
|
+
else {
|
|
649
|
+
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Reprocess waiting events - waiting', data: Object.assign({}, waiting), status: types_1.LogEventStatus.WAITING }));
|
|
650
|
+
}
|
|
651
|
+
});
|
|
652
|
+
return Promise.all(promises);
|
|
653
|
+
}
|
|
654
|
+
catch (e) {
|
|
655
|
+
throw e;
|
|
656
|
+
}
|
|
623
657
|
}
|
|
624
658
|
checkDependentsSuccess(dependants) {
|
|
625
659
|
let pass = true;
|
|
@@ -649,6 +683,12 @@ class ProcessorService {
|
|
|
649
683
|
if (input.data) {
|
|
650
684
|
dependents.push(...this.fetchDependents(input.data, additional_logs));
|
|
651
685
|
}
|
|
686
|
+
if (input.fileName) {
|
|
687
|
+
dependents.push(...this.valueStringDepsCheck(input.fileName));
|
|
688
|
+
}
|
|
689
|
+
if (input.buffer) {
|
|
690
|
+
dependents.push(...this.valueStringDepsCheck(input.buffer));
|
|
691
|
+
}
|
|
652
692
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Fetch request dependencies - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), dependents }, status: types_1.LogEventStatus.SUCCESS }));
|
|
653
693
|
return dependents;
|
|
654
694
|
}
|
|
@@ -673,6 +713,20 @@ class ProcessorService {
|
|
|
673
713
|
event_tag: stages[1],
|
|
674
714
|
});
|
|
675
715
|
}
|
|
716
|
+
else if (values[i].startsWith('$')) {
|
|
717
|
+
const funcArgs = (0, functions_utils_1.extractFunctionAndArgs)(values[i]);
|
|
718
|
+
if (funcArgs.args.length) {
|
|
719
|
+
funcArgs.args.map((arg) => {
|
|
720
|
+
if (arg.startsWith('$Sequence')) {
|
|
721
|
+
const stages = this.productBuilderService.extractStages(arg);
|
|
722
|
+
dependants.push({
|
|
723
|
+
sequence_tag: stages[0],
|
|
724
|
+
event_tag: stages[1],
|
|
725
|
+
});
|
|
726
|
+
}
|
|
727
|
+
});
|
|
728
|
+
}
|
|
729
|
+
}
|
|
676
730
|
}
|
|
677
731
|
}
|
|
678
732
|
else {
|
|
@@ -680,10 +734,7 @@ class ProcessorService {
|
|
|
680
734
|
}
|
|
681
735
|
}
|
|
682
736
|
else if (typeof value === 'string') {
|
|
683
|
-
|
|
684
|
-
const stages = this.productBuilderService.extractStages(value);
|
|
685
|
-
dependants.push({ sequence_tag: stages[0], event_tag: stages[1] });
|
|
686
|
-
}
|
|
737
|
+
dependants.push(...this.valueStringDepsCheck(value.trim()));
|
|
687
738
|
}
|
|
688
739
|
}
|
|
689
740
|
return dependants;
|
|
@@ -693,6 +744,34 @@ class ProcessorService {
|
|
|
693
744
|
throw e;
|
|
694
745
|
}
|
|
695
746
|
}
|
|
747
|
+
valueStringDepsCheck(value) {
|
|
748
|
+
const dependants = [];
|
|
749
|
+
if (value.startsWith('$Sequence')) {
|
|
750
|
+
const stages = this.productBuilderService.extractStages(value);
|
|
751
|
+
dependants.push({ sequence_tag: stages[0], event_tag: stages[1] });
|
|
752
|
+
}
|
|
753
|
+
else if (value.startsWith('$')) {
|
|
754
|
+
const funcArgs = (0, functions_utils_1.extractFunctionAndArgs)(value);
|
|
755
|
+
if (funcArgs && funcArgs.args.length) {
|
|
756
|
+
funcArgs.args.map((arg) => {
|
|
757
|
+
if (arg.startsWith('$Sequence')) {
|
|
758
|
+
const stages = this.productBuilderService.extractStages(arg);
|
|
759
|
+
dependants.push({
|
|
760
|
+
sequence_tag: stages[0],
|
|
761
|
+
event_tag: stages[1],
|
|
762
|
+
});
|
|
763
|
+
}
|
|
764
|
+
else {
|
|
765
|
+
const args = arg.split(',');
|
|
766
|
+
args.map((arg) => {
|
|
767
|
+
dependants.push(...this.valueStringDepsCheck(arg.trim()));
|
|
768
|
+
});
|
|
769
|
+
}
|
|
770
|
+
});
|
|
771
|
+
}
|
|
772
|
+
}
|
|
773
|
+
return dependants;
|
|
774
|
+
}
|
|
696
775
|
async constructJSONDataPayloads(object, additional_logs, samples, event, loopIndex = 0) {
|
|
697
776
|
try {
|
|
698
777
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Construct JSON payloads - initiated', data: { object, samples }, status: types_1.LogEventStatus.PROCESSING }));
|
|
@@ -1388,11 +1467,9 @@ class ProcessorService {
|
|
|
1388
1467
|
name: 'Process feature broker event',
|
|
1389
1468
|
};
|
|
1390
1469
|
try {
|
|
1391
|
-
console.log("GRENADYE ALASOOOO!");
|
|
1392
1470
|
return this.runBrokerPublish(event, additional_logs);
|
|
1393
1471
|
}
|
|
1394
1472
|
catch (e) {
|
|
1395
|
-
console.log("GRENADYE ALASOOOO!!!!");
|
|
1396
1473
|
}
|
|
1397
1474
|
}
|
|
1398
1475
|
if (event.type === types_1.FeatureEventTypes.JOB) {
|
|
@@ -1414,7 +1491,7 @@ class ProcessorService {
|
|
|
1414
1491
|
async processFailedAndWaiting() { }
|
|
1415
1492
|
async generateOutput(process_id) {
|
|
1416
1493
|
var _a, _b, _c, _d;
|
|
1417
|
-
const result =
|
|
1494
|
+
const result = await this.processorApiService.fetchResult(process_id, this.getUserAccess());
|
|
1418
1495
|
if (!result) {
|
|
1419
1496
|
throw new Error(`Invalid process id ${process_id}`);
|
|
1420
1497
|
}
|
|
@@ -1440,7 +1517,7 @@ class ProcessorService {
|
|
|
1440
1517
|
return { process_id, status: result.status, errors };
|
|
1441
1518
|
}
|
|
1442
1519
|
}
|
|
1443
|
-
else {
|
|
1520
|
+
else if (result) {
|
|
1444
1521
|
if ((_a = result.result.success[0]) === null || _a === void 0 ? void 0 : _a.output) {
|
|
1445
1522
|
return { process_id, status: result.status, data: (_b = result.result.success[0]) === null || _b === void 0 ? void 0 : _b.output };
|
|
1446
1523
|
}
|
|
@@ -1480,7 +1557,7 @@ class ProcessorService {
|
|
|
1480
1557
|
// validate feature input and log failure
|
|
1481
1558
|
this.validateJSONFeatureInput(result.input.input, featureInput, additional_logs);
|
|
1482
1559
|
// split processes
|
|
1483
|
-
this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
1560
|
+
this.sequenceLevels = await this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
1484
1561
|
await this.processSequenceLevels(additional_logs);
|
|
1485
1562
|
}
|
|
1486
1563
|
else {
|
|
@@ -1522,7 +1599,7 @@ class ProcessorService {
|
|
|
1522
1599
|
// validate feature input and log failure
|
|
1523
1600
|
this.validateJSONFeatureInput(result.input.input, featureInput, additional_logs);
|
|
1524
1601
|
// split processes
|
|
1525
|
-
this.sequenceLevels = this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
1602
|
+
this.sequenceLevels = await this.splitSequenceIntoLevels(sequence, additional_logs);
|
|
1526
1603
|
await this.processSequenceLevels(additional_logs);
|
|
1527
1604
|
}
|
|
1528
1605
|
else {
|
|
@@ -1539,15 +1616,15 @@ class ProcessorService {
|
|
|
1539
1616
|
}
|
|
1540
1617
|
return { process_id };
|
|
1541
1618
|
}
|
|
1542
|
-
fetchLevelEvents(level) {
|
|
1619
|
+
fetchLevelEvents(sequence, level) {
|
|
1543
1620
|
const events = [];
|
|
1544
|
-
for (let i = 0; i <
|
|
1545
|
-
events.push(...this.appendSequenceDataToLevelEvents(
|
|
1621
|
+
for (let i = 0; i < sequence.length; i++) {
|
|
1622
|
+
events.push(...this.appendSequenceDataToLevelEvents(sequence[i], level));
|
|
1546
1623
|
}
|
|
1547
1624
|
return events;
|
|
1548
1625
|
}
|
|
1549
|
-
appendSequenceDataToLevelEvents(sequence) {
|
|
1550
|
-
const { events,
|
|
1626
|
+
appendSequenceDataToLevelEvents(sequence, level) {
|
|
1627
|
+
const { events, tag } = sequence;
|
|
1551
1628
|
for (let i = 0; i < events.length; i++) {
|
|
1552
1629
|
events[i].sequence_level = level;
|
|
1553
1630
|
events[i].sequence_tag = tag;
|
|
@@ -1678,13 +1755,11 @@ class ProcessorService {
|
|
|
1678
1755
|
}
|
|
1679
1756
|
async runAction(event, additional_logs, returnValue = false) {
|
|
1680
1757
|
try {
|
|
1681
|
-
console.log("RUNNING ACTION!!!!!");
|
|
1682
1758
|
const { event: action_tag, app: access_tag, condition, cache: cache_tag } = event;
|
|
1683
1759
|
let indexes = [];
|
|
1684
1760
|
if (condition &&
|
|
1685
1761
|
condition.type === types_1.Conditions.CHECK &&
|
|
1686
1762
|
(await this.processConditionalCheck(event, additional_logs))) {
|
|
1687
|
-
console.log("RUNNING ACTION SKIPPED!!!!!");
|
|
1688
1763
|
// if it fails, it would add to skipped queue
|
|
1689
1764
|
return;
|
|
1690
1765
|
}
|
|
@@ -1790,6 +1865,8 @@ class ProcessorService {
|
|
|
1790
1865
|
try {
|
|
1791
1866
|
const results = await this.sendActionRequest(request_base_url, resource, payloads, method, env.slug);
|
|
1792
1867
|
const end = Date.now();
|
|
1868
|
+
this.requestTime += end - start;
|
|
1869
|
+
this.totalRequests += 1;
|
|
1793
1870
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process http request - success', successful_execution: true, data: { response: (0, processor_utils_1.anonymizeObject)(results) }, status: types_1.LogEventStatus.SUCCESS, app_id, action: event.event, start,
|
|
1794
1871
|
end }));
|
|
1795
1872
|
await this.addToSuccessOutput(event, results, additional_logs);
|
|
@@ -1802,6 +1879,8 @@ class ProcessorService {
|
|
|
1802
1879
|
}
|
|
1803
1880
|
catch (e) {
|
|
1804
1881
|
const end = Date.now();
|
|
1882
|
+
this.requestTime += end - start;
|
|
1883
|
+
this.totalRequests += 1;
|
|
1805
1884
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Process http request - failed', failed_execution: true, data: { e }, status: types_1.LogEventStatus.FAIL, app_id, action: event.event, start,
|
|
1806
1885
|
end }));
|
|
1807
1886
|
try {
|
|
@@ -1891,6 +1970,7 @@ class ProcessorService {
|
|
|
1891
1970
|
this.processingOutput.failure.push(output);
|
|
1892
1971
|
}
|
|
1893
1972
|
if (retries_left > 0) {
|
|
1973
|
+
//console.log("RETRY AT", retry_at)
|
|
1894
1974
|
setTimeout(() => {
|
|
1895
1975
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Retrying Request', data: Object.assign(Object.assign({}, output), { payload: (0, processor_utils_1.anonymizeObject)(output.payload) }), status: types_1.LogEventStatus.PROCESSING }));
|
|
1896
1976
|
if (event.type === types_1.FeatureEventTypes.ACTION) {
|
|
@@ -1905,11 +1985,17 @@ class ProcessorService {
|
|
|
1905
1985
|
}, retry_at);
|
|
1906
1986
|
}
|
|
1907
1987
|
if (allow_fail === false && retries_left === 0) {
|
|
1988
|
+
if (this.feature) {
|
|
1989
|
+
additional_logs.failed_feature_execution = true;
|
|
1990
|
+
}
|
|
1908
1991
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Ran out of retries - failed', data: Object.assign(Object.assign({}, output), { payload: (0, processor_utils_1.anonymizeObject)(output.payload) }), status: types_1.LogEventStatus.FAIL }));
|
|
1909
1992
|
//throw new Error("Run out of retries")
|
|
1993
|
+
console.log("REQUEST TIME", this.requestTime, this.totalRequests, this.end - this.start);
|
|
1910
1994
|
this.end = Date.now();
|
|
1911
1995
|
this.writeResult(types_1.LogEventStatus.FAIL);
|
|
1912
1996
|
this.logService.publish();
|
|
1997
|
+
this.published = true;
|
|
1998
|
+
//throw new Error("Terminate Process")
|
|
1913
1999
|
}
|
|
1914
2000
|
return output;
|
|
1915
2001
|
}
|
|
@@ -2106,7 +2192,6 @@ class ProcessorService {
|
|
|
2106
2192
|
};
|
|
2107
2193
|
try {
|
|
2108
2194
|
this.validateActionDataMappingInput(data.input, types_1.FeatureEventTypes.PUBLISH);
|
|
2109
|
-
console.log("JAPANESE MIRRORS", data.input);
|
|
2110
2195
|
this.start = Date.now();
|
|
2111
2196
|
// clone
|
|
2112
2197
|
this.clone = (0, processor_utils_1.structuredClone)(data.input);
|
|
@@ -2120,7 +2205,6 @@ class ProcessorService {
|
|
|
2120
2205
|
this.process_id = process_id;
|
|
2121
2206
|
const productEnv = this.fetchEnv(data.env, additional_logs);
|
|
2122
2207
|
this.processEnv = productEnv;
|
|
2123
|
-
console.log("JAPANESE MIRRORS 2", productEnv);
|
|
2124
2208
|
if (!productEnv.active) {
|
|
2125
2209
|
throw new Error(`Environment ${data.env} is not active`);
|
|
2126
2210
|
}
|
|
@@ -2142,7 +2226,6 @@ class ProcessorService {
|
|
|
2142
2226
|
return result;
|
|
2143
2227
|
}
|
|
2144
2228
|
catch (e) {
|
|
2145
|
-
console.log(e);
|
|
2146
2229
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Publishing to topic - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2147
2230
|
this.end = Date.now();
|
|
2148
2231
|
this.logService.publish();
|
|
@@ -2158,7 +2241,6 @@ class ProcessorService {
|
|
|
2158
2241
|
body: payload.body,
|
|
2159
2242
|
data: (0, processor_utils_1.convertStringToObject)(payload.data),
|
|
2160
2243
|
};
|
|
2161
|
-
console.log("MESSAGE!!!", message);
|
|
2162
2244
|
try {
|
|
2163
2245
|
await (0, expo_client_1.default)().post('', message, (0, processor_utils_1.generateAxiosConfig)());
|
|
2164
2246
|
}
|
|
@@ -2177,7 +2259,6 @@ class ProcessorService {
|
|
|
2177
2259
|
try {
|
|
2178
2260
|
const admin = require('firebase-admin');
|
|
2179
2261
|
const serviceAccount = credentials;
|
|
2180
|
-
console.log("MESSAGE", message);
|
|
2181
2262
|
admin.initializeApp({
|
|
2182
2263
|
credential: admin.credential.cert(serviceAccount),
|
|
2183
2264
|
});
|
|
@@ -2308,11 +2389,6 @@ class ProcessorService {
|
|
|
2308
2389
|
const url = new URL(callbacks.url);
|
|
2309
2390
|
try {
|
|
2310
2391
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Send callback - initiated', data: {}, status: types_1.LogEventStatus.PROCESSING }));
|
|
2311
|
-
console.log("CALLBACK!!!!", {
|
|
2312
|
-
url,
|
|
2313
|
-
payload,
|
|
2314
|
-
method: callbacks.method
|
|
2315
|
-
});
|
|
2316
2392
|
await this.sendActionRequest(url.origin, url.pathname, payload, callbacks.method, '');
|
|
2317
2393
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { name: 'Send callback - success', data: {}, status: types_1.LogEventStatus.SUCCESS }));
|
|
2318
2394
|
}
|
|
@@ -2327,7 +2403,6 @@ class ProcessorService {
|
|
|
2327
2403
|
const SmsClient = await (0, sms_repo_1.loadSMSClient)();
|
|
2328
2404
|
const smsClient = new SmsClient(smses);
|
|
2329
2405
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { name: 'Send sms - initiated', data: { message: input.sms.body, config: (0, processor_utils_1.anonymizeObject)(smses) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2330
|
-
console.log("SMS!!!!", input.sms, smses);
|
|
2331
2406
|
const res = await smsClient.sendMessage(input.sms.body, input.sms.recipients);
|
|
2332
2407
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { name: 'Send sms - success', data: res, status: types_1.LogEventStatus.SUCCESS }));
|
|
2333
2408
|
}
|
|
@@ -2338,7 +2413,6 @@ class ProcessorService {
|
|
|
2338
2413
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Attempt notification - success', data: notification, status: types_1.LogEventStatus.SUCCESS }));
|
|
2339
2414
|
}
|
|
2340
2415
|
catch (e) {
|
|
2341
|
-
console.log(e);
|
|
2342
2416
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Attempt notification - failed', data: { e: e.toString() }, status: types_1.LogEventStatus.FAIL }));
|
|
2343
2417
|
//this.logService.publish();
|
|
2344
2418
|
throw e;
|
|
@@ -2419,7 +2493,6 @@ class ProcessorService {
|
|
|
2419
2493
|
}
|
|
2420
2494
|
}
|
|
2421
2495
|
catch (e) {
|
|
2422
|
-
console.log(e);
|
|
2423
2496
|
this.logService.add(Object.assign(Object.assign({}, this.baseLogs), { failed_execution: true, message: 'Attempt migration - failed', data: e, status: types_1.LogEventStatus.FAIL }));
|
|
2424
2497
|
this.logService.publish();
|
|
2425
2498
|
}
|
|
@@ -2554,7 +2627,6 @@ class ProcessorService {
|
|
|
2554
2627
|
input.buffer = input.buffer ? await this.generateStringValues(input.buffer, '', additional_logs, []) : undefined;
|
|
2555
2628
|
input.fileName = input.fileName ? await this.generateStringValues(input.fileName, '', additional_logs, []) : undefined;
|
|
2556
2629
|
input.mimeType = input.mimeType ? await this.generateStringValues(input.mimeType, '', additional_logs, []) : undefined;
|
|
2557
|
-
console.log("CHINYERE", input.buffer, input.fileName);
|
|
2558
2630
|
result = await this.processStorageRequest(data, input, storageEnv, additional_logs);
|
|
2559
2631
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { successful_execution: true, message: 'Store file - success', data: { result }, status: types_1.LogEventStatus.PROCESSING }));
|
|
2560
2632
|
if (cache_tag && this.redisClient) {
|
|
@@ -2577,7 +2649,6 @@ class ProcessorService {
|
|
|
2577
2649
|
return result;
|
|
2578
2650
|
}
|
|
2579
2651
|
catch (e) {
|
|
2580
|
-
console.log(e);
|
|
2581
2652
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Attempt storage - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2582
2653
|
throw e;
|
|
2583
2654
|
}
|
|
@@ -2656,8 +2727,6 @@ class ProcessorService {
|
|
|
2656
2727
|
if (!MongoDBHandler) {
|
|
2657
2728
|
throw new Error(`Running in browser, mongo handler not loaded.`);
|
|
2658
2729
|
}
|
|
2659
|
-
console.log("PRIVATE KEY!!!", this.productBuilderService.fetchProduct().private_key);
|
|
2660
|
-
console.log("AMAZONS", (0, processor_utils_1.decrypt)(databaseEnv.connection_url, this.productBuilderService.fetchProduct().private_key));
|
|
2661
2730
|
const mongoHandler = new MongoDBHandler((0, processor_utils_1.decrypt)(databaseEnv.connection_url, this.productBuilderService.fetchProduct().private_key));
|
|
2662
2731
|
if (databaseAction.type === types_1.DatabaseActionTypes.UPDATE) {
|
|
2663
2732
|
const filterTemplate = typeof databaseAction.filterTemplate === 'string'
|
|
@@ -2805,7 +2874,6 @@ class ProcessorService {
|
|
|
2805
2874
|
}
|
|
2806
2875
|
catch (e) {
|
|
2807
2876
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { failed_execution: true, message: 'Attempt publish to broker topic - failed', data: { e }, status: types_1.LogEventStatus.FAIL }));
|
|
2808
|
-
console.log("JERMOOOOO!!!");
|
|
2809
2877
|
throw e;
|
|
2810
2878
|
}
|
|
2811
2879
|
}
|
|
@@ -2950,7 +3018,6 @@ class ProcessorService {
|
|
|
2950
3018
|
retries: retries || 0,
|
|
2951
3019
|
allow_fail: false,
|
|
2952
3020
|
}, additional_logs, true);
|
|
2953
|
-
console.log("RESULT ===>>>>", result);
|
|
2954
3021
|
this.end = Date.now();
|
|
2955
3022
|
this.logService.add(Object.assign(Object.assign(Object.assign({}, this.baseLogs), additional_logs), { message: 'Execute action - success', data: { input: (0, processor_utils_1.anonymizeObject)(input), result: (0, processor_utils_1.anonymizeObject)(result) }, status: types_1.LogEventStatus.SUCCESS }));
|
|
2956
3023
|
this.writeResult(types_1.LogEventStatus.SUCCESS);
|