@rabbitio/ui-kit 1.0.0-beta.37 → 1.0.0-beta.39
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +2042 -4
- package/dist/index.cjs.map +1 -1
- package/dist/index.modern.js +1193 -1
- package/dist/index.modern.js.map +1 -1
- package/dist/index.module.js +2029 -5
- package/dist/index.module.js.map +1 -1
- package/dist/index.umd.js +2044 -8
- package/dist/index.umd.js.map +1 -1
- package/package.json +4 -2
- package/src/common/adapters/axiosAdapter.js +35 -0
- package/src/common/errorUtils.js +15 -0
- package/src/common/utils/postponeExecution.js +11 -0
- package/src/components/utils/uiUtils.js +14 -0
- package/src/components/utils/urlQueryUtils.js +87 -0
- package/src/index.js +16 -0
- package/src/robustExteranlApiCallerService/cacheAndConcurrentRequestsResolver.js +559 -0
- package/src/robustExteranlApiCallerService/cachedRobustExternalApiCallerService.js +188 -0
- package/src/robustExteranlApiCallerService/cancelProcessing.js +29 -0
- package/src/robustExteranlApiCallerService/concurrentCalculationsMetadataHolder.js +103 -0
- package/src/robustExteranlApiCallerService/externalApiProvider.js +156 -0
- package/src/robustExteranlApiCallerService/externalServicesStatsCollector.js +82 -0
- package/src/robustExteranlApiCallerService/robustExternalAPICallerService.js +386 -0
package/dist/index.modern.js
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import React, { useState, useRef, useEffect, useCallback } from 'react';
|
|
2
2
|
import { BigNumber } from 'bignumber.js';
|
|
3
3
|
import axios from 'axios';
|
|
4
|
+
import { v4 } from 'uuid';
|
|
5
|
+
import Hashes from 'jshashes';
|
|
4
6
|
import EventBusInstance from 'eventbusjs';
|
|
5
7
|
|
|
6
8
|
function createCommonjsModule(fn) {
|
|
@@ -1599,6 +1601,74 @@ function useReferredState(initialValue) {
|
|
|
1599
1601
|
return [reference, setReferredState];
|
|
1600
1602
|
}
|
|
1601
1603
|
|
|
1604
|
+
const handleClickOutside = (exceptionsRefs, callback) => {
|
|
1605
|
+
function handleClick(event) {
|
|
1606
|
+
const isExceptionClicked = exceptionsRefs.find(ref => (ref == null ? void 0 : ref.current) && ref.current.contains(event.target));
|
|
1607
|
+
if (!isExceptionClicked) {
|
|
1608
|
+
callback();
|
|
1609
|
+
}
|
|
1610
|
+
}
|
|
1611
|
+
document.addEventListener("click", handleClick);
|
|
1612
|
+
return () => document.removeEventListener("click", handleClick);
|
|
1613
|
+
};
|
|
1614
|
+
|
|
1615
|
+
const PARAMETER_VALUES_SEPARATOR = "|*|"; // Sting that with high probability will not be in the user's data
|
|
1616
|
+
|
|
1617
|
+
/**
|
|
1618
|
+
* Adds specified parameter with values to the URL query string
|
|
1619
|
+
*
|
|
1620
|
+
* @param parameterName - String - name of the parameter
|
|
1621
|
+
* @param values - Array of String values
|
|
1622
|
+
* @param updateURLCallback - callback that will be called with the updated query string. Can be used to save it to URL
|
|
1623
|
+
*/
|
|
1624
|
+
function saveQueryParameterAndValues(parameterName, values, updateURLCallback = newQueryString => {}) {
|
|
1625
|
+
let parametersAndValues = parseSearchString();
|
|
1626
|
+
parametersAndValues = parametersAndValues.filter(parameterAndValues => parameterAndValues[0] !== parameterName);
|
|
1627
|
+
const parameterValuesForURL = encodeURIComponent(values.join(PARAMETER_VALUES_SEPARATOR));
|
|
1628
|
+
parametersAndValues.push([parameterName, parameterValuesForURL]);
|
|
1629
|
+
const newQueryString = `?${parametersAndValues.map(parameterAndValues => parameterAndValues.join("=")).join("&")}`;
|
|
1630
|
+
updateURLCallback(newQueryString);
|
|
1631
|
+
return newQueryString;
|
|
1632
|
+
}
|
|
1633
|
+
|
|
1634
|
+
/**
|
|
1635
|
+
* Removes specified parameter with values from the URL query string
|
|
1636
|
+
*
|
|
1637
|
+
* @param parameterName - String - name of the parameter
|
|
1638
|
+
* @param updateURLCallback - callback that will be called with the updated query string. Can be used to save it to URL
|
|
1639
|
+
*/
|
|
1640
|
+
// TODO: [tests, moderate] units required the same as or other functions in this module
|
|
1641
|
+
function removeQueryParameterAndValues(parameterName, updateURLCallback = newQueryString => {}) {
|
|
1642
|
+
let parametersAndValues = parseSearchString();
|
|
1643
|
+
parametersAndValues = parametersAndValues.filter(parameterAndValues => parameterAndValues[0] !== parameterName);
|
|
1644
|
+
const newQueryString = `?${parametersAndValues.map(parameterAndValues => parameterAndValues.join("=")).join("&")}`;
|
|
1645
|
+
updateURLCallback(newQueryString);
|
|
1646
|
+
return newQueryString;
|
|
1647
|
+
}
|
|
1648
|
+
|
|
1649
|
+
/**
|
|
1650
|
+
* Retrieves parameter values from the URL query string.
|
|
1651
|
+
*
|
|
1652
|
+
* If there are several parameters with the same name in the URL then all their values are returned
|
|
1653
|
+
*
|
|
1654
|
+
* @param name {string} - parameter name
|
|
1655
|
+
* @return {string[]} [] - if the parameter is not present in URL. [""] - if parameter present but has empty value
|
|
1656
|
+
*/
|
|
1657
|
+
function getQueryParameterValues(name) {
|
|
1658
|
+
return parseSearchString().filter(parameterAndValue => parameterAndValue[0] === name).reduce((allValues, parameterAndValue) => {
|
|
1659
|
+
const values = decodeURIComponent(parameterAndValue[1] || "").split(PARAMETER_VALUES_SEPARATOR);
|
|
1660
|
+
return [...allValues, ...values];
|
|
1661
|
+
}, []);
|
|
1662
|
+
}
|
|
1663
|
+
function parseSearchString() {
|
|
1664
|
+
var _window$location$sear;
|
|
1665
|
+
const trimmed = (((_window$location$sear = window.location.search) == null ? void 0 : _window$location$sear.slice(1)) || "").trim();
|
|
1666
|
+
return trimmed && trimmed.split("&").map(parameterAndValue => parameterAndValue.split("=")) || [];
|
|
1667
|
+
}
|
|
1668
|
+
function getQueryParameterSingleValue(name) {
|
|
1669
|
+
return (getQueryParameterValues(name) || [])[0];
|
|
1670
|
+
}
|
|
1671
|
+
|
|
1602
1672
|
/**
|
|
1603
1673
|
* This function improves the passed error object (its message) by adding the passed function name
|
|
1604
1674
|
* and additional message to it.
|
|
@@ -1624,6 +1694,17 @@ function improvedErrorMessage(e, settingFunction, additionalMessage) {
|
|
|
1624
1694
|
additionalMessage && (message += `${additionalMessage} `);
|
|
1625
1695
|
return message;
|
|
1626
1696
|
}
|
|
1697
|
+
function logErrorOrOutputToConsole(e) {
|
|
1698
|
+
try {
|
|
1699
|
+
// TODO: [dev] remove this after few weeks of testing output in real life
|
|
1700
|
+
// eslint-disable-next-line no-console
|
|
1701
|
+
console.log("BEFORE SAFE", e);
|
|
1702
|
+
Logger.log("logErrorOrOutputToConsole", safeStringify(e));
|
|
1703
|
+
} catch (e) {
|
|
1704
|
+
// eslint-disable-next-line no-console
|
|
1705
|
+
console.log("logErrorOrOutputToConsole", e);
|
|
1706
|
+
}
|
|
1707
|
+
}
|
|
1627
1708
|
|
|
1628
1709
|
class FiatCurrenciesService {
|
|
1629
1710
|
static getFullCurrencyNameByCode(code = "") {
|
|
@@ -2366,6 +2447,45 @@ class Cache {
|
|
|
2366
2447
|
}
|
|
2367
2448
|
}
|
|
2368
2449
|
|
|
2450
|
+
function postponeExecution(execution, timeoutMS = 1000) {
|
|
2451
|
+
return new Promise((resolve, reject) => {
|
|
2452
|
+
setTimeout(async () => {
|
|
2453
|
+
try {
|
|
2454
|
+
resolve(await execution());
|
|
2455
|
+
} catch (e) {
|
|
2456
|
+
reject(e);
|
|
2457
|
+
}
|
|
2458
|
+
}, timeoutMS);
|
|
2459
|
+
});
|
|
2460
|
+
}
|
|
2461
|
+
|
|
2462
|
+
class AxiosAdapter {
|
|
2463
|
+
static async call(method, ...args) {
|
|
2464
|
+
return await axios[method](...args);
|
|
2465
|
+
}
|
|
2466
|
+
static async get(...args) {
|
|
2467
|
+
return await axios.get(...args);
|
|
2468
|
+
}
|
|
2469
|
+
static async post(...args) {
|
|
2470
|
+
return await axios.post(...args);
|
|
2471
|
+
}
|
|
2472
|
+
static async put(...args) {
|
|
2473
|
+
return await axios.put(...args);
|
|
2474
|
+
}
|
|
2475
|
+
static async delete(...args) {
|
|
2476
|
+
return await axios.delete(...args);
|
|
2477
|
+
}
|
|
2478
|
+
static async patch(...args) {
|
|
2479
|
+
return await axios.patch(...args);
|
|
2480
|
+
}
|
|
2481
|
+
static async options(...args) {
|
|
2482
|
+
return await axios.options(...args);
|
|
2483
|
+
}
|
|
2484
|
+
static async head(...args) {
|
|
2485
|
+
return await axios.head(...args);
|
|
2486
|
+
}
|
|
2487
|
+
}
|
|
2488
|
+
|
|
2369
2489
|
class EmailsApi {
|
|
2370
2490
|
static async sendEmail(subject, body) {
|
|
2371
2491
|
try {
|
|
@@ -2381,6 +2501,1078 @@ class EmailsApi {
|
|
|
2381
2501
|
}
|
|
2382
2502
|
EmailsApi.serverEndpointEntity = "emails";
|
|
2383
2503
|
|
|
2504
|
+
/**
|
|
2505
|
+
* This util helps to avoid duplicated calls to a shared resource.
|
|
2506
|
+
* It tracks is there currently active calculation for the specific cache id and make all other requests
|
|
2507
|
+
* with the same cache id waiting for this active calculation to be finished. When the calculation ends
|
|
2508
|
+
* the resolver allows all the waiting requesters to get the data from cache and start their own calculations.
|
|
2509
|
+
*
|
|
2510
|
+
* This class should be instantiated inside some other service where you need to request some resource concurrently.
|
|
2511
|
+
* Rules:
|
|
2512
|
+
* 1. When you need to make a request inside your main service call 'getCachedOrWaitForCachedOrAcquireLock'
|
|
2513
|
+
* on the instance of this class and await for the result. If the flag allowing to start calculation is true
|
|
2514
|
+
* then you can request data inside your main service. Otherwise you should use the cached data as an another
|
|
2515
|
+
* requester just finished the most resent requesting and there is actual data in the cache that
|
|
2516
|
+
* is returned to you here.
|
|
2517
|
+
* 1.1 Also you can acquire a lock directly if you don't want to get cached data. Use the corresponding method 'acquireLock'.
|
|
2518
|
+
*
|
|
2519
|
+
* 2. If you start requesting (when you successfully acquired the lock) then after receiving the result of your
|
|
2520
|
+
* requesting you should call the 'saveCachedData' so the retrieved data will appear in the cache.
|
|
2521
|
+
*
|
|
2522
|
+
* 3. If you successfully acquired the lock then you should after calling the 'saveCachedData' call
|
|
2523
|
+
* the 'releaseLock' - this is mandatory to release the lock and allow other requesters to perform their requests.
|
|
2524
|
+
* WARNING: If for any reason you forget to call this method then this class instance will wait perpetually for
|
|
2525
|
+
* the lock releasing and all your attempts to request the data will constantly fail. So usually call it
|
|
2526
|
+
* inside the 'finally' block.
|
|
2527
|
+
*
|
|
2528
|
+
* TODO: [tests, critical++] add unit tests - massively used logic and can produce sophisticated concurrency bugs
|
|
2529
|
+
*/
|
|
2530
|
+
class CacheAndConcurrentRequestsResolver {
|
|
2531
|
+
/**
|
|
2532
|
+
* @param bio {string} unique identifier for the exact service
|
|
2533
|
+
* @param cache {Cache} cache
|
|
2534
|
+
* @param cacheTtl {number|null} time to live for cache ms. 0 or null means the cache cannot expire
|
|
2535
|
+
* @param [maxCallAttemptsToWaitForAlreadyRunningRequest=100] {number} number of request allowed to do waiting for
|
|
2536
|
+
* result before we fail the original request. Use custom value only if you need to make the attempts count
|
|
2537
|
+
* and polling interval changes.
|
|
2538
|
+
* @param [timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished=1000] {number}
|
|
2539
|
+
* timeout ms for polling for a result. if you change maxCallAttemptsToWaitForAlreadyRunningRequest
|
|
2540
|
+
* then this parameter maybe also require the custom value.
|
|
2541
|
+
* @param [removeExpiredCacheAutomatically=true] {boolean}
|
|
2542
|
+
*/
|
|
2543
|
+
constructor(bio, cache, cacheTtl, removeExpiredCacheAutomatically = true, maxCallAttemptsToWaitForAlreadyRunningRequest = 100, timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished = 1000) {
|
|
2544
|
+
if (cacheTtl != null && cacheTtl < timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished * 2) {
|
|
2545
|
+
/*
|
|
2546
|
+
* During the lifetime of this service e.g. if the data is being retrieved slowly we can get
|
|
2547
|
+
* RACE CONDITION when we constantly retrieve data and during retrieval it is expired, so we are trying
|
|
2548
|
+
* to retrieve it again and again.
|
|
2549
|
+
* We have a protection mechanism that we will wait no more than
|
|
2550
|
+
* maxCallAttemptsToWaitForAlreadyRunningRequest * timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished
|
|
2551
|
+
* but this additional check is aimed to reduce potential loading time for some requests.
|
|
2552
|
+
*/
|
|
2553
|
+
throw new Error(`DEV: Wrong parameters passed to construct ${bio} - TTL ${cacheTtl} should be 2 times greater than ${timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished}`);
|
|
2554
|
+
}
|
|
2555
|
+
this._bio = bio;
|
|
2556
|
+
this._cache = cache;
|
|
2557
|
+
this._cacheTtlMs = cacheTtl != null ? cacheTtl : null;
|
|
2558
|
+
this._maxExecutionTimeMs = maxCallAttemptsToWaitForAlreadyRunningRequest * timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished;
|
|
2559
|
+
this._removeExpiredCacheAutomatically = removeExpiredCacheAutomatically;
|
|
2560
|
+
this._requestsManager = new ManagerOfRequestsToTheSameResource(bio, maxCallAttemptsToWaitForAlreadyRunningRequest, timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished);
|
|
2561
|
+
}
|
|
2562
|
+
|
|
2563
|
+
/**
|
|
2564
|
+
* When using this service this is the major method you should call to get data by cache id.
|
|
2565
|
+
* This method checks is there cached data and ether
|
|
2566
|
+
* - returns you flag that you can start requesting data from the shared resource
|
|
2567
|
+
* - or if there is already started calculation waits until it is finished (removed from this service)
|
|
2568
|
+
* and returns you the retrieved data
|
|
2569
|
+
* - or just returns you the cached data
|
|
2570
|
+
*
|
|
2571
|
+
* 'canStartDataRetrieval' equal true means that the lock was acquired, and you should manually call 'saveCachedData'
|
|
2572
|
+
* if needed and then 'releaseLock' to mark this calculation as finished so other
|
|
2573
|
+
* requesters can take their share of the resource.
|
|
2574
|
+
*
|
|
2575
|
+
* @param cacheId {string}
|
|
2576
|
+
* @return {Promise<({
|
|
2577
|
+
* canStartDataRetrieval: true,
|
|
2578
|
+
* cachedData: any,
|
|
2579
|
+
* lockId: string
|
|
2580
|
+
* }|{
|
|
2581
|
+
* canStartDataRetrieval: false,
|
|
2582
|
+
* cachedData: any
|
|
2583
|
+
* })>}
|
|
2584
|
+
*/
|
|
2585
|
+
async getCachedOrWaitForCachedOrAcquireLock(cacheId) {
|
|
2586
|
+
try {
|
|
2587
|
+
var _cached2;
|
|
2588
|
+
const startedAtTimestamp = Date.now();
|
|
2589
|
+
let cached = this._cache.get(cacheId);
|
|
2590
|
+
let cachedDataBackupIsPresentButExpired = null;
|
|
2591
|
+
if (cached != null && !this._removeExpiredCacheAutomatically) {
|
|
2592
|
+
const lastUpdateTimestamp = this._cache.getLastUpdateTimestamp(cacheId);
|
|
2593
|
+
if ((lastUpdateTimestamp != null ? lastUpdateTimestamp : 0) + this._cacheTtlMs < Date.now()) {
|
|
2594
|
+
/*
|
|
2595
|
+
* Here we are manually clearing 'cached' value retrieved from cache to force the data loading.
|
|
2596
|
+
* But we save its value first to the backup variable to be able to return this value if ongoing
|
|
2597
|
+
* requesting fails.
|
|
2598
|
+
*/
|
|
2599
|
+
cachedDataBackupIsPresentButExpired = cached;
|
|
2600
|
+
cached = null;
|
|
2601
|
+
}
|
|
2602
|
+
}
|
|
2603
|
+
let calculationId = null;
|
|
2604
|
+
let isRetrievedCacheExpired = true;
|
|
2605
|
+
let isWaitingForActiveCalculationSucceeded;
|
|
2606
|
+
let weStillHaveSomeTimeToProceedExecution = true;
|
|
2607
|
+
while (calculationId == null && cached == null && isRetrievedCacheExpired && weStillHaveSomeTimeToProceedExecution) {
|
|
2608
|
+
const result = await this._requestsManager.startCalculationOrWaitForActiveToFinish(cacheId);
|
|
2609
|
+
calculationId = typeof result === "string" ? result : null;
|
|
2610
|
+
isWaitingForActiveCalculationSucceeded = typeof result === "boolean" ? result : null;
|
|
2611
|
+
cached = this._cache.get(cacheId);
|
|
2612
|
+
isRetrievedCacheExpired = isWaitingForActiveCalculationSucceeded && cached == null;
|
|
2613
|
+
weStillHaveSomeTimeToProceedExecution = Date.now() - startedAtTimestamp < this._maxExecutionTimeMs;
|
|
2614
|
+
}
|
|
2615
|
+
if (calculationId) {
|
|
2616
|
+
var _cached;
|
|
2617
|
+
return {
|
|
2618
|
+
canStartDataRetrieval: true,
|
|
2619
|
+
cachedData: (_cached = cached) != null ? _cached : cachedDataBackupIsPresentButExpired,
|
|
2620
|
+
lockId: calculationId
|
|
2621
|
+
};
|
|
2622
|
+
}
|
|
2623
|
+
return {
|
|
2624
|
+
canStartDataRetrieval: false,
|
|
2625
|
+
cachedData: (_cached2 = cached) != null ? _cached2 : cachedDataBackupIsPresentButExpired
|
|
2626
|
+
};
|
|
2627
|
+
} catch (e) {
|
|
2628
|
+
improveAndRethrow(e, `${this._bio}.getCachedOrWaitForCachedOrAcquireLock`);
|
|
2629
|
+
}
|
|
2630
|
+
}
|
|
2631
|
+
|
|
2632
|
+
/**
|
|
2633
|
+
* Returns just the current cache value for the given id.
|
|
2634
|
+
* Doesn't wait for the active calculation, doesn't acquire lock, just retrieves the current cache as it is.
|
|
2635
|
+
*
|
|
2636
|
+
* @param cacheId {string}
|
|
2637
|
+
* @return {any}
|
|
2638
|
+
*/
|
|
2639
|
+
getCached(cacheId) {
|
|
2640
|
+
try {
|
|
2641
|
+
return this._cache.get(cacheId);
|
|
2642
|
+
} catch (e) {
|
|
2643
|
+
improveAndRethrow(e, "getCached");
|
|
2644
|
+
}
|
|
2645
|
+
}
|
|
2646
|
+
_getTtl() {
|
|
2647
|
+
return this._removeExpiredCacheAutomatically ? this._cacheTtlMs : null;
|
|
2648
|
+
}
|
|
2649
|
+
|
|
2650
|
+
/**
|
|
2651
|
+
* Directly acquires the lock despite on cached data availability.
|
|
2652
|
+
* So if this method returns result === true you can start the data retrieval.
|
|
2653
|
+
*
|
|
2654
|
+
* @param cacheId {string}
|
|
2655
|
+
* @return {Promise<{ result: true, lockId: string }|{ result: false }>}
|
|
2656
|
+
*/
|
|
2657
|
+
async acquireLock(cacheId) {
|
|
2658
|
+
try {
|
|
2659
|
+
return await this._requestsManager.acquireLock(cacheId);
|
|
2660
|
+
} catch (e) {
|
|
2661
|
+
improveAndRethrow(e, "acquireLock");
|
|
2662
|
+
}
|
|
2663
|
+
}
|
|
2664
|
+
|
|
2665
|
+
/**
|
|
2666
|
+
* This method should be called only if you acquired a lock successfully.
|
|
2667
|
+
*
|
|
2668
|
+
* If the current lock id is not equal to the passed one the passed data will be ignored.
|
|
2669
|
+
* Or you can do the synchronous data merging on your side and pass the
|
|
2670
|
+
* wasDataMergedSynchronouslyWithMostRecentCacheState=true so your data will be stored
|
|
2671
|
+
* despite on the lockId.
|
|
2672
|
+
* WARNING: you should do this only if you are sure you perform the synchronous update.
|
|
2673
|
+
*
|
|
2674
|
+
* @param cacheId {string}
|
|
2675
|
+
* @param lockId {string}
|
|
2676
|
+
* @param data {any}
|
|
2677
|
+
* @param [sessionDependentData=true] {boolean}
|
|
2678
|
+
* @param [wasDataMergedSynchronouslyWithMostRecentCacheState=false]
|
|
2679
|
+
*/
|
|
2680
|
+
saveCachedData(cacheId, lockId, data, sessionDependentData = true, wasDataMergedSynchronouslyWithMostRecentCacheState = false) {
|
|
2681
|
+
try {
|
|
2682
|
+
if (wasDataMergedSynchronouslyWithMostRecentCacheState || this._requestsManager.isTheLockActiveOne(cacheId, lockId)) {
|
|
2683
|
+
/* We save passed data only if the <caller> has the currently acquired lockId.
|
|
2684
|
+
* If the passed lockId is not the active one it means that other code cleared/stopped the lock
|
|
2685
|
+
* acquired by the <caller> recently due to some urgent/more prior changes.
|
|
2686
|
+
*
|
|
2687
|
+
* But we allow user to pass the 'wasDataMergedSynchronouslyWithMostRecentCacheState' flag
|
|
2688
|
+
* that tells us that the user had taken the most recent cache value and merged his new data
|
|
2689
|
+
* with that cached value (AFTER possibly performing async data retrieval). This means that we
|
|
2690
|
+
* can ignore the fact that his lockId is no more relevant and save the passed data
|
|
2691
|
+
* as it is synchronously merged with the most recent cached data. (Synchronously merged means that
|
|
2692
|
+
* the lost update cannot occur during the merge time as JS execute the synchronous functions\
|
|
2693
|
+
* till the end).
|
|
2694
|
+
*/
|
|
2695
|
+
if (sessionDependentData) {
|
|
2696
|
+
this._cache.putSessionDependentData(cacheId, data, this._getTtl());
|
|
2697
|
+
} else {
|
|
2698
|
+
this._cache.put(cacheId, data, this._getTtl());
|
|
2699
|
+
}
|
|
2700
|
+
}
|
|
2701
|
+
} catch (e) {
|
|
2702
|
+
improveAndRethrow(e, `${this._bio}.saveCachedData`);
|
|
2703
|
+
}
|
|
2704
|
+
}
|
|
2705
|
+
|
|
2706
|
+
/**
|
|
2707
|
+
* Should be called then and only then if you successfully acquired a lock with the lock id.
|
|
2708
|
+
*
|
|
2709
|
+
* @param cacheId {string}
|
|
2710
|
+
* @param lockId {string}
|
|
2711
|
+
*/
|
|
2712
|
+
releaseLock(cacheId, lockId) {
|
|
2713
|
+
try {
|
|
2714
|
+
if (this._requestsManager.isTheLockActiveOne(cacheId, lockId)) {
|
|
2715
|
+
this._requestsManager.finishActiveCalculation(cacheId);
|
|
2716
|
+
}
|
|
2717
|
+
} catch (e) {
|
|
2718
|
+
improveAndRethrow(e, `${this._bio}.releaseLock`);
|
|
2719
|
+
}
|
|
2720
|
+
}
|
|
2721
|
+
|
|
2722
|
+
/**
|
|
2723
|
+
* Actualized currently present cached data by key. Applies the provided function to the cached data.
|
|
2724
|
+
*
|
|
2725
|
+
* @param cacheId {string} id of cache entry
|
|
2726
|
+
* @param synchronousCurrentCacheProcessor (function|null} synchronous function accepting cache entry. Should return
|
|
2727
|
+
* an object in following format:
|
|
2728
|
+
* {
|
|
2729
|
+
* isModified: boolean,
|
|
2730
|
+
* data: any
|
|
2731
|
+
* }
|
|
2732
|
+
* the flag signals whether data was changed during the processing or not
|
|
2733
|
+
* @param [sessionDependent=true] {boolean} whether to mark the cache entry as session-dependent
|
|
2734
|
+
*/
|
|
2735
|
+
actualizeCachedData(cacheId, synchronousCurrentCacheProcessor, sessionDependent = true) {
|
|
2736
|
+
try {
|
|
2737
|
+
const cached = this._cache.get(cacheId);
|
|
2738
|
+
const result = synchronousCurrentCacheProcessor(cached);
|
|
2739
|
+
if (result != null && result.isModified && (result == null ? void 0 : result.data) != null) {
|
|
2740
|
+
if (sessionDependent) {
|
|
2741
|
+
this._cache.putSessionDependentData(cacheId, result == null ? void 0 : result.data, this._getTtl());
|
|
2742
|
+
} else {
|
|
2743
|
+
this._cache.put(cacheId, result == null ? void 0 : result.data, this._getTtl());
|
|
2744
|
+
}
|
|
2745
|
+
|
|
2746
|
+
/* Here we call the lock releasing to ensure the currently active calculation will be ignored.
|
|
2747
|
+
* This is needed to ensure no 'lost update'.
|
|
2748
|
+
* Lost update can occur if we change data in this method and after that some calculation finishes
|
|
2749
|
+
* having the earlier data as its base to calculate its data set result. And the earlier data
|
|
2750
|
+
* has no changes applied inside this method, so we will lose them.
|
|
2751
|
+
*
|
|
2752
|
+
* This is not so good solution: ideally, we should acquire lock before performing any data updating.
|
|
2753
|
+
* But the goal of this method is to provide an instant ability to update the cached data.
|
|
2754
|
+
* And if we start acquiring the lock here the data update can be postponed significantly.
|
|
2755
|
+
* And this kills the desired nature of this method.
|
|
2756
|
+
* So we better lose some data retrieval (means abusing the resource a bit) than lose
|
|
2757
|
+
* the instant update expected after this method execution.
|
|
2758
|
+
*/
|
|
2759
|
+
this._requestsManager.finishActiveCalculation(cacheId);
|
|
2760
|
+
}
|
|
2761
|
+
} catch (e) {
|
|
2762
|
+
improveAndRethrow(e, `${this._bio}.actualizeCachedData`);
|
|
2763
|
+
}
|
|
2764
|
+
}
|
|
2765
|
+
invalidate(key) {
|
|
2766
|
+
this._cache.invalidate(key);
|
|
2767
|
+
this._requestsManager.finishActiveCalculation(key);
|
|
2768
|
+
}
|
|
2769
|
+
invalidateContaining(keyPart) {
|
|
2770
|
+
this._cache.invalidateContaining(keyPart);
|
|
2771
|
+
this._requestsManager.finishAllActiveCalculations(keyPart);
|
|
2772
|
+
}
|
|
2773
|
+
markAsExpiredButDontRemove(key) {
|
|
2774
|
+
if (this._removeExpiredCacheAutomatically) {
|
|
2775
|
+
this._cache.markCacheItemAsExpiredButDontRemove(key, this._cacheTtlMs);
|
|
2776
|
+
} else {
|
|
2777
|
+
this._cache.setLastUpdateTimestamp(key, Date.now() - this._cacheTtlMs - 1);
|
|
2778
|
+
}
|
|
2779
|
+
this._requestsManager.finishAllActiveCalculations(key);
|
|
2780
|
+
}
|
|
2781
|
+
}
|
|
2782
|
+
|
|
2783
|
+
/**
|
|
2784
|
+
* Util class to control access to a resource when it can be called in parallel for the same result.
|
|
2785
|
+
* (E.g. getting today coins-fiat rates from some API).
|
|
2786
|
+
*/
|
|
2787
|
+
class ManagerOfRequestsToTheSameResource {
|
|
2788
|
+
/**
|
|
2789
|
+
* @param bio {string} resource-related identifier for logging
|
|
2790
|
+
* @param [maxPollsCount=100] {number} max number of attempts to wait when waiting for a lock acquisition
|
|
2791
|
+
* @param [timeoutDuration=1000] {number} timeout between the polls for a lock acquisition
|
|
2792
|
+
*/
|
|
2793
|
+
constructor(bio, maxPollsCount = 100, timeoutDuration = 1000) {
|
|
2794
|
+
this.bio = bio;
|
|
2795
|
+
this.maxPollsCount = maxPollsCount;
|
|
2796
|
+
this.timeoutDuration = timeoutDuration;
|
|
2797
|
+
this._activeCalculationsIds = new Map();
|
|
2798
|
+
this._nextCalculationIds = new Map();
|
|
2799
|
+
}
|
|
2800
|
+
|
|
2801
|
+
/**
|
|
2802
|
+
* If there is no active calculation just creates uuid and returns it.
|
|
2803
|
+
* If there is active calculation waits until it removed from the active calculation uuid variable.
|
|
2804
|
+
*
|
|
2805
|
+
* @param requestHash {string}
|
|
2806
|
+
* @return {Promise<string|boolean>} returns uuid of new active calculation or true if waiting for active
|
|
2807
|
+
* calculation succeed or false if max attempts count exceeded
|
|
2808
|
+
*/
|
|
2809
|
+
async startCalculationOrWaitForActiveToFinish(requestHash) {
|
|
2810
|
+
try {
|
|
2811
|
+
const activeCalculationIdForHash = this._activeCalculationsIds.get(requestHash);
|
|
2812
|
+
if (activeCalculationIdForHash == null) {
|
|
2813
|
+
const id = v4();
|
|
2814
|
+
this._activeCalculationsIds.set(requestHash, id);
|
|
2815
|
+
return id;
|
|
2816
|
+
}
|
|
2817
|
+
return await this._waitForCalculationIdToFinish(requestHash, activeCalculationIdForHash, 0);
|
|
2818
|
+
} catch (e) {
|
|
2819
|
+
Logger.logError(e, `startCalculationOrWaitForActiveToFinish_${this.bio}`);
|
|
2820
|
+
}
|
|
2821
|
+
return null;
|
|
2822
|
+
}
|
|
2823
|
+
|
|
2824
|
+
/**
|
|
2825
|
+
* Acquires lock to the resource by the provided hash.
|
|
2826
|
+
*
|
|
2827
|
+
* @param requestHash {string}
|
|
2828
|
+
* @return {Promise<{ result: true, lockId: string }|{ result: false }>} result is true if the lock is successfully
|
|
2829
|
+
* acquired, false if the max allowed time to wait for acquisition expired or any unexpected error occurs
|
|
2830
|
+
* during the waiting.
|
|
2831
|
+
*/
|
|
2832
|
+
async acquireLock(requestHash) {
|
|
2833
|
+
try {
|
|
2834
|
+
var _this$_nextCalculatio;
|
|
2835
|
+
const activeId = this._activeCalculationsIds.get(requestHash);
|
|
2836
|
+
const nextId = v4();
|
|
2837
|
+
if (activeId == null) {
|
|
2838
|
+
this._activeCalculationsIds.set(requestHash, nextId);
|
|
2839
|
+
return {
|
|
2840
|
+
result: true,
|
|
2841
|
+
lockId: nextId
|
|
2842
|
+
};
|
|
2843
|
+
}
|
|
2844
|
+
const currentNext = (_this$_nextCalculatio = this._nextCalculationIds.get(requestHash)) != null ? _this$_nextCalculatio : [];
|
|
2845
|
+
currentNext.push(nextId);
|
|
2846
|
+
this._nextCalculationIds.set(requestHash, currentNext);
|
|
2847
|
+
const waitingResult = await this._waitForCalculationIdToFinish(requestHash, activeId, 0, nextId);
|
|
2848
|
+
return {
|
|
2849
|
+
result: waitingResult,
|
|
2850
|
+
lockId: waitingResult ? nextId : undefined
|
|
2851
|
+
};
|
|
2852
|
+
} catch (e) {
|
|
2853
|
+
improveAndRethrow(e, "acquireLock");
|
|
2854
|
+
}
|
|
2855
|
+
}
|
|
2856
|
+
|
|
2857
|
+
/**
|
|
2858
|
+
* Clears active calculation id.
|
|
2859
|
+
* WARNING: if you forget to call this method the start* one will perform maxPollsCount attempts before finishing
|
|
2860
|
+
* @param requestHash {string} hash of request. Helps to distinct the request for the same resource but
|
|
2861
|
+
* having different request parameters and hold a dedicated calculation id per this hash
|
|
2862
|
+
*/
|
|
2863
|
+
finishActiveCalculation(requestHash = "default") {
|
|
2864
|
+
try {
|
|
2865
|
+
var _this$_nextCalculatio2;
|
|
2866
|
+
this._activeCalculationsIds.delete(requestHash);
|
|
2867
|
+
const next = (_this$_nextCalculatio2 = this._nextCalculationIds.get(requestHash)) != null ? _this$_nextCalculatio2 : [];
|
|
2868
|
+
if (next.length) {
|
|
2869
|
+
this._activeCalculationsIds.set(requestHash, next[0]);
|
|
2870
|
+
this._nextCalculationIds.set(requestHash, next.slice(1));
|
|
2871
|
+
}
|
|
2872
|
+
} catch (e) {
|
|
2873
|
+
improveAndRethrow(e, "finishActiveCalculation");
|
|
2874
|
+
}
|
|
2875
|
+
}
|
|
2876
|
+
finishAllActiveCalculations(keyPart = "") {
|
|
2877
|
+
try {
|
|
2878
|
+
Array.from(this._activeCalculationsIds.keys()).forEach(hash => {
|
|
2879
|
+
if (typeof hash === "string" && new RegExp(keyPart).test(hash)) {
|
|
2880
|
+
this.finishActiveCalculation(hash);
|
|
2881
|
+
}
|
|
2882
|
+
});
|
|
2883
|
+
} catch (e) {
|
|
2884
|
+
improveAndRethrow(e, "finishAllActiveCalculations");
|
|
2885
|
+
}
|
|
2886
|
+
}
|
|
2887
|
+
|
|
2888
|
+
/**
|
|
2889
|
+
* @param requestHash {string}
|
|
2890
|
+
* @param lockId {string}
|
|
2891
|
+
* @return {boolean}
|
|
2892
|
+
*/
|
|
2893
|
+
isTheLockActiveOne(requestHash, lockId) {
|
|
2894
|
+
try {
|
|
2895
|
+
return this._activeCalculationsIds.get(requestHash) === lockId;
|
|
2896
|
+
} catch (e) {
|
|
2897
|
+
improveAndRethrow(e, "isTheLockActiveOne");
|
|
2898
|
+
}
|
|
2899
|
+
}
|
|
2900
|
+
|
|
2901
|
+
/**
|
|
2902
|
+
* @param requestHash {string}
|
|
2903
|
+
* @param activeCalculationId {string|null}
|
|
2904
|
+
* @param [attemptIndex=0] {number}
|
|
2905
|
+
* @param waitForCalculationId {string|null} if you want to wait for an exact id to appear as active then pass this parameter
|
|
2906
|
+
* @return {Promise<boolean>} true
|
|
2907
|
+
* - if the given calculation id is no more an active one
|
|
2908
|
+
* - or it is equal to waitForCalculationId
|
|
2909
|
+
* false
|
|
2910
|
+
* - if waiting period exceeds the max allowed waiting time or unexpected error occurs
|
|
2911
|
+
* @private
|
|
2912
|
+
*/
|
|
2913
|
+
async _waitForCalculationIdToFinish(requestHash, activeCalculationId, attemptIndex = 0, waitForCalculationId = null) {
|
|
2914
|
+
try {
|
|
2915
|
+
if (attemptIndex + 1 > this.maxPollsCount) {
|
|
2916
|
+
// Max number of polls for active calculation id change is achieved. So we return false.
|
|
2917
|
+
return false;
|
|
2918
|
+
}
|
|
2919
|
+
const currentId = this._activeCalculationsIds.get(requestHash);
|
|
2920
|
+
if (waitForCalculationId == null ? currentId !== activeCalculationId : currentId === waitForCalculationId) {
|
|
2921
|
+
/* We return true depending on the usage of this function:
|
|
2922
|
+
* 1. if there is calculation id that we should wait for to become an active then we return true only
|
|
2923
|
+
* if this id becomes the active one.
|
|
2924
|
+
*
|
|
2925
|
+
* Theoretically we can fail to wait for the desired calculation id. This can be caused by wrong use of
|
|
2926
|
+
* this service or by any other mistakes/errors. But this waiting function will return false anyway if
|
|
2927
|
+
* the number of polls done exceeds the max allowed.
|
|
2928
|
+
*
|
|
2929
|
+
* 2. if we just wait for the currently active calculation id to be finished then we return true
|
|
2930
|
+
* when we notice that the current active id differs from the original passed into this function.
|
|
2931
|
+
*/
|
|
2932
|
+
return true;
|
|
2933
|
+
} else {
|
|
2934
|
+
/* The original calculation id is still the active one, so we are scheduling a new attempt to check
|
|
2935
|
+
* whether the active calculation id changed or not in timeoutDuration milliseconds.
|
|
2936
|
+
*/
|
|
2937
|
+
const it = this;
|
|
2938
|
+
return new Promise((resolve, reject) => {
|
|
2939
|
+
setTimeout(function () {
|
|
2940
|
+
try {
|
|
2941
|
+
resolve(it._waitForCalculationIdToFinish(requestHash, activeCalculationId, attemptIndex + 1));
|
|
2942
|
+
} catch (e) {
|
|
2943
|
+
reject(e);
|
|
2944
|
+
}
|
|
2945
|
+
}, this.timeoutDuration);
|
|
2946
|
+
});
|
|
2947
|
+
}
|
|
2948
|
+
} catch (e) {
|
|
2949
|
+
Logger.logError(e, "_waitForCalculationIdToFinish", "Failed to wait for active calculation id change.");
|
|
2950
|
+
return false;
|
|
2951
|
+
}
|
|
2952
|
+
}
|
|
2953
|
+
}
|
|
2954
|
+
|
|
2955
|
+
// TODO: [refactoring, low] Consider removing this logic task_id=c360f2af75764bde8badd9ff1cc00d48
|
|
2956
|
+
class ConcurrentCalculationsMetadataHolder {
|
|
2957
|
+
constructor() {
|
|
2958
|
+
this._calculations = {};
|
|
2959
|
+
}
|
|
2960
|
+
startCalculation(domain, calculationsHistoryMaxLength = 100) {
|
|
2961
|
+
if (!this._calculations[domain]) {
|
|
2962
|
+
this._calculations[domain] = [];
|
|
2963
|
+
}
|
|
2964
|
+
if (this._calculations[domain].length > calculationsHistoryMaxLength) {
|
|
2965
|
+
this._calculations[domain] = this._calculations[domain].slice(Math.round(calculationsHistoryMaxLength * 0.2));
|
|
2966
|
+
}
|
|
2967
|
+
const newCalculation = {
|
|
2968
|
+
startTimestamp: Date.now(),
|
|
2969
|
+
endTimestamp: null,
|
|
2970
|
+
uuid: v4()
|
|
2971
|
+
};
|
|
2972
|
+
this._calculations[domain].push(newCalculation);
|
|
2973
|
+
return newCalculation.uuid;
|
|
2974
|
+
}
|
|
2975
|
+
endCalculation(domain, uuid, isFailed = false) {
|
|
2976
|
+
try {
|
|
2977
|
+
var _calculation$endTimes, _calculation$startTim, _calculation$uuid;
|
|
2978
|
+
const calculation = this._calculations[domain].find(calculation => (calculation == null ? void 0 : calculation.uuid) === uuid);
|
|
2979
|
+
if (calculation) {
|
|
2980
|
+
calculation.endTimestamp = Date.now();
|
|
2981
|
+
calculation.isFiled = isFailed;
|
|
2982
|
+
}
|
|
2983
|
+
const elapsed = ((((_calculation$endTimes = calculation == null ? void 0 : calculation.endTimestamp) != null ? _calculation$endTimes : 0) - ((_calculation$startTim = calculation == null ? void 0 : calculation.startTimestamp) != null ? _calculation$startTim : 0)) / 1000).toFixed(1);
|
|
2984
|
+
Logger.log("endCalculation", `${elapsed} ms: ${domain}.${((_calculation$uuid = calculation == null ? void 0 : calculation.uuid) != null ? _calculation$uuid : "").slice(0, 7)}`);
|
|
2985
|
+
return calculation;
|
|
2986
|
+
} catch (e) {
|
|
2987
|
+
Logger.logError(e, "endCalculation");
|
|
2988
|
+
}
|
|
2989
|
+
}
|
|
2990
|
+
isCalculationLate(domain, uuid) {
|
|
2991
|
+
const queue = this._calculations[domain];
|
|
2992
|
+
const analysingCalculation = queue.find(item => item.uuid === uuid);
|
|
2993
|
+
return analysingCalculation && !!queue.find(calculation => calculation.endTimestamp != null && calculation.startTimestamp > analysingCalculation.startTimestamp);
|
|
2994
|
+
}
|
|
2995
|
+
printCalculationsWaitingMoreThanSpecifiedSeconds(waitingLastsMs = 2000) {
|
|
2996
|
+
const calculations = Object.keys(this._calculations).map(domain => this._calculations[domain].map(c => _extends({}, c, {
|
|
2997
|
+
domain
|
|
2998
|
+
}))).flat().filter(c => c.endTimestamp === null && Date.now() - c.startTimestamp > waitingLastsMs);
|
|
2999
|
+
Logger.log("printCalculationsWaitingMoreThanSpecifiedSeconds", `Calculations waiting more than ${(waitingLastsMs / 1000).toFixed(1)}s:\n` + calculations.map(c => `${c.domain}.${c.uuid.slice(0, 8)}: ${Date.now() - c.startTimestamp}\n`));
|
|
3000
|
+
}
|
|
3001
|
+
}
|
|
3002
|
+
const concurrentCalculationsMetadataHolder = new ConcurrentCalculationsMetadataHolder();
|
|
3003
|
+
|
|
3004
|
+
class ExternalServicesStatsCollector {
|
|
3005
|
+
constructor() {
|
|
3006
|
+
this.stats = new Map();
|
|
3007
|
+
}
|
|
3008
|
+
externalServiceFailed(serviceUrl, message) {
|
|
3009
|
+
try {
|
|
3010
|
+
const processMessage = (stat, errorMessage) => {
|
|
3011
|
+
var _stat$errors, _errorMessage;
|
|
3012
|
+
const errors = (_stat$errors = stat.errors) != null ? _stat$errors : {};
|
|
3013
|
+
errorMessage = (_errorMessage = errorMessage) != null ? _errorMessage : "";
|
|
3014
|
+
if (errorMessage.match(/.*network.+error.*/i)) {
|
|
3015
|
+
errors["networkError"] = (errors["networkError"] || 0) + 1;
|
|
3016
|
+
} else if (errorMessage.match(/.*timeout.+exceeded.*/i)) {
|
|
3017
|
+
errors["timeoutExceeded"] = (errors["timeoutExceeded"] || 0) + 1;
|
|
3018
|
+
} else if (errors["other"]) {
|
|
3019
|
+
errors["other"].push(message);
|
|
3020
|
+
} else {
|
|
3021
|
+
errors["other"] = [message];
|
|
3022
|
+
}
|
|
3023
|
+
stat.errors = errors;
|
|
3024
|
+
};
|
|
3025
|
+
if (this.stats.has(serviceUrl)) {
|
|
3026
|
+
const stat = this.stats.get(serviceUrl);
|
|
3027
|
+
stat.callsCount += 1;
|
|
3028
|
+
stat.failsCount += 1;
|
|
3029
|
+
processMessage(stat, message);
|
|
3030
|
+
} else {
|
|
3031
|
+
this.stats.set(serviceUrl, {
|
|
3032
|
+
callsCount: 1,
|
|
3033
|
+
failsCount: 1
|
|
3034
|
+
});
|
|
3035
|
+
processMessage(this.stats.get(serviceUrl), message);
|
|
3036
|
+
}
|
|
3037
|
+
} catch (e) {
|
|
3038
|
+
improveAndRethrow(e, "externalServiceFailed");
|
|
3039
|
+
}
|
|
3040
|
+
}
|
|
3041
|
+
externalServiceCalledWithoutError(serviceUrl) {
|
|
3042
|
+
try {
|
|
3043
|
+
if (this.stats.has(serviceUrl)) {
|
|
3044
|
+
const stat = this.stats.get(serviceUrl);
|
|
3045
|
+
stat.callsCount += 1;
|
|
3046
|
+
} else {
|
|
3047
|
+
this.stats.set(serviceUrl, {
|
|
3048
|
+
callsCount: 1,
|
|
3049
|
+
failsCount: 0
|
|
3050
|
+
});
|
|
3051
|
+
}
|
|
3052
|
+
} catch (e) {
|
|
3053
|
+
improveAndRethrow(e, "externalServiceCalledWithoutError");
|
|
3054
|
+
}
|
|
3055
|
+
}
|
|
3056
|
+
|
|
3057
|
+
/**
|
|
3058
|
+
* Returns statistics about external services failures.
|
|
3059
|
+
* Provides how many calls were performed and what the percent of failed calls. Also returns errors stat.
|
|
3060
|
+
*
|
|
3061
|
+
* @return {Array<object>} Array of objects of type { failsPerCent: number, calls: number }
|
|
3062
|
+
* sorted by the highest fails percent desc
|
|
3063
|
+
*/
|
|
3064
|
+
getStats() {
|
|
3065
|
+
try {
|
|
3066
|
+
return Array.from(this.stats.keys()).map(key => {
|
|
3067
|
+
var _stat$errors2;
|
|
3068
|
+
const stat = this.stats.get(key);
|
|
3069
|
+
return {
|
|
3070
|
+
url: key,
|
|
3071
|
+
failsPerCent: (stat.failsCount / stat.callsCount * 100).toFixed(2),
|
|
3072
|
+
calls: stat.callsCount,
|
|
3073
|
+
errors: (_stat$errors2 = stat.errors) != null ? _stat$errors2 : []
|
|
3074
|
+
};
|
|
3075
|
+
}).sort((s1, s2) => s1.failsPerCent - s2.failsPerCent);
|
|
3076
|
+
} catch (e) {
|
|
3077
|
+
Logger.logError(e, "getStats");
|
|
3078
|
+
}
|
|
3079
|
+
}
|
|
3080
|
+
}
|
|
3081
|
+
|
|
3082
|
+
/**
|
|
3083
|
+
* TODO: [refactoring, critical] update backend copy of this service. Also there is a task to extract this
|
|
3084
|
+
* service and other related to it stuff to dedicated npm package task_id=b008ee5e4a3f42c08c73831c4bb3db4e
|
|
3085
|
+
*
|
|
3086
|
+
* Template service needed to avoid duplication of the same logic when we need to call
|
|
3087
|
+
* external APIs to retrieve some data. The idea is to use several API providers to retrieve the same data. It helps to
|
|
3088
|
+
* improve the reliability of a data retrieval.
|
|
3089
|
+
*/
|
|
3090
|
+
class RobustExternalAPICallerService {
|
|
3091
|
+
static getStats() {
|
|
3092
|
+
this.statsCollector.getStats();
|
|
3093
|
+
}
|
|
3094
|
+
|
|
3095
|
+
/**
|
|
3096
|
+
* @param bio {string} service name for logging
|
|
3097
|
+
* @param providersData {ExternalApiProvider[]} array of providers
|
|
3098
|
+
* @param [logger] {function} function to be used for logging
|
|
3099
|
+
*/
|
|
3100
|
+
constructor(bio, providersData, logger = Logger.logError) {
|
|
3101
|
+
providersData.forEach(provider => {
|
|
3102
|
+
if (!provider.endpoint && provider.endpoint !== "" || !provider.httpMethod) {
|
|
3103
|
+
throw new Error(`Wrong format of providers data for: ${JSON.stringify(provider)}`);
|
|
3104
|
+
}
|
|
3105
|
+
});
|
|
3106
|
+
|
|
3107
|
+
// We add niceFactor - just number to order the providers array by. It is helpful to call
|
|
3108
|
+
// less robust APIs only if more robust fails
|
|
3109
|
+
this.providers = providersData;
|
|
3110
|
+
providersData.forEach(provider => provider.resetNiceFactor());
|
|
3111
|
+
this.bio = bio;
|
|
3112
|
+
this._logger = Logger.logError;
|
|
3113
|
+
}
|
|
3114
|
+
/**
|
|
3115
|
+
* Performs data retrieval from external APIs. Tries providers till the data is retrieved.
|
|
3116
|
+
*
|
|
3117
|
+
* @param parametersValues {array} array of values of the parameters for URL query string [and/or body]
|
|
3118
|
+
* @param timeoutMS {number} http timeout to wait for response. If provider has its specific timeout value then it is used
|
|
3119
|
+
* @param [cancelToken] {object|undefined} axios token to force-cancel requests from high-level code
|
|
3120
|
+
* @param [attemptsCount] {number|undefined} number of attempts to be performed
|
|
3121
|
+
* @param [doNotFailForNowData] {boolean|undefined} pass true if you do not want us to throw an error if we retrieved null data from all the providers
|
|
3122
|
+
* @return {Promise<any>} resolving to retrieved data (or array of results if specific provider requires
|
|
3123
|
+
* several requests. NOTE: we flatten nested arrays - results of each separate request done for the specific provider)
|
|
3124
|
+
* @throws Error if requests to all providers are failed
|
|
3125
|
+
*/
|
|
3126
|
+
async callExternalAPI(parametersValues = [], timeoutMS = 3500, cancelToken = null, attemptsCount = 1, doNotFailForNowData = false) {
|
|
3127
|
+
var _this = this;
|
|
3128
|
+
let result;
|
|
3129
|
+
const calculationUuid = concurrentCalculationsMetadataHolder.startCalculation(this.bio);
|
|
3130
|
+
try {
|
|
3131
|
+
var _result4, _result5;
|
|
3132
|
+
for (let i = 0; (i < attemptsCount || (_result = result) != null && _result.shouldBeForceRetried) && ((_result2 = result) == null ? void 0 : _result2.data) == null; ++i) {
|
|
3133
|
+
var _result, _result2;
|
|
3134
|
+
/**
|
|
3135
|
+
* We use rpsFactor to improve re-attempting to call the providers if the last attempt resulted with
|
|
3136
|
+
* the fail due to abused RPSes of some (most part of) providers.
|
|
3137
|
+
* The _performCallAttempt in such a case will return increased rpsFactor inside the result object.
|
|
3138
|
+
*/
|
|
3139
|
+
const rpsFactor = result ? result.rpsFactor : RobustExternalAPICallerService.defaultRPSFactor;
|
|
3140
|
+
result = null;
|
|
3141
|
+
try {
|
|
3142
|
+
var _result3, _result$errors;
|
|
3143
|
+
if (i === 0 && !((_result3 = result) != null && _result3.shouldBeForceRetried)) {
|
|
3144
|
+
result = await this._performCallAttempt(parametersValues, timeoutMS, cancelToken, rpsFactor, doNotFailForNowData);
|
|
3145
|
+
} else {
|
|
3146
|
+
const maxRps = Math.max(...this.providers.map(provider => {
|
|
3147
|
+
var _provider$getRps;
|
|
3148
|
+
return (_provider$getRps = provider.getRps()) != null ? _provider$getRps : 0;
|
|
3149
|
+
}));
|
|
3150
|
+
const waitingTimeMs = maxRps ? 1000 / (maxRps / rpsFactor) : 0;
|
|
3151
|
+
result = await new Promise((resolve, reject) => {
|
|
3152
|
+
setTimeout(async function () {
|
|
3153
|
+
try {
|
|
3154
|
+
resolve(await _this._performCallAttempt(parametersValues, timeoutMS, cancelToken, rpsFactor, doNotFailForNowData));
|
|
3155
|
+
} catch (e) {
|
|
3156
|
+
reject(e);
|
|
3157
|
+
}
|
|
3158
|
+
}, waitingTimeMs);
|
|
3159
|
+
});
|
|
3160
|
+
}
|
|
3161
|
+
if ((_result$errors = result.errors) != null && _result$errors.length) {
|
|
3162
|
+
const errors = result.errors;
|
|
3163
|
+
this._logger(new Error(`Failed at attempt ${i}. ${errors.length} errors. Messages: ${safeStringify(errors.map(error => error.message))}: ${safeStringify(errors)}.`), `${this.bio}.callExternalAPI`, "", true);
|
|
3164
|
+
}
|
|
3165
|
+
} catch (e) {
|
|
3166
|
+
this._logger(e, `${this.bio}.callExternalAPI`, "Failed to perform external providers calling");
|
|
3167
|
+
}
|
|
3168
|
+
}
|
|
3169
|
+
if (((_result4 = result) == null ? void 0 : _result4.data) == null) {
|
|
3170
|
+
// TODO: [feature, moderate] looks like we should not fail for null data as it is strange - the provider will fail when processing data internally
|
|
3171
|
+
const error = new Error(`Failed to retrieve data. It means all attempts have been failed. DEV: add more attempts to this data retrieval`);
|
|
3172
|
+
if (!doNotFailForNowData) {
|
|
3173
|
+
throw error;
|
|
3174
|
+
} else {
|
|
3175
|
+
this._logger(error, `${this.bio}.callExternalAPI`);
|
|
3176
|
+
}
|
|
3177
|
+
}
|
|
3178
|
+
return (_result5 = result) == null ? void 0 : _result5.data;
|
|
3179
|
+
} catch (e) {
|
|
3180
|
+
improveAndRethrow(e, `${this.bio}.callExternalAPI`);
|
|
3181
|
+
} finally {
|
|
3182
|
+
concurrentCalculationsMetadataHolder.endCalculation(this.bio, calculationUuid);
|
|
3183
|
+
}
|
|
3184
|
+
}
|
|
3185
|
+
async _performCallAttempt(parametersValues, timeoutMS, cancelToken, rpsFactor, doNotFailForNowData) {
|
|
3186
|
+
var _data;
|
|
3187
|
+
const providers = this._reorderProvidersByNiceFactor();
|
|
3188
|
+
let data = undefined,
|
|
3189
|
+
providerIndex = 0,
|
|
3190
|
+
countOfRequestsDeclinedByRps = 0,
|
|
3191
|
+
errors = [];
|
|
3192
|
+
while (!data && providerIndex < providers.length) {
|
|
3193
|
+
let provider = providers[providerIndex];
|
|
3194
|
+
if (provider.isRpsExceeded()) {
|
|
3195
|
+
/**
|
|
3196
|
+
* Current provider's RPS is exceeded, so we try next provider. Also, we count such cases to make
|
|
3197
|
+
* a decision about the force-retry need.
|
|
3198
|
+
*/
|
|
3199
|
+
++providerIndex;
|
|
3200
|
+
++countOfRequestsDeclinedByRps;
|
|
3201
|
+
continue;
|
|
3202
|
+
}
|
|
3203
|
+
try {
|
|
3204
|
+
var _provider$specificHea;
|
|
3205
|
+
const axiosConfig = _extends({}, cancelToken ? {
|
|
3206
|
+
cancelToken
|
|
3207
|
+
} : {}, {
|
|
3208
|
+
timeout: provider.timeout || timeoutMS,
|
|
3209
|
+
headers: (_provider$specificHea = provider.specificHeaders) != null ? _provider$specificHea : {}
|
|
3210
|
+
});
|
|
3211
|
+
const httpMethods = Array.isArray(provider.httpMethod) ? provider.httpMethod : [provider.httpMethod];
|
|
3212
|
+
const iterationsData = [];
|
|
3213
|
+
for (let subRequestIndex = 0; subRequestIndex < httpMethods.length; ++subRequestIndex) {
|
|
3214
|
+
const query = provider.composeQueryString(parametersValues, subRequestIndex);
|
|
3215
|
+
const endpoint = `${provider.endpoint}${query}`;
|
|
3216
|
+
const axiosParams = [endpoint, axiosConfig];
|
|
3217
|
+
if (["post", "put", "patch"].find(method => method === httpMethods[subRequestIndex])) {
|
|
3218
|
+
var _provider$composeBody;
|
|
3219
|
+
const body = (_provider$composeBody = provider.composeBody(parametersValues, subRequestIndex)) != null ? _provider$composeBody : null;
|
|
3220
|
+
axiosParams.splice(1, 0, body);
|
|
3221
|
+
}
|
|
3222
|
+
let pageNumber = 0;
|
|
3223
|
+
const responsesForPages = [];
|
|
3224
|
+
let hasNextPage = provider.doesSupportPagination();
|
|
3225
|
+
do {
|
|
3226
|
+
if (subRequestIndex === 0 && pageNumber === 0) {
|
|
3227
|
+
provider.actualizeLastCalledTimestamp();
|
|
3228
|
+
responsesForPages[pageNumber] = await AxiosAdapter.call(httpMethods[subRequestIndex], ...axiosParams);
|
|
3229
|
+
RobustExternalAPICallerService.statsCollector.externalServiceCalledWithoutError(provider.getApiGroupId());
|
|
3230
|
+
} else {
|
|
3231
|
+
if (pageNumber > 0) {
|
|
3232
|
+
const actualizedParams = provider.changeQueryParametersForPageNumber(parametersValues, responsesForPages[pageNumber - 1], pageNumber, subRequestIndex);
|
|
3233
|
+
const _query = provider.composeQueryString(actualizedParams, subRequestIndex);
|
|
3234
|
+
axiosParams[0] = `${provider.endpoint}${_query}`;
|
|
3235
|
+
}
|
|
3236
|
+
/**
|
|
3237
|
+
* For second and more request we postpone each request to not exceed RPS
|
|
3238
|
+
* of current provider. We use rpsFactor to dynamically increase the rps to avoid
|
|
3239
|
+
* too frequent calls if we continue failing to retrieve the data due to RPS exceeding.
|
|
3240
|
+
* TODO: [dev] test RPS factor logic (units or integration)
|
|
3241
|
+
*/
|
|
3242
|
+
|
|
3243
|
+
const waitingTimeMS = provider.getRps() ? 1000 / (provider.getRps() / rpsFactor) : 0;
|
|
3244
|
+
const postponeUntilRpsExceeded = async function postponeUntilRpsExceeded(recursionLevel = 0) {
|
|
3245
|
+
return await postponeExecution(async function () {
|
|
3246
|
+
const maxCountOfPostponingAttempts = 2;
|
|
3247
|
+
if (provider.isRpsExceeded() && recursionLevel < maxCountOfPostponingAttempts) {
|
|
3248
|
+
return await postponeUntilRpsExceeded(recursionLevel + 1);
|
|
3249
|
+
}
|
|
3250
|
+
provider.actualizeLastCalledTimestamp();
|
|
3251
|
+
return await AxiosAdapter.call(httpMethods[subRequestIndex], ...axiosParams);
|
|
3252
|
+
}, waitingTimeMS);
|
|
3253
|
+
};
|
|
3254
|
+
responsesForPages[pageNumber] = await postponeUntilRpsExceeded();
|
|
3255
|
+
}
|
|
3256
|
+
if (hasNextPage) {
|
|
3257
|
+
hasNextPage = !provider.checkWhetherResponseIsForLastPage(responsesForPages[pageNumber - 1], responsesForPages[pageNumber], pageNumber, subRequestIndex);
|
|
3258
|
+
}
|
|
3259
|
+
pageNumber++;
|
|
3260
|
+
} while (hasNextPage);
|
|
3261
|
+
const responsesDataForPages = responsesForPages.map(response => provider.getDataByResponse(response, parametersValues, subRequestIndex, iterationsData));
|
|
3262
|
+
let allData = responsesDataForPages;
|
|
3263
|
+
if (Array.isArray(responsesDataForPages[0])) {
|
|
3264
|
+
allData = responsesDataForPages.flat();
|
|
3265
|
+
} else if (responsesDataForPages.length === 1) {
|
|
3266
|
+
allData = responsesDataForPages[0];
|
|
3267
|
+
}
|
|
3268
|
+
iterationsData.push(allData);
|
|
3269
|
+
}
|
|
3270
|
+
if (iterationsData.length) {
|
|
3271
|
+
if (httpMethods.length > 1) {
|
|
3272
|
+
data = provider.incorporateIterationsData(iterationsData);
|
|
3273
|
+
} else {
|
|
3274
|
+
data = iterationsData[0];
|
|
3275
|
+
}
|
|
3276
|
+
} else if (!doNotFailForNowData) {
|
|
3277
|
+
RobustExternalAPICallerService.statsCollector.externalServiceFailed(provider.getApiGroupId(), "Response data was null for some reason");
|
|
3278
|
+
punishProvider(provider);
|
|
3279
|
+
}
|
|
3280
|
+
} catch (e) {
|
|
3281
|
+
punishProvider(provider);
|
|
3282
|
+
RobustExternalAPICallerService.statsCollector.externalServiceFailed(provider.getApiGroupId(), e == null ? void 0 : e.message);
|
|
3283
|
+
errors.push(e);
|
|
3284
|
+
} finally {
|
|
3285
|
+
providerIndex++;
|
|
3286
|
+
}
|
|
3287
|
+
}
|
|
3288
|
+
|
|
3289
|
+
// If we are declining more than 50% of providers (by exceeding RPS) then we note that it better to retry the whole process of providers requesting
|
|
3290
|
+
const shouldBeForceRetried = data == null && countOfRequestsDeclinedByRps > Math.floor(providers.length * 0.5);
|
|
3291
|
+
const rpsMultiplier = shouldBeForceRetried ? RobustExternalAPICallerService.rpsMultiplier : 1;
|
|
3292
|
+
return {
|
|
3293
|
+
data: (_data = data) != null ? _data : null,
|
|
3294
|
+
shouldBeForceRetried,
|
|
3295
|
+
rpsFactor: rpsFactor * rpsMultiplier,
|
|
3296
|
+
errors
|
|
3297
|
+
};
|
|
3298
|
+
}
|
|
3299
|
+
_reorderProvidersByNiceFactor() {
|
|
3300
|
+
const providersCopy = [...this.providers];
|
|
3301
|
+
return providersCopy.sort((p1, p2) => p2.niceFactor - p1.niceFactor);
|
|
3302
|
+
}
|
|
3303
|
+
}
|
|
3304
|
+
RobustExternalAPICallerService.statsCollector = new ExternalServicesStatsCollector();
|
|
3305
|
+
RobustExternalAPICallerService.defaultRPSFactor = 1;
|
|
3306
|
+
RobustExternalAPICallerService.rpsMultiplier = 1.05;
|
|
3307
|
+
function punishProvider(provider) {
|
|
3308
|
+
provider.niceFactor = provider.niceFactor - 1;
|
|
3309
|
+
}
|
|
3310
|
+
|
|
3311
|
+
/**
|
|
3312
|
+
* Extended edit of RobustExternalApiCallerService supporting cache and management of concurrent requests
|
|
3313
|
+
* to the same resource.
|
|
3314
|
+
* TODO: [tests, critical] Massively used logic
|
|
3315
|
+
*/
|
|
3316
|
+
class CachedRobustExternalApiCallerService {
|
|
3317
|
+
/**
|
|
3318
|
+
* @param bio {string} unique service identifier
|
|
3319
|
+
* @param cache {Cache} cache instance
|
|
3320
|
+
* @param providersData {ExternalApiProvider[]} array of providers
|
|
3321
|
+
* @param [cacheTtlMs=10000] {number} time to live for cache ms
|
|
3322
|
+
* @param [maxCallAttemptsToWaitForAlreadyRunningRequest=50] {number} see details in CacheAndConcurrentRequestsResolver
|
|
3323
|
+
* @param [timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished=3000] {number} see details in CacheAndConcurrentRequestsResolver
|
|
3324
|
+
* @param [removeExpiredCacheAutomatically=true] {boolean} whether to remove cached data automatically when ttl exceeds
|
|
3325
|
+
* @param [mergeCachedAndNewlyRetrievedData=null] {function} function accepting cached data, newly retrieved data and id field name for list items
|
|
3326
|
+
* and merging them. use if needed
|
|
3327
|
+
*/
|
|
3328
|
+
constructor(bio, cache, providersData, cacheTtlMs = 10000, removeExpiredCacheAutomatically = true, mergeCachedAndNewlyRetrievedData = null, maxCallAttemptsToWaitForAlreadyRunningRequest = 100, timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished = 1000) {
|
|
3329
|
+
this._provider = new RobustExternalAPICallerService(`cached_${bio}`, providersData, Logger.logError);
|
|
3330
|
+
this._cacheTtlMs = cacheTtlMs;
|
|
3331
|
+
this._cahceAndRequestsResolver = new CacheAndConcurrentRequestsResolver(bio, cache, cacheTtlMs, removeExpiredCacheAutomatically, maxCallAttemptsToWaitForAlreadyRunningRequest, timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished);
|
|
3332
|
+
this._cahceIds = [];
|
|
3333
|
+
this._mergeCachedAndNewlyRetrievedData = mergeCachedAndNewlyRetrievedData;
|
|
3334
|
+
}
|
|
3335
|
+
|
|
3336
|
+
/**
|
|
3337
|
+
* Calls the external API or returns data from cache. Just waits if the same data already requested.
|
|
3338
|
+
*
|
|
3339
|
+
* @param parametersValues {array} array of values of the parameters for URL query string [and/or body]
|
|
3340
|
+
* @param timeoutMS {number} http timeout to wait for response. If provider has its specific timeout value then it is used
|
|
3341
|
+
* @param [cancelToken] {object|undefined} axios token to force-cancel requests from high-level code
|
|
3342
|
+
* @param [attemptsCount] {number|undefined} number of attempts to be performed
|
|
3343
|
+
* @param [customHashFunctionForParams] {function|undefined} function without params calculating the hash to be
|
|
3344
|
+
* added to bio of the service to compose a unique parameters-specific cache id
|
|
3345
|
+
* @param [doNotFailForNowData] {boolean|undefined} pass true if you do not want us to throw an error if we retrieved null data from all the providers
|
|
3346
|
+
* @return {Promise<any>} resolving to retrieved data (or array of results if specific provider requires
|
|
3347
|
+
* several requests. NOTE: we flatten nested arrays - results of each separate request done for the specific provider)
|
|
3348
|
+
* @throws Error if requests to all providers are failed
|
|
3349
|
+
*/
|
|
3350
|
+
async callExternalAPICached(parametersValues = [], timeoutMS = 3500, cancelToken = null, attemptsCount = 1, customHashFunctionForParams = null, doNotFailForNowData = false) {
|
|
3351
|
+
const loggerSource = `${this._provider.bio}.callExternalAPICached`;
|
|
3352
|
+
let cacheId;
|
|
3353
|
+
let result;
|
|
3354
|
+
try {
|
|
3355
|
+
var _result;
|
|
3356
|
+
cacheId = this._calculateCacheId(parametersValues, customHashFunctionForParams);
|
|
3357
|
+
result = await this._cahceAndRequestsResolver.getCachedOrWaitForCachedOrAcquireLock(cacheId);
|
|
3358
|
+
if (!((_result = result) != null && _result.canStartDataRetrieval)) {
|
|
3359
|
+
var _result2;
|
|
3360
|
+
return (_result2 = result) == null ? void 0 : _result2.cachedData;
|
|
3361
|
+
}
|
|
3362
|
+
let data = await this._provider.callExternalAPI(parametersValues, timeoutMS, cancelToken, attemptsCount, doNotFailForNowData);
|
|
3363
|
+
const canPerformMerge = typeof this._mergeCachedAndNewlyRetrievedData === "function";
|
|
3364
|
+
if (canPerformMerge) {
|
|
3365
|
+
const mostRecentCached = this._cahceAndRequestsResolver.getCached(cacheId);
|
|
3366
|
+
data = this._mergeCachedAndNewlyRetrievedData(mostRecentCached, data, parametersValues);
|
|
3367
|
+
}
|
|
3368
|
+
if (data != null) {
|
|
3369
|
+
var _result3;
|
|
3370
|
+
this._cahceAndRequestsResolver.saveCachedData(cacheId, (_result3 = result) == null ? void 0 : _result3.lockId, data, true, canPerformMerge);
|
|
3371
|
+
this._cahceIds.indexOf(cacheId) < 0 && this._cahceIds.push(cacheId);
|
|
3372
|
+
}
|
|
3373
|
+
return data;
|
|
3374
|
+
} catch (e) {
|
|
3375
|
+
improveAndRethrow(e, loggerSource);
|
|
3376
|
+
} finally {
|
|
3377
|
+
var _result4;
|
|
3378
|
+
this._cahceAndRequestsResolver.releaseLock(cacheId, (_result4 = result) == null ? void 0 : _result4.lockId);
|
|
3379
|
+
}
|
|
3380
|
+
}
|
|
3381
|
+
invalidateCaches() {
|
|
3382
|
+
this._cahceIds.forEach(key => this._cahceAndRequestsResolver.invalidate(key));
|
|
3383
|
+
}
|
|
3384
|
+
actualizeCachedData(params, synchronousCurrentCacheProcessor, customHashFunctionForParams = null, sessionDependent = true, actualizedAtTimestamp) {
|
|
3385
|
+
const cacheId = this._calculateCacheId(params, customHashFunctionForParams);
|
|
3386
|
+
this._cahceAndRequestsResolver.actualizeCachedData(cacheId, synchronousCurrentCacheProcessor, sessionDependent);
|
|
3387
|
+
}
|
|
3388
|
+
markCacheAsExpiredButDontRemove(parametersValues, customHashFunctionForParams) {
|
|
3389
|
+
try {
|
|
3390
|
+
this._cahceAndRequestsResolver.markAsExpiredButDontRemove(this._calculateCacheId(parametersValues, customHashFunctionForParams));
|
|
3391
|
+
} catch (e) {
|
|
3392
|
+
improveAndRethrow(e, "markCacheAsExpiredButDontRemove");
|
|
3393
|
+
}
|
|
3394
|
+
}
|
|
3395
|
+
_calculateCacheId(parametersValues, customHashFunctionForParams = null) {
|
|
3396
|
+
try {
|
|
3397
|
+
const hash = typeof customHashFunctionForParams === "function" ? customHashFunctionForParams(parametersValues) : !parametersValues ? "" : new Hashes.SHA512().hex(safeStringify(parametersValues));
|
|
3398
|
+
return `${this._provider.bio}-${hash}`;
|
|
3399
|
+
} catch (e) {
|
|
3400
|
+
improveAndRethrow(e, this._provider.bio + "_calculateCacheId");
|
|
3401
|
+
}
|
|
3402
|
+
}
|
|
3403
|
+
}
|
|
3404
|
+
|
|
3405
|
+
/**
|
|
3406
|
+
* Utils class needed to perform cancelling of axios request inside some process.
|
|
3407
|
+
* Provides cancel state and axios token for HTTP requests
|
|
3408
|
+
*/
|
|
3409
|
+
class CancelProcessing {
|
|
3410
|
+
constructor() {
|
|
3411
|
+
this._cancelToken = axios.CancelToken.source();
|
|
3412
|
+
this._isCanceled = false;
|
|
3413
|
+
}
|
|
3414
|
+
cancel() {
|
|
3415
|
+
this._isCanceled = true;
|
|
3416
|
+
this._cancelToken.cancel();
|
|
3417
|
+
}
|
|
3418
|
+
isCanceled() {
|
|
3419
|
+
return this._isCanceled;
|
|
3420
|
+
}
|
|
3421
|
+
getToken() {
|
|
3422
|
+
return this._cancelToken.token;
|
|
3423
|
+
}
|
|
3424
|
+
static instance() {
|
|
3425
|
+
return new CancelProcessing();
|
|
3426
|
+
}
|
|
3427
|
+
}
|
|
3428
|
+
|
|
3429
|
+
class ExternalApiProvider {
|
|
3430
|
+
/**
|
|
3431
|
+
* Creates an instance of external api provider.
|
|
3432
|
+
*
|
|
3433
|
+
* If you need sub-request then use 'subRequestIndex' to check current request index in functions below.
|
|
3434
|
+
* Also use array for 'httpMethod'.
|
|
3435
|
+
*
|
|
3436
|
+
* If the endpoint of dedicated provider has pagination then you should customize the behavior using
|
|
3437
|
+
* "changeQueryParametersForPageNumber", "checkWhetherResponseIsForLastPage".
|
|
3438
|
+
*
|
|
3439
|
+
* We perform RPS counting all over the App to avoid blocking our clients due to abuses of the providers.
|
|
3440
|
+
*
|
|
3441
|
+
* @param endpoint {string} URL to the provider's endpoint. Note: you can customize it using composeQueryString
|
|
3442
|
+
* @param [httpMethod] {string|string[]} one of "get", "post", "put", "patch", "delete" or an array of these values
|
|
3443
|
+
* for request having sub-requests
|
|
3444
|
+
* @param [timeout] {number} number of milliseconds to wait for the response
|
|
3445
|
+
* @param [apiGroup] {ApiGroup} singleton object containing parameters of API group. Helpful when you use the same
|
|
3446
|
+
* api for different providers to avoid hardcoding RPS inside each provider what can cause mistakes
|
|
3447
|
+
* @param [specificHeaders] {Object} contains specific keys (headers) and values (their content) if needed for this provider
|
|
3448
|
+
* @param [maxPageLength] {number} optional number of items per page if the request supports pagination
|
|
3449
|
+
*/
|
|
3450
|
+
constructor(endpoint, httpMethod, timeout, apiGroup, specificHeaders = {}, maxPageLength = Number.MAX_SAFE_INTEGER) {
|
|
3451
|
+
this.endpoint = endpoint;
|
|
3452
|
+
this.httpMethod = httpMethod != null ? httpMethod : "get";
|
|
3453
|
+
// TODO: [refactoring, critical] We have two timeouts for robust data retrieval - here and inside the service method call, need to remain the only
|
|
3454
|
+
this.timeout = timeout != null ? timeout : 10000;
|
|
3455
|
+
// TODO: [refactoring, critical] We need single place for all RPSes as we use them as hardcoded constants now inside different services
|
|
3456
|
+
this.apiGroup = apiGroup;
|
|
3457
|
+
this.maxPageLength = maxPageLength != null ? maxPageLength : Number.MAX_SAFE_INTEGER;
|
|
3458
|
+
this.niceFactor = 1;
|
|
3459
|
+
this.specificHeaders = specificHeaders != null ? specificHeaders : {};
|
|
3460
|
+
}
|
|
3461
|
+
getRps() {
|
|
3462
|
+
var _this$apiGroup$rps;
|
|
3463
|
+
return (_this$apiGroup$rps = this.apiGroup.rps) != null ? _this$apiGroup$rps : 2;
|
|
3464
|
+
}
|
|
3465
|
+
isRpsExceeded() {
|
|
3466
|
+
return this.apiGroup.isRpsExceeded();
|
|
3467
|
+
}
|
|
3468
|
+
actualizeLastCalledTimestamp() {
|
|
3469
|
+
this.apiGroup.actualizeLastCalledTimestamp();
|
|
3470
|
+
}
|
|
3471
|
+
getApiGroupId() {
|
|
3472
|
+
return this.apiGroup.id;
|
|
3473
|
+
}
|
|
3474
|
+
|
|
3475
|
+
/**
|
|
3476
|
+
* Some endpoint can require several sub requests. Example is one request to get confirmed transactions
|
|
3477
|
+
* and another request for unconfirmed transactions. You should override this method to return true for such requests.
|
|
3478
|
+
*
|
|
3479
|
+
* @return {boolean} true if this provider requires several requests to retrieve the data
|
|
3480
|
+
*/
|
|
3481
|
+
doesRequireSubRequests() {
|
|
3482
|
+
return false;
|
|
3483
|
+
}
|
|
3484
|
+
|
|
3485
|
+
/**
|
|
3486
|
+
* Some endpoint support pagination. Override this method if so and implement corresponding methods.
|
|
3487
|
+
*
|
|
3488
|
+
* @return {boolean} true if this provider requires several requests to retrieve the data
|
|
3489
|
+
*/
|
|
3490
|
+
doesSupportPagination() {
|
|
3491
|
+
return false;
|
|
3492
|
+
}
|
|
3493
|
+
|
|
3494
|
+
/**
|
|
3495
|
+
* Composes a query string to be added to the endpoint of this provider.
|
|
3496
|
+
*
|
|
3497
|
+
* @param params {any[]} params array passed to the RobustExternalAPICallerService
|
|
3498
|
+
* @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
|
|
3499
|
+
* @returns {string} query string to be concatenated with endpoint
|
|
3500
|
+
*/
|
|
3501
|
+
composeQueryString(params, subRequestIndex = 0) {
|
|
3502
|
+
return "";
|
|
3503
|
+
}
|
|
3504
|
+
|
|
3505
|
+
/**
|
|
3506
|
+
* Composes a body to be added to the request
|
|
3507
|
+
*
|
|
3508
|
+
* @param params {any[]} params array passed to the RobustExternalAPICallerService
|
|
3509
|
+
* @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
|
|
3510
|
+
* @returns {string}
|
|
3511
|
+
*/
|
|
3512
|
+
composeBody(params, subRequestIndex = 0) {
|
|
3513
|
+
return "";
|
|
3514
|
+
}
|
|
3515
|
+
|
|
3516
|
+
/**
|
|
3517
|
+
* Extracts data from the response and returns it
|
|
3518
|
+
*
|
|
3519
|
+
* @param response {Object} HTTP response returned by provider
|
|
3520
|
+
* @param [params] {any[]} params array passed to the RobustExternalAPICallerService
|
|
3521
|
+
* @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
|
|
3522
|
+
* @param iterationsData {any[]} array of data retrieved from previous sub-requests
|
|
3523
|
+
* @returns {any}
|
|
3524
|
+
*/
|
|
3525
|
+
getDataByResponse(response, params = [], subRequestIndex = 0, iterationsData = []) {
|
|
3526
|
+
return [];
|
|
3527
|
+
}
|
|
3528
|
+
|
|
3529
|
+
/**
|
|
3530
|
+
* Function changing the query string according to page number and previous response
|
|
3531
|
+
* Only for endpoints supporting pagination
|
|
3532
|
+
*
|
|
3533
|
+
* @param params {any[]} params array passed to the RobustExternalAPICallerService
|
|
3534
|
+
* @param previousResponse {Object} HTTP response returned by provider for previous call (previous page)
|
|
3535
|
+
* @param pageNumber {number} new page number. We count from 0. You need to manually increment with 1 if your
|
|
3536
|
+
* provider counts pages starting with 1
|
|
3537
|
+
* @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
|
|
3538
|
+
* @returns {any[]}
|
|
3539
|
+
*/
|
|
3540
|
+
changeQueryParametersForPageNumber(params, previousResponse, pageNumber, subRequestIndex = 0) {
|
|
3541
|
+
return params;
|
|
3542
|
+
}
|
|
3543
|
+
|
|
3544
|
+
/**
|
|
3545
|
+
* Function checking whether the response is for the last page to stop requesting for a next page.
|
|
3546
|
+
* Only for endpoints supporting pagination.
|
|
3547
|
+
*
|
|
3548
|
+
* @param previousResponse {Object} HTTP response returned by provider for previous call (previous page)
|
|
3549
|
+
* @param currentResponse {Object} HTTP response returned by provider for current call (current page, next after the previous)
|
|
3550
|
+
* @param currentPageNumber {number} current page number (for current response)
|
|
3551
|
+
* @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
|
|
3552
|
+
* @returns {boolean}
|
|
3553
|
+
*/
|
|
3554
|
+
checkWhetherResponseIsForLastPage(previousResponse, currentResponse, currentPageNumber, subRequestIndex = 0) {
|
|
3555
|
+
return true;
|
|
3556
|
+
}
|
|
3557
|
+
|
|
3558
|
+
/**
|
|
3559
|
+
* Resets the nice factor to default value
|
|
3560
|
+
*/
|
|
3561
|
+
resetNiceFactor() {
|
|
3562
|
+
this.niceFactor = 1;
|
|
3563
|
+
}
|
|
3564
|
+
|
|
3565
|
+
/**
|
|
3566
|
+
* Internal method used for requests requiring sub-requests.
|
|
3567
|
+
*
|
|
3568
|
+
* @param iterationsData {any[]} iterations data retrieved from getDataByResponse called per sub-request.
|
|
3569
|
+
* @return {any} by default flatten the passed iterations data array. Should be redefined if you need another logic.
|
|
3570
|
+
*/
|
|
3571
|
+
incorporateIterationsData(iterationsData) {
|
|
3572
|
+
return iterationsData.flat();
|
|
3573
|
+
}
|
|
3574
|
+
}
|
|
3575
|
+
|
|
2384
3576
|
class ExistingSwap {
|
|
2385
3577
|
/**
|
|
2386
3578
|
* @param swapId {string}
|
|
@@ -3848,5 +5040,5 @@ PublicSwapService.PUBLIC_SWAP_DETAILS_FAIL_REASONS = {
|
|
|
3848
5040
|
};
|
|
3849
5041
|
PublicSwapService._fiatDecimalsCount = FiatCurrenciesService.getCurrencyDecimalCountByCode("USD");
|
|
3850
5042
|
|
|
3851
|
-
export { AmountUtils, AssetIcon, BaseSwapCreationInfo, Blockchain, Button, Cache, Coin, EmailsApi, ExistingSwap, ExistingSwapWithFiatData, FiatCurrenciesService, LoadingDots, Logger, LogsStorage, Protocol, PublicSwapService, SupportChat, SwapProvider, SwapUtils, SwapspaceSwapProvider, improveAndRethrow, safeStringify, useCallHandlingErrors, useReferredState };
|
|
5043
|
+
export { AmountUtils, AssetIcon, AxiosAdapter, BaseSwapCreationInfo, Blockchain, Button, Cache, CacheAndConcurrentRequestsResolver, CachedRobustExternalApiCallerService, CancelProcessing, Coin, EmailsApi, ExistingSwap, ExistingSwapWithFiatData, ExternalApiProvider, FiatCurrenciesService, LoadingDots, Logger, LogsStorage, Protocol, PublicSwapService, RobustExternalAPICallerService, SupportChat, SwapProvider, SwapUtils, SwapspaceSwapProvider, getQueryParameterSingleValue, getQueryParameterValues, handleClickOutside, improveAndRethrow, logErrorOrOutputToConsole, postponeExecution, removeQueryParameterAndValues, safeStringify, saveQueryParameterAndValues, useCallHandlingErrors, useReferredState };
|
|
3852
5044
|
//# sourceMappingURL=index.modern.js.map
|