@rabbitio/ui-kit 1.0.0-beta.41 → 1.0.0-beta.42

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,9 @@
1
1
  import React, { useState, useRef, useEffect, useCallback } from 'react';
2
2
  import { BigNumber } from 'bignumber.js';
3
3
  import axios from 'axios';
4
- import { v4 } from 'uuid';
5
- import Hashes from 'jshashes';
6
4
  import EventBusInstance from 'eventbusjs';
5
+ import Hashes from 'jshashes';
6
+ import { v4 } from 'uuid';
7
7
 
8
8
  function createCommonjsModule(fn) {
9
9
  var module = { exports: {} };
@@ -2501,456 +2501,220 @@ class EmailsApi {
2501
2501
  }
2502
2502
  EmailsApi.serverEndpointEntity = "emails";
2503
2503
 
2504
- /**
2505
- * This util helps to avoid duplicated calls to a shared resource.
2506
- * It tracks is there currently active calculation for the specific cache id and make all other requests
2507
- * with the same cache id waiting for this active calculation to be finished. When the calculation ends
2508
- * the resolver allows all the waiting requesters to get the data from cache and start their own calculations.
2509
- *
2510
- * This class should be instantiated inside some other service where you need to request some resource concurrently.
2511
- * Rules:
2512
- * 1. When you need to make a request inside your main service call 'getCachedOrWaitForCachedOrAcquireLock'
2513
- * on the instance of this class and await for the result. If the flag allowing to start calculation is true
2514
- * then you can request data inside your main service. Otherwise you should use the cached data as an another
2515
- * requester just finished the most resent requesting and there is actual data in the cache that
2516
- * is returned to you here.
2517
- * 1.1 Also you can acquire a lock directly if you don't want to get cached data. Use the corresponding method 'acquireLock'.
2518
- *
2519
- * 2. If you start requesting (when you successfully acquired the lock) then after receiving the result of your
2520
- * requesting you should call the 'saveCachedData' so the retrieved data will appear in the cache.
2521
- *
2522
- * 3. If you successfully acquired the lock then you should after calling the 'saveCachedData' call
2523
- * the 'releaseLock' - this is mandatory to release the lock and allow other requesters to perform their requests.
2524
- * WARNING: If for any reason you forget to call this method then this class instance will wait perpetually for
2525
- * the lock releasing and all your attempts to request the data will constantly fail. So usually call it
2526
- * inside the 'finally' block.
2527
- *
2528
- * TODO: [tests, critical++] add unit tests - massively used logic and can produce sophisticated concurrency bugs
2529
- */
2530
- class CacheAndConcurrentRequestsResolver {
2504
+ class ExternalApiProvider {
2531
2505
  /**
2532
- * @param bio {string} unique identifier for the exact service
2533
- * @param cache {Cache} cache
2534
- * @param cacheTtl {number|null} time to live for cache ms. 0 or null means the cache cannot expire
2535
- * @param [maxCallAttemptsToWaitForAlreadyRunningRequest=100] {number} number of request allowed to do waiting for
2536
- * result before we fail the original request. Use custom value only if you need to make the attempts count
2537
- * and polling interval changes.
2538
- * @param [timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished=1000] {number}
2539
- * timeout ms for polling for a result. if you change maxCallAttemptsToWaitForAlreadyRunningRequest
2540
- * then this parameter maybe also require the custom value.
2541
- * @param [removeExpiredCacheAutomatically=true] {boolean}
2506
+ * Creates an instance of external api provider.
2507
+ *
2508
+ * If you need sub-request then use 'subRequestIndex' to check current request index in functions below.
2509
+ * Also use array for 'httpMethod'.
2510
+ *
2511
+ * If the endpoint of dedicated provider has pagination then you should customize the behavior using
2512
+ * "changeQueryParametersForPageNumber", "checkWhetherResponseIsForLastPage".
2513
+ *
2514
+ * We perform RPS counting all over the App to avoid blocking our clients due to abuses of the providers.
2515
+ *
2516
+ * @param endpoint {string} URL to the provider's endpoint. Note: you can customize it using composeQueryString
2517
+ * @param [httpMethod] {string|string[]} one of "get", "post", "put", "patch", "delete" or an array of these values
2518
+ * for request having sub-requests
2519
+ * @param [timeout] {number} number of milliseconds to wait for the response
2520
+ * @param [apiGroup] {ApiGroup} singleton object containing parameters of API group. Helpful when you use the same
2521
+ * api for different providers to avoid hardcoding RPS inside each provider what can cause mistakes
2522
+ * @param [specificHeaders] {Object} contains specific keys (headers) and values (their content) if needed for this provider
2523
+ * @param [maxPageLength] {number} optional number of items per page if the request supports pagination
2542
2524
  */
2543
- constructor(bio, cache, cacheTtl, removeExpiredCacheAutomatically = true, maxCallAttemptsToWaitForAlreadyRunningRequest = 100, timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished = 1000) {
2544
- if (cacheTtl != null && cacheTtl < timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished * 2) {
2545
- /*
2546
- * During the lifetime of this service e.g. if the data is being retrieved slowly we can get
2547
- * RACE CONDITION when we constantly retrieve data and during retrieval it is expired, so we are trying
2548
- * to retrieve it again and again.
2549
- * We have a protection mechanism that we will wait no more than
2550
- * maxCallAttemptsToWaitForAlreadyRunningRequest * timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished
2551
- * but this additional check is aimed to reduce potential loading time for some requests.
2552
- */
2553
- throw new Error(`DEV: Wrong parameters passed to construct ${bio} - TTL ${cacheTtl} should be 2 times greater than ${timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished}`);
2554
- }
2555
- this._bio = bio;
2556
- this._cache = cache;
2557
- this._cacheTtlMs = cacheTtl != null ? cacheTtl : null;
2558
- this._maxExecutionTimeMs = maxCallAttemptsToWaitForAlreadyRunningRequest * timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished;
2559
- this._removeExpiredCacheAutomatically = removeExpiredCacheAutomatically;
2560
- this._requestsManager = new ManagerOfRequestsToTheSameResource(bio, maxCallAttemptsToWaitForAlreadyRunningRequest, timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished);
2525
+ constructor(endpoint, httpMethod, timeout, apiGroup, specificHeaders = {}, maxPageLength = Number.MAX_SAFE_INTEGER) {
2526
+ this.endpoint = endpoint;
2527
+ this.httpMethod = httpMethod != null ? httpMethod : "get";
2528
+ // TODO: [refactoring, critical] We have two timeouts for robust data retrieval - here and inside the service method call, need to remain the only
2529
+ this.timeout = timeout != null ? timeout : 10000;
2530
+ // TODO: [refactoring, critical] We need single place for all RPSes as we use them as hardcoded constants now inside different services
2531
+ this.apiGroup = apiGroup;
2532
+ this.maxPageLength = maxPageLength != null ? maxPageLength : Number.MAX_SAFE_INTEGER;
2533
+ this.niceFactor = 1;
2534
+ this.specificHeaders = specificHeaders != null ? specificHeaders : {};
2535
+ }
2536
+ getRps() {
2537
+ var _this$apiGroup$rps;
2538
+ return (_this$apiGroup$rps = this.apiGroup.rps) != null ? _this$apiGroup$rps : 2;
2539
+ }
2540
+ isRpsExceeded() {
2541
+ return this.apiGroup.isRpsExceeded();
2542
+ }
2543
+ actualizeLastCalledTimestamp() {
2544
+ this.apiGroup.actualizeLastCalledTimestamp();
2545
+ }
2546
+ getApiGroupId() {
2547
+ return this.apiGroup.id;
2561
2548
  }
2562
2549
 
2563
2550
  /**
2564
- * When using this service this is the major method you should call to get data by cache id.
2565
- * This method checks is there cached data and ether
2566
- * - returns you flag that you can start requesting data from the shared resource
2567
- * - or if there is already started calculation waits until it is finished (removed from this service)
2568
- * and returns you the retrieved data
2569
- * - or just returns you the cached data
2570
- *
2571
- * 'canStartDataRetrieval' equal true means that the lock was acquired, and you should manually call 'saveCachedData'
2572
- * if needed and then 'releaseLock' to mark this calculation as finished so other
2573
- * requesters can take their share of the resource.
2551
+ * Some endpoint can require several sub requests. Example is one request to get confirmed transactions
2552
+ * and another request for unconfirmed transactions. You should override this method to return true for such requests.
2574
2553
  *
2575
- * @param cacheId {string}
2576
- * @return {Promise<({
2577
- * canStartDataRetrieval: true,
2578
- * cachedData: any,
2579
- * lockId: string
2580
- * }|{
2581
- * canStartDataRetrieval: false,
2582
- * cachedData: any
2583
- * })>}
2554
+ * @return {boolean} true if this provider requires several requests to retrieve the data
2584
2555
  */
2585
- async getCachedOrWaitForCachedOrAcquireLock(cacheId) {
2586
- try {
2587
- var _cached2;
2588
- const startedAtTimestamp = Date.now();
2589
- let cached = this._cache.get(cacheId);
2590
- let cachedDataBackupIsPresentButExpired = null;
2591
- if (cached != null && !this._removeExpiredCacheAutomatically) {
2592
- const lastUpdateTimestamp = this._cache.getLastUpdateTimestamp(cacheId);
2593
- if ((lastUpdateTimestamp != null ? lastUpdateTimestamp : 0) + this._cacheTtlMs < Date.now()) {
2594
- /*
2595
- * Here we are manually clearing 'cached' value retrieved from cache to force the data loading.
2596
- * But we save its value first to the backup variable to be able to return this value if ongoing
2597
- * requesting fails.
2598
- */
2599
- cachedDataBackupIsPresentButExpired = cached;
2600
- cached = null;
2601
- }
2602
- }
2603
- let calculationId = null;
2604
- let isRetrievedCacheExpired = true;
2605
- let isWaitingForActiveCalculationSucceeded;
2606
- let weStillHaveSomeTimeToProceedExecution = true;
2607
- while (calculationId == null && cached == null && isRetrievedCacheExpired && weStillHaveSomeTimeToProceedExecution) {
2608
- const result = await this._requestsManager.startCalculationOrWaitForActiveToFinish(cacheId);
2609
- calculationId = typeof result === "string" ? result : null;
2610
- isWaitingForActiveCalculationSucceeded = typeof result === "boolean" ? result : null;
2611
- cached = this._cache.get(cacheId);
2612
- isRetrievedCacheExpired = isWaitingForActiveCalculationSucceeded && cached == null;
2613
- weStillHaveSomeTimeToProceedExecution = Date.now() - startedAtTimestamp < this._maxExecutionTimeMs;
2614
- }
2615
- if (calculationId) {
2616
- var _cached;
2617
- return {
2618
- canStartDataRetrieval: true,
2619
- cachedData: (_cached = cached) != null ? _cached : cachedDataBackupIsPresentButExpired,
2620
- lockId: calculationId
2621
- };
2622
- }
2623
- return {
2624
- canStartDataRetrieval: false,
2625
- cachedData: (_cached2 = cached) != null ? _cached2 : cachedDataBackupIsPresentButExpired
2626
- };
2627
- } catch (e) {
2628
- improveAndRethrow(e, `${this._bio}.getCachedOrWaitForCachedOrAcquireLock`);
2629
- }
2556
+ doesRequireSubRequests() {
2557
+ return false;
2630
2558
  }
2631
2559
 
2632
2560
  /**
2633
- * Returns just the current cache value for the given id.
2634
- * Doesn't wait for the active calculation, doesn't acquire lock, just retrieves the current cache as it is.
2561
+ * Some endpoint support pagination. Override this method if so and implement corresponding methods.
2635
2562
  *
2636
- * @param cacheId {string}
2637
- * @return {any}
2563
+ * @return {boolean} true if this provider requires several requests to retrieve the data
2638
2564
  */
2639
- getCached(cacheId) {
2640
- try {
2641
- return this._cache.get(cacheId);
2642
- } catch (e) {
2643
- improveAndRethrow(e, "getCached");
2644
- }
2645
- }
2646
- _getTtl() {
2647
- return this._removeExpiredCacheAutomatically ? this._cacheTtlMs : null;
2565
+ doesSupportPagination() {
2566
+ return false;
2648
2567
  }
2649
2568
 
2650
2569
  /**
2651
- * Directly acquires the lock despite on cached data availability.
2652
- * So if this method returns result === true you can start the data retrieval.
2570
+ * Composes a query string to be added to the endpoint of this provider.
2653
2571
  *
2654
- * @param cacheId {string}
2655
- * @return {Promise<{ result: true, lockId: string }|{ result: false }>}
2572
+ * @param params {any[]} params array passed to the RobustExternalAPICallerService
2573
+ * @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
2574
+ * @returns {string} query string to be concatenated with endpoint
2656
2575
  */
2657
- async acquireLock(cacheId) {
2658
- try {
2659
- return await this._requestsManager.acquireLock(cacheId);
2660
- } catch (e) {
2661
- improveAndRethrow(e, "acquireLock");
2662
- }
2576
+ composeQueryString(params, subRequestIndex = 0) {
2577
+ return "";
2663
2578
  }
2664
2579
 
2665
2580
  /**
2666
- * This method should be called only if you acquired a lock successfully.
2667
- *
2668
- * If the current lock id is not equal to the passed one the passed data will be ignored.
2669
- * Or you can do the synchronous data merging on your side and pass the
2670
- * wasDataMergedSynchronouslyWithMostRecentCacheState=true so your data will be stored
2671
- * despite on the lockId.
2672
- * WARNING: you should do this only if you are sure you perform the synchronous update.
2581
+ * Composes a body to be added to the request
2673
2582
  *
2674
- * @param cacheId {string}
2675
- * @param lockId {string}
2676
- * @param data {any}
2677
- * @param [sessionDependentData=true] {boolean}
2678
- * @param [wasDataMergedSynchronouslyWithMostRecentCacheState=false]
2583
+ * @param params {any[]} params array passed to the RobustExternalAPICallerService
2584
+ * @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
2585
+ * @returns {string}
2679
2586
  */
2680
- saveCachedData(cacheId, lockId, data, sessionDependentData = true, wasDataMergedSynchronouslyWithMostRecentCacheState = false) {
2681
- try {
2682
- if (wasDataMergedSynchronouslyWithMostRecentCacheState || this._requestsManager.isTheLockActiveOne(cacheId, lockId)) {
2683
- /* We save passed data only if the <caller> has the currently acquired lockId.
2684
- * If the passed lockId is not the active one it means that other code cleared/stopped the lock
2685
- * acquired by the <caller> recently due to some urgent/more prior changes.
2686
- *
2687
- * But we allow user to pass the 'wasDataMergedSynchronouslyWithMostRecentCacheState' flag
2688
- * that tells us that the user had taken the most recent cache value and merged his new data
2689
- * with that cached value (AFTER possibly performing async data retrieval). This means that we
2690
- * can ignore the fact that his lockId is no more relevant and save the passed data
2691
- * as it is synchronously merged with the most recent cached data. (Synchronously merged means that
2692
- * the lost update cannot occur during the merge time as JS execute the synchronous functions\
2693
- * till the end).
2694
- */
2695
- if (sessionDependentData) {
2696
- this._cache.putSessionDependentData(cacheId, data, this._getTtl());
2697
- } else {
2698
- this._cache.put(cacheId, data, this._getTtl());
2699
- }
2700
- }
2701
- } catch (e) {
2702
- improveAndRethrow(e, `${this._bio}.saveCachedData`);
2703
- }
2587
+ composeBody(params, subRequestIndex = 0) {
2588
+ return "";
2704
2589
  }
2705
2590
 
2706
2591
  /**
2707
- * Should be called then and only then if you successfully acquired a lock with the lock id.
2592
+ * Extracts data from the response and returns it
2708
2593
  *
2709
- * @param cacheId {string}
2710
- * @param lockId {string}
2594
+ * @param response {Object} HTTP response returned by provider
2595
+ * @param [params] {any[]} params array passed to the RobustExternalAPICallerService
2596
+ * @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
2597
+ * @param iterationsData {any[]} array of data retrieved from previous sub-requests
2598
+ * @returns {any}
2711
2599
  */
2712
- releaseLock(cacheId, lockId) {
2713
- try {
2714
- if (this._requestsManager.isTheLockActiveOne(cacheId, lockId)) {
2715
- this._requestsManager.finishActiveCalculation(cacheId);
2716
- }
2717
- } catch (e) {
2718
- improveAndRethrow(e, `${this._bio}.releaseLock`);
2719
- }
2600
+ getDataByResponse(response, params = [], subRequestIndex = 0, iterationsData = []) {
2601
+ return [];
2720
2602
  }
2721
2603
 
2722
2604
  /**
2723
- * Actualized currently present cached data by key. Applies the provided function to the cached data.
2605
+ * Function changing the query string according to page number and previous response
2606
+ * Only for endpoints supporting pagination
2724
2607
  *
2725
- * @param cacheId {string} id of cache entry
2726
- * @param synchronousCurrentCacheProcessor (function|null} synchronous function accepting cache entry. Should return
2727
- * an object in following format:
2728
- * {
2729
- * isModified: boolean,
2730
- * data: any
2731
- * }
2732
- * the flag signals whether data was changed during the processing or not
2733
- * @param [sessionDependent=true] {boolean} whether to mark the cache entry as session-dependent
2608
+ * @param params {any[]} params array passed to the RobustExternalAPICallerService
2609
+ * @param previousResponse {Object} HTTP response returned by provider for previous call (previous page)
2610
+ * @param pageNumber {number} new page number. We count from 0. You need to manually increment with 1 if your
2611
+ * provider counts pages starting with 1
2612
+ * @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
2613
+ * @returns {any[]}
2734
2614
  */
2735
- actualizeCachedData(cacheId, synchronousCurrentCacheProcessor, sessionDependent = true) {
2736
- try {
2737
- const cached = this._cache.get(cacheId);
2738
- const result = synchronousCurrentCacheProcessor(cached);
2739
- if (result != null && result.isModified && (result == null ? void 0 : result.data) != null) {
2740
- if (sessionDependent) {
2741
- this._cache.putSessionDependentData(cacheId, result == null ? void 0 : result.data, this._getTtl());
2742
- } else {
2743
- this._cache.put(cacheId, result == null ? void 0 : result.data, this._getTtl());
2744
- }
2745
-
2746
- /* Here we call the lock releasing to ensure the currently active calculation will be ignored.
2747
- * This is needed to ensure no 'lost update'.
2748
- * Lost update can occur if we change data in this method and after that some calculation finishes
2749
- * having the earlier data as its base to calculate its data set result. And the earlier data
2750
- * has no changes applied inside this method, so we will lose them.
2751
- *
2752
- * This is not so good solution: ideally, we should acquire lock before performing any data updating.
2753
- * But the goal of this method is to provide an instant ability to update the cached data.
2754
- * And if we start acquiring the lock here the data update can be postponed significantly.
2755
- * And this kills the desired nature of this method.
2756
- * So we better lose some data retrieval (means abusing the resource a bit) than lose
2757
- * the instant update expected after this method execution.
2758
- */
2759
- this._requestsManager.finishActiveCalculation(cacheId);
2760
- }
2761
- } catch (e) {
2762
- improveAndRethrow(e, `${this._bio}.actualizeCachedData`);
2763
- }
2764
- }
2765
- invalidate(key) {
2766
- this._cache.invalidate(key);
2767
- this._requestsManager.finishActiveCalculation(key);
2768
- }
2769
- invalidateContaining(keyPart) {
2770
- this._cache.invalidateContaining(keyPart);
2771
- this._requestsManager.finishAllActiveCalculations(keyPart);
2772
- }
2773
- markAsExpiredButDontRemove(key) {
2774
- if (this._removeExpiredCacheAutomatically) {
2775
- this._cache.markCacheItemAsExpiredButDontRemove(key, this._cacheTtlMs);
2776
- } else {
2777
- this._cache.setLastUpdateTimestamp(key, Date.now() - this._cacheTtlMs - 1);
2778
- }
2779
- this._requestsManager.finishAllActiveCalculations(key);
2615
+ changeQueryParametersForPageNumber(params, previousResponse, pageNumber, subRequestIndex = 0) {
2616
+ return params;
2780
2617
  }
2781
- }
2782
2618
 
2783
- /**
2784
- * Util class to control access to a resource when it can be called in parallel for the same result.
2785
- * (E.g. getting today coins-fiat rates from some API).
2786
- */
2787
- class ManagerOfRequestsToTheSameResource {
2788
2619
  /**
2789
- * @param bio {string} resource-related identifier for logging
2790
- * @param [maxPollsCount=100] {number} max number of attempts to wait when waiting for a lock acquisition
2791
- * @param [timeoutDuration=1000] {number} timeout between the polls for a lock acquisition
2620
+ * Function checking whether the response is for the last page to stop requesting for a next page.
2621
+ * Only for endpoints supporting pagination.
2622
+ *
2623
+ * @param previousResponse {Object} HTTP response returned by provider for previous call (previous page)
2624
+ * @param currentResponse {Object} HTTP response returned by provider for current call (current page, next after the previous)
2625
+ * @param currentPageNumber {number} current page number (for current response)
2626
+ * @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
2627
+ * @returns {boolean}
2792
2628
  */
2793
- constructor(bio, maxPollsCount = 100, timeoutDuration = 1000) {
2794
- this.bio = bio;
2795
- this.maxPollsCount = maxPollsCount;
2796
- this.timeoutDuration = timeoutDuration;
2797
- this._activeCalculationsIds = new Map();
2798
- this._nextCalculationIds = new Map();
2629
+ checkWhetherResponseIsForLastPage(previousResponse, currentResponse, currentPageNumber, subRequestIndex = 0) {
2630
+ return true;
2799
2631
  }
2800
2632
 
2801
2633
  /**
2802
- * If there is no active calculation just creates uuid and returns it.
2803
- * If there is active calculation waits until it removed from the active calculation uuid variable.
2804
- *
2805
- * @param requestHash {string}
2806
- * @return {Promise<string|boolean>} returns uuid of new active calculation or true if waiting for active
2807
- * calculation succeed or false if max attempts count exceeded
2634
+ * Resets the nice factor to default value
2808
2635
  */
2809
- async startCalculationOrWaitForActiveToFinish(requestHash) {
2810
- try {
2811
- const activeCalculationIdForHash = this._activeCalculationsIds.get(requestHash);
2812
- if (activeCalculationIdForHash == null) {
2813
- const id = v4();
2814
- this._activeCalculationsIds.set(requestHash, id);
2815
- return id;
2816
- }
2817
- return await this._waitForCalculationIdToFinish(requestHash, activeCalculationIdForHash, 0);
2818
- } catch (e) {
2819
- Logger.logError(e, `startCalculationOrWaitForActiveToFinish_${this.bio}`);
2820
- }
2821
- return null;
2636
+ resetNiceFactor() {
2637
+ this.niceFactor = 1;
2822
2638
  }
2823
2639
 
2824
2640
  /**
2825
- * Acquires lock to the resource by the provided hash.
2641
+ * Internal method used for requests requiring sub-requests.
2826
2642
  *
2827
- * @param requestHash {string}
2828
- * @return {Promise<{ result: true, lockId: string }|{ result: false }>} result is true if the lock is successfully
2829
- * acquired, false if the max allowed time to wait for acquisition expired or any unexpected error occurs
2830
- * during the waiting.
2643
+ * @param iterationsData {any[]} iterations data retrieved from getDataByResponse called per sub-request.
2644
+ * @return {any} by default flatten the passed iterations data array. Should be redefined if you need another logic.
2831
2645
  */
2832
- async acquireLock(requestHash) {
2833
- try {
2834
- var _this$_nextCalculatio;
2835
- const activeId = this._activeCalculationsIds.get(requestHash);
2836
- const nextId = v4();
2837
- if (activeId == null) {
2838
- this._activeCalculationsIds.set(requestHash, nextId);
2839
- return {
2840
- result: true,
2841
- lockId: nextId
2842
- };
2843
- }
2844
- const currentNext = (_this$_nextCalculatio = this._nextCalculationIds.get(requestHash)) != null ? _this$_nextCalculatio : [];
2845
- currentNext.push(nextId);
2846
- this._nextCalculationIds.set(requestHash, currentNext);
2847
- const waitingResult = await this._waitForCalculationIdToFinish(requestHash, activeId, 0, nextId);
2848
- return {
2849
- result: waitingResult,
2850
- lockId: waitingResult ? nextId : undefined
2851
- };
2852
- } catch (e) {
2853
- improveAndRethrow(e, "acquireLock");
2854
- }
2646
+ incorporateIterationsData(iterationsData) {
2647
+ return iterationsData.flat();
2855
2648
  }
2649
+ }
2856
2650
 
2857
- /**
2858
- * Clears active calculation id.
2859
- * WARNING: if you forget to call this method the start* one will perform maxPollsCount attempts before finishing
2860
- * @param requestHash {string} hash of request. Helps to distinct the request for the same resource but
2861
- * having different request parameters and hold a dedicated calculation id per this hash
2862
- */
2863
- finishActiveCalculation(requestHash = "default") {
2864
- try {
2865
- var _this$_nextCalculatio2;
2866
- this._activeCalculationsIds.delete(requestHash);
2867
- const next = (_this$_nextCalculatio2 = this._nextCalculationIds.get(requestHash)) != null ? _this$_nextCalculatio2 : [];
2868
- if (next.length) {
2869
- this._activeCalculationsIds.set(requestHash, next[0]);
2870
- this._nextCalculationIds.set(requestHash, next.slice(1));
2871
- }
2872
- } catch (e) {
2873
- improveAndRethrow(e, "finishActiveCalculation");
2874
- }
2651
+ /**
2652
+ * Models a group of APIs provided by the same owner and used for different services in our app.
2653
+ * It means we need to mention RPS several times for each usage and also have some holder of last call timestamp per
2654
+ * api group. So this concept allows to use it for exact ExternalApiProvider and make sure that you use the same
2655
+ * RPS value and make decisions on base of the same timestamp of last call to the API group owner.
2656
+ */
2657
+ class ApiGroup {
2658
+ constructor(id, rps, backendProxyIdGenerator = null) {
2659
+ this.id = id;
2660
+ this.rps = rps;
2661
+ this.lastCalledTimestamp = null;
2662
+ this.backendProxyIdGenerator = backendProxyIdGenerator;
2875
2663
  }
2876
- finishAllActiveCalculations(keyPart = "") {
2877
- try {
2878
- Array.from(this._activeCalculationsIds.keys()).forEach(hash => {
2879
- if (typeof hash === "string" && new RegExp(keyPart).test(hash)) {
2880
- this.finishActiveCalculation(hash);
2881
- }
2882
- });
2883
- } catch (e) {
2884
- improveAndRethrow(e, "finishAllActiveCalculations");
2885
- }
2664
+ isRpsExceeded() {
2665
+ var _this$lastCalledTimes;
2666
+ return ((_this$lastCalledTimes = this.lastCalledTimestamp) != null ? _this$lastCalledTimes : 0) + Math.floor(1000 / this.rps) > Date.now();
2886
2667
  }
2887
-
2888
- /**
2889
- * @param requestHash {string}
2890
- * @param lockId {string}
2891
- * @return {boolean}
2892
- */
2893
- isTheLockActiveOne(requestHash, lockId) {
2894
- try {
2895
- return this._activeCalculationsIds.get(requestHash) === lockId;
2896
- } catch (e) {
2897
- improveAndRethrow(e, "isTheLockActiveOne");
2898
- }
2668
+ actualizeLastCalledTimestamp() {
2669
+ this.lastCalledTimestamp = Date.now();
2899
2670
  }
2900
-
2671
+ }
2672
+ const ApiGroups = {
2901
2673
  /**
2902
- * @param requestHash {string}
2903
- * @param activeCalculationId {string|null}
2904
- * @param [attemptIndex=0] {number}
2905
- * @param waitForCalculationId {string|null} if you want to wait for an exact id to appear as active then pass this parameter
2906
- * @return {Promise<boolean>} true
2907
- * - if the given calculation id is no more an active one
2908
- * - or it is equal to waitForCalculationId
2909
- * false
2910
- * - if waiting period exceeds the max allowed waiting time or unexpected error occurs
2911
- * @private
2674
+ * Currently we use free version of etherscan provider with 0.2 RPS. But we have API key with 100k requests free
2675
+ * per month. So we can add it if not enough current RPS.
2912
2676
  */
2913
- async _waitForCalculationIdToFinish(requestHash, activeCalculationId, attemptIndex = 0, waitForCalculationId = null) {
2914
- try {
2915
- if (attemptIndex + 1 > this.maxPollsCount) {
2916
- // Max number of polls for active calculation id change is achieved. So we return false.
2917
- return false;
2918
- }
2919
- const currentId = this._activeCalculationsIds.get(requestHash);
2920
- if (waitForCalculationId == null ? currentId !== activeCalculationId : currentId === waitForCalculationId) {
2921
- /* We return true depending on the usage of this function:
2922
- * 1. if there is calculation id that we should wait for to become an active then we return true only
2923
- * if this id becomes the active one.
2924
- *
2925
- * Theoretically we can fail to wait for the desired calculation id. This can be caused by wrong use of
2926
- * this service or by any other mistakes/errors. But this waiting function will return false anyway if
2927
- * the number of polls done exceeds the max allowed.
2928
- *
2929
- * 2. if we just wait for the currently active calculation id to be finished then we return true
2930
- * when we notice that the current active id differs from the original passed into this function.
2931
- */
2932
- return true;
2933
- } else {
2934
- /* The original calculation id is still the active one, so we are scheduling a new attempt to check
2935
- * whether the active calculation id changed or not in timeoutDuration milliseconds.
2936
- */
2937
- const it = this;
2938
- return new Promise((resolve, reject) => {
2939
- setTimeout(function () {
2940
- try {
2941
- resolve(it._waitForCalculationIdToFinish(requestHash, activeCalculationId, attemptIndex + 1));
2942
- } catch (e) {
2943
- reject(e);
2944
- }
2945
- }, this.timeoutDuration);
2946
- });
2947
- }
2948
- } catch (e) {
2949
- Logger.logError(e, "_waitForCalculationIdToFinish", "Failed to wait for active calculation id change.");
2950
- return false;
2951
- }
2952
- }
2953
- }
2677
+ ETHERSCAN: new ApiGroup("etherscan", 0.17),
2678
+ // Actually 0.2 but fails sometime, so we use smaller
2679
+ ALCHEMY: new ApiGroup("alchemy", 0.3, networkKey => `alchemy-${networkKey}`),
2680
+ BLOCKSTREAM: new ApiGroup("blockstream", 0.2),
2681
+ BLOCKCHAIN_INFO: new ApiGroup("blockchain.info", 1),
2682
+ BLOCKNATIVE: new ApiGroup("blocknative", 0.5),
2683
+ ETHGASSTATION: new ApiGroup("ethgasstation", 0.5),
2684
+ TRONGRID: new ApiGroup("trongrid", 0.3, networkKey => `trongrid-${networkKey}`),
2685
+ TRONSCAN: new ApiGroup("tronscan", 0.3),
2686
+ GETBLOCK: new ApiGroup("getblock", 0.3),
2687
+ COINCAP: new ApiGroup("coincap", 0.5),
2688
+ // 200 per minute without API key
2689
+ COINGECKO: new ApiGroup("coingecko", 0.9),
2690
+ // actually 0.13-0.5 according to the docs but we use smaller due to expirienced frequent abuses
2691
+ MESSARI: new ApiGroup("messari", 0.2),
2692
+ BTCCOM: new ApiGroup("btccom", 0.2),
2693
+ BITAPS: new ApiGroup("bitaps", 0.25),
2694
+ // Docs say that RPS is 3 but using it causes frequent 429 HTTP errors
2695
+ CEX: new ApiGroup("cex", 0.5),
2696
+ // Just assumption for RPS
2697
+ BIGDATACLOUD: new ApiGroup("bigdatacloud", 1),
2698
+ // Just assumption for RPS
2699
+ TRACKIP: new ApiGroup("trackip", 1),
2700
+ // Just assumption for RPS
2701
+ IPIFY: new ApiGroup("ipify", 1),
2702
+ // Just assumption for RPS
2703
+ WHATISMYIPADDRESS: new ApiGroup("whatismyipaddress", 1),
2704
+ // Just assumption for RPS
2705
+ EXCHANGERATE: new ApiGroup("exchangerate", 1),
2706
+ // Just assumption for RPS
2707
+ FRANKFURTER: new ApiGroup("frankfurter", 1),
2708
+ // Just assumption for RPS
2709
+ BITGO: new ApiGroup("bitgo", 1),
2710
+ // Just assumption for RPS
2711
+ BITCOINER: new ApiGroup("bitcoiner", 1),
2712
+ // Just assumption for RPS
2713
+ BITCORE: new ApiGroup("bitcore", 1),
2714
+ // Just assumption for RPS
2715
+ // BLOCKCHAIR: new ApiGroup("blockchair", 0.04), // this provider require API key for commercial use (10usd 10000 reqs), we will add it later
2716
+ MEMPOOL: new ApiGroup("mempool", 0.2) // Just assumption for RPS
2717
+ };
2954
2718
 
2955
2719
  // TODO: [refactoring, low] Consider removing this logic task_id=c360f2af75764bde8badd9ff1cc00d48
2956
2720
  class ConcurrentCalculationsMetadataHolder {
@@ -3089,7 +2853,7 @@ class ExternalServicesStatsCollector {
3089
2853
  */
3090
2854
  class RobustExternalAPICallerService {
3091
2855
  static getStats() {
3092
- this.statsCollector.getStats();
2856
+ return this.statsCollector.getStats();
3093
2857
  }
3094
2858
 
3095
2859
  /**
@@ -3301,12 +3065,463 @@ class RobustExternalAPICallerService {
3301
3065
  return providersCopy.sort((p1, p2) => p2.niceFactor - p1.niceFactor);
3302
3066
  }
3303
3067
  }
3304
- RobustExternalAPICallerService.statsCollector = new ExternalServicesStatsCollector();
3305
- RobustExternalAPICallerService.defaultRPSFactor = 1;
3306
- RobustExternalAPICallerService.rpsMultiplier = 1.05;
3307
- function punishProvider(provider) {
3308
- provider.niceFactor = provider.niceFactor - 1;
3309
- }
3068
+ RobustExternalAPICallerService.statsCollector = new ExternalServicesStatsCollector();
3069
+ RobustExternalAPICallerService.defaultRPSFactor = 1;
3070
+ RobustExternalAPICallerService.rpsMultiplier = 1.05;
3071
+ function punishProvider(provider) {
3072
+ provider.niceFactor = provider.niceFactor - 1;
3073
+ }
3074
+
3075
+ /**
3076
+ * This util helps to avoid duplicated calls to a shared resource.
3077
+ * It tracks is there currently active calculation for the specific cache id and make all other requests
3078
+ * with the same cache id waiting for this active calculation to be finished. When the calculation ends
3079
+ * the resolver allows all the waiting requesters to get the data from cache and start their own calculations.
3080
+ *
3081
+ * This class should be instantiated inside some other service where you need to request some resource concurrently.
3082
+ * Rules:
3083
+ * 1. When you need to make a request inside your main service call 'getCachedOrWaitForCachedOrAcquireLock'
3084
+ * on the instance of this class and await for the result. If the flag allowing to start calculation is true
3085
+ * then you can request data inside your main service. Otherwise you should use the cached data as an another
3086
+ * requester just finished the most resent requesting and there is actual data in the cache that
3087
+ * is returned to you here.
3088
+ * 1.1 Also you can acquire a lock directly if you don't want to get cached data. Use the corresponding method 'acquireLock'.
3089
+ *
3090
+ * 2. If you start requesting (when you successfully acquired the lock) then after receiving the result of your
3091
+ * requesting you should call the 'saveCachedData' so the retrieved data will appear in the cache.
3092
+ *
3093
+ * 3. If you successfully acquired the lock then you should after calling the 'saveCachedData' call
3094
+ * the 'releaseLock' - this is mandatory to release the lock and allow other requesters to perform their requests.
3095
+ * WARNING: If for any reason you forget to call this method then this class instance will wait perpetually for
3096
+ * the lock releasing and all your attempts to request the data will constantly fail. So usually call it
3097
+ * inside the 'finally' block.
3098
+ *
3099
+ * TODO: [tests, critical++] add unit tests - massively used logic and can produce sophisticated concurrency bugs
3100
+ */
3101
+ class CacheAndConcurrentRequestsResolver {
3102
+ /**
3103
+ * @param bio {string} unique identifier for the exact service
3104
+ * @param cache {Cache} cache
3105
+ * @param cacheTtl {number|null} time to live for cache ms. 0 or null means the cache cannot expire
3106
+ * @param [maxCallAttemptsToWaitForAlreadyRunningRequest=100] {number} number of request allowed to do waiting for
3107
+ * result before we fail the original request. Use custom value only if you need to make the attempts count
3108
+ * and polling interval changes.
3109
+ * @param [timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished=1000] {number}
3110
+ * timeout ms for polling for a result. if you change maxCallAttemptsToWaitForAlreadyRunningRequest
3111
+ * then this parameter maybe also require the custom value.
3112
+ * @param [removeExpiredCacheAutomatically=true] {boolean}
3113
+ */
3114
+ constructor(bio, cache, cacheTtl, removeExpiredCacheAutomatically = true, maxCallAttemptsToWaitForAlreadyRunningRequest = 100, timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished = 1000) {
3115
+ if (cacheTtl != null && cacheTtl < timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished * 2) {
3116
+ /*
3117
+ * During the lifetime of this service e.g. if the data is being retrieved slowly we can get
3118
+ * RACE CONDITION when we constantly retrieve data and during retrieval it is expired, so we are trying
3119
+ * to retrieve it again and again.
3120
+ * We have a protection mechanism that we will wait no more than
3121
+ * maxCallAttemptsToWaitForAlreadyRunningRequest * timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished
3122
+ * but this additional check is aimed to reduce potential loading time for some requests.
3123
+ */
3124
+ throw new Error(`DEV: Wrong parameters passed to construct ${bio} - TTL ${cacheTtl} should be 2 times greater than ${timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished}`);
3125
+ }
3126
+ this._bio = bio;
3127
+ this._cache = cache;
3128
+ this._cacheTtlMs = cacheTtl != null ? cacheTtl : null;
3129
+ this._maxExecutionTimeMs = maxCallAttemptsToWaitForAlreadyRunningRequest * timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished;
3130
+ this._removeExpiredCacheAutomatically = removeExpiredCacheAutomatically;
3131
+ this._requestsManager = new ManagerOfRequestsToTheSameResource(bio, maxCallAttemptsToWaitForAlreadyRunningRequest, timeoutBetweenAttemptsToCheckWhetherAlreadyRunningRequestFinished);
3132
+ }
3133
+
3134
+ /**
3135
+ * When using this service this is the major method you should call to get data by cache id.
3136
+ * This method checks is there cached data and ether
3137
+ * - returns you flag that you can start requesting data from the shared resource
3138
+ * - or if there is already started calculation waits until it is finished (removed from this service)
3139
+ * and returns you the retrieved data
3140
+ * - or just returns you the cached data
3141
+ *
3142
+ * 'canStartDataRetrieval' equal true means that the lock was acquired, and you should manually call 'saveCachedData'
3143
+ * if needed and then 'releaseLock' to mark this calculation as finished so other
3144
+ * requesters can take their share of the resource.
3145
+ *
3146
+ * @param cacheId {string}
3147
+ * @return {Promise<({
3148
+ * canStartDataRetrieval: true,
3149
+ * cachedData: any,
3150
+ * lockId: string
3151
+ * }|{
3152
+ * canStartDataRetrieval: false,
3153
+ * cachedData: any
3154
+ * })>}
3155
+ */
3156
+ async getCachedOrWaitForCachedOrAcquireLock(cacheId) {
3157
+ try {
3158
+ var _cached2;
3159
+ const startedAtTimestamp = Date.now();
3160
+ let cached = this._cache.get(cacheId);
3161
+ let cachedDataBackupIsPresentButExpired = null;
3162
+ if (cached != null && !this._removeExpiredCacheAutomatically) {
3163
+ const lastUpdateTimestamp = this._cache.getLastUpdateTimestamp(cacheId);
3164
+ if ((lastUpdateTimestamp != null ? lastUpdateTimestamp : 0) + this._cacheTtlMs < Date.now()) {
3165
+ /*
3166
+ * Here we are manually clearing 'cached' value retrieved from cache to force the data loading.
3167
+ * But we save its value first to the backup variable to be able to return this value if ongoing
3168
+ * requesting fails.
3169
+ */
3170
+ cachedDataBackupIsPresentButExpired = cached;
3171
+ cached = null;
3172
+ }
3173
+ }
3174
+ let calculationId = null;
3175
+ let isRetrievedCacheExpired = true;
3176
+ let isWaitingForActiveCalculationSucceeded;
3177
+ let weStillHaveSomeTimeToProceedExecution = true;
3178
+ while (calculationId == null && cached == null && isRetrievedCacheExpired && weStillHaveSomeTimeToProceedExecution) {
3179
+ const result = await this._requestsManager.startCalculationOrWaitForActiveToFinish(cacheId);
3180
+ calculationId = typeof result === "string" ? result : null;
3181
+ isWaitingForActiveCalculationSucceeded = typeof result === "boolean" ? result : null;
3182
+ cached = this._cache.get(cacheId);
3183
+ isRetrievedCacheExpired = isWaitingForActiveCalculationSucceeded && cached == null;
3184
+ weStillHaveSomeTimeToProceedExecution = Date.now() - startedAtTimestamp < this._maxExecutionTimeMs;
3185
+ }
3186
+ if (calculationId) {
3187
+ var _cached;
3188
+ return {
3189
+ canStartDataRetrieval: true,
3190
+ cachedData: (_cached = cached) != null ? _cached : cachedDataBackupIsPresentButExpired,
3191
+ lockId: calculationId
3192
+ };
3193
+ }
3194
+ return {
3195
+ canStartDataRetrieval: false,
3196
+ cachedData: (_cached2 = cached) != null ? _cached2 : cachedDataBackupIsPresentButExpired
3197
+ };
3198
+ } catch (e) {
3199
+ improveAndRethrow(e, `${this._bio}.getCachedOrWaitForCachedOrAcquireLock`);
3200
+ }
3201
+ }
3202
+
3203
+ /**
3204
+ * Returns just the current cache value for the given id.
3205
+ * Doesn't wait for the active calculation, doesn't acquire lock, just retrieves the current cache as it is.
3206
+ *
3207
+ * @param cacheId {string}
3208
+ * @return {any}
3209
+ */
3210
+ getCached(cacheId) {
3211
+ try {
3212
+ return this._cache.get(cacheId);
3213
+ } catch (e) {
3214
+ improveAndRethrow(e, "getCached");
3215
+ }
3216
+ }
3217
+ _getTtl() {
3218
+ return this._removeExpiredCacheAutomatically ? this._cacheTtlMs : null;
3219
+ }
3220
+
3221
+ /**
3222
+ * Directly acquires the lock despite on cached data availability.
3223
+ * So if this method returns result === true you can start the data retrieval.
3224
+ *
3225
+ * @param cacheId {string}
3226
+ * @return {Promise<{ result: true, lockId: string }|{ result: false }>}
3227
+ */
3228
+ async acquireLock(cacheId) {
3229
+ try {
3230
+ return await this._requestsManager.acquireLock(cacheId);
3231
+ } catch (e) {
3232
+ improveAndRethrow(e, "acquireLock");
3233
+ }
3234
+ }
3235
+
3236
+ /**
3237
+ * This method should be called only if you acquired a lock successfully.
3238
+ *
3239
+ * If the current lock id is not equal to the passed one the passed data will be ignored.
3240
+ * Or you can do the synchronous data merging on your side and pass the
3241
+ * wasDataMergedSynchronouslyWithMostRecentCacheState=true so your data will be stored
3242
+ * despite on the lockId.
3243
+ * WARNING: you should do this only if you are sure you perform the synchronous update.
3244
+ *
3245
+ * @param cacheId {string}
3246
+ * @param lockId {string}
3247
+ * @param data {any}
3248
+ * @param [sessionDependentData=true] {boolean}
3249
+ * @param [wasDataMergedSynchronouslyWithMostRecentCacheState=false]
3250
+ */
3251
+ saveCachedData(cacheId, lockId, data, sessionDependentData = true, wasDataMergedSynchronouslyWithMostRecentCacheState = false) {
3252
+ try {
3253
+ if (wasDataMergedSynchronouslyWithMostRecentCacheState || this._requestsManager.isTheLockActiveOne(cacheId, lockId)) {
3254
+ /* We save passed data only if the <caller> has the currently acquired lockId.
3255
+ * If the passed lockId is not the active one it means that other code cleared/stopped the lock
3256
+ * acquired by the <caller> recently due to some urgent/more prior changes.
3257
+ *
3258
+ * But we allow user to pass the 'wasDataMergedSynchronouslyWithMostRecentCacheState' flag
3259
+ * that tells us that the user had taken the most recent cache value and merged his new data
3260
+ * with that cached value (AFTER possibly performing async data retrieval). This means that we
3261
+ * can ignore the fact that his lockId is no more relevant and save the passed data
3262
+ * as it is synchronously merged with the most recent cached data. (Synchronously merged means that
3263
+ * the lost update cannot occur during the merge time as JS execute the synchronous functions\
3264
+ * till the end).
3265
+ */
3266
+ if (sessionDependentData) {
3267
+ this._cache.putSessionDependentData(cacheId, data, this._getTtl());
3268
+ } else {
3269
+ this._cache.put(cacheId, data, this._getTtl());
3270
+ }
3271
+ }
3272
+ } catch (e) {
3273
+ improveAndRethrow(e, `${this._bio}.saveCachedData`);
3274
+ }
3275
+ }
3276
+
3277
+ /**
3278
+ * Should be called then and only then if you successfully acquired a lock with the lock id.
3279
+ *
3280
+ * @param cacheId {string}
3281
+ * @param lockId {string}
3282
+ */
3283
+ releaseLock(cacheId, lockId) {
3284
+ try {
3285
+ if (this._requestsManager.isTheLockActiveOne(cacheId, lockId)) {
3286
+ this._requestsManager.finishActiveCalculation(cacheId);
3287
+ }
3288
+ } catch (e) {
3289
+ improveAndRethrow(e, `${this._bio}.releaseLock`);
3290
+ }
3291
+ }
3292
+
3293
+ /**
3294
+ * Actualized currently present cached data by key. Applies the provided function to the cached data.
3295
+ *
3296
+ * @param cacheId {string} id of cache entry
3297
+ * @param synchronousCurrentCacheProcessor (function|null} synchronous function accepting cache entry. Should return
3298
+ * an object in following format:
3299
+ * {
3300
+ * isModified: boolean,
3301
+ * data: any
3302
+ * }
3303
+ * the flag signals whether data was changed during the processing or not
3304
+ * @param [sessionDependent=true] {boolean} whether to mark the cache entry as session-dependent
3305
+ */
3306
+ actualizeCachedData(cacheId, synchronousCurrentCacheProcessor, sessionDependent = true) {
3307
+ try {
3308
+ const cached = this._cache.get(cacheId);
3309
+ const result = synchronousCurrentCacheProcessor(cached);
3310
+ if (result != null && result.isModified && (result == null ? void 0 : result.data) != null) {
3311
+ if (sessionDependent) {
3312
+ this._cache.putSessionDependentData(cacheId, result == null ? void 0 : result.data, this._getTtl());
3313
+ } else {
3314
+ this._cache.put(cacheId, result == null ? void 0 : result.data, this._getTtl());
3315
+ }
3316
+
3317
+ /* Here we call the lock releasing to ensure the currently active calculation will be ignored.
3318
+ * This is needed to ensure no 'lost update'.
3319
+ * Lost update can occur if we change data in this method and after that some calculation finishes
3320
+ * having the earlier data as its base to calculate its data set result. And the earlier data
3321
+ * has no changes applied inside this method, so we will lose them.
3322
+ *
3323
+ * This is not so good solution: ideally, we should acquire lock before performing any data updating.
3324
+ * But the goal of this method is to provide an instant ability to update the cached data.
3325
+ * And if we start acquiring the lock here the data update can be postponed significantly.
3326
+ * And this kills the desired nature of this method.
3327
+ * So we better lose some data retrieval (means abusing the resource a bit) than lose
3328
+ * the instant update expected after this method execution.
3329
+ */
3330
+ this._requestsManager.finishActiveCalculation(cacheId);
3331
+ }
3332
+ } catch (e) {
3333
+ improveAndRethrow(e, `${this._bio}.actualizeCachedData`);
3334
+ }
3335
+ }
3336
+ invalidate(key) {
3337
+ this._cache.invalidate(key);
3338
+ this._requestsManager.finishActiveCalculation(key);
3339
+ }
3340
+ invalidateContaining(keyPart) {
3341
+ this._cache.invalidateContaining(keyPart);
3342
+ this._requestsManager.finishAllActiveCalculations(keyPart);
3343
+ }
3344
+ markAsExpiredButDontRemove(key) {
3345
+ if (this._removeExpiredCacheAutomatically) {
3346
+ this._cache.markCacheItemAsExpiredButDontRemove(key, this._cacheTtlMs);
3347
+ } else {
3348
+ this._cache.setLastUpdateTimestamp(key, Date.now() - this._cacheTtlMs - 1);
3349
+ }
3350
+ this._requestsManager.finishAllActiveCalculations(key);
3351
+ }
3352
+ }
3353
+
3354
+ /**
3355
+ * Util class to control access to a resource when it can be called in parallel for the same result.
3356
+ * (E.g. getting today coins-fiat rates from some API).
3357
+ */
3358
+ class ManagerOfRequestsToTheSameResource {
3359
+ /**
3360
+ * @param bio {string} resource-related identifier for logging
3361
+ * @param [maxPollsCount=100] {number} max number of attempts to wait when waiting for a lock acquisition
3362
+ * @param [timeoutDuration=1000] {number} timeout between the polls for a lock acquisition
3363
+ */
3364
+ constructor(bio, maxPollsCount = 100, timeoutDuration = 1000) {
3365
+ this.bio = bio;
3366
+ this.maxPollsCount = maxPollsCount;
3367
+ this.timeoutDuration = timeoutDuration;
3368
+ this._activeCalculationsIds = new Map();
3369
+ this._nextCalculationIds = new Map();
3370
+ }
3371
+
3372
+ /**
3373
+ * If there is no active calculation just creates uuid and returns it.
3374
+ * If there is active calculation waits until it removed from the active calculation uuid variable.
3375
+ *
3376
+ * @param requestHash {string}
3377
+ * @return {Promise<string|boolean>} returns uuid of new active calculation or true if waiting for active
3378
+ * calculation succeed or false if max attempts count exceeded
3379
+ */
3380
+ async startCalculationOrWaitForActiveToFinish(requestHash) {
3381
+ try {
3382
+ const activeCalculationIdForHash = this._activeCalculationsIds.get(requestHash);
3383
+ if (activeCalculationIdForHash == null) {
3384
+ const id = v4();
3385
+ this._activeCalculationsIds.set(requestHash, id);
3386
+ return id;
3387
+ }
3388
+ return await this._waitForCalculationIdToFinish(requestHash, activeCalculationIdForHash, 0);
3389
+ } catch (e) {
3390
+ Logger.logError(e, `startCalculationOrWaitForActiveToFinish_${this.bio}`);
3391
+ }
3392
+ return null;
3393
+ }
3394
+
3395
+ /**
3396
+ * Acquires lock to the resource by the provided hash.
3397
+ *
3398
+ * @param requestHash {string}
3399
+ * @return {Promise<{ result: true, lockId: string }|{ result: false }>} result is true if the lock is successfully
3400
+ * acquired, false if the max allowed time to wait for acquisition expired or any unexpected error occurs
3401
+ * during the waiting.
3402
+ */
3403
+ async acquireLock(requestHash) {
3404
+ try {
3405
+ var _this$_nextCalculatio;
3406
+ const activeId = this._activeCalculationsIds.get(requestHash);
3407
+ const nextId = v4();
3408
+ if (activeId == null) {
3409
+ this._activeCalculationsIds.set(requestHash, nextId);
3410
+ return {
3411
+ result: true,
3412
+ lockId: nextId
3413
+ };
3414
+ }
3415
+ const currentNext = (_this$_nextCalculatio = this._nextCalculationIds.get(requestHash)) != null ? _this$_nextCalculatio : [];
3416
+ currentNext.push(nextId);
3417
+ this._nextCalculationIds.set(requestHash, currentNext);
3418
+ const waitingResult = await this._waitForCalculationIdToFinish(requestHash, activeId, 0, nextId);
3419
+ return {
3420
+ result: waitingResult,
3421
+ lockId: waitingResult ? nextId : undefined
3422
+ };
3423
+ } catch (e) {
3424
+ improveAndRethrow(e, "acquireLock");
3425
+ }
3426
+ }
3427
+
3428
+ /**
3429
+ * Clears active calculation id.
3430
+ * WARNING: if you forget to call this method the start* one will perform maxPollsCount attempts before finishing
3431
+ * @param requestHash {string} hash of request. Helps to distinct the request for the same resource but
3432
+ * having different request parameters and hold a dedicated calculation id per this hash
3433
+ */
3434
+ finishActiveCalculation(requestHash = "default") {
3435
+ try {
3436
+ var _this$_nextCalculatio2;
3437
+ this._activeCalculationsIds.delete(requestHash);
3438
+ const next = (_this$_nextCalculatio2 = this._nextCalculationIds.get(requestHash)) != null ? _this$_nextCalculatio2 : [];
3439
+ if (next.length) {
3440
+ this._activeCalculationsIds.set(requestHash, next[0]);
3441
+ this._nextCalculationIds.set(requestHash, next.slice(1));
3442
+ }
3443
+ } catch (e) {
3444
+ improveAndRethrow(e, "finishActiveCalculation");
3445
+ }
3446
+ }
3447
+ finishAllActiveCalculations(keyPart = "") {
3448
+ try {
3449
+ Array.from(this._activeCalculationsIds.keys()).forEach(hash => {
3450
+ if (typeof hash === "string" && new RegExp(keyPart).test(hash)) {
3451
+ this.finishActiveCalculation(hash);
3452
+ }
3453
+ });
3454
+ } catch (e) {
3455
+ improveAndRethrow(e, "finishAllActiveCalculations");
3456
+ }
3457
+ }
3458
+
3459
+ /**
3460
+ * @param requestHash {string}
3461
+ * @param lockId {string}
3462
+ * @return {boolean}
3463
+ */
3464
+ isTheLockActiveOne(requestHash, lockId) {
3465
+ try {
3466
+ return this._activeCalculationsIds.get(requestHash) === lockId;
3467
+ } catch (e) {
3468
+ improveAndRethrow(e, "isTheLockActiveOne");
3469
+ }
3470
+ }
3471
+
3472
+ /**
3473
+ * @param requestHash {string}
3474
+ * @param activeCalculationId {string|null}
3475
+ * @param [attemptIndex=0] {number}
3476
+ * @param waitForCalculationId {string|null} if you want to wait for an exact id to appear as active then pass this parameter
3477
+ * @return {Promise<boolean>} true
3478
+ * - if the given calculation id is no more an active one
3479
+ * - or it is equal to waitForCalculationId
3480
+ * false
3481
+ * - if waiting period exceeds the max allowed waiting time or unexpected error occurs
3482
+ * @private
3483
+ */
3484
+ async _waitForCalculationIdToFinish(requestHash, activeCalculationId, attemptIndex = 0, waitForCalculationId = null) {
3485
+ try {
3486
+ if (attemptIndex + 1 > this.maxPollsCount) {
3487
+ // Max number of polls for active calculation id change is achieved. So we return false.
3488
+ return false;
3489
+ }
3490
+ const currentId = this._activeCalculationsIds.get(requestHash);
3491
+ if (waitForCalculationId == null ? currentId !== activeCalculationId : currentId === waitForCalculationId) {
3492
+ /* We return true depending on the usage of this function:
3493
+ * 1. if there is calculation id that we should wait for to become an active then we return true only
3494
+ * if this id becomes the active one.
3495
+ *
3496
+ * Theoretically we can fail to wait for the desired calculation id. This can be caused by wrong use of
3497
+ * this service or by any other mistakes/errors. But this waiting function will return false anyway if
3498
+ * the number of polls done exceeds the max allowed.
3499
+ *
3500
+ * 2. if we just wait for the currently active calculation id to be finished then we return true
3501
+ * when we notice that the current active id differs from the original passed into this function.
3502
+ */
3503
+ return true;
3504
+ } else {
3505
+ /* The original calculation id is still the active one, so we are scheduling a new attempt to check
3506
+ * whether the active calculation id changed or not in timeoutDuration milliseconds.
3507
+ */
3508
+ const it = this;
3509
+ return new Promise((resolve, reject) => {
3510
+ setTimeout(function () {
3511
+ try {
3512
+ resolve(it._waitForCalculationIdToFinish(requestHash, activeCalculationId, attemptIndex + 1));
3513
+ } catch (e) {
3514
+ reject(e);
3515
+ }
3516
+ }, this.timeoutDuration);
3517
+ });
3518
+ }
3519
+ } catch (e) {
3520
+ Logger.logError(e, "_waitForCalculationIdToFinish", "Failed to wait for active calculation id change.");
3521
+ return false;
3522
+ }
3523
+ }
3524
+ }
3310
3525
 
3311
3526
  /**
3312
3527
  * Extended edit of RobustExternalApiCallerService supporting cache and management of concurrent requests
@@ -3402,244 +3617,93 @@ class CachedRobustExternalApiCallerService {
3402
3617
  }
3403
3618
  }
3404
3619
 
3405
- /**
3406
- * Utils class needed to perform cancelling of axios request inside some process.
3407
- * Provides cancel state and axios token for HTTP requests
3408
- */
3409
- class CancelProcessing {
3620
+ class BigdatacloudIpAddressProvider extends ExternalApiProvider {
3410
3621
  constructor() {
3411
- this._cancelToken = axios.CancelToken.source();
3412
- this._isCanceled = false;
3413
- }
3414
- cancel() {
3415
- this._isCanceled = true;
3416
- this._cancelToken.cancel();
3417
- }
3418
- isCanceled() {
3419
- return this._isCanceled;
3420
- }
3421
- getToken() {
3422
- return this._cancelToken.token;
3622
+ super("https://api.bigdatacloud.net/data/client-ip", "get", 15000, ApiGroups.BIGDATACLOUD);
3423
3623
  }
3424
- static instance() {
3425
- return new CancelProcessing();
3624
+ getDataByResponse(response, params = [], subRequestIndex = 0, iterationsData = []) {
3625
+ var _response$data;
3626
+ return (response == null ? void 0 : response.data) && ((_response$data = response.data) == null ? void 0 : _response$data.ipString);
3426
3627
  }
3427
3628
  }
3428
-
3429
- class ExternalApiProvider {
3430
- /**
3431
- * Creates an instance of external api provider.
3432
- *
3433
- * If you need sub-request then use 'subRequestIndex' to check current request index in functions below.
3434
- * Also use array for 'httpMethod'.
3435
- *
3436
- * If the endpoint of dedicated provider has pagination then you should customize the behavior using
3437
- * "changeQueryParametersForPageNumber", "checkWhetherResponseIsForLastPage".
3438
- *
3439
- * We perform RPS counting all over the App to avoid blocking our clients due to abuses of the providers.
3440
- *
3441
- * @param endpoint {string} URL to the provider's endpoint. Note: you can customize it using composeQueryString
3442
- * @param [httpMethod] {string|string[]} one of "get", "post", "put", "patch", "delete" or an array of these values
3443
- * for request having sub-requests
3444
- * @param [timeout] {number} number of milliseconds to wait for the response
3445
- * @param [apiGroup] {ApiGroup} singleton object containing parameters of API group. Helpful when you use the same
3446
- * api for different providers to avoid hardcoding RPS inside each provider what can cause mistakes
3447
- * @param [specificHeaders] {Object} contains specific keys (headers) and values (their content) if needed for this provider
3448
- * @param [maxPageLength] {number} optional number of items per page if the request supports pagination
3449
- */
3450
- constructor(endpoint, httpMethod, timeout, apiGroup, specificHeaders = {}, maxPageLength = Number.MAX_SAFE_INTEGER) {
3451
- this.endpoint = endpoint;
3452
- this.httpMethod = httpMethod != null ? httpMethod : "get";
3453
- // TODO: [refactoring, critical] We have two timeouts for robust data retrieval - here and inside the service method call, need to remain the only
3454
- this.timeout = timeout != null ? timeout : 10000;
3455
- // TODO: [refactoring, critical] We need single place for all RPSes as we use them as hardcoded constants now inside different services
3456
- this.apiGroup = apiGroup;
3457
- this.maxPageLength = maxPageLength != null ? maxPageLength : Number.MAX_SAFE_INTEGER;
3458
- this.niceFactor = 1;
3459
- this.specificHeaders = specificHeaders != null ? specificHeaders : {};
3460
- }
3461
- getRps() {
3462
- var _this$apiGroup$rps;
3463
- return (_this$apiGroup$rps = this.apiGroup.rps) != null ? _this$apiGroup$rps : 2;
3464
- }
3465
- isRpsExceeded() {
3466
- return this.apiGroup.isRpsExceeded();
3629
+ class TrackipIpAddressProvider extends ExternalApiProvider {
3630
+ constructor() {
3631
+ super("https://www.trackip.net/ip", "get", 15000, ApiGroups.TRACKIP);
3467
3632
  }
3468
- actualizeLastCalledTimestamp() {
3469
- this.apiGroup.actualizeLastCalledTimestamp();
3633
+ getDataByResponse(response, params = [], subRequestIndex = 0, iterationsData = []) {
3634
+ return response == null ? void 0 : response.data;
3470
3635
  }
3471
- getApiGroupId() {
3472
- return this.apiGroup.id;
3636
+ }
3637
+ class IpifyV6IpAddressProvider extends ExternalApiProvider {
3638
+ constructor() {
3639
+ super("https://api6.ipify.org/?format=json", "get", 15000, ApiGroups.IPIFY);
3473
3640
  }
3474
-
3475
- /**
3476
- * Some endpoint can require several sub requests. Example is one request to get confirmed transactions
3477
- * and another request for unconfirmed transactions. You should override this method to return true for such requests.
3478
- *
3479
- * @return {boolean} true if this provider requires several requests to retrieve the data
3480
- */
3481
- doesRequireSubRequests() {
3482
- return false;
3641
+ getDataByResponse(response, params = [], subRequestIndex = 0, iterationsData = []) {
3642
+ var _response$data2;
3643
+ return (response == null ? void 0 : response.data) && ((_response$data2 = response.data) == null ? void 0 : _response$data2.ip);
3483
3644
  }
3484
-
3485
- /**
3486
- * Some endpoint support pagination. Override this method if so and implement corresponding methods.
3487
- *
3488
- * @return {boolean} true if this provider requires several requests to retrieve the data
3489
- */
3490
- doesSupportPagination() {
3491
- return false;
3645
+ }
3646
+ class IpifyIpAddressProvider extends ExternalApiProvider {
3647
+ constructor() {
3648
+ super("https://api.ipify.org/?format=json", "get", 15000, ApiGroups.IPIFY);
3492
3649
  }
3493
-
3494
- /**
3495
- * Composes a query string to be added to the endpoint of this provider.
3496
- *
3497
- * @param params {any[]} params array passed to the RobustExternalAPICallerService
3498
- * @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
3499
- * @returns {string} query string to be concatenated with endpoint
3500
- */
3501
- composeQueryString(params, subRequestIndex = 0) {
3502
- return "";
3650
+ getDataByResponse(response, params = [], subRequestIndex = 0, iterationsData = []) {
3651
+ var _response$data3;
3652
+ return (response == null ? void 0 : response.data) && ((_response$data3 = response.data) == null ? void 0 : _response$data3.ip);
3503
3653
  }
3504
-
3505
- /**
3506
- * Composes a body to be added to the request
3507
- *
3508
- * @param params {any[]} params array passed to the RobustExternalAPICallerService
3509
- * @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
3510
- * @returns {string}
3511
- */
3512
- composeBody(params, subRequestIndex = 0) {
3513
- return "";
3654
+ }
3655
+ class WhatismyipaddressIpAddressProvider extends ExternalApiProvider {
3656
+ constructor() {
3657
+ super("http://bot.whatismyipaddress.com/", "get", 15000, ApiGroups.WHATISMYIPADDRESS);
3514
3658
  }
3515
-
3516
- /**
3517
- * Extracts data from the response and returns it
3518
- *
3519
- * @param response {Object} HTTP response returned by provider
3520
- * @param [params] {any[]} params array passed to the RobustExternalAPICallerService
3521
- * @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
3522
- * @param iterationsData {any[]} array of data retrieved from previous sub-requests
3523
- * @returns {any}
3524
- */
3525
3659
  getDataByResponse(response, params = [], subRequestIndex = 0, iterationsData = []) {
3526
- return [];
3527
- }
3528
-
3529
- /**
3530
- * Function changing the query string according to page number and previous response
3531
- * Only for endpoints supporting pagination
3532
- *
3533
- * @param params {any[]} params array passed to the RobustExternalAPICallerService
3534
- * @param previousResponse {Object} HTTP response returned by provider for previous call (previous page)
3535
- * @param pageNumber {number} new page number. We count from 0. You need to manually increment with 1 if your
3536
- * provider counts pages starting with 1
3537
- * @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
3538
- * @returns {any[]}
3539
- */
3540
- changeQueryParametersForPageNumber(params, previousResponse, pageNumber, subRequestIndex = 0) {
3541
- return params;
3660
+ return response == null ? void 0 : response.data;
3542
3661
  }
3543
-
3662
+ }
3663
+ class IpAddressProvider {
3544
3664
  /**
3545
- * Function checking whether the response is for the last page to stop requesting for a next page.
3546
- * Only for endpoints supporting pagination.
3665
+ * Returns current public IP address identified by one of external services.
3547
3666
  *
3548
- * @param previousResponse {Object} HTTP response returned by provider for previous call (previous page)
3549
- * @param currentResponse {Object} HTTP response returned by provider for current call (current page, next after the previous)
3550
- * @param currentPageNumber {number} current page number (for current response)
3551
- * @param [subRequestIndex] {number} optional number of the sub-request the call is performed for
3552
- * @returns {boolean}
3553
- */
3554
- checkWhetherResponseIsForLastPage(previousResponse, currentResponse, currentPageNumber, subRequestIndex = 0) {
3555
- return true;
3556
- }
3557
-
3558
- /**
3559
- * Resets the nice factor to default value
3560
- */
3561
- resetNiceFactor() {
3562
- this.niceFactor = 1;
3563
- }
3564
-
3565
- /**
3566
- * Internal method used for requests requiring sub-requests.
3667
+ * It is easier than manual identification and also (as ip needed for server side to check it) it saves us from
3668
+ * issues related to changes of infrastructure configurations (like adding proxies etc.) so we should not configure
3669
+ * anything on server side to get correct client's IP.
3567
3670
  *
3568
- * @param iterationsData {any[]} iterations data retrieved from getDataByResponse called per sub-request.
3569
- * @return {any} by default flatten the passed iterations data array. Should be redefined if you need another logic.
3671
+ * @returns {Promise<String>} IP address
3672
+ * @throws {Error} if fails to retrieve IP address from all the services
3570
3673
  */
3571
- incorporateIterationsData(iterationsData) {
3572
- return iterationsData.flat();
3674
+ static async getClientIpAddress() {
3675
+ try {
3676
+ return await this.externalIPAddressAPICaller.callExternalAPICached([], 7000);
3677
+ } catch (e) {
3678
+ improveAndRethrow(e, "getClientIpAddress");
3679
+ }
3573
3680
  }
3574
3681
  }
3682
+ IpAddressProvider.externalIPAddressAPICaller = new CachedRobustExternalApiCallerService("externalIPAddressAPICaller", new Cache(EventBusInstance), [new BigdatacloudIpAddressProvider(), new TrackipIpAddressProvider(), new IpifyV6IpAddressProvider(), new IpifyIpAddressProvider(), new WhatismyipaddressIpAddressProvider()], 300000);
3575
3683
 
3576
3684
  /**
3577
- * Models a group of APIs provided by the same owner and used for different services in our app.
3578
- * It means we need to mention RPS several times for each usage and also have some holder of last call timestamp per
3579
- * api group. So this concept allows to use it for exact ExternalApiProvider and make sure that you use the same
3580
- * RPS value and make decisions on base of the same timestamp of last call to the API group owner.
3685
+ * Utils class needed to perform cancelling of axios request inside some process.
3686
+ * Provides cancel state and axios token for HTTP requests
3581
3687
  */
3582
- class ApiGroup {
3583
- constructor(id, rps, backendProxyIdGenerator = null) {
3584
- this.id = id;
3585
- this.rps = rps;
3586
- this.lastCalledTimestamp = null;
3587
- this.backendProxyIdGenerator = backendProxyIdGenerator;
3688
+ class CancelProcessing {
3689
+ constructor() {
3690
+ this._cancelToken = axios.CancelToken.source();
3691
+ this._isCanceled = false;
3588
3692
  }
3589
- isRpsExceeded() {
3590
- var _this$lastCalledTimes;
3591
- return ((_this$lastCalledTimes = this.lastCalledTimestamp) != null ? _this$lastCalledTimes : 0) + Math.floor(1000 / this.rps) > Date.now();
3693
+ cancel() {
3694
+ this._isCanceled = true;
3695
+ this._cancelToken.cancel();
3592
3696
  }
3593
- actualizeLastCalledTimestamp() {
3594
- this.lastCalledTimestamp = Date.now();
3697
+ isCanceled() {
3698
+ return this._isCanceled;
3699
+ }
3700
+ getToken() {
3701
+ return this._cancelToken.token;
3702
+ }
3703
+ static instance() {
3704
+ return new CancelProcessing();
3595
3705
  }
3596
3706
  }
3597
- const ApiGroups = {
3598
- /**
3599
- * Currently we use free version of etherscan provider with 0.2 RPS. But we have API key with 100k requests free
3600
- * per month. So we can add it if not enough current RPS.
3601
- */
3602
- ETHERSCAN: new ApiGroup("etherscan", 0.17),
3603
- // Actually 0.2 but fails sometime, so we use smaller
3604
- ALCHEMY: new ApiGroup("alchemy", 0.3, networkKey => `alchemy-${networkKey}`),
3605
- BLOCKSTREAM: new ApiGroup("blockstream", 0.2),
3606
- BLOCKCHAIN_INFO: new ApiGroup("blockchain.info", 1),
3607
- BLOCKNATIVE: new ApiGroup("blocknative", 0.5),
3608
- ETHGASSTATION: new ApiGroup("ethgasstation", 0.5),
3609
- TRONGRID: new ApiGroup("trongrid", 0.3, networkKey => `trongrid-${networkKey}`),
3610
- TRONSCAN: new ApiGroup("tronscan", 0.3),
3611
- GETBLOCK: new ApiGroup("getblock", 0.3),
3612
- COINCAP: new ApiGroup("coincap", 0.5),
3613
- // 200 per minute without API key
3614
- COINGECKO: new ApiGroup("coingecko", 0.9),
3615
- // actually 0.13-0.5 according to the docs but we use smaller due to expirienced frequent abuses
3616
- MESSARI: new ApiGroup("messari", 0.2),
3617
- BTCCOM: new ApiGroup("btccom", 0.2),
3618
- BITAPS: new ApiGroup("bitaps", 0.25),
3619
- // Docs say that RPS is 3 but using it causes frequent 429 HTTP errors
3620
- CEX: new ApiGroup("cex", 0.5),
3621
- // Just assumption for RPS
3622
- BIGDATACLOUD: new ApiGroup("bigdatacloud", 1),
3623
- // Just assumption for RPS
3624
- TRACKIP: new ApiGroup("trackip", 1),
3625
- // Just assumption for RPS
3626
- IPIFY: new ApiGroup("ipify", 1),
3627
- // Just assumption for RPS
3628
- WHATISMYIPADDRESS: new ApiGroup("whatismyipaddress", 1),
3629
- // Just assumption for RPS
3630
- EXCHANGERATE: new ApiGroup("exchangerate", 1),
3631
- // Just assumption for RPS
3632
- FRANKFURTER: new ApiGroup("frankfurter", 1),
3633
- // Just assumption for RPS
3634
- BITGO: new ApiGroup("bitgo", 1),
3635
- // Just assumption for RPS
3636
- BITCOINER: new ApiGroup("bitcoiner", 1),
3637
- // Just assumption for RPS
3638
- BITCORE: new ApiGroup("bitcore", 1),
3639
- // Just assumption for RPS
3640
- // BLOCKCHAIR: new ApiGroup("blockchair", 0.04), // this provider require API key for commercial use (10usd 10000 reqs), we will add it later
3641
- MEMPOOL: new ApiGroup("mempool", 0.2) // Just assumption for RPS
3642
- };
3643
3707
 
3644
3708
  class ExistingSwap {
3645
3709
  /**
@@ -5108,5 +5172,5 @@ PublicSwapService.PUBLIC_SWAP_DETAILS_FAIL_REASONS = {
5108
5172
  };
5109
5173
  PublicSwapService._fiatDecimalsCount = FiatCurrenciesService.getCurrencyDecimalCountByCode("USD");
5110
5174
 
5111
- export { AmountUtils, ApiGroup, ApiGroups, AssetIcon, AxiosAdapter, BaseSwapCreationInfo, Blockchain, Button, Cache, CacheAndConcurrentRequestsResolver, CachedRobustExternalApiCallerService, CancelProcessing, Coin, ConcurrentCalculationsMetadataHolder, EmailsApi, ExistingSwap, ExistingSwapWithFiatData, ExternalApiProvider, FiatCurrenciesService, LoadingDots, Logger, LogsStorage, Protocol, PublicSwapService, RobustExternalAPICallerService, SupportChat, SwapProvider, SwapUtils, SwapspaceSwapProvider, getQueryParameterSingleValue, getQueryParameterValues, handleClickOutside, improveAndRethrow, logErrorOrOutputToConsole, postponeExecution, removeQueryParameterAndValues, safeStringify, saveQueryParameterAndValues, useCallHandlingErrors, useReferredState };
5175
+ export { AmountUtils, ApiGroup, ApiGroups, AssetIcon, AxiosAdapter, BaseSwapCreationInfo, Blockchain, Button, Cache, CacheAndConcurrentRequestsResolver, CachedRobustExternalApiCallerService, CancelProcessing, Coin, ConcurrentCalculationsMetadataHolder, EmailsApi, ExistingSwap, ExistingSwapWithFiatData, ExternalApiProvider, FiatCurrenciesService, IpAddressProvider, LoadingDots, Logger, LogsStorage, Protocol, PublicSwapService, RobustExternalAPICallerService, SupportChat, SwapProvider, SwapUtils, SwapspaceSwapProvider, getQueryParameterSingleValue, getQueryParameterValues, handleClickOutside, improveAndRethrow, logErrorOrOutputToConsole, postponeExecution, removeQueryParameterAndValues, safeStringify, saveQueryParameterAndValues, useCallHandlingErrors, useReferredState };
5112
5176
  //# sourceMappingURL=index.modern.js.map