io-complyance-unify-sdk 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +26 -0
- data/README.md +595 -0
- data/lib/complyance/circuit_breaker.rb +99 -0
- data/lib/complyance/persistent_queue_manager.rb +474 -0
- data/lib/complyance/retry_strategy.rb +198 -0
- data/lib/complyance_sdk/config/retry_config.rb +127 -0
- data/lib/complyance_sdk/config/sdk_config.rb +212 -0
- data/lib/complyance_sdk/exceptions/circuit_breaker_open_error.rb +14 -0
- data/lib/complyance_sdk/exceptions/sdk_exception.rb +93 -0
- data/lib/complyance_sdk/generators/config_generator.rb +67 -0
- data/lib/complyance_sdk/generators/install_generator.rb +22 -0
- data/lib/complyance_sdk/generators/templates/complyance_initializer.rb +36 -0
- data/lib/complyance_sdk/http/authentication_middleware.rb +43 -0
- data/lib/complyance_sdk/http/client.rb +223 -0
- data/lib/complyance_sdk/http/logging_middleware.rb +153 -0
- data/lib/complyance_sdk/jobs/base_job.rb +63 -0
- data/lib/complyance_sdk/jobs/process_document_job.rb +92 -0
- data/lib/complyance_sdk/jobs/sidekiq_job.rb +165 -0
- data/lib/complyance_sdk/middleware/rack_middleware.rb +39 -0
- data/lib/complyance_sdk/models/country.rb +205 -0
- data/lib/complyance_sdk/models/country_policy_registry.rb +159 -0
- data/lib/complyance_sdk/models/document_type.rb +52 -0
- data/lib/complyance_sdk/models/environment.rb +144 -0
- data/lib/complyance_sdk/models/logical_doc_type.rb +228 -0
- data/lib/complyance_sdk/models/mode.rb +47 -0
- data/lib/complyance_sdk/models/operation.rb +47 -0
- data/lib/complyance_sdk/models/policy_result.rb +145 -0
- data/lib/complyance_sdk/models/purpose.rb +52 -0
- data/lib/complyance_sdk/models/source.rb +104 -0
- data/lib/complyance_sdk/models/source_ref.rb +130 -0
- data/lib/complyance_sdk/models/unify_request.rb +208 -0
- data/lib/complyance_sdk/models/unify_response.rb +198 -0
- data/lib/complyance_sdk/queue/persistent_queue_manager.rb +609 -0
- data/lib/complyance_sdk/railtie.rb +29 -0
- data/lib/complyance_sdk/retry/circuit_breaker.rb +159 -0
- data/lib/complyance_sdk/retry/retry_manager.rb +108 -0
- data/lib/complyance_sdk/retry/retry_strategy.rb +225 -0
- data/lib/complyance_sdk/version.rb +5 -0
- data/lib/complyance_sdk.rb +935 -0
- metadata +322 -0
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module ComplyanceSDK
|
|
4
|
+
module Retry
|
|
5
|
+
# Circuit breaker pattern implementation
|
|
6
|
+
# Prevents cascading failures by opening the circuit when too many failures occur
|
|
7
|
+
class CircuitBreaker
|
|
8
|
+
# Circuit breaker states
|
|
9
|
+
module State
|
|
10
|
+
CLOSED = :closed
|
|
11
|
+
OPEN = :open
|
|
12
|
+
HALF_OPEN = :half_open
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
attr_reader :state, :failure_count, :last_failure_time, :config
|
|
16
|
+
|
|
17
|
+
# Initialize a new circuit breaker
|
|
18
|
+
#
|
|
19
|
+
# @param config [Hash] Circuit breaker configuration
|
|
20
|
+
# @option config [Integer] :failure_threshold Number of failures before opening (default: 5)
|
|
21
|
+
# @option config [Integer] :timeout_seconds Timeout before attempting reset (default: 60)
|
|
22
|
+
# @option config [Integer] :success_threshold Successes needed to close from half-open (default: 1)
|
|
23
|
+
def initialize(config = {})
|
|
24
|
+
@config = {
|
|
25
|
+
failure_threshold: 5,
|
|
26
|
+
timeout_seconds: 60,
|
|
27
|
+
success_threshold: 1
|
|
28
|
+
}.merge(config)
|
|
29
|
+
|
|
30
|
+
@state = State::CLOSED
|
|
31
|
+
@failure_count = 0
|
|
32
|
+
@last_failure_time = nil
|
|
33
|
+
@success_count = 0
|
|
34
|
+
@mutex = Mutex.new
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# Execute a block with circuit breaker protection
|
|
38
|
+
#
|
|
39
|
+
# @yield The block to execute
|
|
40
|
+
# @return The result of the block
|
|
41
|
+
# @raise [ComplyanceSDK::Exceptions::CircuitBreakerOpenError] If circuit is open
|
|
42
|
+
def execute
|
|
43
|
+
@mutex.synchronize do
|
|
44
|
+
case @state
|
|
45
|
+
when State::OPEN
|
|
46
|
+
if should_attempt_reset?
|
|
47
|
+
@state = State::HALF_OPEN
|
|
48
|
+
@success_count = 0
|
|
49
|
+
else
|
|
50
|
+
raise ComplyanceSDK::Exceptions::CircuitBreakerOpenError.new(
|
|
51
|
+
'Circuit breaker is open',
|
|
52
|
+
context: {
|
|
53
|
+
failure_count: @failure_count,
|
|
54
|
+
last_failure_time: @last_failure_time,
|
|
55
|
+
timeout_seconds: @config[:timeout_seconds]
|
|
56
|
+
}
|
|
57
|
+
)
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
begin
|
|
63
|
+
result = yield
|
|
64
|
+
on_success
|
|
65
|
+
result
|
|
66
|
+
rescue => e
|
|
67
|
+
on_failure
|
|
68
|
+
raise e
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
# Check if circuit breaker is open
|
|
73
|
+
#
|
|
74
|
+
# @return [Boolean] True if open
|
|
75
|
+
def open?
|
|
76
|
+
@state == State::OPEN
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
# Check if circuit breaker is closed
|
|
80
|
+
#
|
|
81
|
+
# @return [Boolean] True if closed
|
|
82
|
+
def closed?
|
|
83
|
+
@state == State::CLOSED
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# Check if circuit breaker is half-open
|
|
87
|
+
#
|
|
88
|
+
# @return [Boolean] True if half-open
|
|
89
|
+
def half_open?
|
|
90
|
+
@state == State::HALF_OPEN
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
# Get circuit breaker statistics
|
|
94
|
+
#
|
|
95
|
+
# @return [Hash] Statistics hash
|
|
96
|
+
def stats
|
|
97
|
+
{
|
|
98
|
+
state: @state,
|
|
99
|
+
failure_count: @failure_count,
|
|
100
|
+
last_failure_time: @last_failure_time,
|
|
101
|
+
success_count: @success_count,
|
|
102
|
+
config: @config
|
|
103
|
+
}
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
# Reset the circuit breaker to closed state
|
|
107
|
+
def reset!
|
|
108
|
+
@mutex.synchronize do
|
|
109
|
+
@state = State::CLOSED
|
|
110
|
+
@failure_count = 0
|
|
111
|
+
@last_failure_time = nil
|
|
112
|
+
@success_count = 0
|
|
113
|
+
end
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
private
|
|
117
|
+
|
|
118
|
+
# Handle successful execution
|
|
119
|
+
def on_success
|
|
120
|
+
@mutex.synchronize do
|
|
121
|
+
case @state
|
|
122
|
+
when State::HALF_OPEN
|
|
123
|
+
@success_count += 1
|
|
124
|
+
if @success_count >= @config[:success_threshold]
|
|
125
|
+
@state = State::CLOSED
|
|
126
|
+
@failure_count = 0
|
|
127
|
+
@last_failure_time = nil
|
|
128
|
+
@success_count = 0
|
|
129
|
+
end
|
|
130
|
+
when State::CLOSED
|
|
131
|
+
# Reset failure count on successful execution in closed state
|
|
132
|
+
@failure_count = 0 if @failure_count > 0
|
|
133
|
+
end
|
|
134
|
+
end
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
# Handle failed execution
|
|
138
|
+
def on_failure
|
|
139
|
+
@mutex.synchronize do
|
|
140
|
+
@failure_count += 1
|
|
141
|
+
@last_failure_time = Time.now
|
|
142
|
+
|
|
143
|
+
if @failure_count >= @config[:failure_threshold]
|
|
144
|
+
@state = State::OPEN
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
# Check if circuit breaker should attempt to reset
|
|
150
|
+
#
|
|
151
|
+
# @return [Boolean] True if should attempt reset
|
|
152
|
+
def should_attempt_reset?
|
|
153
|
+
return false unless @last_failure_time
|
|
154
|
+
|
|
155
|
+
Time.now - @last_failure_time >= @config[:timeout_seconds]
|
|
156
|
+
end
|
|
157
|
+
end
|
|
158
|
+
end
|
|
159
|
+
end
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "circuit_breaker"
|
|
4
|
+
require_relative "retry_strategy"
|
|
5
|
+
|
|
6
|
+
module ComplyanceSDK
|
|
7
|
+
module Retry
|
|
8
|
+
# Manager class that coordinates retry strategies and circuit breakers
|
|
9
|
+
class RetryManager
|
|
10
|
+
# Initialize a new retry manager
|
|
11
|
+
#
|
|
12
|
+
# @param config [ComplyanceSDK::Config::SDKConfig] The SDK configuration
|
|
13
|
+
# @param redis_config [Hash] Redis configuration for circuit breaker
|
|
14
|
+
def initialize(config, redis_config = {})
|
|
15
|
+
@config = config
|
|
16
|
+
@redis_config = redis_config
|
|
17
|
+
@circuit_breakers = {}
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
# Execute a block with retry logic and circuit breaker protection
|
|
21
|
+
#
|
|
22
|
+
# @param operation_name [String] Name of the operation for circuit breaker
|
|
23
|
+
# @param context [Hash] Context information for logging/debugging
|
|
24
|
+
# @yield The block to execute
|
|
25
|
+
# @return The result of the block
|
|
26
|
+
def execute(operation_name, context = {})
|
|
27
|
+
circuit_breaker = get_circuit_breaker(operation_name)
|
|
28
|
+
retry_strategy = RetryStrategy.new(@config.retry_config, circuit_breaker)
|
|
29
|
+
|
|
30
|
+
retry_strategy.execute(context.merge(operation: operation_name)) do
|
|
31
|
+
yield
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
# Execute a block asynchronously using background jobs
|
|
36
|
+
#
|
|
37
|
+
# @param request_data [Hash] The request data
|
|
38
|
+
# @param job_type [Symbol] The job type (:active_job or :sidekiq)
|
|
39
|
+
# @param callback_url [String, nil] Optional callback URL
|
|
40
|
+
# @param callback_headers [Hash] Optional callback headers
|
|
41
|
+
# @return [String, Object] Job ID or job object
|
|
42
|
+
def execute_async(request_data, job_type: :active_job, callback_url: nil, callback_headers: {})
|
|
43
|
+
case job_type
|
|
44
|
+
when :active_job
|
|
45
|
+
if defined?(ActiveJob)
|
|
46
|
+
require_relative "../jobs/process_document_job"
|
|
47
|
+
job = Jobs::ProcessDocumentJob.perform_later(request_data, callback_url, callback_headers)
|
|
48
|
+
job.job_id
|
|
49
|
+
else
|
|
50
|
+
raise ComplyanceSDK::Exceptions::ConfigurationError.new(
|
|
51
|
+
"ActiveJob is not available. Please add 'activejob' to your Gemfile."
|
|
52
|
+
)
|
|
53
|
+
end
|
|
54
|
+
when :sidekiq
|
|
55
|
+
if defined?(Sidekiq)
|
|
56
|
+
require_relative "../jobs/sidekiq_job"
|
|
57
|
+
Jobs::SidekiqJob.perform_async(request_data, callback_url, callback_headers)
|
|
58
|
+
else
|
|
59
|
+
raise ComplyanceSDK::Exceptions::ConfigurationError.new(
|
|
60
|
+
"Sidekiq is not available. Please add 'sidekiq' to your Gemfile."
|
|
61
|
+
)
|
|
62
|
+
end
|
|
63
|
+
else
|
|
64
|
+
raise ArgumentError, "Invalid job_type: #{job_type}. Must be :active_job or :sidekiq"
|
|
65
|
+
end
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
# Get circuit breaker status for an operation
|
|
69
|
+
#
|
|
70
|
+
# @param operation_name [String] Name of the operation
|
|
71
|
+
# @return [Hash] Circuit breaker status information
|
|
72
|
+
def circuit_breaker_status(operation_name)
|
|
73
|
+
circuit_breaker = get_circuit_breaker(operation_name)
|
|
74
|
+
|
|
75
|
+
{
|
|
76
|
+
name: operation_name,
|
|
77
|
+
state: circuit_breaker.current_state,
|
|
78
|
+
failure_count: circuit_breaker.failure_count,
|
|
79
|
+
last_failure_time: circuit_breaker.last_failure_time,
|
|
80
|
+
disabled: circuit_breaker.disabled?
|
|
81
|
+
}
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
# Reset a circuit breaker
|
|
85
|
+
#
|
|
86
|
+
# @param operation_name [String] Name of the operation
|
|
87
|
+
def reset_circuit_breaker(operation_name)
|
|
88
|
+
circuit_breaker = get_circuit_breaker(operation_name)
|
|
89
|
+
circuit_breaker.reset!
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
# Reset all circuit breakers
|
|
93
|
+
def reset_all_circuit_breakers
|
|
94
|
+
@circuit_breakers.each_value(&:reset!)
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
private
|
|
98
|
+
|
|
99
|
+
def get_circuit_breaker(operation_name)
|
|
100
|
+
@circuit_breakers[operation_name] ||= CircuitBreaker.new(
|
|
101
|
+
operation_name,
|
|
102
|
+
@config.retry_config,
|
|
103
|
+
@redis_config
|
|
104
|
+
)
|
|
105
|
+
end
|
|
106
|
+
end
|
|
107
|
+
end
|
|
108
|
+
end
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'logger'
|
|
4
|
+
|
|
5
|
+
module ComplyanceSDK
|
|
6
|
+
module Retry
|
|
7
|
+
# Advanced retry strategy with exponential backoff, jitter, and circuit breaker
|
|
8
|
+
# Ruby equivalent of the Java RetryStrategy
|
|
9
|
+
class RetryStrategy
|
|
10
|
+
attr_reader :config, :circuit_breaker, :logger
|
|
11
|
+
|
|
12
|
+
# Initialize a new retry strategy
|
|
13
|
+
#
|
|
14
|
+
# @param config [ComplyanceSDK::Config::RetryConfig] The retry configuration
|
|
15
|
+
# @param circuit_breaker [CircuitBreaker, nil] Optional circuit breaker
|
|
16
|
+
# @param logger [Logger, nil] Optional logger instance
|
|
17
|
+
def initialize(config, circuit_breaker = nil, logger = nil)
|
|
18
|
+
@config = config
|
|
19
|
+
@circuit_breaker = circuit_breaker
|
|
20
|
+
@logger = logger || Logger.new(STDOUT)
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Execute a block with retry logic
|
|
24
|
+
#
|
|
25
|
+
# @param operation_name [String] Name of the operation for logging
|
|
26
|
+
# @param context [Hash] Context information for logging/debugging
|
|
27
|
+
# @yield The block to execute
|
|
28
|
+
# @return The result of the block
|
|
29
|
+
def execute(operation_name = 'operation', context = {})
|
|
30
|
+
attempt = 1
|
|
31
|
+
last_exception = nil
|
|
32
|
+
|
|
33
|
+
while attempt <= @config.max_attempts
|
|
34
|
+
begin
|
|
35
|
+
@logger.debug("Attempting operation '#{operation_name}' (attempt #{attempt}/#{@config.max_attempts})")
|
|
36
|
+
|
|
37
|
+
result = if @circuit_breaker
|
|
38
|
+
@circuit_breaker.execute { yield }
|
|
39
|
+
else
|
|
40
|
+
yield
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
if attempt > 1
|
|
44
|
+
@logger.info("Operation '#{operation_name}' succeeded on attempt #{attempt}")
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
return result
|
|
48
|
+
rescue ComplyanceSDK::Exceptions::SDKException => e
|
|
49
|
+
last_exception = e
|
|
50
|
+
|
|
51
|
+
# Check if this error should be retried
|
|
52
|
+
unless should_retry?(e, attempt)
|
|
53
|
+
@logger.debug("Operation '#{operation_name}' failed with non-retryable error: #{e.message}")
|
|
54
|
+
raise e
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
# If this was the last attempt, don't wait
|
|
58
|
+
break if attempt >= @config.max_attempts
|
|
59
|
+
|
|
60
|
+
# Calculate delay and wait
|
|
61
|
+
delay = calculate_delay(attempt)
|
|
62
|
+
@logger.warn("Operation '#{operation_name}' failed on attempt #{attempt} (#{e.message}), retrying in #{(delay * 1000).round}ms")
|
|
63
|
+
|
|
64
|
+
sleep(delay)
|
|
65
|
+
rescue => e
|
|
66
|
+
# Handle non-SDK exceptions
|
|
67
|
+
@logger.error("Unexpected error in operation '#{operation_name}': #{e.message}")
|
|
68
|
+
|
|
69
|
+
error_detail = {
|
|
70
|
+
code: :processing_error,
|
|
71
|
+
message: "Unexpected error: #{e.message}",
|
|
72
|
+
suggestion: "This appears to be an unexpected error. Please contact support if it persists",
|
|
73
|
+
context: { original_exception: e.class.name }
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
raise ComplyanceSDK::Exceptions::SDKException.new(
|
|
77
|
+
"Unexpected error: #{e.message}",
|
|
78
|
+
context: error_detail
|
|
79
|
+
)
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
attempt += 1
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# All retries exhausted
|
|
86
|
+
@logger.error("Operation '#{operation_name}' failed after #{@config.max_attempts} attempts")
|
|
87
|
+
|
|
88
|
+
if last_exception
|
|
89
|
+
raise last_exception
|
|
90
|
+
else
|
|
91
|
+
error_detail = {
|
|
92
|
+
code: :max_retries_exceeded,
|
|
93
|
+
message: "Max retries exceeded",
|
|
94
|
+
suggestion: "Operation failed after #{@config.max_attempts} attempts",
|
|
95
|
+
context: { attempts: @config.max_attempts }
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
raise ComplyanceSDK::Exceptions::SDKException.new(
|
|
99
|
+
"Max retries exceeded",
|
|
100
|
+
context: error_detail
|
|
101
|
+
)
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
# Get the current circuit breaker state (for monitoring)
|
|
106
|
+
#
|
|
107
|
+
# @return [Symbol, nil] The circuit breaker state or nil if no circuit breaker
|
|
108
|
+
def circuit_breaker_state
|
|
109
|
+
@circuit_breaker&.state
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
# Get circuit breaker statistics (for monitoring)
|
|
113
|
+
#
|
|
114
|
+
# @return [String] Circuit breaker statistics string
|
|
115
|
+
def circuit_breaker_stats
|
|
116
|
+
if @circuit_breaker
|
|
117
|
+
stats = @circuit_breaker.stats
|
|
118
|
+
"CircuitBreaker{state=#{stats[:state]}, failures=#{stats[:failure_count]}, last_failure=#{stats[:last_failure_time]}}"
|
|
119
|
+
else
|
|
120
|
+
'Circuit breaker disabled'
|
|
121
|
+
end
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
# Reset the circuit breaker (for testing/administrative purposes)
|
|
125
|
+
def reset_circuit_breaker!
|
|
126
|
+
if @circuit_breaker
|
|
127
|
+
@circuit_breaker.reset!
|
|
128
|
+
@logger.info('Circuit breaker reset')
|
|
129
|
+
else
|
|
130
|
+
@logger.warn('Circuit breaker reset requested but no circuit breaker configured')
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
private
|
|
135
|
+
|
|
136
|
+
# Determine if an error should be retried
|
|
137
|
+
#
|
|
138
|
+
# @param error [ComplyanceSDK::Exceptions::SDKException] The error
|
|
139
|
+
# @param attempt [Integer] Current attempt number
|
|
140
|
+
# @return [Boolean] True if should retry
|
|
141
|
+
def should_retry?(error, attempt)
|
|
142
|
+
return false if attempt >= @config.max_attempts
|
|
143
|
+
|
|
144
|
+
error_detail = error.context
|
|
145
|
+
return false unless error_detail
|
|
146
|
+
|
|
147
|
+
# Check if explicitly marked as retryable
|
|
148
|
+
return true if error_detail[:retryable] == true
|
|
149
|
+
|
|
150
|
+
# Check if error code is in retryable list
|
|
151
|
+
return true if @config.retryable_error_codes.include?(error_detail[:code])
|
|
152
|
+
|
|
153
|
+
# Check if HTTP status is retryable
|
|
154
|
+
http_status = error_detail.dig(:context, :http_status) || error_detail[:http_status]
|
|
155
|
+
if http_status
|
|
156
|
+
status_code = http_status.to_i
|
|
157
|
+
return true if @config.retryable_http_codes.include?(status_code)
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
false
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
# Calculate delay for next retry with exponential backoff and jitter
|
|
164
|
+
#
|
|
165
|
+
# @param attempt [Integer] Current attempt number
|
|
166
|
+
# @return [Float] Delay in seconds
|
|
167
|
+
def calculate_delay(attempt)
|
|
168
|
+
# Start with base delay and apply exponential backoff
|
|
169
|
+
delay_ms = @config.base_delay * (@config.backoff_multiplier ** (attempt - 1))
|
|
170
|
+
|
|
171
|
+
# Apply jitter to avoid thundering herd
|
|
172
|
+
if @config.jitter_factor > 0
|
|
173
|
+
jitter = (rand * 2 - 1) * @config.jitter_factor # -jitter_factor to +jitter_factor
|
|
174
|
+
delay_ms = delay_ms * (1 + jitter)
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
# Cap at max delay
|
|
178
|
+
delay_ms = [delay_ms, @config.max_delay].min
|
|
179
|
+
|
|
180
|
+
# Ensure minimum delay
|
|
181
|
+
delay_ms = [delay_ms, 0].max
|
|
182
|
+
|
|
183
|
+
# Convert to seconds
|
|
184
|
+
delay_ms / 1000.0
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
# Check if error is retryable based on class
|
|
188
|
+
#
|
|
189
|
+
# @param error [Exception] The error to check
|
|
190
|
+
# @return [Boolean] True if retryable
|
|
191
|
+
def retryable_error_class?(error)
|
|
192
|
+
retryable_classes = [
|
|
193
|
+
ComplyanceSDK::Exceptions::NetworkError,
|
|
194
|
+
Timeout::Error,
|
|
195
|
+
Errno::ECONNREFUSED,
|
|
196
|
+
Errno::ECONNRESET,
|
|
197
|
+
Errno::EHOSTUNREACH,
|
|
198
|
+
Errno::ENETUNREACH,
|
|
199
|
+
Faraday::ConnectionFailed,
|
|
200
|
+
Faraday::TimeoutError,
|
|
201
|
+
Faraday::SSLError
|
|
202
|
+
]
|
|
203
|
+
|
|
204
|
+
retryable_classes.any? { |klass| error.is_a?(klass) }
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
# Check if API error is retryable based on status code
|
|
208
|
+
#
|
|
209
|
+
# @param error [ComplyanceSDK::Exceptions::APIError] The API error
|
|
210
|
+
# @return [Boolean] True if retryable
|
|
211
|
+
def retryable_api_error?(error)
|
|
212
|
+
return false unless error.respond_to?(:status_code) && error.status_code
|
|
213
|
+
|
|
214
|
+
@config.retryable_http_codes.include?(error.status_code)
|
|
215
|
+
end
|
|
216
|
+
end
|
|
217
|
+
|
|
218
|
+
# Exception raised when all retry attempts are exhausted
|
|
219
|
+
class RetryExhaustedError < ComplyanceSDK::Exceptions::SDKException
|
|
220
|
+
def initialize(message = "All retry attempts exhausted", **kwargs)
|
|
221
|
+
super(message, context: { code: :retry_exhausted, **kwargs })
|
|
222
|
+
end
|
|
223
|
+
end
|
|
224
|
+
end
|
|
225
|
+
end
|