buda_api 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +101 -4
- data/buda_api.gemspec +4 -1
- data/examples/ai/README.md +314 -0
- data/examples/ai/anomaly_detection_example.rb +412 -0
- data/examples/ai/natural_language_trading.rb +369 -0
- data/examples/ai/report_generation_example.rb +605 -0
- data/examples/ai/risk_management_example.rb +300 -0
- data/examples/ai/trading_assistant_example.rb +295 -0
- data/lib/buda_api/ai/anomaly_detector.rb +787 -0
- data/lib/buda_api/ai/natural_language_trader.rb +541 -0
- data/lib/buda_api/ai/report_generator.rb +1054 -0
- data/lib/buda_api/ai/risk_manager.rb +789 -0
- data/lib/buda_api/ai/trading_assistant.rb +404 -0
- data/lib/buda_api/version.rb +1 -1
- data/lib/buda_api.rb +37 -0
- metadata +32 -1
@@ -0,0 +1,787 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module BudaApi
|
4
|
+
module AI
|
5
|
+
# AI-powered anomaly detection for trading patterns and market behavior
|
6
|
+
class AnomalyDetector
|
7
|
+
ANOMALY_TYPES = {
|
8
|
+
price_spike: {
|
9
|
+
severity: :high,
|
10
|
+
description: "Unusual price movement detected",
|
11
|
+
threshold: 15.0 # 15% price change
|
12
|
+
},
|
13
|
+
volume_anomaly: {
|
14
|
+
severity: :medium,
|
15
|
+
description: "Abnormal trading volume",
|
16
|
+
threshold: 3.0 # 3x average volume
|
17
|
+
},
|
18
|
+
spread_anomaly: {
|
19
|
+
severity: :medium,
|
20
|
+
description: "Unusual bid-ask spread",
|
21
|
+
threshold: 2.0 # 2x normal spread
|
22
|
+
},
|
23
|
+
trading_pattern: {
|
24
|
+
severity: :low,
|
25
|
+
description: "Unusual trading pattern detected",
|
26
|
+
threshold: 1.5 # 1.5x normal pattern deviation
|
27
|
+
},
|
28
|
+
market_correlation: {
|
29
|
+
severity: :medium,
|
30
|
+
description: "Abnormal market correlation",
|
31
|
+
threshold: 0.7 # Correlation coefficient threshold
|
32
|
+
},
|
33
|
+
whale_activity: {
|
34
|
+
severity: :high,
|
35
|
+
description: "Large order activity detected",
|
36
|
+
threshold: 10.0 # 10x average order size
|
37
|
+
}
|
38
|
+
}.freeze
|
39
|
+
|
40
|
+
def initialize(client, llm_provider: :openai)
|
41
|
+
@client = client
|
42
|
+
@llm = RubyLLM.new(
|
43
|
+
provider: llm_provider,
|
44
|
+
system_prompt: build_anomaly_system_prompt
|
45
|
+
)
|
46
|
+
@historical_data = {}
|
47
|
+
|
48
|
+
BudaApi::Logger.info("Anomaly Detector initialized")
|
49
|
+
end
|
50
|
+
|
51
|
+
# Detect real-time anomalies across all markets
|
52
|
+
#
|
53
|
+
# @param options [Hash] detection options
|
54
|
+
# @option options [Array<String>] :markets specific markets to monitor
|
55
|
+
# @option options [Array<Symbol>] :anomaly_types types to detect
|
56
|
+
# @option options [Boolean] :include_ai_analysis include AI insights
|
57
|
+
# @return [Hash] anomaly detection results
|
58
|
+
def detect_market_anomalies(options = {})
|
59
|
+
markets = options[:markets] || BudaApi::Constants::Market::MAJOR
|
60
|
+
anomaly_types = options[:anomaly_types] || ANOMALY_TYPES.keys
|
61
|
+
include_ai = options[:include_ai_analysis] != false
|
62
|
+
|
63
|
+
BudaApi::Logger.info("Detecting market anomalies across #{markets.length} markets")
|
64
|
+
|
65
|
+
begin
|
66
|
+
anomalies = []
|
67
|
+
market_data = {}
|
68
|
+
|
69
|
+
# Analyze each market
|
70
|
+
markets.each do |market_id|
|
71
|
+
market_analysis = analyze_market_for_anomalies(market_id, anomaly_types)
|
72
|
+
market_data[market_id] = market_analysis[:data]
|
73
|
+
|
74
|
+
if market_analysis[:anomalies].any?
|
75
|
+
anomalies.concat(market_analysis[:anomalies])
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
# Cross-market anomaly detection
|
80
|
+
cross_market_anomalies = detect_cross_market_anomalies(market_data)
|
81
|
+
anomalies.concat(cross_market_anomalies)
|
82
|
+
|
83
|
+
# Sort by severity
|
84
|
+
anomalies.sort_by! { |anomaly| anomaly[:severity_score] }.reverse!
|
85
|
+
|
86
|
+
result = {
|
87
|
+
type: :anomaly_detection,
|
88
|
+
timestamp: Time.now,
|
89
|
+
markets_analyzed: markets.length,
|
90
|
+
anomalies_detected: anomalies.length,
|
91
|
+
anomalies: anomalies,
|
92
|
+
market_data: market_data,
|
93
|
+
severity_summary: calculate_severity_summary(anomalies),
|
94
|
+
recommendations: generate_anomaly_recommendations(anomalies)
|
95
|
+
}
|
96
|
+
|
97
|
+
# Add AI analysis if requested
|
98
|
+
if include_ai && anomalies.any?
|
99
|
+
result[:ai_analysis] = generate_ai_anomaly_analysis(result)
|
100
|
+
end
|
101
|
+
|
102
|
+
result
|
103
|
+
|
104
|
+
rescue => e
|
105
|
+
error_msg = "Anomaly detection failed: #{e.message}"
|
106
|
+
BudaApi::Logger.error(error_msg)
|
107
|
+
|
108
|
+
{
|
109
|
+
type: :anomaly_detection_error,
|
110
|
+
error: error_msg,
|
111
|
+
timestamp: Time.now
|
112
|
+
}
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
# Monitor specific market for anomalies in real-time
|
117
|
+
#
|
118
|
+
# @param market_id [String] market to monitor
|
119
|
+
# @param duration [Integer] monitoring duration in seconds
|
120
|
+
# @param callback [Proc] callback for real-time alerts
|
121
|
+
# @return [Hash] monitoring results
|
122
|
+
def monitor_market_realtime(market_id, duration = 3600, &callback)
|
123
|
+
BudaApi::Logger.info("Starting real-time monitoring of #{market_id} for #{duration} seconds")
|
124
|
+
|
125
|
+
start_time = Time.now
|
126
|
+
anomalies_detected = []
|
127
|
+
monitoring_active = true
|
128
|
+
|
129
|
+
# Background monitoring thread
|
130
|
+
monitoring_thread = Thread.new do
|
131
|
+
while monitoring_active && (Time.now - start_time) < duration
|
132
|
+
begin
|
133
|
+
anomalies = detect_single_market_anomalies(market_id)
|
134
|
+
|
135
|
+
anomalies.each do |anomaly|
|
136
|
+
anomalies_detected << anomaly
|
137
|
+
callback&.call(anomaly)
|
138
|
+
|
139
|
+
# Log significant anomalies
|
140
|
+
if anomaly[:severity_score] >= 7.0
|
141
|
+
BudaApi::Logger.warn("High severity anomaly detected in #{market_id}: #{anomaly[:type]}")
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
# Check every 30 seconds
|
146
|
+
sleep(30)
|
147
|
+
|
148
|
+
rescue => e
|
149
|
+
BudaApi::Logger.error("Real-time monitoring error: #{e.message}")
|
150
|
+
sleep(60) # Wait longer on error
|
151
|
+
end
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
# Return monitoring control object
|
156
|
+
{
|
157
|
+
type: :realtime_monitoring,
|
158
|
+
market_id: market_id,
|
159
|
+
start_time: start_time,
|
160
|
+
duration: duration,
|
161
|
+
thread: monitoring_thread,
|
162
|
+
anomalies_detected: anomalies_detected,
|
163
|
+
stop: -> { monitoring_active = false; monitoring_thread.join },
|
164
|
+
status: -> { monitoring_active ? :active : :stopped }
|
165
|
+
}
|
166
|
+
end
|
167
|
+
|
168
|
+
# Analyze historical data for patterns and anomalies
|
169
|
+
#
|
170
|
+
# @param market_id [String] market to analyze
|
171
|
+
# @param lookback_hours [Integer] hours of history to analyze
|
172
|
+
# @return [Hash] historical anomaly analysis
|
173
|
+
def analyze_historical_anomalies(market_id, lookback_hours = 24)
|
174
|
+
BudaApi::Logger.info("Analyzing historical anomalies for #{market_id} (#{lookback_hours}h lookback)")
|
175
|
+
|
176
|
+
begin
|
177
|
+
# Get historical ticker data (simulated - Buda API might not provide full historical data)
|
178
|
+
historical_data = fetch_historical_data(market_id, lookback_hours)
|
179
|
+
|
180
|
+
if historical_data.empty?
|
181
|
+
return {
|
182
|
+
type: :historical_analysis,
|
183
|
+
market_id: market_id,
|
184
|
+
message: "Insufficient historical data for analysis",
|
185
|
+
timestamp: Time.now
|
186
|
+
}
|
187
|
+
end
|
188
|
+
|
189
|
+
# Detect various anomaly patterns
|
190
|
+
anomalies = []
|
191
|
+
|
192
|
+
# Price spike detection
|
193
|
+
price_anomalies = detect_price_spikes(historical_data)
|
194
|
+
anomalies.concat(price_anomalies)
|
195
|
+
|
196
|
+
# Volume pattern analysis
|
197
|
+
volume_anomalies = detect_volume_anomalies(historical_data)
|
198
|
+
anomalies.concat(volume_anomalies)
|
199
|
+
|
200
|
+
# Trend anomalies
|
201
|
+
trend_anomalies = detect_trend_anomalies(historical_data)
|
202
|
+
anomalies.concat(trend_anomalies)
|
203
|
+
|
204
|
+
# Statistical analysis
|
205
|
+
statistical_anomalies = detect_statistical_anomalies(historical_data)
|
206
|
+
anomalies.concat(statistical_anomalies)
|
207
|
+
|
208
|
+
{
|
209
|
+
type: :historical_analysis,
|
210
|
+
market_id: market_id,
|
211
|
+
lookback_hours: lookback_hours,
|
212
|
+
data_points: historical_data.length,
|
213
|
+
anomalies_found: anomalies.length,
|
214
|
+
anomalies: anomalies.sort_by { |a| a[:severity_score] }.reverse,
|
215
|
+
summary: generate_historical_summary(anomalies),
|
216
|
+
timestamp: Time.now
|
217
|
+
}
|
218
|
+
|
219
|
+
rescue => e
|
220
|
+
error_msg = "Historical anomaly analysis failed: #{e.message}"
|
221
|
+
BudaApi::Logger.error(error_msg)
|
222
|
+
|
223
|
+
{
|
224
|
+
type: :historical_analysis_error,
|
225
|
+
error: error_msg,
|
226
|
+
timestamp: Time.now
|
227
|
+
}
|
228
|
+
end
|
229
|
+
end
|
230
|
+
|
231
|
+
# Set up automated alerts for specific anomaly types
|
232
|
+
#
|
233
|
+
# @param alert_config [Hash] alert configuration
|
234
|
+
# @return [Hash] alert system status
|
235
|
+
def setup_anomaly_alerts(alert_config = {})
|
236
|
+
default_config = {
|
237
|
+
markets: BudaApi::Constants::Market::MAJOR,
|
238
|
+
anomaly_types: [:price_spike, :volume_anomaly, :whale_activity],
|
239
|
+
severity_threshold: 6.0,
|
240
|
+
notification_methods: [:log, :callback],
|
241
|
+
check_interval: 60 # seconds
|
242
|
+
}
|
243
|
+
|
244
|
+
config = default_config.merge(alert_config)
|
245
|
+
|
246
|
+
BudaApi::Logger.info("Setting up anomaly alerts with config: #{config}")
|
247
|
+
|
248
|
+
{
|
249
|
+
type: :alert_system,
|
250
|
+
config: config,
|
251
|
+
status: :configured,
|
252
|
+
start_monitoring: -> { start_alert_monitoring(config) },
|
253
|
+
timestamp: Time.now
|
254
|
+
}
|
255
|
+
end
|
256
|
+
|
257
|
+
private
|
258
|
+
|
259
|
+
def build_anomaly_system_prompt
|
260
|
+
"""
|
261
|
+
You are an expert cryptocurrency market analyst specializing in anomaly detection.
|
262
|
+
|
263
|
+
Your expertise includes:
|
264
|
+
- Identifying unusual price movements and market patterns
|
265
|
+
- Detecting trading volume anomalies
|
266
|
+
- Recognizing whale activity and large order impacts
|
267
|
+
- Analyzing market correlations and divergences
|
268
|
+
- Assessing systemic risks and market manipulation
|
269
|
+
- Understanding Chilean cryptocurrency market dynamics
|
270
|
+
|
271
|
+
When analyzing anomalies:
|
272
|
+
1. Consider both technical and fundamental factors
|
273
|
+
2. Assess the potential market impact and risk level
|
274
|
+
3. Provide clear explanations for detected anomalies
|
275
|
+
4. Suggest appropriate responses or precautions
|
276
|
+
5. Consider market context and recent events
|
277
|
+
6. Differentiate between normal volatility and true anomalies
|
278
|
+
|
279
|
+
Always prioritize accuracy and provide actionable insights.
|
280
|
+
"""
|
281
|
+
end
|
282
|
+
|
283
|
+
def analyze_market_for_anomalies(market_id, anomaly_types)
|
284
|
+
begin
|
285
|
+
# Get current market data
|
286
|
+
ticker = @client.ticker(market_id)
|
287
|
+
order_book = @client.order_book(market_id)
|
288
|
+
|
289
|
+
market_data = {
|
290
|
+
ticker: ticker,
|
291
|
+
order_book: order_book,
|
292
|
+
timestamp: Time.now
|
293
|
+
}
|
294
|
+
|
295
|
+
# Store historical reference
|
296
|
+
store_market_reference(market_id, market_data)
|
297
|
+
|
298
|
+
detected_anomalies = []
|
299
|
+
|
300
|
+
# Check each requested anomaly type
|
301
|
+
anomaly_types.each do |anomaly_type|
|
302
|
+
anomaly = case anomaly_type
|
303
|
+
when :price_spike
|
304
|
+
detect_price_spike_anomaly(market_id, ticker)
|
305
|
+
when :volume_anomaly
|
306
|
+
detect_volume_anomaly(market_id, ticker)
|
307
|
+
when :spread_anomaly
|
308
|
+
detect_spread_anomaly(market_id, order_book)
|
309
|
+
when :whale_activity
|
310
|
+
detect_whale_activity(market_id, order_book)
|
311
|
+
when :trading_pattern
|
312
|
+
detect_trading_pattern_anomaly(market_id, ticker)
|
313
|
+
end
|
314
|
+
|
315
|
+
detected_anomalies << anomaly if anomaly
|
316
|
+
end
|
317
|
+
|
318
|
+
{
|
319
|
+
market_id: market_id,
|
320
|
+
data: market_data,
|
321
|
+
anomalies: detected_anomalies
|
322
|
+
}
|
323
|
+
|
324
|
+
rescue => e
|
325
|
+
BudaApi::Logger.warn("Failed to analyze #{market_id} for anomalies: #{e.message}")
|
326
|
+
{
|
327
|
+
market_id: market_id,
|
328
|
+
data: nil,
|
329
|
+
anomalies: [],
|
330
|
+
error: e.message
|
331
|
+
}
|
332
|
+
end
|
333
|
+
end
|
334
|
+
|
335
|
+
def detect_price_spike_anomaly(market_id, ticker)
|
336
|
+
change_24h = ticker.price_variation_24h.abs
|
337
|
+
threshold = ANOMALY_TYPES[:price_spike][:threshold]
|
338
|
+
|
339
|
+
return nil unless change_24h > threshold
|
340
|
+
|
341
|
+
severity_score = calculate_severity_score(:price_spike, change_24h, threshold)
|
342
|
+
|
343
|
+
{
|
344
|
+
type: :price_spike,
|
345
|
+
market_id: market_id,
|
346
|
+
severity: determine_severity_level(severity_score),
|
347
|
+
severity_score: severity_score,
|
348
|
+
description: "Price spike detected: #{ticker.price_variation_24h.round(2)}% change",
|
349
|
+
details: {
|
350
|
+
current_price: ticker.last_price.amount,
|
351
|
+
change_24h: ticker.price_variation_24h,
|
352
|
+
threshold_exceeded: change_24h - threshold
|
353
|
+
},
|
354
|
+
timestamp: Time.now,
|
355
|
+
recommendation: generate_price_spike_recommendation(change_24h, ticker.price_variation_24h)
|
356
|
+
}
|
357
|
+
end
|
358
|
+
|
359
|
+
def detect_volume_anomaly(market_id, ticker)
|
360
|
+
current_volume = ticker.volume.amount
|
361
|
+
|
362
|
+
# Get historical average (simplified - using stored reference)
|
363
|
+
historical_avg = get_historical_average_volume(market_id)
|
364
|
+
return nil unless historical_avg && historical_avg > 0
|
365
|
+
|
366
|
+
volume_ratio = current_volume / historical_avg
|
367
|
+
threshold = ANOMALY_TYPES[:volume_anomaly][:threshold]
|
368
|
+
|
369
|
+
return nil unless volume_ratio > threshold
|
370
|
+
|
371
|
+
severity_score = calculate_severity_score(:volume_anomaly, volume_ratio, threshold)
|
372
|
+
|
373
|
+
{
|
374
|
+
type: :volume_anomaly,
|
375
|
+
market_id: market_id,
|
376
|
+
severity: determine_severity_level(severity_score),
|
377
|
+
severity_score: severity_score,
|
378
|
+
description: "Volume anomaly: #{volume_ratio.round(1)}x normal volume",
|
379
|
+
details: {
|
380
|
+
current_volume: current_volume,
|
381
|
+
average_volume: historical_avg,
|
382
|
+
volume_ratio: volume_ratio
|
383
|
+
},
|
384
|
+
timestamp: Time.now,
|
385
|
+
recommendation: "Monitor for potential market movements or news events"
|
386
|
+
}
|
387
|
+
end
|
388
|
+
|
389
|
+
def detect_spread_anomaly(market_id, order_book)
|
390
|
+
return nil if order_book.asks.empty? || order_book.bids.empty?
|
391
|
+
|
392
|
+
best_ask = order_book.asks.first.price
|
393
|
+
best_bid = order_book.bids.first.price
|
394
|
+
spread_percent = ((best_ask - best_bid) / best_ask * 100)
|
395
|
+
|
396
|
+
# Get normal spread reference
|
397
|
+
normal_spread = get_historical_average_spread(market_id)
|
398
|
+
return nil unless normal_spread && normal_spread > 0
|
399
|
+
|
400
|
+
spread_ratio = spread_percent / normal_spread
|
401
|
+
threshold = ANOMALY_TYPES[:spread_anomaly][:threshold]
|
402
|
+
|
403
|
+
return nil unless spread_ratio > threshold
|
404
|
+
|
405
|
+
severity_score = calculate_severity_score(:spread_anomaly, spread_ratio, threshold)
|
406
|
+
|
407
|
+
{
|
408
|
+
type: :spread_anomaly,
|
409
|
+
market_id: market_id,
|
410
|
+
severity: determine_severity_level(severity_score),
|
411
|
+
severity_score: severity_score,
|
412
|
+
description: "Unusual spread: #{spread_percent.round(3)}% (#{spread_ratio.round(1)}x normal)",
|
413
|
+
details: {
|
414
|
+
current_spread: spread_percent,
|
415
|
+
normal_spread: normal_spread,
|
416
|
+
best_bid: best_bid,
|
417
|
+
best_ask: best_ask
|
418
|
+
},
|
419
|
+
timestamp: Time.now,
|
420
|
+
recommendation: "Caution with market orders - consider using limit orders"
|
421
|
+
}
|
422
|
+
end
|
423
|
+
|
424
|
+
def detect_whale_activity(market_id, order_book)
|
425
|
+
# Analyze order book for unusually large orders
|
426
|
+
large_orders = []
|
427
|
+
|
428
|
+
# Check asks
|
429
|
+
order_book.asks.each do |ask|
|
430
|
+
if ask.amount > calculate_whale_threshold(market_id, :ask)
|
431
|
+
large_orders << { side: :ask, price: ask.price, amount: ask.amount }
|
432
|
+
end
|
433
|
+
end
|
434
|
+
|
435
|
+
# Check bids
|
436
|
+
order_book.bids.each do |bid|
|
437
|
+
if bid.amount > calculate_whale_threshold(market_id, :bid)
|
438
|
+
large_orders << { side: :bid, price: bid.price, amount: bid.amount }
|
439
|
+
end
|
440
|
+
end
|
441
|
+
|
442
|
+
return nil if large_orders.empty?
|
443
|
+
|
444
|
+
total_whale_value = large_orders.sum { |order| order[:amount] * order[:price] }
|
445
|
+
severity_score = calculate_whale_severity(large_orders, total_whale_value)
|
446
|
+
|
447
|
+
{
|
448
|
+
type: :whale_activity,
|
449
|
+
market_id: market_id,
|
450
|
+
severity: determine_severity_level(severity_score),
|
451
|
+
severity_score: severity_score,
|
452
|
+
description: "Large order activity detected: #{large_orders.length} whale orders",
|
453
|
+
details: {
|
454
|
+
large_orders: large_orders,
|
455
|
+
total_value: total_whale_value,
|
456
|
+
largest_order: large_orders.max_by { |o| o[:amount] }
|
457
|
+
},
|
458
|
+
timestamp: Time.now,
|
459
|
+
recommendation: "Monitor for potential price impact from large orders"
|
460
|
+
}
|
461
|
+
end
|
462
|
+
|
463
|
+
def detect_trading_pattern_anomaly(market_id, ticker)
|
464
|
+
# This would require more historical data for pattern recognition
|
465
|
+
# Placeholder implementation
|
466
|
+
|
467
|
+
change_24h = ticker.price_variation_24h.abs
|
468
|
+
volume = ticker.volume.amount
|
469
|
+
|
470
|
+
# Simple pattern check: high volume with low price change (accumulation/distribution)
|
471
|
+
if volume > get_historical_average_volume(market_id).to_f * 2 && change_24h < 2.0
|
472
|
+
severity_score = 5.0
|
473
|
+
|
474
|
+
{
|
475
|
+
type: :trading_pattern,
|
476
|
+
market_id: market_id,
|
477
|
+
severity: determine_severity_level(severity_score),
|
478
|
+
severity_score: severity_score,
|
479
|
+
description: "Accumulation/distribution pattern detected",
|
480
|
+
details: {
|
481
|
+
volume: volume,
|
482
|
+
price_change: change_24h,
|
483
|
+
pattern: "high_volume_low_change"
|
484
|
+
},
|
485
|
+
timestamp: Time.now,
|
486
|
+
recommendation: "Potential institutional activity - monitor for breakout"
|
487
|
+
}
|
488
|
+
end
|
489
|
+
end
|
490
|
+
|
491
|
+
def detect_cross_market_anomalies(market_data)
|
492
|
+
anomalies = []
|
493
|
+
|
494
|
+
# Correlation anomalies between similar markets
|
495
|
+
btc_markets = market_data.select { |market, _| market.start_with?("BTC-") }
|
496
|
+
|
497
|
+
if btc_markets.length > 1
|
498
|
+
correlation_anomaly = detect_correlation_anomaly(btc_markets)
|
499
|
+
anomalies << correlation_anomaly if correlation_anomaly
|
500
|
+
end
|
501
|
+
|
502
|
+
# Market divergence detection
|
503
|
+
divergence_anomaly = detect_market_divergence(market_data)
|
504
|
+
anomalies << divergence_anomaly if divergence_anomaly
|
505
|
+
|
506
|
+
anomalies
|
507
|
+
end
|
508
|
+
|
509
|
+
def detect_correlation_anomaly(btc_markets)
|
510
|
+
# Simplified correlation analysis
|
511
|
+
changes = btc_markets.values.map { |data| data[:ticker].price_variation_24h }
|
512
|
+
|
513
|
+
# Check if changes have unusual divergence
|
514
|
+
max_change = changes.max
|
515
|
+
min_change = changes.min
|
516
|
+
divergence = (max_change - min_change).abs
|
517
|
+
|
518
|
+
if divergence > 10.0 # 10% divergence threshold
|
519
|
+
{
|
520
|
+
type: :market_correlation,
|
521
|
+
severity: :medium,
|
522
|
+
severity_score: 6.0,
|
523
|
+
description: "Unusual BTC market divergence detected",
|
524
|
+
details: {
|
525
|
+
markets: btc_markets.keys,
|
526
|
+
changes: changes,
|
527
|
+
divergence: divergence
|
528
|
+
},
|
529
|
+
timestamp: Time.now,
|
530
|
+
recommendation: "Investigate potential arbitrage opportunities"
|
531
|
+
}
|
532
|
+
end
|
533
|
+
end
|
534
|
+
|
535
|
+
def detect_market_divergence(market_data)
|
536
|
+
# Check for overall market divergence from expected correlations
|
537
|
+
all_changes = market_data.values.map { |data| data[:ticker].price_variation_24h rescue 0 }
|
538
|
+
|
539
|
+
positive_markets = all_changes.count { |change| change > 0 }
|
540
|
+
negative_markets = all_changes.count { |change| change < 0 }
|
541
|
+
total_markets = all_changes.length
|
542
|
+
|
543
|
+
# Unusual if markets are heavily skewed in one direction
|
544
|
+
skew_ratio = [positive_markets, negative_markets].max / total_markets.to_f
|
545
|
+
|
546
|
+
if skew_ratio > 0.8 # 80% of markets moving in same direction
|
547
|
+
direction = positive_markets > negative_markets ? "bullish" : "bearish"
|
548
|
+
|
549
|
+
{
|
550
|
+
type: :market_divergence,
|
551
|
+
severity: :low,
|
552
|
+
severity_score: 4.0,
|
553
|
+
description: "Strong #{direction} market consensus detected",
|
554
|
+
details: {
|
555
|
+
positive_markets: positive_markets,
|
556
|
+
negative_markets: negative_markets,
|
557
|
+
total_markets: total_markets,
|
558
|
+
skew_ratio: skew_ratio,
|
559
|
+
direction: direction
|
560
|
+
},
|
561
|
+
timestamp: Time.now,
|
562
|
+
recommendation: "Monitor for potential trend reversal or continuation"
|
563
|
+
}
|
564
|
+
end
|
565
|
+
end
|
566
|
+
|
567
|
+
def store_market_reference(market_id, market_data)
|
568
|
+
@historical_data[market_id] ||= []
|
569
|
+
@historical_data[market_id] << {
|
570
|
+
price: market_data[:ticker].last_price.amount,
|
571
|
+
volume: market_data[:ticker].volume.amount,
|
572
|
+
spread: calculate_current_spread(market_data[:order_book]),
|
573
|
+
timestamp: Time.now
|
574
|
+
}
|
575
|
+
|
576
|
+
# Keep only last 100 data points
|
577
|
+
@historical_data[market_id] = @historical_data[market_id].last(100)
|
578
|
+
end
|
579
|
+
|
580
|
+
def calculate_current_spread(order_book)
|
581
|
+
return 0.0 if order_book.asks.empty? || order_book.bids.empty?
|
582
|
+
|
583
|
+
best_ask = order_book.asks.first.price
|
584
|
+
best_bid = order_book.bids.first.price
|
585
|
+
((best_ask - best_bid) / best_ask * 100)
|
586
|
+
end
|
587
|
+
|
588
|
+
def get_historical_average_volume(market_id)
|
589
|
+
history = @historical_data[market_id]
|
590
|
+
return nil if !history || history.empty?
|
591
|
+
|
592
|
+
volumes = history.map { |data| data[:volume] }
|
593
|
+
volumes.sum / volumes.length.to_f
|
594
|
+
end
|
595
|
+
|
596
|
+
def get_historical_average_spread(market_id)
|
597
|
+
history = @historical_data[market_id]
|
598
|
+
return nil if !history || history.empty?
|
599
|
+
|
600
|
+
spreads = history.map { |data| data[:spread] }
|
601
|
+
spreads.sum / spreads.length.to_f
|
602
|
+
end
|
603
|
+
|
604
|
+
def calculate_whale_threshold(market_id, side)
|
605
|
+
# Simplified whale detection - would use more sophisticated analysis in production
|
606
|
+
avg_volume = get_historical_average_volume(market_id) || 1000.0
|
607
|
+
avg_volume * 0.1 # 10% of daily volume in single order
|
608
|
+
end
|
609
|
+
|
610
|
+
def calculate_severity_score(anomaly_type, value, threshold)
|
611
|
+
config = ANOMALY_TYPES[anomaly_type]
|
612
|
+
base_score = case config[:severity]
|
613
|
+
when :low then 3.0
|
614
|
+
when :medium then 5.0
|
615
|
+
when :high then 7.0
|
616
|
+
end
|
617
|
+
|
618
|
+
# Adjust based on how much threshold was exceeded
|
619
|
+
excess_ratio = value / threshold
|
620
|
+
adjusted_score = base_score * excess_ratio
|
621
|
+
|
622
|
+
[adjusted_score, 10.0].min # Cap at 10.0
|
623
|
+
end
|
624
|
+
|
625
|
+
def calculate_whale_severity(large_orders, total_value)
|
626
|
+
# Base severity on number and size of whale orders
|
627
|
+
order_count_factor = [large_orders.length / 3.0, 1.0].min
|
628
|
+
value_factor = Math.log10([total_value / 100000.0, 1.0].max)
|
629
|
+
|
630
|
+
5.0 + (order_count_factor * 2.0) + (value_factor * 1.5)
|
631
|
+
end
|
632
|
+
|
633
|
+
def determine_severity_level(score)
|
634
|
+
case score
|
635
|
+
when 0..3 then :low
|
636
|
+
when 3..6 then :medium
|
637
|
+
when 6..8 then :high
|
638
|
+
else :critical
|
639
|
+
end
|
640
|
+
end
|
641
|
+
|
642
|
+
def calculate_severity_summary(anomalies)
|
643
|
+
{
|
644
|
+
critical: anomalies.count { |a| a[:severity] == :critical },
|
645
|
+
high: anomalies.count { |a| a[:severity] == :high },
|
646
|
+
medium: anomalies.count { |a| a[:severity] == :medium },
|
647
|
+
low: anomalies.count { |a| a[:severity] == :low }
|
648
|
+
}
|
649
|
+
end
|
650
|
+
|
651
|
+
def generate_anomaly_recommendations(anomalies)
|
652
|
+
recommendations = []
|
653
|
+
|
654
|
+
if anomalies.any? { |a| a[:severity] == :critical }
|
655
|
+
recommendations << "🚨 CRITICAL: Immediate attention required - consider halting trading"
|
656
|
+
end
|
657
|
+
|
658
|
+
if anomalies.count { |a| a[:type] == :price_spike } > 2
|
659
|
+
recommendations << "📈 Multiple price spikes detected - market volatility high"
|
660
|
+
end
|
661
|
+
|
662
|
+
if anomalies.any? { |a| a[:type] == :whale_activity }
|
663
|
+
recommendations << "🐋 Large order activity - monitor for price impact"
|
664
|
+
end
|
665
|
+
|
666
|
+
if anomalies.count { |a| a[:severity] == :high } > 3
|
667
|
+
recommendations << "⚠️ Multiple high-severity anomalies - increased caution advised"
|
668
|
+
end
|
669
|
+
|
670
|
+
recommendations << "✅ Continue normal monitoring" if recommendations.empty?
|
671
|
+
|
672
|
+
recommendations
|
673
|
+
end
|
674
|
+
|
675
|
+
def generate_price_spike_recommendation(change_magnitude, change_direction)
|
676
|
+
if change_magnitude > 20.0
|
677
|
+
if change_direction > 0
|
678
|
+
"🚀 Major pump detected - consider taking profits or wait for pullback"
|
679
|
+
else
|
680
|
+
"📉 Major dump detected - avoid panic selling, look for support levels"
|
681
|
+
end
|
682
|
+
elsif change_magnitude > 10.0
|
683
|
+
"📊 Significant price movement - verify with news and volume"
|
684
|
+
else
|
685
|
+
"👀 Monitor for continuation or reversal"
|
686
|
+
end
|
687
|
+
end
|
688
|
+
|
689
|
+
def detect_single_market_anomalies(market_id)
|
690
|
+
analysis = analyze_market_for_anomalies(market_id, ANOMALY_TYPES.keys)
|
691
|
+
analysis[:anomalies]
|
692
|
+
end
|
693
|
+
|
694
|
+
def start_alert_monitoring(config)
|
695
|
+
# Implementation would start background monitoring with the given configuration
|
696
|
+
BudaApi::Logger.info("Starting alert monitoring with config: #{config}")
|
697
|
+
{
|
698
|
+
status: :monitoring_started,
|
699
|
+
config: config,
|
700
|
+
timestamp: Time.now
|
701
|
+
}
|
702
|
+
end
|
703
|
+
|
704
|
+
# Historical data analysis methods (simplified implementations)
|
705
|
+
def fetch_historical_data(market_id, lookback_hours)
|
706
|
+
# In a real implementation, this would fetch actual historical data
|
707
|
+
# For now, return empty array as Buda API might not provide extensive historical data
|
708
|
+
[]
|
709
|
+
end
|
710
|
+
|
711
|
+
def detect_price_spikes(historical_data)
|
712
|
+
[] # Placeholder
|
713
|
+
end
|
714
|
+
|
715
|
+
def detect_volume_anomalies(historical_data)
|
716
|
+
[] # Placeholder
|
717
|
+
end
|
718
|
+
|
719
|
+
def detect_trend_anomalies(historical_data)
|
720
|
+
[] # Placeholder
|
721
|
+
end
|
722
|
+
|
723
|
+
def detect_statistical_anomalies(historical_data)
|
724
|
+
[] # Placeholder
|
725
|
+
end
|
726
|
+
|
727
|
+
def generate_historical_summary(anomalies)
|
728
|
+
{
|
729
|
+
total_anomalies: anomalies.length,
|
730
|
+
by_type: anomalies.group_by { |a| a[:type] }.transform_values(&:length),
|
731
|
+
avg_severity: anomalies.map { |a| a[:severity_score] }.sum / [anomalies.length, 1].max
|
732
|
+
}
|
733
|
+
end
|
734
|
+
|
735
|
+
def generate_ai_anomaly_analysis(result)
|
736
|
+
return nil unless defined?(RubyLLM)
|
737
|
+
|
738
|
+
prompt = build_ai_anomaly_prompt(result)
|
739
|
+
|
740
|
+
begin
|
741
|
+
response = @llm.complete(
|
742
|
+
messages: [{ role: "user", content: prompt }],
|
743
|
+
max_tokens: 400
|
744
|
+
)
|
745
|
+
|
746
|
+
{
|
747
|
+
analysis: response.content,
|
748
|
+
generated_at: Time.now
|
749
|
+
}
|
750
|
+
rescue => e
|
751
|
+
BudaApi::Logger.error("AI anomaly analysis failed: #{e.message}")
|
752
|
+
nil
|
753
|
+
end
|
754
|
+
end
|
755
|
+
|
756
|
+
def build_ai_anomaly_prompt(result)
|
757
|
+
anomaly_summary = result[:anomalies].map do |anomaly|
|
758
|
+
"- #{anomaly[:type]}: #{anomaly[:description]} (Severity: #{anomaly[:severity]})"
|
759
|
+
end.join("\n")
|
760
|
+
|
761
|
+
"""
|
762
|
+
Analyze these cryptocurrency market anomalies detected on the Buda exchange:
|
763
|
+
|
764
|
+
Markets Analyzed: #{result[:markets_analyzed]}
|
765
|
+
Total Anomalies: #{result[:anomalies_detected]}
|
766
|
+
|
767
|
+
Detected Anomalies:
|
768
|
+
#{anomaly_summary}
|
769
|
+
|
770
|
+
Severity Summary:
|
771
|
+
- Critical: #{result[:severity_summary][:critical]}
|
772
|
+
- High: #{result[:severity_summary][:high]}
|
773
|
+
- Medium: #{result[:severity_summary][:medium]}
|
774
|
+
- Low: #{result[:severity_summary][:low]}
|
775
|
+
|
776
|
+
Please provide:
|
777
|
+
1. Overall market risk assessment
|
778
|
+
2. Potential causes for the anomalies
|
779
|
+
3. Specific trading recommendations
|
780
|
+
4. Risk mitigation strategies
|
781
|
+
|
782
|
+
Focus on actionable insights for Chilean crypto traders.
|
783
|
+
"""
|
784
|
+
end
|
785
|
+
end
|
786
|
+
end
|
787
|
+
end
|