aidp 0.5.0 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +128 -151
  3. data/bin/aidp +1 -1
  4. data/lib/aidp/analysis/kb_inspector.rb +471 -0
  5. data/lib/aidp/analysis/seams.rb +159 -0
  6. data/lib/aidp/analysis/tree_sitter_grammar_loader.rb +480 -0
  7. data/lib/aidp/analysis/tree_sitter_scan.rb +686 -0
  8. data/lib/aidp/analyze/error_handler.rb +2 -78
  9. data/lib/aidp/analyze/json_file_storage.rb +292 -0
  10. data/lib/aidp/analyze/progress.rb +12 -0
  11. data/lib/aidp/analyze/progress_visualizer.rb +12 -17
  12. data/lib/aidp/analyze/ruby_maat_integration.rb +13 -31
  13. data/lib/aidp/analyze/runner.rb +256 -87
  14. data/lib/aidp/analyze/steps.rb +6 -0
  15. data/lib/aidp/cli/jobs_command.rb +103 -435
  16. data/lib/aidp/cli.rb +317 -191
  17. data/lib/aidp/config.rb +298 -10
  18. data/lib/aidp/debug_logger.rb +195 -0
  19. data/lib/aidp/debug_mixin.rb +187 -0
  20. data/lib/aidp/execute/progress.rb +9 -0
  21. data/lib/aidp/execute/runner.rb +221 -40
  22. data/lib/aidp/execute/steps.rb +17 -7
  23. data/lib/aidp/execute/workflow_selector.rb +211 -0
  24. data/lib/aidp/harness/completion_checker.rb +268 -0
  25. data/lib/aidp/harness/condition_detector.rb +1526 -0
  26. data/lib/aidp/harness/config_loader.rb +373 -0
  27. data/lib/aidp/harness/config_manager.rb +382 -0
  28. data/lib/aidp/harness/config_schema.rb +1006 -0
  29. data/lib/aidp/harness/config_validator.rb +355 -0
  30. data/lib/aidp/harness/configuration.rb +477 -0
  31. data/lib/aidp/harness/enhanced_runner.rb +494 -0
  32. data/lib/aidp/harness/error_handler.rb +616 -0
  33. data/lib/aidp/harness/provider_config.rb +423 -0
  34. data/lib/aidp/harness/provider_factory.rb +306 -0
  35. data/lib/aidp/harness/provider_manager.rb +1269 -0
  36. data/lib/aidp/harness/provider_type_checker.rb +88 -0
  37. data/lib/aidp/harness/runner.rb +411 -0
  38. data/lib/aidp/harness/state/errors.rb +28 -0
  39. data/lib/aidp/harness/state/metrics.rb +219 -0
  40. data/lib/aidp/harness/state/persistence.rb +128 -0
  41. data/lib/aidp/harness/state/provider_state.rb +132 -0
  42. data/lib/aidp/harness/state/ui_state.rb +68 -0
  43. data/lib/aidp/harness/state/workflow_state.rb +123 -0
  44. data/lib/aidp/harness/state_manager.rb +586 -0
  45. data/lib/aidp/harness/status_display.rb +888 -0
  46. data/lib/aidp/harness/ui/base.rb +16 -0
  47. data/lib/aidp/harness/ui/enhanced_tui.rb +545 -0
  48. data/lib/aidp/harness/ui/enhanced_workflow_selector.rb +252 -0
  49. data/lib/aidp/harness/ui/error_handler.rb +132 -0
  50. data/lib/aidp/harness/ui/frame_manager.rb +361 -0
  51. data/lib/aidp/harness/ui/job_monitor.rb +500 -0
  52. data/lib/aidp/harness/ui/navigation/main_menu.rb +311 -0
  53. data/lib/aidp/harness/ui/navigation/menu_formatter.rb +120 -0
  54. data/lib/aidp/harness/ui/navigation/menu_item.rb +142 -0
  55. data/lib/aidp/harness/ui/navigation/menu_state.rb +139 -0
  56. data/lib/aidp/harness/ui/navigation/submenu.rb +202 -0
  57. data/lib/aidp/harness/ui/navigation/workflow_selector.rb +176 -0
  58. data/lib/aidp/harness/ui/progress_display.rb +280 -0
  59. data/lib/aidp/harness/ui/question_collector.rb +141 -0
  60. data/lib/aidp/harness/ui/spinner_group.rb +184 -0
  61. data/lib/aidp/harness/ui/spinner_helper.rb +152 -0
  62. data/lib/aidp/harness/ui/status_manager.rb +312 -0
  63. data/lib/aidp/harness/ui/status_widget.rb +280 -0
  64. data/lib/aidp/harness/ui/workflow_controller.rb +312 -0
  65. data/lib/aidp/harness/user_interface.rb +2381 -0
  66. data/lib/aidp/provider_manager.rb +131 -7
  67. data/lib/aidp/providers/anthropic.rb +28 -109
  68. data/lib/aidp/providers/base.rb +170 -0
  69. data/lib/aidp/providers/cursor.rb +52 -183
  70. data/lib/aidp/providers/gemini.rb +24 -109
  71. data/lib/aidp/providers/macos_ui.rb +99 -5
  72. data/lib/aidp/providers/opencode.rb +194 -0
  73. data/lib/aidp/storage/csv_storage.rb +172 -0
  74. data/lib/aidp/storage/file_manager.rb +214 -0
  75. data/lib/aidp/storage/json_storage.rb +140 -0
  76. data/lib/aidp/version.rb +1 -1
  77. data/lib/aidp.rb +56 -35
  78. data/templates/ANALYZE/06a_tree_sitter_scan.md +217 -0
  79. data/templates/COMMON/AGENT_BASE.md +11 -0
  80. data/templates/EXECUTE/00_PRD.md +4 -4
  81. data/templates/EXECUTE/02_ARCHITECTURE.md +5 -4
  82. data/templates/EXECUTE/07_TEST_PLAN.md +4 -1
  83. data/templates/EXECUTE/08_TASKS.md +4 -4
  84. data/templates/EXECUTE/10_IMPLEMENTATION_AGENT.md +4 -4
  85. data/templates/README.md +279 -0
  86. data/templates/aidp-development.yml.example +373 -0
  87. data/templates/aidp-minimal.yml.example +48 -0
  88. data/templates/aidp-production.yml.example +475 -0
  89. data/templates/aidp.yml.example +598 -0
  90. metadata +106 -64
  91. data/lib/aidp/analyze/agent_personas.rb +0 -71
  92. data/lib/aidp/analyze/agent_tool_executor.rb +0 -445
  93. data/lib/aidp/analyze/data_retention_manager.rb +0 -426
  94. data/lib/aidp/analyze/database.rb +0 -260
  95. data/lib/aidp/analyze/dependencies.rb +0 -335
  96. data/lib/aidp/analyze/export_manager.rb +0 -425
  97. data/lib/aidp/analyze/focus_guidance.rb +0 -517
  98. data/lib/aidp/analyze/incremental_analyzer.rb +0 -543
  99. data/lib/aidp/analyze/language_analysis_strategies.rb +0 -897
  100. data/lib/aidp/analyze/large_analysis_progress.rb +0 -504
  101. data/lib/aidp/analyze/memory_manager.rb +0 -365
  102. data/lib/aidp/analyze/metrics_storage.rb +0 -336
  103. data/lib/aidp/analyze/parallel_processor.rb +0 -460
  104. data/lib/aidp/analyze/performance_optimizer.rb +0 -694
  105. data/lib/aidp/analyze/repository_chunker.rb +0 -704
  106. data/lib/aidp/analyze/static_analysis_detector.rb +0 -577
  107. data/lib/aidp/analyze/storage.rb +0 -662
  108. data/lib/aidp/analyze/tool_configuration.rb +0 -456
  109. data/lib/aidp/analyze/tool_modernization.rb +0 -750
  110. data/lib/aidp/database/pg_adapter.rb +0 -148
  111. data/lib/aidp/database_config.rb +0 -69
  112. data/lib/aidp/database_connection.rb +0 -72
  113. data/lib/aidp/database_migration.rb +0 -158
  114. data/lib/aidp/job_manager.rb +0 -41
  115. data/lib/aidp/jobs/base_job.rb +0 -47
  116. data/lib/aidp/jobs/provider_execution_job.rb +0 -96
  117. data/lib/aidp/project_detector.rb +0 -117
  118. data/lib/aidp/providers/agent_supervisor.rb +0 -348
  119. data/lib/aidp/providers/supervised_base.rb +0 -317
  120. data/lib/aidp/providers/supervised_cursor.rb +0 -22
  121. data/lib/aidp/sync.rb +0 -13
  122. data/lib/aidp/workspace.rb +0 -19
@@ -1,365 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "json"
4
- require "yaml"
5
- require "digest"
6
-
7
- module Aidp
8
- class MemoryManager
9
- # Memory management strategies
10
- MEMORY_STRATEGIES = %w[streaming chunking caching garbage_collection].freeze
11
-
12
- # Default configuration
13
- DEFAULT_CONFIG = {
14
- max_memory_usage: 1024 * 1024 * 1024, # 1GB
15
- chunk_size: 1000,
16
- cache_size: 100,
17
- gc_threshold: 0.8, # 80% memory usage triggers GC
18
- streaming_enabled: true,
19
- compression_enabled: false
20
- }.freeze
21
-
22
- def initialize(config = {})
23
- @config = DEFAULT_CONFIG.merge(config)
24
- @cache = {}
25
- @memory_usage = 0
26
- @peak_memory_usage = 0
27
- @gc_count = 0
28
- @streaming_data = []
29
- end
30
-
31
- # Process large dataset with memory management
32
- def process_large_dataset(dataset, processor_method, options = {})
33
- strategy = options[:strategy] || "streaming"
34
-
35
- case strategy
36
- when "streaming"
37
- process_with_streaming(dataset, processor_method, options)
38
- when "chunking"
39
- process_with_chunking(dataset, processor_method, options)
40
- when "caching"
41
- process_with_caching(dataset, processor_method, options)
42
- else
43
- raise "Unknown memory management strategy: #{strategy}"
44
- end
45
- end
46
-
47
- # Process data with streaming approach
48
- def process_with_streaming(dataset, processor_method, options = {})
49
- results = {
50
- processed_items: 0,
51
- memory_usage: [],
52
- gc_count: 0,
53
- results: [],
54
- errors: []
55
- }
56
-
57
- begin
58
- dataset.each_with_index do |item, index|
59
- # Check memory usage
60
- current_memory = get_memory_usage
61
- results[:memory_usage] << current_memory
62
-
63
- # Trigger garbage collection if needed
64
- if should_trigger_gc?(current_memory)
65
- trigger_garbage_collection
66
- results[:gc_count] += 1
67
- end
68
-
69
- # Process item
70
- begin
71
- result = processor_method.call(item, options)
72
- results[:results] << result
73
- results[:processed_items] += 1
74
- rescue => e
75
- results[:errors] << {
76
- item_index: index,
77
- error: e.message
78
- }
79
- end
80
-
81
- # Update memory tracking
82
- update_memory_tracking(current_memory)
83
- end
84
- rescue => e
85
- results[:errors] << {
86
- type: "streaming_error",
87
- message: e.message
88
- }
89
- end
90
-
91
- results
92
- end
93
-
94
- # Process data with chunking approach
95
- def process_with_chunking(dataset, processor_method, options = {})
96
- chunk_size = options[:chunk_size] || @config[:chunk_size]
97
- results = {
98
- processed_chunks: 0,
99
- processed_items: 0,
100
- memory_usage: [],
101
- gc_count: 0,
102
- results: [],
103
- errors: []
104
- }
105
-
106
- begin
107
- dataset.each_slice(chunk_size) do |chunk|
108
- # Check memory before processing chunk
109
- pre_chunk_memory = get_memory_usage
110
- results[:memory_usage] << pre_chunk_memory
111
-
112
- # Process chunk
113
- chunk_results = process_chunk(chunk, processor_method, options)
114
- results[:results].concat(chunk_results[:results])
115
- results[:errors].concat(chunk_results[:errors])
116
- results[:processed_items] += chunk_results[:processed_items]
117
-
118
- # Trigger garbage collection after chunk
119
- if should_trigger_gc?(pre_chunk_memory)
120
- trigger_garbage_collection
121
- results[:gc_count] += 1
122
- end
123
-
124
- results[:processed_chunks] += 1
125
- update_memory_tracking(pre_chunk_memory)
126
- end
127
- rescue => e
128
- results[:errors] << {
129
- type: "chunking_error",
130
- message: e.message
131
- }
132
- end
133
-
134
- results
135
- end
136
-
137
- # Process data with caching approach
138
- def process_with_caching(dataset, processor_method, options = {})
139
- cache_size = options[:cache_size] || @config[:cache_size]
140
- results = {
141
- processed_items: 0,
142
- cache_hits: 0,
143
- cache_misses: 0,
144
- memory_usage: [],
145
- gc_count: 0,
146
- results: [],
147
- errors: []
148
- }
149
-
150
- begin
151
- dataset.each_with_index do |item, index|
152
- # Check memory usage
153
- current_memory = get_memory_usage
154
- results[:memory_usage] << current_memory
155
-
156
- # Check cache
157
- cache_key = generate_cache_key(item)
158
- if @cache.key?(cache_key)
159
- results[:cache_hits] += 1
160
- result = @cache[cache_key]
161
- else
162
- results[:cache_misses] += 1
163
- begin
164
- result = processor_method.call(item, options)
165
- cache_result(cache_key, result, cache_size)
166
- rescue => e
167
- results[:errors] << {
168
- item_index: index,
169
- error: e.message
170
- }
171
- next
172
- end
173
- end
174
-
175
- results[:results] << result
176
- results[:processed_items] += 1
177
-
178
- # Trigger garbage collection if needed
179
- if should_trigger_gc?(current_memory)
180
- trigger_garbage_collection
181
- results[:gc_count] += 1
182
- end
183
-
184
- update_memory_tracking(current_memory)
185
- end
186
- rescue => e
187
- results[:errors] << {
188
- type: "caching_error",
189
- message: e.message
190
- }
191
- end
192
-
193
- results
194
- end
195
-
196
- # Optimize memory usage
197
- def optimize_memory_usage(options = {})
198
- optimizations = {
199
- memory_before: get_memory_usage,
200
- optimizations_applied: [],
201
- memory_after: 0,
202
- memory_saved: 0
203
- }
204
-
205
- # Clear cache if memory usage is high
206
- if get_memory_usage > @config[:max_memory_usage] * 0.8
207
- clear_cache
208
- optimizations[:optimizations_applied] << "cache_cleared"
209
- end
210
-
211
- # Trigger garbage collection
212
- trigger_garbage_collection
213
- optimizations[:optimizations_applied] << "garbage_collection"
214
-
215
- # Compress data if enabled
216
- if @config[:compression_enabled]
217
- compress_data
218
- optimizations[:optimizations_applied] << "data_compression"
219
- end
220
-
221
- optimizations[:memory_after] = get_memory_usage
222
- optimizations[:memory_saved] = optimizations[:memory_before] - optimizations[:memory_after]
223
-
224
- optimizations
225
- end
226
-
227
- # Get memory statistics
228
- def get_memory_statistics
229
- {
230
- current_memory: get_memory_usage,
231
- peak_memory: @peak_memory_usage,
232
- cache_size: @cache.length,
233
- gc_count: @gc_count,
234
- streaming_data_size: @streaming_data.length,
235
- memory_limit: @config[:max_memory_usage],
236
- memory_usage_percentage: (get_memory_usage.to_f / @config[:max_memory_usage] * 100).round(2)
237
- }
238
- end
239
-
240
- # Clear memory
241
- def clear_memory
242
- clear_cache
243
- @streaming_data.clear
244
- trigger_garbage_collection
245
-
246
- {
247
- memory_cleared: true,
248
- memory_after_clear: get_memory_usage
249
- }
250
- end
251
-
252
- # Monitor memory usage
253
- def monitor_memory_usage(duration = 60, interval = 1)
254
- monitoring_data = {
255
- start_time: Time.now,
256
- duration: duration,
257
- interval: interval,
258
- measurements: [],
259
- alerts: []
260
- }
261
-
262
- start_time = Time.now
263
- end_time = start_time + duration
264
-
265
- while Time.now < end_time
266
- current_memory = get_memory_usage
267
- current_time = Time.now
268
-
269
- measurement = {
270
- timestamp: current_time,
271
- memory_usage: current_memory,
272
- memory_percentage: (current_memory.to_f / @config[:max_memory_usage] * 100).round(2)
273
- }
274
-
275
- monitoring_data[:measurements] << measurement
276
-
277
- # Check for memory alerts
278
- if current_memory > @config[:max_memory_usage] * 0.9
279
- monitoring_data[:alerts] << {
280
- timestamp: current_time,
281
- type: "high_memory_usage",
282
- message: "Memory usage is at #{measurement[:memory_percentage]}%"
283
- }
284
- end
285
-
286
- sleep(interval)
287
- end
288
-
289
- monitoring_data[:end_time] = Time.now
290
- monitoring_data
291
- end
292
-
293
- private
294
-
295
- def process_chunk(chunk, processor_method, options)
296
- results = {
297
- processed_items: 0,
298
- results: [],
299
- errors: []
300
- }
301
-
302
- chunk.each_with_index do |item, index|
303
- result = processor_method.call(item, options)
304
- results[:results] << result
305
- results[:processed_items] += 1
306
- rescue => e
307
- results[:errors] << {
308
- item_index: index,
309
- error: e.message
310
- }
311
- end
312
-
313
- results
314
- end
315
-
316
- def should_trigger_gc?(current_memory)
317
- current_memory > @config[:max_memory_usage] * @config[:gc_threshold]
318
- end
319
-
320
- def trigger_garbage_collection
321
- GC.start
322
- @gc_count += 1
323
- end
324
-
325
- def get_memory_usage
326
- # Get current memory usage in bytes
327
- Process.getrusage(:SELF).maxrss * 1024
328
- end
329
-
330
- def update_memory_tracking(current_memory)
331
- @memory_usage = current_memory
332
- @peak_memory_usage = [@peak_memory_usage, current_memory].max
333
- end
334
-
335
- def generate_cache_key(item)
336
- # Generate a cache key for the item
337
- Digest::MD5.hexdigest(item.to_json)
338
- rescue
339
- # Fallback to object_id if JSON serialization fails
340
- "item_#{item.object_id}"
341
- end
342
-
343
- def cache_result(key, result, max_cache_size)
344
- # Add result to cache
345
- @cache[key] = result
346
-
347
- # Remove oldest entries if cache is full
348
- return unless @cache.length > max_cache_size
349
-
350
- oldest_key = @cache.keys.first
351
- @cache.delete(oldest_key)
352
- end
353
-
354
- def clear_cache
355
- @cache.clear
356
- end
357
-
358
- def compress_data
359
- # Compress streaming data if it's large
360
- return unless @streaming_data.length > 1000
361
-
362
- @streaming_data = @streaming_data.last(500) # Keep only recent data
363
- end
364
- end
365
- end
@@ -1,336 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "pg"
4
- require "json"
5
-
6
- module Aidp
7
- module Analyze
8
- class MetricsStorage
9
- # Database schema version
10
- SCHEMA_VERSION = 1
11
-
12
- def initialize(project_dir = Dir.pwd, db_config = nil)
13
- @project_dir = project_dir
14
- @db_config = db_config || default_db_config
15
- @db = nil
16
-
17
- ensure_database_exists
18
- end
19
-
20
- # Store step execution metrics
21
- def store_step_metrics(step_name, provider_name, duration, success, metadata = {})
22
- ensure_connection
23
-
24
- timestamp = Time.now
25
-
26
- result = @db.exec_params(
27
- "INSERT INTO step_executions (step_name, provider_name, duration, success, metadata, created_at) VALUES ($1, $2, $3, $4, $5, $6) RETURNING id",
28
- [step_name, provider_name, duration, success, metadata.to_json, timestamp]
29
- )
30
-
31
- {
32
- id: result[0]["id"],
33
- step_name: step_name,
34
- provider_name: provider_name,
35
- duration: duration,
36
- success: success,
37
- stored_at: timestamp
38
- }
39
- end
40
-
41
- # Store provider activity metrics
42
- def store_provider_activity(provider_name, step_name, activity_summary)
43
- ensure_connection
44
-
45
- timestamp = Time.now
46
-
47
- result = @db.exec_params(
48
- "INSERT INTO provider_activities (provider_name, step_name, start_time, end_time, duration, final_state, stuck_detected, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING id",
49
- [
50
- provider_name,
51
- step_name,
52
- activity_summary[:start_time],
53
- activity_summary[:end_time],
54
- activity_summary[:duration],
55
- activity_summary[:final_state].to_s,
56
- activity_summary[:stuck_detected],
57
- timestamp
58
- ]
59
- )
60
-
61
- {
62
- id: result[0]["id"],
63
- provider_name: provider_name,
64
- step_name: step_name,
65
- stored_at: timestamp
66
- }
67
- end
68
-
69
- # Get step execution statistics
70
- def get_step_statistics(step_name = nil, provider_name = nil, limit = 100)
71
- ensure_connection
72
-
73
- query = "SELECT * FROM step_executions WHERE 1=1"
74
- params = []
75
- param_index = 1
76
-
77
- if step_name
78
- query += " AND step_name = $#{param_index}"
79
- params << step_name
80
- param_index += 1
81
- end
82
-
83
- if provider_name
84
- query += " AND provider_name = $#{param_index}"
85
- params << provider_name
86
- param_index += 1
87
- end
88
-
89
- query += " ORDER BY created_at DESC LIMIT $#{param_index}"
90
- params << limit
91
-
92
- results = @db.exec_params(query, params)
93
- results.map { |row| parse_step_execution(row) }
94
- end
95
-
96
- # Get provider activity statistics
97
- def get_provider_activity_statistics(provider_name = nil, step_name = nil, limit = 100)
98
- ensure_connection
99
-
100
- query = "SELECT * FROM provider_activities WHERE 1=1"
101
- params = []
102
- param_index = 1
103
-
104
- if provider_name
105
- query += " AND provider_name = $#{param_index}"
106
- params << provider_name
107
- param_index += 1
108
- end
109
-
110
- if step_name
111
- query += " AND step_name = $#{param_index}"
112
- params << step_name
113
- param_index += 1
114
- end
115
-
116
- query += " ORDER BY created_at DESC LIMIT $#{param_index}"
117
- params << limit
118
-
119
- results = @db.exec_params(query, params)
120
- results.map { |row| parse_provider_activity(row) }
121
- end
122
-
123
- # Calculate timeout recommendations based on p95 of execution times
124
- def calculate_timeout_recommendations
125
- ensure_connection
126
-
127
- recommendations = {}
128
-
129
- # Get all step names
130
- step_names = @db.exec("SELECT DISTINCT step_name FROM step_executions WHERE success = true")
131
-
132
- step_names.each do |row|
133
- step_name = row["step_name"]
134
-
135
- # Get successful executions for this step
136
- durations = @db.exec_params(
137
- "SELECT duration FROM step_executions WHERE step_name = $1 AND success = true ORDER BY duration",
138
- [step_name]
139
- ).map { |r| r["duration"].to_f }
140
-
141
- next if durations.empty?
142
-
143
- # Calculate p95
144
- p95_index = (durations.length * 0.95).ceil - 1
145
- p95_duration = durations[p95_index]
146
-
147
- # Round up to nearest second and add 10% buffer
148
- recommended_timeout = (p95_duration * 1.1).ceil
149
-
150
- recommendations[step_name] = {
151
- p95_duration: p95_duration,
152
- recommended_timeout: recommended_timeout,
153
- sample_count: durations.length,
154
- min_duration: durations.first,
155
- max_duration: durations.last,
156
- avg_duration: durations.sum.to_f / durations.length
157
- }
158
- end
159
-
160
- recommendations
161
- end
162
-
163
- # Get overall metrics summary
164
- def get_metrics_summary
165
- ensure_connection
166
-
167
- summary = {}
168
-
169
- # Total executions
170
- total_executions = @db.exec("SELECT COUNT(*) FROM step_executions").first["count"].to_i
171
- summary[:total_executions] = total_executions
172
-
173
- # Successful executions
174
- successful_executions = @db.exec("SELECT COUNT(*) FROM step_executions WHERE success = true").first["count"].to_i
175
- summary[:successful_executions] = successful_executions
176
-
177
- # Success rate
178
- summary[:success_rate] = (total_executions > 0) ? (successful_executions.to_f / total_executions * 100).round(2) : 0
179
-
180
- # Average duration
181
- avg_duration = @db.exec("SELECT AVG(duration) FROM step_executions WHERE success = true").first["avg"]
182
- summary[:average_duration] = avg_duration ? avg_duration.to_f.round(2) : 0
183
-
184
- # Stuck detections
185
- stuck_count = @db.exec("SELECT COUNT(*) FROM provider_activities WHERE stuck_detected = true").first["count"].to_i
186
- summary[:stuck_detections] = stuck_count
187
-
188
- # Date range
189
- date_range = @db.exec("SELECT MIN(created_at), MAX(created_at) FROM step_executions").first
190
- if date_range && date_range["min"]
191
- summary[:date_range] = {
192
- start: Time.parse(date_range["min"]),
193
- end: Time.parse(date_range["max"])
194
- }
195
- end
196
-
197
- summary
198
- end
199
-
200
- # Clean up old metrics data
201
- def cleanup_old_metrics(retention_days = 30)
202
- ensure_connection
203
-
204
- cutoff_time = Time.now - (retention_days * 24 * 60 * 60)
205
-
206
- # Delete old step executions
207
- deleted_executions = @db.exec_params(
208
- "DELETE FROM step_executions WHERE created_at < $1 RETURNING id",
209
- [cutoff_time]
210
- ).ntuples
211
-
212
- # Delete old provider activities
213
- deleted_activities = @db.exec_params(
214
- "DELETE FROM provider_activities WHERE created_at < $1 RETURNING id",
215
- [cutoff_time]
216
- ).ntuples
217
-
218
- {
219
- deleted_executions: deleted_executions,
220
- deleted_activities: deleted_activities,
221
- cutoff_time: cutoff_time
222
- }
223
- end
224
-
225
- # Export metrics data
226
- def export_metrics(format = :json)
227
- ensure_connection
228
-
229
- case format
230
- when :json
231
- {
232
- step_executions: get_step_statistics(nil, nil, 1000),
233
- provider_activities: get_provider_activity_statistics(nil, nil, 1000),
234
- summary: get_metrics_summary,
235
- recommendations: calculate_timeout_recommendations,
236
- exported_at: Time.now.iso8601
237
- }
238
- when :csv
239
- # TODO: Implement CSV export
240
- raise NotImplementedError, "CSV export not yet implemented"
241
- else
242
- raise ArgumentError, "Unsupported export format: #{format}"
243
- end
244
- end
245
-
246
- private
247
-
248
- def default_db_config
249
- {
250
- host: ENV["AIDP_DB_HOST"] || "localhost",
251
- port: ENV["AIDP_DB_PORT"] || 5432,
252
- dbname: ENV["AIDP_DB_NAME"] || "aidp",
253
- user: ENV["AIDP_DB_USER"] || ENV["USER"],
254
- password: ENV["AIDP_DB_PASSWORD"]
255
- }
256
- end
257
-
258
- def ensure_connection
259
- return if @db
260
-
261
- @db = PG.connect(@db_config)
262
- @db.type_map_for_results = PG::BasicTypeMapForResults.new(@db)
263
- end
264
-
265
- def ensure_database_exists
266
- ensure_connection
267
-
268
- # Create step_executions table if it doesn't exist
269
- @db.exec(<<~SQL)
270
- CREATE TABLE IF NOT EXISTS step_executions (
271
- id SERIAL PRIMARY KEY,
272
- step_name TEXT NOT NULL,
273
- provider_name TEXT NOT NULL,
274
- duration REAL NOT NULL,
275
- success BOOLEAN NOT NULL,
276
- metadata JSONB,
277
- created_at TIMESTAMP WITH TIME ZONE NOT NULL
278
- )
279
- SQL
280
-
281
- # Create provider_activities table if it doesn't exist
282
- @db.exec(<<~SQL)
283
- CREATE TABLE IF NOT EXISTS provider_activities (
284
- id SERIAL PRIMARY KEY,
285
- provider_name TEXT NOT NULL,
286
- step_name TEXT NOT NULL,
287
- start_time TIMESTAMP WITH TIME ZONE,
288
- end_time TIMESTAMP WITH TIME ZONE,
289
- duration REAL,
290
- final_state TEXT,
291
- stuck_detected BOOLEAN DEFAULT FALSE,
292
- created_at TIMESTAMP WITH TIME ZONE NOT NULL
293
- )
294
- SQL
295
-
296
- # Create indexes separately
297
- @db.exec("CREATE INDEX IF NOT EXISTS idx_step_executions_step_name ON step_executions(step_name)")
298
- @db.exec("CREATE INDEX IF NOT EXISTS idx_step_executions_provider_name ON step_executions(provider_name)")
299
- @db.exec("CREATE INDEX IF NOT EXISTS idx_step_executions_created_at ON step_executions(created_at)")
300
- @db.exec("CREATE INDEX IF NOT EXISTS idx_provider_activities_provider_name ON provider_activities(provider_name)")
301
- @db.exec("CREATE INDEX IF NOT EXISTS idx_provider_activities_step_name ON provider_activities(step_name)")
302
- @db.exec("CREATE INDEX IF NOT EXISTS idx_provider_activities_created_at ON provider_activities(created_at)")
303
-
304
- # Create metrics_schema_version table if it doesn't exist
305
- @db.exec("CREATE TABLE IF NOT EXISTS metrics_schema_version (version INTEGER NOT NULL)")
306
- @db.exec_params("INSERT INTO metrics_schema_version (version) VALUES ($1) ON CONFLICT DO NOTHING", [SCHEMA_VERSION])
307
- end
308
-
309
- def parse_step_execution(row)
310
- {
311
- id: row["id"].to_i,
312
- step_name: row["step_name"],
313
- provider_name: row["provider_name"],
314
- duration: row["duration"].to_f,
315
- success: row["success"],
316
- metadata: row["metadata"] ? JSON.parse(row["metadata"]) : {},
317
- created_at: Time.parse(row["created_at"])
318
- }
319
- end
320
-
321
- def parse_provider_activity(row)
322
- {
323
- id: row["id"].to_i,
324
- provider_name: row["provider_name"],
325
- step_name: row["step_name"],
326
- start_time: row["start_time"] ? Time.parse(row["start_time"]) : nil,
327
- end_time: row["end_time"] ? Time.parse(row["end_time"]) : nil,
328
- duration: row["duration"].to_f,
329
- final_state: row["final_state"]&.to_sym,
330
- stuck_detected: row["stuck_detected"],
331
- created_at: Time.parse(row["created_at"])
332
- }
333
- end
334
- end
335
- end
336
- end