aidp 0.5.0 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +128 -151
  3. data/bin/aidp +1 -1
  4. data/lib/aidp/analysis/kb_inspector.rb +471 -0
  5. data/lib/aidp/analysis/seams.rb +159 -0
  6. data/lib/aidp/analysis/tree_sitter_grammar_loader.rb +480 -0
  7. data/lib/aidp/analysis/tree_sitter_scan.rb +686 -0
  8. data/lib/aidp/analyze/error_handler.rb +2 -78
  9. data/lib/aidp/analyze/json_file_storage.rb +292 -0
  10. data/lib/aidp/analyze/progress.rb +12 -0
  11. data/lib/aidp/analyze/progress_visualizer.rb +12 -17
  12. data/lib/aidp/analyze/ruby_maat_integration.rb +13 -31
  13. data/lib/aidp/analyze/runner.rb +256 -87
  14. data/lib/aidp/analyze/steps.rb +6 -0
  15. data/lib/aidp/cli/jobs_command.rb +103 -435
  16. data/lib/aidp/cli.rb +317 -191
  17. data/lib/aidp/config.rb +298 -10
  18. data/lib/aidp/debug_logger.rb +195 -0
  19. data/lib/aidp/debug_mixin.rb +187 -0
  20. data/lib/aidp/execute/progress.rb +9 -0
  21. data/lib/aidp/execute/runner.rb +221 -40
  22. data/lib/aidp/execute/steps.rb +17 -7
  23. data/lib/aidp/execute/workflow_selector.rb +211 -0
  24. data/lib/aidp/harness/completion_checker.rb +268 -0
  25. data/lib/aidp/harness/condition_detector.rb +1526 -0
  26. data/lib/aidp/harness/config_loader.rb +373 -0
  27. data/lib/aidp/harness/config_manager.rb +382 -0
  28. data/lib/aidp/harness/config_schema.rb +1006 -0
  29. data/lib/aidp/harness/config_validator.rb +355 -0
  30. data/lib/aidp/harness/configuration.rb +477 -0
  31. data/lib/aidp/harness/enhanced_runner.rb +494 -0
  32. data/lib/aidp/harness/error_handler.rb +616 -0
  33. data/lib/aidp/harness/provider_config.rb +423 -0
  34. data/lib/aidp/harness/provider_factory.rb +306 -0
  35. data/lib/aidp/harness/provider_manager.rb +1269 -0
  36. data/lib/aidp/harness/provider_type_checker.rb +88 -0
  37. data/lib/aidp/harness/runner.rb +411 -0
  38. data/lib/aidp/harness/state/errors.rb +28 -0
  39. data/lib/aidp/harness/state/metrics.rb +219 -0
  40. data/lib/aidp/harness/state/persistence.rb +128 -0
  41. data/lib/aidp/harness/state/provider_state.rb +132 -0
  42. data/lib/aidp/harness/state/ui_state.rb +68 -0
  43. data/lib/aidp/harness/state/workflow_state.rb +123 -0
  44. data/lib/aidp/harness/state_manager.rb +586 -0
  45. data/lib/aidp/harness/status_display.rb +888 -0
  46. data/lib/aidp/harness/ui/base.rb +16 -0
  47. data/lib/aidp/harness/ui/enhanced_tui.rb +545 -0
  48. data/lib/aidp/harness/ui/enhanced_workflow_selector.rb +252 -0
  49. data/lib/aidp/harness/ui/error_handler.rb +132 -0
  50. data/lib/aidp/harness/ui/frame_manager.rb +361 -0
  51. data/lib/aidp/harness/ui/job_monitor.rb +500 -0
  52. data/lib/aidp/harness/ui/navigation/main_menu.rb +311 -0
  53. data/lib/aidp/harness/ui/navigation/menu_formatter.rb +120 -0
  54. data/lib/aidp/harness/ui/navigation/menu_item.rb +142 -0
  55. data/lib/aidp/harness/ui/navigation/menu_state.rb +139 -0
  56. data/lib/aidp/harness/ui/navigation/submenu.rb +202 -0
  57. data/lib/aidp/harness/ui/navigation/workflow_selector.rb +176 -0
  58. data/lib/aidp/harness/ui/progress_display.rb +280 -0
  59. data/lib/aidp/harness/ui/question_collector.rb +141 -0
  60. data/lib/aidp/harness/ui/spinner_group.rb +184 -0
  61. data/lib/aidp/harness/ui/spinner_helper.rb +152 -0
  62. data/lib/aidp/harness/ui/status_manager.rb +312 -0
  63. data/lib/aidp/harness/ui/status_widget.rb +280 -0
  64. data/lib/aidp/harness/ui/workflow_controller.rb +312 -0
  65. data/lib/aidp/harness/user_interface.rb +2381 -0
  66. data/lib/aidp/provider_manager.rb +131 -7
  67. data/lib/aidp/providers/anthropic.rb +28 -109
  68. data/lib/aidp/providers/base.rb +170 -0
  69. data/lib/aidp/providers/cursor.rb +52 -183
  70. data/lib/aidp/providers/gemini.rb +24 -109
  71. data/lib/aidp/providers/macos_ui.rb +99 -5
  72. data/lib/aidp/providers/opencode.rb +194 -0
  73. data/lib/aidp/storage/csv_storage.rb +172 -0
  74. data/lib/aidp/storage/file_manager.rb +214 -0
  75. data/lib/aidp/storage/json_storage.rb +140 -0
  76. data/lib/aidp/version.rb +1 -1
  77. data/lib/aidp.rb +56 -35
  78. data/templates/ANALYZE/06a_tree_sitter_scan.md +217 -0
  79. data/templates/COMMON/AGENT_BASE.md +11 -0
  80. data/templates/EXECUTE/00_PRD.md +4 -4
  81. data/templates/EXECUTE/02_ARCHITECTURE.md +5 -4
  82. data/templates/EXECUTE/07_TEST_PLAN.md +4 -1
  83. data/templates/EXECUTE/08_TASKS.md +4 -4
  84. data/templates/EXECUTE/10_IMPLEMENTATION_AGENT.md +4 -4
  85. data/templates/README.md +279 -0
  86. data/templates/aidp-development.yml.example +373 -0
  87. data/templates/aidp-minimal.yml.example +48 -0
  88. data/templates/aidp-production.yml.example +475 -0
  89. data/templates/aidp.yml.example +598 -0
  90. metadata +106 -64
  91. data/lib/aidp/analyze/agent_personas.rb +0 -71
  92. data/lib/aidp/analyze/agent_tool_executor.rb +0 -445
  93. data/lib/aidp/analyze/data_retention_manager.rb +0 -426
  94. data/lib/aidp/analyze/database.rb +0 -260
  95. data/lib/aidp/analyze/dependencies.rb +0 -335
  96. data/lib/aidp/analyze/export_manager.rb +0 -425
  97. data/lib/aidp/analyze/focus_guidance.rb +0 -517
  98. data/lib/aidp/analyze/incremental_analyzer.rb +0 -543
  99. data/lib/aidp/analyze/language_analysis_strategies.rb +0 -897
  100. data/lib/aidp/analyze/large_analysis_progress.rb +0 -504
  101. data/lib/aidp/analyze/memory_manager.rb +0 -365
  102. data/lib/aidp/analyze/metrics_storage.rb +0 -336
  103. data/lib/aidp/analyze/parallel_processor.rb +0 -460
  104. data/lib/aidp/analyze/performance_optimizer.rb +0 -694
  105. data/lib/aidp/analyze/repository_chunker.rb +0 -704
  106. data/lib/aidp/analyze/static_analysis_detector.rb +0 -577
  107. data/lib/aidp/analyze/storage.rb +0 -662
  108. data/lib/aidp/analyze/tool_configuration.rb +0 -456
  109. data/lib/aidp/analyze/tool_modernization.rb +0 -750
  110. data/lib/aidp/database/pg_adapter.rb +0 -148
  111. data/lib/aidp/database_config.rb +0 -69
  112. data/lib/aidp/database_connection.rb +0 -72
  113. data/lib/aidp/database_migration.rb +0 -158
  114. data/lib/aidp/job_manager.rb +0 -41
  115. data/lib/aidp/jobs/base_job.rb +0 -47
  116. data/lib/aidp/jobs/provider_execution_job.rb +0 -96
  117. data/lib/aidp/project_detector.rb +0 -117
  118. data/lib/aidp/providers/agent_supervisor.rb +0 -348
  119. data/lib/aidp/providers/supervised_base.rb +0 -317
  120. data/lib/aidp/providers/supervised_cursor.rb +0 -22
  121. data/lib/aidp/sync.rb +0 -13
  122. data/lib/aidp/workspace.rb +0 -19
@@ -1,662 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "pg"
4
- require "json"
5
- require "yaml"
6
-
7
- module Aidp
8
- class AnalysisStorage
9
- # Database schema version
10
- SCHEMA_VERSION = 1
11
-
12
- def initialize(project_dir = Dir.pwd, config = {})
13
- @project_dir = project_dir
14
- @config = config
15
- @db = nil
16
-
17
- ensure_database_exists
18
- end
19
-
20
- # Store analysis result
21
- def store_analysis_result(step_name, data, options = {})
22
- ensure_connection
23
-
24
- timestamp = Time.now
25
- execution_id = options[:execution_id] || generate_execution_id
26
-
27
- # Store main analysis data
28
- analysis_data = {
29
- execution_id: execution_id,
30
- step_name: step_name,
31
- data: data,
32
- metadata: options[:metadata] || {},
33
- created_at: timestamp,
34
- updated_at: timestamp
35
- }
36
-
37
- # Insert or update analysis result
38
- @db.exec_params(
39
- <<~SQL,
40
- INSERT INTO analysis_results (execution_id, step_name, data, metadata, created_at, updated_at)
41
- VALUES ($1, $2, $3, $4, $5, $6)
42
- ON CONFLICT (execution_id, step_name)
43
- DO UPDATE SET
44
- data = EXCLUDED.data,
45
- metadata = EXCLUDED.metadata,
46
- updated_at = EXCLUDED.updated_at
47
- SQL
48
- [
49
- execution_id,
50
- step_name,
51
- data.to_json,
52
- analysis_data[:metadata].to_json,
53
- timestamp,
54
- timestamp
55
- ]
56
- )
57
-
58
- # Store metrics with indefinite retention
59
- store_metrics(execution_id, step_name, data, options)
60
-
61
- {
62
- execution_id: execution_id,
63
- step_name: step_name,
64
- stored_at: timestamp,
65
- success: true
66
- }
67
- rescue => e
68
- {
69
- success: false,
70
- error: e.message,
71
- execution_id: execution_id,
72
- step_name: step_name
73
- }
74
- end
75
-
76
- # Store metrics with indefinite retention
77
- def store_metrics(execution_id, step_name, data, options = {})
78
- ensure_connection
79
-
80
- timestamp = Time.now
81
- metrics = extract_metrics(data)
82
-
83
- metrics.each do |metric_name, metric_value|
84
- @db.exec_params(
85
- <<~SQL,
86
- INSERT INTO metrics (execution_id, step_name, metric_name, metric_value, metric_type, created_at)
87
- VALUES ($1, $2, $3, $4, $5, $6)
88
- SQL
89
- [
90
- execution_id,
91
- step_name,
92
- metric_name,
93
- metric_value.to_s,
94
- metric_value.class.name,
95
- timestamp
96
- ]
97
- )
98
- end
99
-
100
- # Store aggregated metrics
101
- store_aggregated_metrics(execution_id, step_name, metrics, timestamp)
102
- end
103
-
104
- # Retrieve analysis result
105
- def get_analysis_result(execution_id, step_name = nil)
106
- ensure_connection
107
-
108
- result = if step_name
109
- # Get specific step result
110
- @db.exec_params(
111
- <<~SQL,
112
- SELECT * FROM analysis_results
113
- WHERE execution_id = $1 AND step_name = $2
114
- ORDER BY updated_at DESC
115
- LIMIT 1
116
- SQL
117
- [execution_id, step_name]
118
- )
119
- else
120
- # Get all results for execution
121
- @db.exec_params(
122
- <<~SQL,
123
- SELECT * FROM analysis_results
124
- WHERE execution_id = $1
125
- ORDER BY updated_at DESC
126
- SQL
127
- [execution_id]
128
- )
129
- end
130
-
131
- return nil if result.ntuples.zero?
132
-
133
- if result.ntuples > 1
134
- # Multiple results
135
- result.map { |row| parse_analysis_result(row) }
136
- else
137
- # Single result
138
- parse_analysis_result(result[0])
139
- end
140
- end
141
-
142
- # Retrieve metrics
143
- def get_metrics(execution_id = nil, step_name = nil, metric_name = nil, limit = 100)
144
- ensure_connection
145
-
146
- query = "SELECT * FROM metrics WHERE 1=1"
147
- params = []
148
- param_index = 1
149
-
150
- if execution_id
151
- query += " AND execution_id = $#{param_index}"
152
- params << execution_id
153
- param_index += 1
154
- end
155
-
156
- if step_name
157
- query += " AND step_name = $#{param_index}"
158
- params << step_name
159
- param_index += 1
160
- end
161
-
162
- if metric_name
163
- query += " AND metric_name = $#{param_index}"
164
- params << metric_name
165
- param_index += 1
166
- end
167
-
168
- query += " ORDER BY created_at DESC"
169
- query += " LIMIT $#{param_index}"
170
- params << limit
171
-
172
- results = @db.exec_params(query, params)
173
- results.map { |row| parse_metric(row) }
174
- end
175
-
176
- # Get aggregated metrics
177
- def get_aggregated_metrics(execution_id = nil, step_name = nil, metric_name = nil)
178
- ensure_connection
179
-
180
- query = "SELECT * FROM aggregated_metrics WHERE 1=1"
181
- params = []
182
- param_index = 1
183
-
184
- if execution_id
185
- query += " AND execution_id = $#{param_index}"
186
- params << execution_id
187
- param_index += 1
188
- end
189
-
190
- if step_name
191
- query += " AND step_name = $#{param_index}"
192
- params << step_name
193
- param_index += 1
194
- end
195
-
196
- if metric_name
197
- query += " AND metric_name = $#{param_index}"
198
- params << metric_name
199
- param_index + 1
200
- end
201
-
202
- query += " ORDER BY created_at DESC"
203
-
204
- results = @db.exec_params(query, params)
205
- results.map { |row| parse_aggregated_metric(row) }
206
- end
207
-
208
- # Get execution history
209
- def get_execution_history(limit = 50)
210
- ensure_connection
211
-
212
- results = @db.exec_params(
213
- <<~SQL,
214
- SELECT DISTINCT execution_id, step_name, created_at, updated_at
215
- FROM analysis_results
216
- ORDER BY created_at DESC
217
- LIMIT $1
218
- SQL
219
- [limit]
220
- )
221
-
222
- results.map { |row| parse_execution_history(row) }
223
- end
224
-
225
- # Get analysis statistics
226
- def get_analysis_statistics
227
- ensure_connection
228
-
229
- stats = {}
230
-
231
- # Total executions
232
- total_executions = @db.exec("SELECT COUNT(DISTINCT execution_id) FROM analysis_results").first["count"].to_i
233
- stats[:total_executions] = total_executions
234
-
235
- # Total steps
236
- total_steps = @db.exec("SELECT COUNT(*) FROM analysis_results").first["count"].to_i
237
- stats[:total_steps] = total_steps
238
-
239
- # Steps by type
240
- steps_by_type = @db.exec("SELECT step_name, COUNT(*) FROM analysis_results GROUP BY step_name")
241
- stats[:steps_by_type] = steps_by_type.each_with_object({}) do |row, hash|
242
- hash[row["step_name"]] = row["count"].to_i
243
- end
244
-
245
- # Total metrics
246
- total_metrics = @db.exec("SELECT COUNT(*) FROM metrics").first["count"].to_i
247
- stats[:total_metrics] = total_metrics
248
-
249
- # Metrics by type
250
- metrics_by_type = @db.exec("SELECT metric_name, COUNT(*) FROM metrics GROUP BY metric_name")
251
- stats[:metrics_by_type] = metrics_by_type.each_with_object({}) do |row, hash|
252
- hash[row["metric_name"]] = row["count"].to_i
253
- end
254
-
255
- # Date range
256
- date_range = @db.exec("SELECT MIN(created_at), MAX(created_at) FROM analysis_results").first
257
- stats[:date_range] = {
258
- earliest: date_range["min"] ? Time.parse(date_range["min"]) : nil,
259
- latest: date_range["max"] ? Time.parse(date_range["max"]) : nil
260
- }
261
-
262
- stats
263
- end
264
-
265
- # Force overwrite analysis data (retains metrics)
266
- def force_overwrite(execution_id, step_name, data, options = {})
267
- ensure_connection
268
-
269
- # Delete existing analysis result
270
- @db.exec_params(
271
- "DELETE FROM analysis_results WHERE execution_id = $1 AND step_name = $2",
272
- [execution_id, step_name]
273
- )
274
-
275
- # Store new analysis result
276
- store_analysis_result(step_name, data, options.merge(execution_id: execution_id))
277
- end
278
-
279
- # Delete analysis data (retains metrics)
280
- def delete_analysis_data(execution_id = nil, step_name = nil)
281
- ensure_connection
282
-
283
- if execution_id && step_name
284
- @db.exec_params(
285
- "DELETE FROM analysis_results WHERE execution_id = $1 AND step_name = $2",
286
- [execution_id, step_name]
287
- )
288
- elsif execution_id
289
- @db.exec_params("DELETE FROM analysis_results WHERE execution_id = $1", [execution_id])
290
- elsif step_name
291
- @db.exec_params("DELETE FROM analysis_results WHERE step_name = $1", [step_name])
292
- else
293
- @db.exec("DELETE FROM analysis_results")
294
- end
295
-
296
- {success: true, deleted_execution_id: execution_id, deleted_step_name: step_name}
297
- end
298
-
299
- # Export data
300
- def export_data(format = "json", options = {})
301
- ensure_connection
302
-
303
- data = {
304
- analysis_results: export_analysis_results(options),
305
- metrics: export_metrics(options),
306
- aggregated_metrics: export_aggregated_metrics(options),
307
- statistics: get_analysis_statistics
308
- }
309
-
310
- case format.downcase
311
- when "json"
312
- JSON.pretty_generate(data)
313
- when "yaml"
314
- YAML.dump(data)
315
- else
316
- raise "Unsupported export format: #{format}"
317
- end
318
- end
319
-
320
- # Import data
321
- def import_data(data, format = "json")
322
- ensure_connection
323
-
324
- parsed_data = case format.downcase
325
- when "json"
326
- JSON.parse(data)
327
- when "yaml"
328
- YAML.safe_load(data)
329
- else
330
- raise "Unsupported import format: #{format}"
331
- end
332
-
333
- # Import analysis results
334
- parsed_data["analysis_results"]&.each do |result|
335
- @db.exec_params(
336
- <<~SQL,
337
- INSERT INTO analysis_results (execution_id, step_name, data, metadata, created_at, updated_at)
338
- VALUES ($1, $2, $3, $4, $5, $6)
339
- ON CONFLICT (execution_id, step_name)
340
- DO UPDATE SET
341
- data = EXCLUDED.data,
342
- metadata = EXCLUDED.metadata,
343
- updated_at = EXCLUDED.updated_at
344
- SQL
345
- [
346
- result["execution_id"],
347
- result["step_name"],
348
- result["data"],
349
- result["metadata"],
350
- result["created_at"],
351
- result["updated_at"]
352
- ]
353
- )
354
- end
355
-
356
- # Import metrics
357
- parsed_data["metrics"]&.each do |metric|
358
- @db.exec_params(
359
- <<~SQL,
360
- INSERT INTO metrics (execution_id, step_name, metric_name, metric_value, metric_type, created_at)
361
- VALUES ($1, $2, $3, $4, $5, $6)
362
- ON CONFLICT DO NOTHING
363
- SQL
364
- [
365
- metric["execution_id"],
366
- metric["step_name"],
367
- metric["metric_name"],
368
- metric["metric_value"],
369
- metric["metric_type"],
370
- metric["created_at"]
371
- ]
372
- )
373
- end
374
-
375
- {success: true, imported_records: parsed_data.length}
376
- end
377
-
378
- # Close database connection
379
- def close
380
- @db&.close
381
- @db = nil
382
- end
383
-
384
- private
385
-
386
- def ensure_database_exists
387
- ensure_connection
388
- create_schema
389
- end
390
-
391
- def ensure_connection
392
- return if @db
393
-
394
- @db = PG.connect(
395
- host: ENV["AIDP_DB_HOST"] || "localhost",
396
- port: ENV["AIDP_DB_PORT"] || 5432,
397
- dbname: ENV["AIDP_DB_NAME"] || "aidp",
398
- user: ENV["AIDP_DB_USER"] || ENV["USER"],
399
- password: ENV["AIDP_DB_PASSWORD"]
400
- )
401
- @db.type_map_for_results = PG::BasicTypeMapForResults.new(@db)
402
- end
403
-
404
- def create_schema
405
- # Create analysis_results table
406
- @db.exec(<<~SQL)
407
- CREATE TABLE IF NOT EXISTS analysis_results (
408
- id SERIAL PRIMARY KEY,
409
- execution_id TEXT NOT NULL,
410
- step_name TEXT NOT NULL,
411
- data JSONB NOT NULL,
412
- metadata JSONB,
413
- created_at TIMESTAMP WITH TIME ZONE NOT NULL,
414
- updated_at TIMESTAMP WITH TIME ZONE NOT NULL,
415
- UNIQUE(execution_id, step_name)
416
- )
417
- SQL
418
-
419
- # Create metrics table (indefinite retention)
420
- @db.exec(<<~SQL)
421
- CREATE TABLE IF NOT EXISTS metrics (
422
- id SERIAL PRIMARY KEY,
423
- execution_id TEXT NOT NULL,
424
- step_name TEXT NOT NULL,
425
- metric_name TEXT NOT NULL,
426
- metric_value TEXT NOT NULL,
427
- metric_type TEXT NOT NULL,
428
- created_at TIMESTAMP WITH TIME ZONE NOT NULL
429
- )
430
- SQL
431
-
432
- # Create aggregated_metrics table
433
- @db.exec(<<~SQL)
434
- CREATE TABLE IF NOT EXISTS aggregated_metrics (
435
- id SERIAL PRIMARY KEY,
436
- execution_id TEXT NOT NULL,
437
- step_name TEXT NOT NULL,
438
- metric_name TEXT NOT NULL,
439
- min_value DOUBLE PRECISION,
440
- max_value DOUBLE PRECISION,
441
- avg_value DOUBLE PRECISION,
442
- count INTEGER NOT NULL,
443
- created_at TIMESTAMP WITH TIME ZONE NOT NULL
444
- )
445
- SQL
446
-
447
- # Create indexes
448
- @db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_results_execution_id ON analysis_results(execution_id)")
449
- @db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_results_step_name ON analysis_results(step_name)")
450
- @db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_results_created_at ON analysis_results(created_at)")
451
- @db.exec("CREATE INDEX IF NOT EXISTS idx_metrics_execution_id ON metrics(execution_id)")
452
- @db.exec("CREATE INDEX IF NOT EXISTS idx_metrics_step_name ON metrics(step_name)")
453
- @db.exec("CREATE INDEX IF NOT EXISTS idx_metrics_metric_name ON metrics(metric_name)")
454
- @db.exec("CREATE INDEX IF NOT EXISTS idx_metrics_created_at ON metrics(created_at)")
455
-
456
- # Store schema version
457
- @db.exec("CREATE TABLE IF NOT EXISTS schema_version (version INTEGER NOT NULL)")
458
- @db.exec_params("INSERT INTO schema_version (version) VALUES ($1) ON CONFLICT DO NOTHING", [SCHEMA_VERSION])
459
- end
460
-
461
- def generate_execution_id
462
- "exec_#{Time.now.to_i}_#{rand(1000)}"
463
- end
464
-
465
- def extract_metrics(data)
466
- metrics = {}
467
-
468
- case data
469
- when Hash
470
- data.each do |key, value|
471
- if value.is_a?(Numeric)
472
- metrics[key] = value
473
- elsif value.is_a?(Hash)
474
- metrics.merge!(extract_metrics(value))
475
- elsif value.is_a?(Array) && value.all?(Numeric)
476
- metrics["#{key}_count"] = value.length
477
- metrics["#{key}_sum"] = value.sum
478
- metrics["#{key}_avg"] = value.sum.to_f / value.length
479
- end
480
- end
481
- when Array
482
- metrics["count"] = data.length
483
- if data.all?(Numeric)
484
- metrics["sum"] = data.sum
485
- metrics["avg"] = data.sum.to_f / data.length
486
- end
487
- end
488
-
489
- metrics
490
- end
491
-
492
- def store_aggregated_metrics(execution_id, step_name, metrics, timestamp)
493
- ensure_connection
494
-
495
- metrics.each do |metric_name, metric_value|
496
- next unless metric_value.is_a?(Numeric)
497
-
498
- # Get existing aggregated metric
499
- existing = @db.exec_params(
500
- <<~SQL,
501
- SELECT * FROM aggregated_metrics
502
- WHERE execution_id = $1 AND step_name = $2 AND metric_name = $3
503
- SQL
504
- [execution_id, step_name, metric_name]
505
- ).first
506
-
507
- if existing
508
- # Update existing aggregated metric
509
- count = existing["count"].to_i + 1
510
- min_value = [existing["min_value"].to_f, metric_value].min
511
- max_value = [existing["max_value"].to_f, metric_value].max
512
- avg_value = ((existing["avg_value"].to_f * existing["count"].to_i) + metric_value) / count
513
-
514
- @db.exec_params(
515
- <<~SQL,
516
- UPDATE aggregated_metrics
517
- SET min_value = $1, max_value = $2, avg_value = $3, count = $4, created_at = $5
518
- WHERE id = $6
519
- SQL
520
- [min_value, max_value, avg_value, count, timestamp, existing["id"]]
521
- )
522
- else
523
- # Create new aggregated metric
524
- @db.exec_params(
525
- <<~SQL,
526
- INSERT INTO aggregated_metrics (execution_id, step_name, metric_name, min_value, max_value, avg_value, count, created_at)
527
- VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
528
- SQL
529
- [execution_id, step_name, metric_name, metric_value, metric_value, metric_value, 1, timestamp]
530
- )
531
- end
532
- end
533
- end
534
-
535
- def parse_analysis_result(row)
536
- return nil unless row
537
-
538
- {
539
- id: row["id"].to_i,
540
- execution_id: row["execution_id"],
541
- step_name: row["step_name"],
542
- data: JSON.parse(row["data"]),
543
- metadata: JSON.parse(row["metadata"] || "{}"),
544
- created_at: Time.parse(row["created_at"]),
545
- updated_at: Time.parse(row["updated_at"])
546
- }
547
- end
548
-
549
- def parse_metric(row)
550
- return nil unless row
551
-
552
- {
553
- id: row["id"].to_i,
554
- execution_id: row["execution_id"],
555
- step_name: row["step_name"],
556
- metric_name: row["metric_name"],
557
- metric_value: row["metric_value"],
558
- metric_type: row["metric_type"],
559
- created_at: Time.parse(row["created_at"])
560
- }
561
- end
562
-
563
- def parse_aggregated_metric(row)
564
- return nil unless row
565
-
566
- {
567
- id: row["id"].to_i,
568
- execution_id: row["execution_id"],
569
- step_name: row["step_name"],
570
- metric_name: row["metric_name"],
571
- min_value: row["min_value"].to_f,
572
- max_value: row["max_value"].to_f,
573
- avg_value: row["avg_value"].to_f,
574
- count: row["count"].to_i,
575
- created_at: Time.parse(row["created_at"])
576
- }
577
- end
578
-
579
- def parse_execution_history(row)
580
- return nil unless row
581
-
582
- {
583
- execution_id: row["execution_id"],
584
- step_name: row["step_name"],
585
- created_at: Time.parse(row["created_at"]),
586
- updated_at: Time.parse(row["updated_at"])
587
- }
588
- end
589
-
590
- def export_analysis_results(options = {})
591
- ensure_connection
592
-
593
- query = "SELECT * FROM analysis_results"
594
- params = []
595
- param_index = 1
596
-
597
- if options[:execution_id]
598
- query += " WHERE execution_id = $#{param_index}"
599
- params << options[:execution_id]
600
- param_index += 1
601
- end
602
-
603
- query += " ORDER BY created_at DESC"
604
-
605
- if options[:limit]
606
- query += " LIMIT $#{param_index}"
607
- params << options[:limit]
608
- end
609
-
610
- results = @db.exec_params(query, params)
611
- results.map { |row| parse_analysis_result(row) }
612
- end
613
-
614
- def export_metrics(options = {})
615
- ensure_connection
616
-
617
- query = "SELECT * FROM metrics"
618
- params = []
619
- param_index = 1
620
-
621
- if options[:execution_id]
622
- query += " WHERE execution_id = $#{param_index}"
623
- params << options[:execution_id]
624
- param_index += 1
625
- end
626
-
627
- query += " ORDER BY created_at DESC"
628
-
629
- if options[:limit]
630
- query += " LIMIT $#{param_index}"
631
- params << options[:limit]
632
- end
633
-
634
- results = @db.exec_params(query, params)
635
- results.map { |row| parse_metric(row) }
636
- end
637
-
638
- def export_aggregated_metrics(options = {})
639
- ensure_connection
640
-
641
- query = "SELECT * FROM aggregated_metrics"
642
- params = []
643
- param_index = 1
644
-
645
- if options[:execution_id]
646
- query += " WHERE execution_id = $#{param_index}"
647
- params << options[:execution_id]
648
- param_index += 1
649
- end
650
-
651
- query += " ORDER BY created_at DESC"
652
-
653
- if options[:limit]
654
- query += " LIMIT $#{param_index}"
655
- params << options[:limit]
656
- end
657
-
658
- results = @db.exec_params(query, params)
659
- results.map { |row| parse_aggregated_metric(row) }
660
- end
661
- end
662
- end