aidp 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE +21 -0
  3. data/README.md +210 -0
  4. data/bin/aidp +5 -0
  5. data/lib/aidp/analyze/agent_personas.rb +71 -0
  6. data/lib/aidp/analyze/agent_tool_executor.rb +445 -0
  7. data/lib/aidp/analyze/data_retention_manager.rb +426 -0
  8. data/lib/aidp/analyze/database.rb +243 -0
  9. data/lib/aidp/analyze/dependencies.rb +335 -0
  10. data/lib/aidp/analyze/error_handler.rb +486 -0
  11. data/lib/aidp/analyze/export_manager.rb +425 -0
  12. data/lib/aidp/analyze/feature_analyzer.rb +397 -0
  13. data/lib/aidp/analyze/focus_guidance.rb +517 -0
  14. data/lib/aidp/analyze/incremental_analyzer.rb +543 -0
  15. data/lib/aidp/analyze/language_analysis_strategies.rb +897 -0
  16. data/lib/aidp/analyze/large_analysis_progress.rb +504 -0
  17. data/lib/aidp/analyze/memory_manager.rb +365 -0
  18. data/lib/aidp/analyze/parallel_processor.rb +460 -0
  19. data/lib/aidp/analyze/performance_optimizer.rb +694 -0
  20. data/lib/aidp/analyze/prioritizer.rb +402 -0
  21. data/lib/aidp/analyze/progress.rb +75 -0
  22. data/lib/aidp/analyze/progress_visualizer.rb +320 -0
  23. data/lib/aidp/analyze/report_generator.rb +582 -0
  24. data/lib/aidp/analyze/repository_chunker.rb +702 -0
  25. data/lib/aidp/analyze/ruby_maat_integration.rb +572 -0
  26. data/lib/aidp/analyze/runner.rb +245 -0
  27. data/lib/aidp/analyze/static_analysis_detector.rb +577 -0
  28. data/lib/aidp/analyze/steps.rb +53 -0
  29. data/lib/aidp/analyze/storage.rb +600 -0
  30. data/lib/aidp/analyze/tool_configuration.rb +456 -0
  31. data/lib/aidp/analyze/tool_modernization.rb +750 -0
  32. data/lib/aidp/execute/progress.rb +76 -0
  33. data/lib/aidp/execute/runner.rb +135 -0
  34. data/lib/aidp/execute/steps.rb +113 -0
  35. data/lib/aidp/shared/cli.rb +117 -0
  36. data/lib/aidp/shared/config.rb +35 -0
  37. data/lib/aidp/shared/project_detector.rb +119 -0
  38. data/lib/aidp/shared/providers/anthropic.rb +26 -0
  39. data/lib/aidp/shared/providers/base.rb +17 -0
  40. data/lib/aidp/shared/providers/cursor.rb +102 -0
  41. data/lib/aidp/shared/providers/gemini.rb +26 -0
  42. data/lib/aidp/shared/providers/macos_ui.rb +26 -0
  43. data/lib/aidp/shared/sync.rb +15 -0
  44. data/lib/aidp/shared/util.rb +41 -0
  45. data/lib/aidp/shared/version.rb +7 -0
  46. data/lib/aidp/shared/workspace.rb +21 -0
  47. data/lib/aidp.rb +53 -0
  48. data/templates/ANALYZE/01_REPOSITORY_ANALYSIS.md +100 -0
  49. data/templates/ANALYZE/02_ARCHITECTURE_ANALYSIS.md +151 -0
  50. data/templates/ANALYZE/03_TEST_ANALYSIS.md +182 -0
  51. data/templates/ANALYZE/04_FUNCTIONALITY_ANALYSIS.md +200 -0
  52. data/templates/ANALYZE/05_DOCUMENTATION_ANALYSIS.md +202 -0
  53. data/templates/ANALYZE/06_STATIC_ANALYSIS.md +233 -0
  54. data/templates/ANALYZE/07_REFACTORING_RECOMMENDATIONS.md +316 -0
  55. data/templates/COMMON/AGENT_BASE.md +129 -0
  56. data/templates/COMMON/CONVENTIONS.md +19 -0
  57. data/templates/COMMON/TEMPLATES/ADR_TEMPLATE.md +21 -0
  58. data/templates/COMMON/TEMPLATES/DOMAIN_CHARTER.md +27 -0
  59. data/templates/COMMON/TEMPLATES/EVENT_EXAMPLE.yaml +16 -0
  60. data/templates/COMMON/TEMPLATES/MERMAID_C4.md +46 -0
  61. data/templates/COMMON/TEMPLATES/OPENAPI_STUB.yaml +11 -0
  62. data/templates/EXECUTE/00_PRD.md +36 -0
  63. data/templates/EXECUTE/01_NFRS.md +27 -0
  64. data/templates/EXECUTE/02A_ARCH_GATE_QUESTIONS.md +13 -0
  65. data/templates/EXECUTE/02_ARCHITECTURE.md +42 -0
  66. data/templates/EXECUTE/03_ADR_FACTORY.md +22 -0
  67. data/templates/EXECUTE/04_DOMAIN_DECOMPOSITION.md +24 -0
  68. data/templates/EXECUTE/05_CONTRACTS.md +27 -0
  69. data/templates/EXECUTE/06_THREAT_MODEL.md +23 -0
  70. data/templates/EXECUTE/07_TEST_PLAN.md +24 -0
  71. data/templates/EXECUTE/08_TASKS.md +29 -0
  72. data/templates/EXECUTE/09_SCAFFOLDING_DEVEX.md +25 -0
  73. data/templates/EXECUTE/10_IMPLEMENTATION_AGENT.md +30 -0
  74. data/templates/EXECUTE/11_STATIC_ANALYSIS.md +22 -0
  75. data/templates/EXECUTE/12_OBSERVABILITY_SLOS.md +21 -0
  76. data/templates/EXECUTE/13_DELIVERY_ROLLOUT.md +21 -0
  77. data/templates/EXECUTE/14_DOCS_PORTAL.md +23 -0
  78. data/templates/EXECUTE/15_POST_RELEASE.md +25 -0
  79. metadata +301 -0
@@ -0,0 +1,600 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "sqlite3"
4
+ require "json"
5
+ require "yaml"
6
+
7
+ module Aidp
8
+ class AnalysisStorage
9
+ # Database schema version
10
+ SCHEMA_VERSION = 1
11
+
12
+ def initialize(project_dir = Dir.pwd, config = {})
13
+ @project_dir = project_dir
14
+ @config = config
15
+ @db_path = config[:db_path] || File.join(project_dir, ".aidp-analysis.db")
16
+ @db = nil
17
+
18
+ ensure_database_exists
19
+ end
20
+
21
+ # Store analysis result
22
+ def store_analysis_result(step_name, data, options = {})
23
+ ensure_connection
24
+
25
+ timestamp = Time.now
26
+ execution_id = options[:execution_id] || generate_execution_id
27
+
28
+ # Store main analysis data
29
+ analysis_data = {
30
+ execution_id: execution_id,
31
+ step_name: step_name,
32
+ data: data,
33
+ metadata: options[:metadata] || {},
34
+ created_at: timestamp,
35
+ updated_at: timestamp
36
+ }
37
+
38
+ # Insert or update analysis result
39
+ @db.execute(
40
+ "INSERT OR REPLACE INTO analysis_results (execution_id, step_name, data, metadata, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)",
41
+ execution_id,
42
+ step_name,
43
+ JSON.generate(data),
44
+ JSON.generate(analysis_data[:metadata]),
45
+ timestamp.to_i,
46
+ timestamp.to_i
47
+ )
48
+
49
+ # Store metrics with indefinite retention
50
+ store_metrics(execution_id, step_name, data, options)
51
+
52
+ {
53
+ execution_id: execution_id,
54
+ step_name: step_name,
55
+ stored_at: timestamp,
56
+ success: true
57
+ }
58
+ rescue => e
59
+ {
60
+ success: false,
61
+ error: e.message,
62
+ execution_id: execution_id,
63
+ step_name: step_name
64
+ }
65
+ end
66
+
67
+ # Store metrics with indefinite retention
68
+ def store_metrics(execution_id, step_name, data, options = {})
69
+ ensure_connection
70
+
71
+ timestamp = Time.now
72
+ metrics = extract_metrics(data)
73
+
74
+ metrics.each do |metric_name, metric_value|
75
+ @db.execute(
76
+ "INSERT INTO metrics (execution_id, step_name, metric_name, metric_value, metric_type, created_at) VALUES (?, ?, ?, ?, ?, ?)",
77
+ execution_id,
78
+ step_name,
79
+ metric_name,
80
+ metric_value.to_s,
81
+ metric_value.class.name,
82
+ timestamp.to_i
83
+ )
84
+ end
85
+
86
+ # Store aggregated metrics
87
+ store_aggregated_metrics(execution_id, step_name, metrics, timestamp)
88
+ end
89
+
90
+ # Retrieve analysis result
91
+ def get_analysis_result(execution_id, step_name = nil)
92
+ ensure_connection
93
+
94
+ result = if step_name
95
+ # Get specific step result
96
+ @db.execute(
97
+ "SELECT * FROM analysis_results WHERE execution_id = ? AND step_name = ? ORDER BY updated_at DESC LIMIT 1",
98
+ execution_id,
99
+ step_name
100
+ ).first
101
+ else
102
+ # Get all results for execution
103
+ @db.execute(
104
+ "SELECT * FROM analysis_results WHERE execution_id = ? ORDER BY updated_at DESC",
105
+ execution_id
106
+ )
107
+ end
108
+
109
+ return nil unless result
110
+
111
+ if result.is_a?(Array) && result.length > 1
112
+ # Multiple results
113
+ result.map { |row| parse_analysis_result(row) }
114
+ else
115
+ # Single result
116
+ row = result.is_a?(Array) ? result.first : result
117
+ parse_analysis_result(row)
118
+ end
119
+ end
120
+
121
+ # Retrieve metrics
122
+ def get_metrics(execution_id = nil, step_name = nil, metric_name = nil, limit = 100)
123
+ ensure_connection
124
+
125
+ query = "SELECT * FROM metrics WHERE 1=1"
126
+ params = []
127
+
128
+ if execution_id
129
+ query += " AND execution_id = ?"
130
+ params << execution_id
131
+ end
132
+
133
+ if step_name
134
+ query += " AND step_name = ?"
135
+ params << step_name
136
+ end
137
+
138
+ if metric_name
139
+ query += " AND metric_name = ?"
140
+ params << metric_name
141
+ end
142
+
143
+ query += " ORDER BY created_at DESC LIMIT ?"
144
+ params << limit
145
+
146
+ results = @db.execute(query, *params)
147
+ results.map { |row| parse_metric(row) }
148
+ end
149
+
150
+ # Get aggregated metrics
151
+ def get_aggregated_metrics(execution_id = nil, step_name = nil, metric_name = nil)
152
+ ensure_connection
153
+
154
+ query = "SELECT * FROM aggregated_metrics WHERE 1=1"
155
+ params = []
156
+
157
+ if execution_id
158
+ query += " AND execution_id = ?"
159
+ params << execution_id
160
+ end
161
+
162
+ if step_name
163
+ query += " AND step_name = ?"
164
+ params << step_name
165
+ end
166
+
167
+ if metric_name
168
+ query += " AND metric_name = ?"
169
+ params << metric_name
170
+ end
171
+
172
+ query += " ORDER BY created_at DESC"
173
+
174
+ results = @db.execute(query, *params)
175
+ results.map { |row| parse_aggregated_metric(row) }
176
+ end
177
+
178
+ # Get execution history
179
+ def get_execution_history(limit = 50)
180
+ ensure_connection
181
+
182
+ results = @db.execute(
183
+ "SELECT DISTINCT execution_id, step_name, created_at, updated_at FROM analysis_results ORDER BY created_at DESC LIMIT ?",
184
+ limit
185
+ )
186
+
187
+ results.map { |row| parse_execution_history(row) }
188
+ end
189
+
190
+ # Get analysis statistics
191
+ def get_analysis_statistics
192
+ ensure_connection
193
+
194
+ stats = {}
195
+
196
+ # Total executions
197
+ total_executions = @db.execute("SELECT COUNT(DISTINCT execution_id) FROM analysis_results").first[0]
198
+ stats[:total_executions] = total_executions
199
+
200
+ # Total steps
201
+ total_steps = @db.execute("SELECT COUNT(*) FROM analysis_results").first[0]
202
+ stats[:total_steps] = total_steps
203
+
204
+ # Steps by type
205
+ steps_by_type = @db.execute("SELECT step_name, COUNT(*) FROM analysis_results GROUP BY step_name")
206
+ stats[:steps_by_type] = steps_by_type.to_h
207
+
208
+ # Total metrics
209
+ total_metrics = @db.execute("SELECT COUNT(*) FROM metrics").first[0]
210
+ stats[:total_metrics] = total_metrics
211
+
212
+ # Metrics by type
213
+ metrics_by_type = @db.execute("SELECT metric_name, COUNT(*) FROM metrics GROUP BY metric_name")
214
+ stats[:metrics_by_type] = metrics_by_type.to_h
215
+
216
+ # Date range
217
+ date_range = @db.execute("SELECT MIN(created_at), MAX(created_at) FROM analysis_results").first
218
+ stats[:date_range] = {
219
+ earliest: date_range[0] ? Time.at(date_range[0]) : nil,
220
+ latest: date_range[1] ? Time.at(date_range[1]) : nil
221
+ }
222
+
223
+ stats
224
+ end
225
+
226
+ # Force overwrite analysis data (retains metrics)
227
+ def force_overwrite(execution_id, step_name, data, options = {})
228
+ ensure_connection
229
+
230
+ # Delete existing analysis result
231
+ @db.execute(
232
+ "DELETE FROM analysis_results WHERE execution_id = ? AND step_name = ?",
233
+ execution_id,
234
+ step_name
235
+ )
236
+
237
+ # Store new analysis result
238
+ store_analysis_result(step_name, data, options.merge(execution_id: execution_id))
239
+ end
240
+
241
+ # Delete analysis data (retains metrics)
242
+ def delete_analysis_data(execution_id = nil, step_name = nil)
243
+ ensure_connection
244
+
245
+ if execution_id && step_name
246
+ @db.execute(
247
+ "DELETE FROM analysis_results WHERE execution_id = ? AND step_name = ?",
248
+ execution_id,
249
+ step_name
250
+ )
251
+ elsif execution_id
252
+ @db.execute("DELETE FROM analysis_results WHERE execution_id = ?", execution_id)
253
+ elsif step_name
254
+ @db.execute("DELETE FROM analysis_results WHERE step_name = ?", step_name)
255
+ else
256
+ @db.execute("DELETE FROM analysis_results")
257
+ end
258
+
259
+ {success: true, deleted_execution_id: execution_id, deleted_step_name: step_name}
260
+ end
261
+
262
+ # Export data
263
+ def export_data(format = "json", options = {})
264
+ ensure_connection
265
+
266
+ data = {
267
+ analysis_results: export_analysis_results(options),
268
+ metrics: export_metrics(options),
269
+ aggregated_metrics: export_aggregated_metrics(options),
270
+ statistics: get_analysis_statistics
271
+ }
272
+
273
+ case format.downcase
274
+ when "json"
275
+ JSON.pretty_generate(data)
276
+ when "yaml"
277
+ YAML.dump(data)
278
+ else
279
+ raise "Unsupported export format: #{format}"
280
+ end
281
+ end
282
+
283
+ # Import data
284
+ def import_data(data, format = "json")
285
+ ensure_connection
286
+
287
+ parsed_data = case format.downcase
288
+ when "json"
289
+ JSON.parse(data)
290
+ when "yaml"
291
+ YAML.load(data)
292
+ else
293
+ raise "Unsupported import format: #{format}"
294
+ end
295
+
296
+ # Import analysis results
297
+ parsed_data["analysis_results"]&.each do |result|
298
+ @db.execute(
299
+ "INSERT OR REPLACE INTO analysis_results (execution_id, step_name, data, metadata, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)",
300
+ result["execution_id"],
301
+ result["step_name"],
302
+ result["data"],
303
+ result["metadata"],
304
+ result["created_at"],
305
+ result["updated_at"]
306
+ )
307
+ end
308
+
309
+ # Import metrics
310
+ parsed_data["metrics"]&.each do |metric|
311
+ @db.execute(
312
+ "INSERT OR REPLACE INTO metrics (execution_id, step_name, metric_name, metric_value, metric_type, created_at) VALUES (?, ?, ?, ?, ?, ?)",
313
+ metric["execution_id"],
314
+ metric["step_name"],
315
+ metric["metric_name"],
316
+ metric["metric_value"],
317
+ metric["metric_type"],
318
+ metric["created_at"]
319
+ )
320
+ end
321
+
322
+ {success: true, imported_records: parsed_data.length}
323
+ end
324
+
325
+ # Close database connection
326
+ def close
327
+ @db&.close
328
+ @db = nil
329
+ end
330
+
331
+ private
332
+
333
+ def ensure_database_exists
334
+ return if File.exist?(@db_path)
335
+
336
+ @db = SQLite3::Database.new(@db_path)
337
+ create_schema
338
+ end
339
+
340
+ def ensure_connection
341
+ @db ||= SQLite3::Database.new(@db_path)
342
+ end
343
+
344
+ def create_schema
345
+ # Create analysis_results table
346
+ @db.execute(<<~SQL)
347
+ CREATE TABLE IF NOT EXISTS analysis_results (
348
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
349
+ execution_id TEXT NOT NULL,
350
+ step_name TEXT NOT NULL,
351
+ data TEXT NOT NULL,
352
+ metadata TEXT,
353
+ created_at INTEGER NOT NULL,
354
+ updated_at INTEGER NOT NULL,
355
+ UNIQUE(execution_id, step_name)
356
+ )
357
+ SQL
358
+
359
+ # Create metrics table (indefinite retention)
360
+ @db.execute(<<~SQL)
361
+ CREATE TABLE IF NOT EXISTS metrics (
362
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
363
+ execution_id TEXT NOT NULL,
364
+ step_name TEXT NOT NULL,
365
+ metric_name TEXT NOT NULL,
366
+ metric_value TEXT NOT NULL,
367
+ metric_type TEXT NOT NULL,
368
+ created_at INTEGER NOT NULL
369
+ )
370
+ SQL
371
+
372
+ # Create aggregated_metrics table
373
+ @db.execute(<<~SQL)
374
+ CREATE TABLE IF NOT EXISTS aggregated_metrics (
375
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
376
+ execution_id TEXT NOT NULL,
377
+ step_name TEXT NOT NULL,
378
+ metric_name TEXT NOT NULL,
379
+ min_value REAL,
380
+ max_value REAL,
381
+ avg_value REAL,
382
+ count INTEGER NOT NULL,
383
+ created_at INTEGER NOT NULL
384
+ )
385
+ SQL
386
+
387
+ # Create indexes
388
+ @db.execute("CREATE INDEX IF NOT EXISTS idx_analysis_results_execution_id ON analysis_results(execution_id)")
389
+ @db.execute("CREATE INDEX IF NOT EXISTS idx_analysis_results_step_name ON analysis_results(step_name)")
390
+ @db.execute("CREATE INDEX IF NOT EXISTS idx_analysis_results_created_at ON analysis_results(created_at)")
391
+ @db.execute("CREATE INDEX IF NOT EXISTS idx_metrics_execution_id ON metrics(execution_id)")
392
+ @db.execute("CREATE INDEX IF NOT EXISTS idx_metrics_step_name ON metrics(step_name)")
393
+ @db.execute("CREATE INDEX IF NOT EXISTS idx_metrics_metric_name ON metrics(metric_name)")
394
+ @db.execute("CREATE INDEX IF NOT EXISTS idx_metrics_created_at ON metrics(created_at)")
395
+
396
+ # Store schema version
397
+ @db.execute("CREATE TABLE IF NOT EXISTS schema_version (version INTEGER NOT NULL)")
398
+ @db.execute("INSERT OR REPLACE INTO schema_version (version) VALUES (?)", SCHEMA_VERSION)
399
+ end
400
+
401
+ def generate_execution_id
402
+ "exec_#{Time.now.to_i}_#{rand(1000)}"
403
+ end
404
+
405
+ def extract_metrics(data)
406
+ metrics = {}
407
+
408
+ case data
409
+ when Hash
410
+ data.each do |key, value|
411
+ if value.is_a?(Numeric)
412
+ metrics[key] = value
413
+ elsif value.is_a?(Hash)
414
+ metrics.merge!(extract_metrics(value))
415
+ elsif value.is_a?(Array) && value.all?(Numeric)
416
+ metrics["#{key}_count"] = value.length
417
+ metrics["#{key}_sum"] = value.sum
418
+ metrics["#{key}_avg"] = value.sum.to_f / value.length
419
+ end
420
+ end
421
+ when Array
422
+ metrics["count"] = data.length
423
+ if data.all?(Numeric)
424
+ metrics["sum"] = data.sum
425
+ metrics["avg"] = data.sum.to_f / data.length
426
+ end
427
+ end
428
+
429
+ metrics
430
+ end
431
+
432
+ def store_aggregated_metrics(execution_id, step_name, metrics, timestamp)
433
+ ensure_connection
434
+
435
+ metrics.each do |metric_name, metric_value|
436
+ next unless metric_value.is_a?(Numeric)
437
+
438
+ # Get existing aggregated metric
439
+ existing = @db.execute(
440
+ "SELECT * FROM aggregated_metrics WHERE execution_id = ? AND step_name = ? AND metric_name = ?",
441
+ execution_id,
442
+ step_name,
443
+ metric_name
444
+ ).first
445
+
446
+ if existing
447
+ # Update existing aggregated metric
448
+ count = existing[7] + 1
449
+ min_value = [existing[3], metric_value].min
450
+ max_value = [existing[4], metric_value].max
451
+ avg_value = ((existing[5] * existing[7]) + metric_value) / count
452
+
453
+ @db.execute(
454
+ "UPDATE aggregated_metrics SET min_value = ?, max_value = ?, avg_value = ?, count = ?, created_at = ? WHERE id = ?",
455
+ min_value,
456
+ max_value,
457
+ avg_value,
458
+ count,
459
+ timestamp.to_i,
460
+ existing[0]
461
+ )
462
+ else
463
+ # Create new aggregated metric
464
+ @db.execute(
465
+ "INSERT INTO aggregated_metrics (execution_id, step_name, metric_name, min_value, max_value, avg_value, count, created_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
466
+ execution_id,
467
+ step_name,
468
+ metric_name,
469
+ metric_value,
470
+ metric_value,
471
+ metric_value,
472
+ 1,
473
+ timestamp.to_i
474
+ )
475
+ end
476
+ end
477
+ end
478
+
479
+ def parse_analysis_result(row)
480
+ return nil unless row
481
+
482
+ {
483
+ id: row[0],
484
+ execution_id: row[1],
485
+ step_name: row[2],
486
+ data: JSON.parse(row[3]),
487
+ metadata: JSON.parse(row[4] || "{}"),
488
+ created_at: Time.at(row[5]),
489
+ updated_at: Time.at(row[6])
490
+ }
491
+ end
492
+
493
+ def parse_metric(row)
494
+ return nil unless row
495
+
496
+ {
497
+ id: row[0],
498
+ execution_id: row[1],
499
+ step_name: row[2],
500
+ metric_name: row[3],
501
+ metric_value: row[4],
502
+ metric_type: row[5],
503
+ created_at: Time.at(row[6])
504
+ }
505
+ end
506
+
507
+ def parse_aggregated_metric(row)
508
+ return nil unless row
509
+
510
+ {
511
+ id: row[0],
512
+ execution_id: row[1],
513
+ step_name: row[2],
514
+ metric_name: row[3],
515
+ min_value: row[4],
516
+ max_value: row[5],
517
+ avg_value: row[6],
518
+ count: row[7],
519
+ created_at: Time.at(row[8])
520
+ }
521
+ end
522
+
523
+ def parse_execution_history(row)
524
+ return nil unless row
525
+
526
+ {
527
+ execution_id: row[0],
528
+ step_name: row[1],
529
+ created_at: Time.at(row[2]),
530
+ updated_at: Time.at(row[3])
531
+ }
532
+ end
533
+
534
+ def export_analysis_results(options = {})
535
+ ensure_connection
536
+
537
+ query = "SELECT * FROM analysis_results"
538
+ params = []
539
+
540
+ if options[:execution_id]
541
+ query += " WHERE execution_id = ?"
542
+ params << options[:execution_id]
543
+ end
544
+
545
+ query += " ORDER BY created_at DESC"
546
+
547
+ if options[:limit]
548
+ query += " LIMIT ?"
549
+ params << options[:limit]
550
+ end
551
+
552
+ results = @db.execute(query, *params)
553
+ results.map { |row| parse_analysis_result(row) }
554
+ end
555
+
556
+ def export_metrics(options = {})
557
+ ensure_connection
558
+
559
+ query = "SELECT * FROM metrics"
560
+ params = []
561
+
562
+ if options[:execution_id]
563
+ query += " WHERE execution_id = ?"
564
+ params << options[:execution_id]
565
+ end
566
+
567
+ query += " ORDER BY created_at DESC"
568
+
569
+ if options[:limit]
570
+ query += " LIMIT ?"
571
+ params << options[:limit]
572
+ end
573
+
574
+ results = @db.execute(query, *params)
575
+ results.map { |row| parse_metric(row) }
576
+ end
577
+
578
+ def export_aggregated_metrics(options = {})
579
+ ensure_connection
580
+
581
+ query = "SELECT * FROM aggregated_metrics"
582
+ params = []
583
+
584
+ if options[:execution_id]
585
+ query += " WHERE execution_id = ?"
586
+ params << options[:execution_id]
587
+ end
588
+
589
+ query += " ORDER BY created_at DESC"
590
+
591
+ if options[:limit]
592
+ query += " LIMIT ?"
593
+ params << options[:limit]
594
+ end
595
+
596
+ results = @db.execute(query, *params)
597
+ results.map { |row| parse_aggregated_metric(row) }
598
+ end
599
+ end
600
+ end