aidp 0.7.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +60 -214
- data/bin/aidp +1 -1
- data/lib/aidp/analysis/kb_inspector.rb +38 -23
- data/lib/aidp/analysis/seams.rb +2 -31
- data/lib/aidp/analysis/tree_sitter_grammar_loader.rb +0 -13
- data/lib/aidp/analysis/tree_sitter_scan.rb +3 -20
- data/lib/aidp/analyze/error_handler.rb +2 -75
- data/lib/aidp/analyze/json_file_storage.rb +292 -0
- data/lib/aidp/analyze/progress.rb +12 -0
- data/lib/aidp/analyze/progress_visualizer.rb +12 -17
- data/lib/aidp/analyze/ruby_maat_integration.rb +13 -31
- data/lib/aidp/analyze/runner.rb +256 -87
- data/lib/aidp/cli/jobs_command.rb +100 -432
- data/lib/aidp/cli.rb +309 -239
- data/lib/aidp/config.rb +298 -10
- data/lib/aidp/debug_logger.rb +195 -0
- data/lib/aidp/debug_mixin.rb +187 -0
- data/lib/aidp/execute/progress.rb +9 -0
- data/lib/aidp/execute/runner.rb +221 -40
- data/lib/aidp/execute/steps.rb +17 -7
- data/lib/aidp/execute/workflow_selector.rb +211 -0
- data/lib/aidp/harness/completion_checker.rb +268 -0
- data/lib/aidp/harness/condition_detector.rb +1526 -0
- data/lib/aidp/harness/config_loader.rb +373 -0
- data/lib/aidp/harness/config_manager.rb +382 -0
- data/lib/aidp/harness/config_schema.rb +1006 -0
- data/lib/aidp/harness/config_validator.rb +355 -0
- data/lib/aidp/harness/configuration.rb +477 -0
- data/lib/aidp/harness/enhanced_runner.rb +494 -0
- data/lib/aidp/harness/error_handler.rb +616 -0
- data/lib/aidp/harness/provider_config.rb +423 -0
- data/lib/aidp/harness/provider_factory.rb +306 -0
- data/lib/aidp/harness/provider_manager.rb +1269 -0
- data/lib/aidp/harness/provider_type_checker.rb +88 -0
- data/lib/aidp/harness/runner.rb +411 -0
- data/lib/aidp/harness/state/errors.rb +28 -0
- data/lib/aidp/harness/state/metrics.rb +219 -0
- data/lib/aidp/harness/state/persistence.rb +128 -0
- data/lib/aidp/harness/state/provider_state.rb +132 -0
- data/lib/aidp/harness/state/ui_state.rb +68 -0
- data/lib/aidp/harness/state/workflow_state.rb +123 -0
- data/lib/aidp/harness/state_manager.rb +586 -0
- data/lib/aidp/harness/status_display.rb +888 -0
- data/lib/aidp/harness/ui/base.rb +16 -0
- data/lib/aidp/harness/ui/enhanced_tui.rb +545 -0
- data/lib/aidp/harness/ui/enhanced_workflow_selector.rb +252 -0
- data/lib/aidp/harness/ui/error_handler.rb +132 -0
- data/lib/aidp/harness/ui/frame_manager.rb +361 -0
- data/lib/aidp/harness/ui/job_monitor.rb +500 -0
- data/lib/aidp/harness/ui/navigation/main_menu.rb +311 -0
- data/lib/aidp/harness/ui/navigation/menu_formatter.rb +120 -0
- data/lib/aidp/harness/ui/navigation/menu_item.rb +142 -0
- data/lib/aidp/harness/ui/navigation/menu_state.rb +139 -0
- data/lib/aidp/harness/ui/navigation/submenu.rb +202 -0
- data/lib/aidp/harness/ui/navigation/workflow_selector.rb +176 -0
- data/lib/aidp/harness/ui/progress_display.rb +280 -0
- data/lib/aidp/harness/ui/question_collector.rb +141 -0
- data/lib/aidp/harness/ui/spinner_group.rb +184 -0
- data/lib/aidp/harness/ui/spinner_helper.rb +152 -0
- data/lib/aidp/harness/ui/status_manager.rb +312 -0
- data/lib/aidp/harness/ui/status_widget.rb +280 -0
- data/lib/aidp/harness/ui/workflow_controller.rb +312 -0
- data/lib/aidp/harness/user_interface.rb +2381 -0
- data/lib/aidp/provider_manager.rb +131 -7
- data/lib/aidp/providers/anthropic.rb +28 -103
- data/lib/aidp/providers/base.rb +170 -0
- data/lib/aidp/providers/cursor.rb +52 -181
- data/lib/aidp/providers/gemini.rb +24 -107
- data/lib/aidp/providers/macos_ui.rb +99 -5
- data/lib/aidp/providers/opencode.rb +194 -0
- data/lib/aidp/storage/csv_storage.rb +172 -0
- data/lib/aidp/storage/file_manager.rb +214 -0
- data/lib/aidp/storage/json_storage.rb +140 -0
- data/lib/aidp/version.rb +1 -1
- data/lib/aidp.rb +54 -39
- data/templates/COMMON/AGENT_BASE.md +11 -0
- data/templates/EXECUTE/00_PRD.md +4 -4
- data/templates/EXECUTE/02_ARCHITECTURE.md +5 -4
- data/templates/EXECUTE/07_TEST_PLAN.md +4 -1
- data/templates/EXECUTE/08_TASKS.md +4 -4
- data/templates/EXECUTE/10_IMPLEMENTATION_AGENT.md +4 -4
- data/templates/README.md +279 -0
- data/templates/aidp-development.yml.example +373 -0
- data/templates/aidp-minimal.yml.example +48 -0
- data/templates/aidp-production.yml.example +475 -0
- data/templates/aidp.yml.example +598 -0
- metadata +93 -69
- data/lib/aidp/analyze/agent_personas.rb +0 -71
- data/lib/aidp/analyze/agent_tool_executor.rb +0 -439
- data/lib/aidp/analyze/data_retention_manager.rb +0 -421
- data/lib/aidp/analyze/database.rb +0 -260
- data/lib/aidp/analyze/dependencies.rb +0 -335
- data/lib/aidp/analyze/export_manager.rb +0 -418
- data/lib/aidp/analyze/focus_guidance.rb +0 -517
- data/lib/aidp/analyze/incremental_analyzer.rb +0 -533
- data/lib/aidp/analyze/language_analysis_strategies.rb +0 -897
- data/lib/aidp/analyze/large_analysis_progress.rb +0 -499
- data/lib/aidp/analyze/memory_manager.rb +0 -339
- data/lib/aidp/analyze/metrics_storage.rb +0 -336
- data/lib/aidp/analyze/parallel_processor.rb +0 -454
- data/lib/aidp/analyze/performance_optimizer.rb +0 -691
- data/lib/aidp/analyze/repository_chunker.rb +0 -697
- data/lib/aidp/analyze/static_analysis_detector.rb +0 -577
- data/lib/aidp/analyze/storage.rb +0 -655
- data/lib/aidp/analyze/tool_configuration.rb +0 -441
- data/lib/aidp/analyze/tool_modernization.rb +0 -750
- data/lib/aidp/database/pg_adapter.rb +0 -148
- data/lib/aidp/database_config.rb +0 -69
- data/lib/aidp/database_connection.rb +0 -72
- data/lib/aidp/job_manager.rb +0 -41
- data/lib/aidp/jobs/base_job.rb +0 -45
- data/lib/aidp/jobs/provider_execution_job.rb +0 -83
- data/lib/aidp/project_detector.rb +0 -117
- data/lib/aidp/providers/agent_supervisor.rb +0 -348
- data/lib/aidp/providers/supervised_base.rb +0 -317
- data/lib/aidp/providers/supervised_cursor.rb +0 -22
- data/lib/aidp/sync.rb +0 -13
- data/lib/aidp/workspace.rb +0 -19
data/lib/aidp/analyze/storage.rb
DELETED
@@ -1,655 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require "pg"
|
4
|
-
require "json"
|
5
|
-
require "yaml"
|
6
|
-
|
7
|
-
module Aidp
|
8
|
-
class AnalysisStorage
|
9
|
-
# Database schema version
|
10
|
-
SCHEMA_VERSION = 1
|
11
|
-
|
12
|
-
def initialize(project_dir = Dir.pwd, config = {})
|
13
|
-
@project_dir = project_dir
|
14
|
-
@config = config
|
15
|
-
@db = nil
|
16
|
-
|
17
|
-
ensure_database_exists
|
18
|
-
end
|
19
|
-
|
20
|
-
# Store analysis result
|
21
|
-
def store_analysis_result(step_name, data, options = {})
|
22
|
-
ensure_connection
|
23
|
-
|
24
|
-
timestamp = Time.now
|
25
|
-
execution_id = options[:execution_id] || generate_execution_id
|
26
|
-
|
27
|
-
# Store main analysis data
|
28
|
-
analysis_data = {
|
29
|
-
execution_id: execution_id,
|
30
|
-
step_name: step_name,
|
31
|
-
data: data,
|
32
|
-
metadata: options[:metadata] || {},
|
33
|
-
created_at: timestamp,
|
34
|
-
updated_at: timestamp
|
35
|
-
}
|
36
|
-
|
37
|
-
# Insert or update analysis result
|
38
|
-
@db.exec_params(
|
39
|
-
<<~SQL,
|
40
|
-
INSERT INTO analysis_results (execution_id, step_name, data, metadata, created_at, updated_at)
|
41
|
-
VALUES ($1, $2, $3, $4, $5, $6)
|
42
|
-
ON CONFLICT (execution_id, step_name)
|
43
|
-
DO UPDATE SET
|
44
|
-
data = EXCLUDED.data,
|
45
|
-
metadata = EXCLUDED.metadata,
|
46
|
-
updated_at = EXCLUDED.updated_at
|
47
|
-
SQL
|
48
|
-
[
|
49
|
-
execution_id,
|
50
|
-
step_name,
|
51
|
-
data.to_json,
|
52
|
-
analysis_data[:metadata].to_json,
|
53
|
-
timestamp,
|
54
|
-
timestamp
|
55
|
-
]
|
56
|
-
)
|
57
|
-
|
58
|
-
# Store metrics with indefinite retention
|
59
|
-
store_metrics(execution_id, step_name, data, options)
|
60
|
-
|
61
|
-
{
|
62
|
-
execution_id: execution_id,
|
63
|
-
step_name: step_name,
|
64
|
-
stored_at: timestamp,
|
65
|
-
success: true
|
66
|
-
}
|
67
|
-
end
|
68
|
-
|
69
|
-
# Store metrics with indefinite retention
|
70
|
-
def store_metrics(execution_id, step_name, data, options = {})
|
71
|
-
ensure_connection
|
72
|
-
|
73
|
-
timestamp = Time.now
|
74
|
-
metrics = extract_metrics(data)
|
75
|
-
|
76
|
-
metrics.each do |metric_name, metric_value|
|
77
|
-
@db.exec_params(
|
78
|
-
<<~SQL,
|
79
|
-
INSERT INTO metrics (execution_id, step_name, metric_name, metric_value, metric_type, created_at)
|
80
|
-
VALUES ($1, $2, $3, $4, $5, $6)
|
81
|
-
SQL
|
82
|
-
[
|
83
|
-
execution_id,
|
84
|
-
step_name,
|
85
|
-
metric_name,
|
86
|
-
metric_value.to_s,
|
87
|
-
metric_value.class.name,
|
88
|
-
timestamp
|
89
|
-
]
|
90
|
-
)
|
91
|
-
end
|
92
|
-
|
93
|
-
# Store aggregated metrics
|
94
|
-
store_aggregated_metrics(execution_id, step_name, metrics, timestamp)
|
95
|
-
end
|
96
|
-
|
97
|
-
# Retrieve analysis result
|
98
|
-
def get_analysis_result(execution_id, step_name = nil)
|
99
|
-
ensure_connection
|
100
|
-
|
101
|
-
result = if step_name
|
102
|
-
# Get specific step result
|
103
|
-
@db.exec_params(
|
104
|
-
<<~SQL,
|
105
|
-
SELECT * FROM analysis_results
|
106
|
-
WHERE execution_id = $1 AND step_name = $2
|
107
|
-
ORDER BY updated_at DESC
|
108
|
-
LIMIT 1
|
109
|
-
SQL
|
110
|
-
[execution_id, step_name]
|
111
|
-
)
|
112
|
-
else
|
113
|
-
# Get all results for execution
|
114
|
-
@db.exec_params(
|
115
|
-
<<~SQL,
|
116
|
-
SELECT * FROM analysis_results
|
117
|
-
WHERE execution_id = $1
|
118
|
-
ORDER BY updated_at DESC
|
119
|
-
SQL
|
120
|
-
[execution_id]
|
121
|
-
)
|
122
|
-
end
|
123
|
-
|
124
|
-
return nil if result.ntuples.zero?
|
125
|
-
|
126
|
-
if result.ntuples > 1
|
127
|
-
# Multiple results
|
128
|
-
result.map { |row| parse_analysis_result(row) }
|
129
|
-
else
|
130
|
-
# Single result
|
131
|
-
parse_analysis_result(result[0])
|
132
|
-
end
|
133
|
-
end
|
134
|
-
|
135
|
-
# Retrieve metrics
|
136
|
-
def get_metrics(execution_id = nil, step_name = nil, metric_name = nil, limit = 100)
|
137
|
-
ensure_connection
|
138
|
-
|
139
|
-
query = "SELECT * FROM metrics WHERE 1=1"
|
140
|
-
params = []
|
141
|
-
param_index = 1
|
142
|
-
|
143
|
-
if execution_id
|
144
|
-
query += " AND execution_id = $#{param_index}"
|
145
|
-
params << execution_id
|
146
|
-
param_index += 1
|
147
|
-
end
|
148
|
-
|
149
|
-
if step_name
|
150
|
-
query += " AND step_name = $#{param_index}"
|
151
|
-
params << step_name
|
152
|
-
param_index += 1
|
153
|
-
end
|
154
|
-
|
155
|
-
if metric_name
|
156
|
-
query += " AND metric_name = $#{param_index}"
|
157
|
-
params << metric_name
|
158
|
-
param_index += 1
|
159
|
-
end
|
160
|
-
|
161
|
-
query += " ORDER BY created_at DESC"
|
162
|
-
query += " LIMIT $#{param_index}"
|
163
|
-
params << limit
|
164
|
-
|
165
|
-
results = @db.exec_params(query, params)
|
166
|
-
results.map { |row| parse_metric(row) }
|
167
|
-
end
|
168
|
-
|
169
|
-
# Get aggregated metrics
|
170
|
-
def get_aggregated_metrics(execution_id = nil, step_name = nil, metric_name = nil)
|
171
|
-
ensure_connection
|
172
|
-
|
173
|
-
query = "SELECT * FROM aggregated_metrics WHERE 1=1"
|
174
|
-
params = []
|
175
|
-
param_index = 1
|
176
|
-
|
177
|
-
if execution_id
|
178
|
-
query += " AND execution_id = $#{param_index}"
|
179
|
-
params << execution_id
|
180
|
-
param_index += 1
|
181
|
-
end
|
182
|
-
|
183
|
-
if step_name
|
184
|
-
query += " AND step_name = $#{param_index}"
|
185
|
-
params << step_name
|
186
|
-
param_index += 1
|
187
|
-
end
|
188
|
-
|
189
|
-
if metric_name
|
190
|
-
query += " AND metric_name = $#{param_index}"
|
191
|
-
params << metric_name
|
192
|
-
param_index + 1
|
193
|
-
end
|
194
|
-
|
195
|
-
query += " ORDER BY created_at DESC"
|
196
|
-
|
197
|
-
results = @db.exec_params(query, params)
|
198
|
-
results.map { |row| parse_aggregated_metric(row) }
|
199
|
-
end
|
200
|
-
|
201
|
-
# Get execution history
|
202
|
-
def get_execution_history(limit = 50)
|
203
|
-
ensure_connection
|
204
|
-
|
205
|
-
results = @db.exec_params(
|
206
|
-
<<~SQL,
|
207
|
-
SELECT DISTINCT execution_id, step_name, created_at, updated_at
|
208
|
-
FROM analysis_results
|
209
|
-
ORDER BY created_at DESC
|
210
|
-
LIMIT $1
|
211
|
-
SQL
|
212
|
-
[limit]
|
213
|
-
)
|
214
|
-
|
215
|
-
results.map { |row| parse_execution_history(row) }
|
216
|
-
end
|
217
|
-
|
218
|
-
# Get analysis statistics
|
219
|
-
def get_analysis_statistics
|
220
|
-
ensure_connection
|
221
|
-
|
222
|
-
stats = {}
|
223
|
-
|
224
|
-
# Total executions
|
225
|
-
total_executions = @db.exec("SELECT COUNT(DISTINCT execution_id) FROM analysis_results").first["count"].to_i
|
226
|
-
stats[:total_executions] = total_executions
|
227
|
-
|
228
|
-
# Total steps
|
229
|
-
total_steps = @db.exec("SELECT COUNT(*) FROM analysis_results").first["count"].to_i
|
230
|
-
stats[:total_steps] = total_steps
|
231
|
-
|
232
|
-
# Steps by type
|
233
|
-
steps_by_type = @db.exec("SELECT step_name, COUNT(*) FROM analysis_results GROUP BY step_name")
|
234
|
-
stats[:steps_by_type] = steps_by_type.each_with_object({}) do |row, hash|
|
235
|
-
hash[row["step_name"]] = row["count"].to_i
|
236
|
-
end
|
237
|
-
|
238
|
-
# Total metrics
|
239
|
-
total_metrics = @db.exec("SELECT COUNT(*) FROM metrics").first["count"].to_i
|
240
|
-
stats[:total_metrics] = total_metrics
|
241
|
-
|
242
|
-
# Metrics by type
|
243
|
-
metrics_by_type = @db.exec("SELECT metric_name, COUNT(*) FROM metrics GROUP BY metric_name")
|
244
|
-
stats[:metrics_by_type] = metrics_by_type.each_with_object({}) do |row, hash|
|
245
|
-
hash[row["metric_name"]] = row["count"].to_i
|
246
|
-
end
|
247
|
-
|
248
|
-
# Date range
|
249
|
-
date_range = @db.exec("SELECT MIN(created_at), MAX(created_at) FROM analysis_results").first
|
250
|
-
stats[:date_range] = {
|
251
|
-
earliest: date_range["min"] ? Time.parse(date_range["min"]) : nil,
|
252
|
-
latest: date_range["max"] ? Time.parse(date_range["max"]) : nil
|
253
|
-
}
|
254
|
-
|
255
|
-
stats
|
256
|
-
end
|
257
|
-
|
258
|
-
# Force overwrite analysis data (retains metrics)
|
259
|
-
def force_overwrite(execution_id, step_name, data, options = {})
|
260
|
-
ensure_connection
|
261
|
-
|
262
|
-
# Delete existing analysis result
|
263
|
-
@db.exec_params(
|
264
|
-
"DELETE FROM analysis_results WHERE execution_id = $1 AND step_name = $2",
|
265
|
-
[execution_id, step_name]
|
266
|
-
)
|
267
|
-
|
268
|
-
# Store new analysis result
|
269
|
-
store_analysis_result(step_name, data, options.merge(execution_id: execution_id))
|
270
|
-
end
|
271
|
-
|
272
|
-
# Delete analysis data (retains metrics)
|
273
|
-
def delete_analysis_data(execution_id = nil, step_name = nil)
|
274
|
-
ensure_connection
|
275
|
-
|
276
|
-
if execution_id && step_name
|
277
|
-
@db.exec_params(
|
278
|
-
"DELETE FROM analysis_results WHERE execution_id = $1 AND step_name = $2",
|
279
|
-
[execution_id, step_name]
|
280
|
-
)
|
281
|
-
elsif execution_id
|
282
|
-
@db.exec_params("DELETE FROM analysis_results WHERE execution_id = $1", [execution_id])
|
283
|
-
elsif step_name
|
284
|
-
@db.exec_params("DELETE FROM analysis_results WHERE step_name = $1", [step_name])
|
285
|
-
else
|
286
|
-
@db.exec("DELETE FROM analysis_results")
|
287
|
-
end
|
288
|
-
|
289
|
-
{success: true, deleted_execution_id: execution_id, deleted_step_name: step_name}
|
290
|
-
end
|
291
|
-
|
292
|
-
# Export data
|
293
|
-
def export_data(format = "json", options = {})
|
294
|
-
ensure_connection
|
295
|
-
|
296
|
-
data = {
|
297
|
-
analysis_results: export_analysis_results(options),
|
298
|
-
metrics: export_metrics(options),
|
299
|
-
aggregated_metrics: export_aggregated_metrics(options),
|
300
|
-
statistics: get_analysis_statistics
|
301
|
-
}
|
302
|
-
|
303
|
-
case format.downcase
|
304
|
-
when "json"
|
305
|
-
JSON.pretty_generate(data)
|
306
|
-
when "yaml"
|
307
|
-
YAML.dump(data)
|
308
|
-
else
|
309
|
-
raise "Unsupported export format: #{format}"
|
310
|
-
end
|
311
|
-
end
|
312
|
-
|
313
|
-
# Import data
|
314
|
-
def import_data(data, format = "json")
|
315
|
-
ensure_connection
|
316
|
-
|
317
|
-
parsed_data = case format.downcase
|
318
|
-
when "json"
|
319
|
-
JSON.parse(data)
|
320
|
-
when "yaml"
|
321
|
-
YAML.safe_load(data)
|
322
|
-
else
|
323
|
-
raise "Unsupported import format: #{format}"
|
324
|
-
end
|
325
|
-
|
326
|
-
# Import analysis results
|
327
|
-
parsed_data["analysis_results"]&.each do |result|
|
328
|
-
@db.exec_params(
|
329
|
-
<<~SQL,
|
330
|
-
INSERT INTO analysis_results (execution_id, step_name, data, metadata, created_at, updated_at)
|
331
|
-
VALUES ($1, $2, $3, $4, $5, $6)
|
332
|
-
ON CONFLICT (execution_id, step_name)
|
333
|
-
DO UPDATE SET
|
334
|
-
data = EXCLUDED.data,
|
335
|
-
metadata = EXCLUDED.metadata,
|
336
|
-
updated_at = EXCLUDED.updated_at
|
337
|
-
SQL
|
338
|
-
[
|
339
|
-
result["execution_id"],
|
340
|
-
result["step_name"],
|
341
|
-
result["data"],
|
342
|
-
result["metadata"],
|
343
|
-
result["created_at"],
|
344
|
-
result["updated_at"]
|
345
|
-
]
|
346
|
-
)
|
347
|
-
end
|
348
|
-
|
349
|
-
# Import metrics
|
350
|
-
parsed_data["metrics"]&.each do |metric|
|
351
|
-
@db.exec_params(
|
352
|
-
<<~SQL,
|
353
|
-
INSERT INTO metrics (execution_id, step_name, metric_name, metric_value, metric_type, created_at)
|
354
|
-
VALUES ($1, $2, $3, $4, $5, $6)
|
355
|
-
ON CONFLICT DO NOTHING
|
356
|
-
SQL
|
357
|
-
[
|
358
|
-
metric["execution_id"],
|
359
|
-
metric["step_name"],
|
360
|
-
metric["metric_name"],
|
361
|
-
metric["metric_value"],
|
362
|
-
metric["metric_type"],
|
363
|
-
metric["created_at"]
|
364
|
-
]
|
365
|
-
)
|
366
|
-
end
|
367
|
-
|
368
|
-
{success: true, imported_records: parsed_data.length}
|
369
|
-
end
|
370
|
-
|
371
|
-
# Close database connection
|
372
|
-
def close
|
373
|
-
@db&.close
|
374
|
-
@db = nil
|
375
|
-
end
|
376
|
-
|
377
|
-
private
|
378
|
-
|
379
|
-
def ensure_database_exists
|
380
|
-
ensure_connection
|
381
|
-
create_schema
|
382
|
-
end
|
383
|
-
|
384
|
-
def ensure_connection
|
385
|
-
return if @db
|
386
|
-
|
387
|
-
@db = PG.connect(
|
388
|
-
host: ENV["AIDP_DB_HOST"] || "localhost",
|
389
|
-
port: ENV["AIDP_DB_PORT"] || 5432,
|
390
|
-
dbname: ENV["AIDP_DB_NAME"] || "aidp",
|
391
|
-
user: ENV["AIDP_DB_USER"] || ENV["USER"],
|
392
|
-
password: ENV["AIDP_DB_PASSWORD"]
|
393
|
-
)
|
394
|
-
@db.type_map_for_results = PG::BasicTypeMapForResults.new(@db)
|
395
|
-
end
|
396
|
-
|
397
|
-
def create_schema
|
398
|
-
# Create analysis_results table
|
399
|
-
@db.exec(<<~SQL)
|
400
|
-
CREATE TABLE IF NOT EXISTS analysis_results (
|
401
|
-
id SERIAL PRIMARY KEY,
|
402
|
-
execution_id TEXT NOT NULL,
|
403
|
-
step_name TEXT NOT NULL,
|
404
|
-
data JSONB NOT NULL,
|
405
|
-
metadata JSONB,
|
406
|
-
created_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
407
|
-
updated_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
408
|
-
UNIQUE(execution_id, step_name)
|
409
|
-
)
|
410
|
-
SQL
|
411
|
-
|
412
|
-
# Create metrics table (indefinite retention)
|
413
|
-
@db.exec(<<~SQL)
|
414
|
-
CREATE TABLE IF NOT EXISTS metrics (
|
415
|
-
id SERIAL PRIMARY KEY,
|
416
|
-
execution_id TEXT NOT NULL,
|
417
|
-
step_name TEXT NOT NULL,
|
418
|
-
metric_name TEXT NOT NULL,
|
419
|
-
metric_value TEXT NOT NULL,
|
420
|
-
metric_type TEXT NOT NULL,
|
421
|
-
created_at TIMESTAMP WITH TIME ZONE NOT NULL
|
422
|
-
)
|
423
|
-
SQL
|
424
|
-
|
425
|
-
# Create aggregated_metrics table
|
426
|
-
@db.exec(<<~SQL)
|
427
|
-
CREATE TABLE IF NOT EXISTS aggregated_metrics (
|
428
|
-
id SERIAL PRIMARY KEY,
|
429
|
-
execution_id TEXT NOT NULL,
|
430
|
-
step_name TEXT NOT NULL,
|
431
|
-
metric_name TEXT NOT NULL,
|
432
|
-
min_value DOUBLE PRECISION,
|
433
|
-
max_value DOUBLE PRECISION,
|
434
|
-
avg_value DOUBLE PRECISION,
|
435
|
-
count INTEGER NOT NULL,
|
436
|
-
created_at TIMESTAMP WITH TIME ZONE NOT NULL
|
437
|
-
)
|
438
|
-
SQL
|
439
|
-
|
440
|
-
# Create indexes
|
441
|
-
@db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_results_execution_id ON analysis_results(execution_id)")
|
442
|
-
@db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_results_step_name ON analysis_results(step_name)")
|
443
|
-
@db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_results_created_at ON analysis_results(created_at)")
|
444
|
-
@db.exec("CREATE INDEX IF NOT EXISTS idx_metrics_execution_id ON metrics(execution_id)")
|
445
|
-
@db.exec("CREATE INDEX IF NOT EXISTS idx_metrics_step_name ON metrics(step_name)")
|
446
|
-
@db.exec("CREATE INDEX IF NOT EXISTS idx_metrics_metric_name ON metrics(metric_name)")
|
447
|
-
@db.exec("CREATE INDEX IF NOT EXISTS idx_metrics_created_at ON metrics(created_at)")
|
448
|
-
|
449
|
-
# Store schema version
|
450
|
-
@db.exec("CREATE TABLE IF NOT EXISTS schema_version (version INTEGER NOT NULL)")
|
451
|
-
@db.exec_params("INSERT INTO schema_version (version) VALUES ($1) ON CONFLICT DO NOTHING", [SCHEMA_VERSION])
|
452
|
-
end
|
453
|
-
|
454
|
-
def generate_execution_id
|
455
|
-
"exec_#{Time.now.to_i}_#{rand(1000)}"
|
456
|
-
end
|
457
|
-
|
458
|
-
def extract_metrics(data)
|
459
|
-
metrics = {}
|
460
|
-
|
461
|
-
case data
|
462
|
-
when Hash
|
463
|
-
data.each do |key, value|
|
464
|
-
if value.is_a?(Numeric)
|
465
|
-
metrics[key] = value
|
466
|
-
elsif value.is_a?(Hash)
|
467
|
-
metrics.merge!(extract_metrics(value))
|
468
|
-
elsif value.is_a?(Array) && value.all?(Numeric)
|
469
|
-
metrics["#{key}_count"] = value.length
|
470
|
-
metrics["#{key}_sum"] = value.sum
|
471
|
-
metrics["#{key}_avg"] = value.sum.to_f / value.length
|
472
|
-
end
|
473
|
-
end
|
474
|
-
when Array
|
475
|
-
metrics["count"] = data.length
|
476
|
-
if data.all?(Numeric)
|
477
|
-
metrics["sum"] = data.sum
|
478
|
-
metrics["avg"] = data.sum.to_f / data.length
|
479
|
-
end
|
480
|
-
end
|
481
|
-
|
482
|
-
metrics
|
483
|
-
end
|
484
|
-
|
485
|
-
def store_aggregated_metrics(execution_id, step_name, metrics, timestamp)
|
486
|
-
ensure_connection
|
487
|
-
|
488
|
-
metrics.each do |metric_name, metric_value|
|
489
|
-
next unless metric_value.is_a?(Numeric)
|
490
|
-
|
491
|
-
# Get existing aggregated metric
|
492
|
-
existing = @db.exec_params(
|
493
|
-
<<~SQL,
|
494
|
-
SELECT * FROM aggregated_metrics
|
495
|
-
WHERE execution_id = $1 AND step_name = $2 AND metric_name = $3
|
496
|
-
SQL
|
497
|
-
[execution_id, step_name, metric_name]
|
498
|
-
).first
|
499
|
-
|
500
|
-
if existing
|
501
|
-
# Update existing aggregated metric
|
502
|
-
count = existing["count"].to_i + 1
|
503
|
-
min_value = [existing["min_value"].to_f, metric_value].min
|
504
|
-
max_value = [existing["max_value"].to_f, metric_value].max
|
505
|
-
avg_value = ((existing["avg_value"].to_f * existing["count"].to_i) + metric_value) / count
|
506
|
-
|
507
|
-
@db.exec_params(
|
508
|
-
<<~SQL,
|
509
|
-
UPDATE aggregated_metrics
|
510
|
-
SET min_value = $1, max_value = $2, avg_value = $3, count = $4, created_at = $5
|
511
|
-
WHERE id = $6
|
512
|
-
SQL
|
513
|
-
[min_value, max_value, avg_value, count, timestamp, existing["id"]]
|
514
|
-
)
|
515
|
-
else
|
516
|
-
# Create new aggregated metric
|
517
|
-
@db.exec_params(
|
518
|
-
<<~SQL,
|
519
|
-
INSERT INTO aggregated_metrics (execution_id, step_name, metric_name, min_value, max_value, avg_value, count, created_at)
|
520
|
-
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
521
|
-
SQL
|
522
|
-
[execution_id, step_name, metric_name, metric_value, metric_value, metric_value, 1, timestamp]
|
523
|
-
)
|
524
|
-
end
|
525
|
-
end
|
526
|
-
end
|
527
|
-
|
528
|
-
def parse_analysis_result(row)
|
529
|
-
return nil unless row
|
530
|
-
|
531
|
-
{
|
532
|
-
id: row["id"].to_i,
|
533
|
-
execution_id: row["execution_id"],
|
534
|
-
step_name: row["step_name"],
|
535
|
-
data: JSON.parse(row["data"]),
|
536
|
-
metadata: JSON.parse(row["metadata"] || "{}"),
|
537
|
-
created_at: Time.parse(row["created_at"]),
|
538
|
-
updated_at: Time.parse(row["updated_at"])
|
539
|
-
}
|
540
|
-
end
|
541
|
-
|
542
|
-
def parse_metric(row)
|
543
|
-
return nil unless row
|
544
|
-
|
545
|
-
{
|
546
|
-
id: row["id"].to_i,
|
547
|
-
execution_id: row["execution_id"],
|
548
|
-
step_name: row["step_name"],
|
549
|
-
metric_name: row["metric_name"],
|
550
|
-
metric_value: row["metric_value"],
|
551
|
-
metric_type: row["metric_type"],
|
552
|
-
created_at: Time.parse(row["created_at"])
|
553
|
-
}
|
554
|
-
end
|
555
|
-
|
556
|
-
def parse_aggregated_metric(row)
|
557
|
-
return nil unless row
|
558
|
-
|
559
|
-
{
|
560
|
-
id: row["id"].to_i,
|
561
|
-
execution_id: row["execution_id"],
|
562
|
-
step_name: row["step_name"],
|
563
|
-
metric_name: row["metric_name"],
|
564
|
-
min_value: row["min_value"].to_f,
|
565
|
-
max_value: row["max_value"].to_f,
|
566
|
-
avg_value: row["avg_value"].to_f,
|
567
|
-
count: row["count"].to_i,
|
568
|
-
created_at: Time.parse(row["created_at"])
|
569
|
-
}
|
570
|
-
end
|
571
|
-
|
572
|
-
def parse_execution_history(row)
|
573
|
-
return nil unless row
|
574
|
-
|
575
|
-
{
|
576
|
-
execution_id: row["execution_id"],
|
577
|
-
step_name: row["step_name"],
|
578
|
-
created_at: Time.parse(row["created_at"]),
|
579
|
-
updated_at: Time.parse(row["updated_at"])
|
580
|
-
}
|
581
|
-
end
|
582
|
-
|
583
|
-
def export_analysis_results(options = {})
|
584
|
-
ensure_connection
|
585
|
-
|
586
|
-
query = "SELECT * FROM analysis_results"
|
587
|
-
params = []
|
588
|
-
param_index = 1
|
589
|
-
|
590
|
-
if options[:execution_id]
|
591
|
-
query += " WHERE execution_id = $#{param_index}"
|
592
|
-
params << options[:execution_id]
|
593
|
-
param_index += 1
|
594
|
-
end
|
595
|
-
|
596
|
-
query += " ORDER BY created_at DESC"
|
597
|
-
|
598
|
-
if options[:limit]
|
599
|
-
query += " LIMIT $#{param_index}"
|
600
|
-
params << options[:limit]
|
601
|
-
end
|
602
|
-
|
603
|
-
results = @db.exec_params(query, params)
|
604
|
-
results.map { |row| parse_analysis_result(row) }
|
605
|
-
end
|
606
|
-
|
607
|
-
def export_metrics(options = {})
|
608
|
-
ensure_connection
|
609
|
-
|
610
|
-
query = "SELECT * FROM metrics"
|
611
|
-
params = []
|
612
|
-
param_index = 1
|
613
|
-
|
614
|
-
if options[:execution_id]
|
615
|
-
query += " WHERE execution_id = $#{param_index}"
|
616
|
-
params << options[:execution_id]
|
617
|
-
param_index += 1
|
618
|
-
end
|
619
|
-
|
620
|
-
query += " ORDER BY created_at DESC"
|
621
|
-
|
622
|
-
if options[:limit]
|
623
|
-
query += " LIMIT $#{param_index}"
|
624
|
-
params << options[:limit]
|
625
|
-
end
|
626
|
-
|
627
|
-
results = @db.exec_params(query, params)
|
628
|
-
results.map { |row| parse_metric(row) }
|
629
|
-
end
|
630
|
-
|
631
|
-
def export_aggregated_metrics(options = {})
|
632
|
-
ensure_connection
|
633
|
-
|
634
|
-
query = "SELECT * FROM aggregated_metrics"
|
635
|
-
params = []
|
636
|
-
param_index = 1
|
637
|
-
|
638
|
-
if options[:execution_id]
|
639
|
-
query += " WHERE execution_id = $#{param_index}"
|
640
|
-
params << options[:execution_id]
|
641
|
-
param_index += 1
|
642
|
-
end
|
643
|
-
|
644
|
-
query += " ORDER BY created_at DESC"
|
645
|
-
|
646
|
-
if options[:limit]
|
647
|
-
query += " LIMIT $#{param_index}"
|
648
|
-
params << options[:limit]
|
649
|
-
end
|
650
|
-
|
651
|
-
results = @db.exec_params(query, params)
|
652
|
-
results.map { |row| parse_aggregated_metric(row) }
|
653
|
-
end
|
654
|
-
end
|
655
|
-
end
|