aidp 0.7.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +60 -214
- data/bin/aidp +1 -1
- data/lib/aidp/analysis/kb_inspector.rb +38 -23
- data/lib/aidp/analysis/seams.rb +2 -31
- data/lib/aidp/analysis/tree_sitter_grammar_loader.rb +0 -13
- data/lib/aidp/analysis/tree_sitter_scan.rb +3 -20
- data/lib/aidp/analyze/error_handler.rb +2 -75
- data/lib/aidp/analyze/json_file_storage.rb +292 -0
- data/lib/aidp/analyze/progress.rb +12 -0
- data/lib/aidp/analyze/progress_visualizer.rb +12 -17
- data/lib/aidp/analyze/ruby_maat_integration.rb +13 -31
- data/lib/aidp/analyze/runner.rb +256 -87
- data/lib/aidp/cli/jobs_command.rb +100 -432
- data/lib/aidp/cli.rb +309 -239
- data/lib/aidp/config.rb +298 -10
- data/lib/aidp/debug_logger.rb +195 -0
- data/lib/aidp/debug_mixin.rb +187 -0
- data/lib/aidp/execute/progress.rb +9 -0
- data/lib/aidp/execute/runner.rb +221 -40
- data/lib/aidp/execute/steps.rb +17 -7
- data/lib/aidp/execute/workflow_selector.rb +211 -0
- data/lib/aidp/harness/completion_checker.rb +268 -0
- data/lib/aidp/harness/condition_detector.rb +1526 -0
- data/lib/aidp/harness/config_loader.rb +373 -0
- data/lib/aidp/harness/config_manager.rb +382 -0
- data/lib/aidp/harness/config_schema.rb +1006 -0
- data/lib/aidp/harness/config_validator.rb +355 -0
- data/lib/aidp/harness/configuration.rb +477 -0
- data/lib/aidp/harness/enhanced_runner.rb +494 -0
- data/lib/aidp/harness/error_handler.rb +616 -0
- data/lib/aidp/harness/provider_config.rb +423 -0
- data/lib/aidp/harness/provider_factory.rb +306 -0
- data/lib/aidp/harness/provider_manager.rb +1269 -0
- data/lib/aidp/harness/provider_type_checker.rb +88 -0
- data/lib/aidp/harness/runner.rb +411 -0
- data/lib/aidp/harness/state/errors.rb +28 -0
- data/lib/aidp/harness/state/metrics.rb +219 -0
- data/lib/aidp/harness/state/persistence.rb +128 -0
- data/lib/aidp/harness/state/provider_state.rb +132 -0
- data/lib/aidp/harness/state/ui_state.rb +68 -0
- data/lib/aidp/harness/state/workflow_state.rb +123 -0
- data/lib/aidp/harness/state_manager.rb +586 -0
- data/lib/aidp/harness/status_display.rb +888 -0
- data/lib/aidp/harness/ui/base.rb +16 -0
- data/lib/aidp/harness/ui/enhanced_tui.rb +545 -0
- data/lib/aidp/harness/ui/enhanced_workflow_selector.rb +252 -0
- data/lib/aidp/harness/ui/error_handler.rb +132 -0
- data/lib/aidp/harness/ui/frame_manager.rb +361 -0
- data/lib/aidp/harness/ui/job_monitor.rb +500 -0
- data/lib/aidp/harness/ui/navigation/main_menu.rb +311 -0
- data/lib/aidp/harness/ui/navigation/menu_formatter.rb +120 -0
- data/lib/aidp/harness/ui/navigation/menu_item.rb +142 -0
- data/lib/aidp/harness/ui/navigation/menu_state.rb +139 -0
- data/lib/aidp/harness/ui/navigation/submenu.rb +202 -0
- data/lib/aidp/harness/ui/navigation/workflow_selector.rb +176 -0
- data/lib/aidp/harness/ui/progress_display.rb +280 -0
- data/lib/aidp/harness/ui/question_collector.rb +141 -0
- data/lib/aidp/harness/ui/spinner_group.rb +184 -0
- data/lib/aidp/harness/ui/spinner_helper.rb +152 -0
- data/lib/aidp/harness/ui/status_manager.rb +312 -0
- data/lib/aidp/harness/ui/status_widget.rb +280 -0
- data/lib/aidp/harness/ui/workflow_controller.rb +312 -0
- data/lib/aidp/harness/user_interface.rb +2381 -0
- data/lib/aidp/provider_manager.rb +131 -7
- data/lib/aidp/providers/anthropic.rb +28 -103
- data/lib/aidp/providers/base.rb +170 -0
- data/lib/aidp/providers/cursor.rb +52 -181
- data/lib/aidp/providers/gemini.rb +24 -107
- data/lib/aidp/providers/macos_ui.rb +99 -5
- data/lib/aidp/providers/opencode.rb +194 -0
- data/lib/aidp/storage/csv_storage.rb +172 -0
- data/lib/aidp/storage/file_manager.rb +214 -0
- data/lib/aidp/storage/json_storage.rb +140 -0
- data/lib/aidp/version.rb +1 -1
- data/lib/aidp.rb +54 -39
- data/templates/COMMON/AGENT_BASE.md +11 -0
- data/templates/EXECUTE/00_PRD.md +4 -4
- data/templates/EXECUTE/02_ARCHITECTURE.md +5 -4
- data/templates/EXECUTE/07_TEST_PLAN.md +4 -1
- data/templates/EXECUTE/08_TASKS.md +4 -4
- data/templates/EXECUTE/10_IMPLEMENTATION_AGENT.md +4 -4
- data/templates/README.md +279 -0
- data/templates/aidp-development.yml.example +373 -0
- data/templates/aidp-minimal.yml.example +48 -0
- data/templates/aidp-production.yml.example +475 -0
- data/templates/aidp.yml.example +598 -0
- metadata +93 -69
- data/lib/aidp/analyze/agent_personas.rb +0 -71
- data/lib/aidp/analyze/agent_tool_executor.rb +0 -439
- data/lib/aidp/analyze/data_retention_manager.rb +0 -421
- data/lib/aidp/analyze/database.rb +0 -260
- data/lib/aidp/analyze/dependencies.rb +0 -335
- data/lib/aidp/analyze/export_manager.rb +0 -418
- data/lib/aidp/analyze/focus_guidance.rb +0 -517
- data/lib/aidp/analyze/incremental_analyzer.rb +0 -533
- data/lib/aidp/analyze/language_analysis_strategies.rb +0 -897
- data/lib/aidp/analyze/large_analysis_progress.rb +0 -499
- data/lib/aidp/analyze/memory_manager.rb +0 -339
- data/lib/aidp/analyze/metrics_storage.rb +0 -336
- data/lib/aidp/analyze/parallel_processor.rb +0 -454
- data/lib/aidp/analyze/performance_optimizer.rb +0 -691
- data/lib/aidp/analyze/repository_chunker.rb +0 -697
- data/lib/aidp/analyze/static_analysis_detector.rb +0 -577
- data/lib/aidp/analyze/storage.rb +0 -655
- data/lib/aidp/analyze/tool_configuration.rb +0 -441
- data/lib/aidp/analyze/tool_modernization.rb +0 -750
- data/lib/aidp/database/pg_adapter.rb +0 -148
- data/lib/aidp/database_config.rb +0 -69
- data/lib/aidp/database_connection.rb +0 -72
- data/lib/aidp/job_manager.rb +0 -41
- data/lib/aidp/jobs/base_job.rb +0 -45
- data/lib/aidp/jobs/provider_execution_job.rb +0 -83
- data/lib/aidp/project_detector.rb +0 -117
- data/lib/aidp/providers/agent_supervisor.rb +0 -348
- data/lib/aidp/providers/supervised_base.rb +0 -317
- data/lib/aidp/providers/supervised_cursor.rb +0 -22
- data/lib/aidp/sync.rb +0 -13
- data/lib/aidp/workspace.rb +0 -19
@@ -0,0 +1,194 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "timeout"
|
4
|
+
require_relative "base"
|
5
|
+
require_relative "../util"
|
6
|
+
require_relative "../debug_mixin"
|
7
|
+
|
8
|
+
module Aidp
|
9
|
+
module Providers
|
10
|
+
class Opencode < Base
|
11
|
+
include Aidp::DebugMixin
|
12
|
+
|
13
|
+
def self.available?
|
14
|
+
!!Aidp::Util.which("opencode")
|
15
|
+
end
|
16
|
+
|
17
|
+
def name
|
18
|
+
"opencode"
|
19
|
+
end
|
20
|
+
|
21
|
+
def send(prompt:, session: nil)
|
22
|
+
raise "opencode not available" unless self.class.available?
|
23
|
+
|
24
|
+
# Smart timeout calculation
|
25
|
+
timeout_seconds = calculate_timeout
|
26
|
+
|
27
|
+
debug_provider("opencode", "Starting execution", {timeout: timeout_seconds})
|
28
|
+
debug_log("📝 Sending prompt to opencode (length: #{prompt.length})", level: :info)
|
29
|
+
|
30
|
+
# Check if prompt is too large and warn
|
31
|
+
if prompt.length > 3000
|
32
|
+
debug_log("⚠️ Large prompt detected (#{prompt.length} chars) - this may cause rate limiting", level: :warn)
|
33
|
+
end
|
34
|
+
|
35
|
+
# Set up activity monitoring
|
36
|
+
setup_activity_monitoring("opencode", method(:activity_callback))
|
37
|
+
record_activity("Starting opencode execution")
|
38
|
+
|
39
|
+
# Start activity display thread with timeout
|
40
|
+
activity_display_thread = Thread.new do
|
41
|
+
start_time = Time.now
|
42
|
+
loop do
|
43
|
+
sleep 0.5 # Update every 500ms to reduce spam
|
44
|
+
elapsed = Time.now - start_time
|
45
|
+
|
46
|
+
# Break if we've been running too long or state changed
|
47
|
+
break if elapsed > timeout_seconds || @activity_state == :completed || @activity_state == :failed
|
48
|
+
|
49
|
+
print_activity_status(elapsed)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
begin
|
54
|
+
# Use debug_execute_command for better debugging
|
55
|
+
# opencode run command with prompt and model
|
56
|
+
model = ENV["OPENCODE_MODEL"] || "github-copilot/claude-3.5-sonnet"
|
57
|
+
result = debug_execute_command("opencode", args: ["run", "-m", model, prompt], timeout: timeout_seconds)
|
58
|
+
|
59
|
+
# Log the results
|
60
|
+
debug_command("opencode", args: ["run", "-m", model, prompt], input: nil, output: result.out, error: result.err, exit_code: result.exit_status)
|
61
|
+
|
62
|
+
# Stop activity display
|
63
|
+
activity_display_thread.kill if activity_display_thread.alive?
|
64
|
+
activity_display_thread.join(0.1) # Give it 100ms to finish
|
65
|
+
clear_activity_status
|
66
|
+
|
67
|
+
if result.exit_status == 0
|
68
|
+
mark_completed
|
69
|
+
result.out
|
70
|
+
else
|
71
|
+
mark_failed("opencode failed with exit code #{result.exit_status}")
|
72
|
+
debug_error(StandardError.new("opencode failed"), {exit_code: result.exit_status, stderr: result.err})
|
73
|
+
raise "opencode failed with exit code #{result.exit_status}: #{result.err}"
|
74
|
+
end
|
75
|
+
rescue => e
|
76
|
+
# Stop activity display
|
77
|
+
activity_display_thread.kill if activity_display_thread.alive?
|
78
|
+
activity_display_thread.join(0.1) # Give it 100ms to finish
|
79
|
+
clear_activity_status
|
80
|
+
mark_failed("opencode execution failed: #{e.message}")
|
81
|
+
debug_error(e, {provider: "opencode", prompt_length: prompt.length})
|
82
|
+
raise
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
private
|
87
|
+
|
88
|
+
def print_activity_status(elapsed)
|
89
|
+
# Print activity status during opencode execution with elapsed time
|
90
|
+
minutes = (elapsed / 60).to_i
|
91
|
+
seconds = (elapsed % 60).to_i
|
92
|
+
|
93
|
+
if minutes > 0
|
94
|
+
print "\r🔄 opencode is running... (#{minutes}m #{seconds}s)"
|
95
|
+
else
|
96
|
+
print "\r🔄 opencode is running... (#{seconds}s)"
|
97
|
+
end
|
98
|
+
$stdout.flush
|
99
|
+
end
|
100
|
+
|
101
|
+
def clear_activity_status
|
102
|
+
# Clear the activity status line
|
103
|
+
print "\r" + " " * 50 + "\r"
|
104
|
+
$stdout.flush
|
105
|
+
end
|
106
|
+
|
107
|
+
def calculate_timeout
|
108
|
+
# Priority order for timeout calculation:
|
109
|
+
# 1. Quick mode (for testing)
|
110
|
+
# 2. Environment variable override
|
111
|
+
# 3. Adaptive timeout based on step type
|
112
|
+
# 4. Default timeout
|
113
|
+
|
114
|
+
if ENV["AIDP_QUICK_MODE"]
|
115
|
+
puts "⚡ Quick mode enabled - 2 minute timeout"
|
116
|
+
return 120
|
117
|
+
end
|
118
|
+
|
119
|
+
if ENV["AIDP_OPENCODE_TIMEOUT"]
|
120
|
+
return ENV["AIDP_OPENCODE_TIMEOUT"].to_i
|
121
|
+
end
|
122
|
+
|
123
|
+
# Adaptive timeout based on step type
|
124
|
+
step_timeout = get_adaptive_timeout
|
125
|
+
if step_timeout
|
126
|
+
puts "🧠 Using adaptive timeout: #{step_timeout} seconds"
|
127
|
+
return step_timeout
|
128
|
+
end
|
129
|
+
|
130
|
+
# Default timeout (5 minutes for interactive use)
|
131
|
+
puts "📋 Using default timeout: 5 minutes"
|
132
|
+
300
|
133
|
+
end
|
134
|
+
|
135
|
+
def get_adaptive_timeout
|
136
|
+
# Timeout recommendations based on step type patterns
|
137
|
+
step_name = ENV["AIDP_CURRENT_STEP"] || ""
|
138
|
+
|
139
|
+
case step_name
|
140
|
+
when /REPOSITORY_ANALYSIS/
|
141
|
+
180 # 3 minutes - repository analysis can be quick
|
142
|
+
when /ARCHITECTURE_ANALYSIS/
|
143
|
+
600 # 10 minutes - architecture analysis needs more time
|
144
|
+
when /TEST_ANALYSIS/
|
145
|
+
300 # 5 minutes - test analysis is moderate
|
146
|
+
when /FUNCTIONALITY_ANALYSIS/
|
147
|
+
600 # 10 minutes - functionality analysis is complex
|
148
|
+
when /DOCUMENTATION_ANALYSIS/
|
149
|
+
300 # 5 minutes - documentation analysis is moderate
|
150
|
+
when /STATIC_ANALYSIS/
|
151
|
+
450 # 7.5 minutes - static analysis can be intensive
|
152
|
+
when /REFACTORING_RECOMMENDATIONS/
|
153
|
+
600 # 10 minutes - refactoring recommendations are complex
|
154
|
+
else
|
155
|
+
nil # Use default
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
def activity_callback(state, message, provider)
|
160
|
+
# This is now handled by the animated display thread
|
161
|
+
# Only print static messages for state changes
|
162
|
+
case state
|
163
|
+
when :starting
|
164
|
+
puts "🚀 Starting opencode execution..."
|
165
|
+
when :completed
|
166
|
+
puts "✅ opencode execution completed"
|
167
|
+
when :failed
|
168
|
+
puts "❌ opencode execution failed: #{message}"
|
169
|
+
end
|
170
|
+
end
|
171
|
+
|
172
|
+
def setup_activity_monitoring(provider_name, callback)
|
173
|
+
@activity_callback = callback
|
174
|
+
@activity_state = :starting
|
175
|
+
@activity_start_time = Time.now
|
176
|
+
end
|
177
|
+
|
178
|
+
def record_activity(message)
|
179
|
+
@activity_state = :running
|
180
|
+
@activity_callback&.call(:running, message, "opencode")
|
181
|
+
end
|
182
|
+
|
183
|
+
def mark_completed
|
184
|
+
@activity_state = :completed
|
185
|
+
@activity_callback&.call(:completed, "Execution completed", "opencode")
|
186
|
+
end
|
187
|
+
|
188
|
+
def mark_failed(reason)
|
189
|
+
@activity_state = :failed
|
190
|
+
@activity_callback&.call(:failed, reason, "opencode")
|
191
|
+
end
|
192
|
+
end
|
193
|
+
end
|
194
|
+
end
|
@@ -0,0 +1,172 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "csv"
|
4
|
+
require "fileutils"
|
5
|
+
|
6
|
+
module Aidp
|
7
|
+
module Storage
|
8
|
+
# Simple CSV file storage for tabular data
|
9
|
+
class CsvStorage
|
10
|
+
def initialize(base_dir = ".aidp")
|
11
|
+
@base_dir = base_dir
|
12
|
+
ensure_directory_exists
|
13
|
+
end
|
14
|
+
|
15
|
+
# Append a row to CSV file
|
16
|
+
def append(filename, row_data)
|
17
|
+
file_path = get_file_path(filename)
|
18
|
+
FileUtils.mkdir_p(File.dirname(file_path))
|
19
|
+
|
20
|
+
# Add timestamp if not present
|
21
|
+
row_data["created_at"] ||= Time.now.iso8601
|
22
|
+
|
23
|
+
# Convert all values to strings
|
24
|
+
row_data = row_data.transform_values(&:to_s)
|
25
|
+
|
26
|
+
# If file doesn't exist, write headers first
|
27
|
+
if !File.exist?(file_path)
|
28
|
+
CSV.open(file_path, "w") do |csv|
|
29
|
+
csv << row_data.keys
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
# Append the row
|
34
|
+
CSV.open(file_path, "a") do |csv|
|
35
|
+
csv << row_data.values
|
36
|
+
end
|
37
|
+
|
38
|
+
{
|
39
|
+
filename: filename,
|
40
|
+
file_path: file_path,
|
41
|
+
row_count: count_rows(filename),
|
42
|
+
success: true
|
43
|
+
}
|
44
|
+
rescue => error
|
45
|
+
{
|
46
|
+
filename: filename,
|
47
|
+
error: error.message,
|
48
|
+
success: false
|
49
|
+
}
|
50
|
+
end
|
51
|
+
|
52
|
+
# Read all rows from CSV file
|
53
|
+
def read_all(filename)
|
54
|
+
file_path = get_file_path(filename)
|
55
|
+
return [] unless File.exist?(file_path)
|
56
|
+
|
57
|
+
rows = []
|
58
|
+
CSV.foreach(file_path, headers: true) do |row|
|
59
|
+
rows << row.to_h
|
60
|
+
end
|
61
|
+
rows
|
62
|
+
rescue => error
|
63
|
+
puts "Error reading #{filename}: #{error.message}" if ENV["AIDP_DEBUG"]
|
64
|
+
[]
|
65
|
+
end
|
66
|
+
|
67
|
+
# Read rows with filtering
|
68
|
+
def read_filtered(filename, filters = {})
|
69
|
+
all_rows = read_all(filename)
|
70
|
+
return all_rows if filters.empty?
|
71
|
+
|
72
|
+
all_rows.select do |row|
|
73
|
+
filters.all? { |key, value| row[key.to_s] == value.to_s }
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
# Count rows in CSV file
|
78
|
+
def count_rows(filename)
|
79
|
+
file_path = get_file_path(filename)
|
80
|
+
return 0 unless File.exist?(file_path)
|
81
|
+
|
82
|
+
count = 0
|
83
|
+
CSV.foreach(file_path) { count += 1 }
|
84
|
+
count - 1 # Subtract 1 for header row
|
85
|
+
rescue => error
|
86
|
+
puts "Error counting rows in #{filename}: #{error.message}" if ENV["AIDP_DEBUG"]
|
87
|
+
0
|
88
|
+
end
|
89
|
+
|
90
|
+
# Get unique values for a column
|
91
|
+
def unique_values(filename, column)
|
92
|
+
all_rows = read_all(filename)
|
93
|
+
all_rows.map { |row| row[column.to_s] }.compact.uniq
|
94
|
+
end
|
95
|
+
|
96
|
+
# Get summary statistics
|
97
|
+
def summary(filename)
|
98
|
+
file_path = get_file_path(filename)
|
99
|
+
return nil unless File.exist?(file_path)
|
100
|
+
|
101
|
+
rows = read_all(filename)
|
102
|
+
return nil if rows.empty?
|
103
|
+
|
104
|
+
headers = rows.first.keys
|
105
|
+
numeric_columns = headers.select do |col|
|
106
|
+
rows.all? { |row| row[col] =~ /^-?\d+\.?\d*$/ }
|
107
|
+
end
|
108
|
+
|
109
|
+
summary_data = {
|
110
|
+
filename: filename,
|
111
|
+
file_path: file_path,
|
112
|
+
total_rows: rows.length,
|
113
|
+
columns: headers,
|
114
|
+
numeric_columns: numeric_columns,
|
115
|
+
file_size: File.size(file_path)
|
116
|
+
}
|
117
|
+
|
118
|
+
# Add basic stats for numeric columns
|
119
|
+
numeric_columns.each do |col|
|
120
|
+
values = rows.map { |row| row[col].to_f }
|
121
|
+
summary_data["#{col}_stats"] = {
|
122
|
+
min: values.min,
|
123
|
+
max: values.max,
|
124
|
+
avg: values.sum / values.length
|
125
|
+
}
|
126
|
+
end
|
127
|
+
|
128
|
+
summary_data
|
129
|
+
rescue => error
|
130
|
+
puts "Error generating summary for #{filename}: #{error.message}" if ENV["AIDP_DEBUG"]
|
131
|
+
nil
|
132
|
+
end
|
133
|
+
|
134
|
+
# Check if file exists
|
135
|
+
def exists?(filename)
|
136
|
+
File.exist?(get_file_path(filename))
|
137
|
+
end
|
138
|
+
|
139
|
+
# Delete file
|
140
|
+
def delete(filename)
|
141
|
+
file_path = get_file_path(filename)
|
142
|
+
return {success: true, message: "File does not exist"} unless File.exist?(file_path)
|
143
|
+
|
144
|
+
File.delete(file_path)
|
145
|
+
{success: true, message: "File deleted"}
|
146
|
+
rescue => error
|
147
|
+
{success: false, error: error.message}
|
148
|
+
end
|
149
|
+
|
150
|
+
# List all CSV files
|
151
|
+
def list
|
152
|
+
return [] unless Dir.exist?(@base_dir)
|
153
|
+
|
154
|
+
Dir.glob(File.join(@base_dir, "**", "*.csv")).map do |file|
|
155
|
+
File.basename(file, ".csv")
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
private
|
160
|
+
|
161
|
+
def get_file_path(filename)
|
162
|
+
# Ensure filename has .csv extension
|
163
|
+
filename += ".csv" unless filename.end_with?(".csv")
|
164
|
+
File.join(@base_dir, filename)
|
165
|
+
end
|
166
|
+
|
167
|
+
def ensure_directory_exists
|
168
|
+
FileUtils.mkdir_p(@base_dir) unless Dir.exist?(@base_dir)
|
169
|
+
end
|
170
|
+
end
|
171
|
+
end
|
172
|
+
end
|
@@ -0,0 +1,214 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "json_storage"
|
4
|
+
require_relative "csv_storage"
|
5
|
+
|
6
|
+
module Aidp
|
7
|
+
module Storage
|
8
|
+
# Simple file manager that provides easy access to JSON and CSV storage
|
9
|
+
class FileManager
|
10
|
+
def initialize(base_dir = ".aidp")
|
11
|
+
@base_dir = base_dir
|
12
|
+
@json_storage = JsonStorage.new(base_dir)
|
13
|
+
@csv_storage = CsvStorage.new(base_dir)
|
14
|
+
end
|
15
|
+
|
16
|
+
# JSON operations for structured data
|
17
|
+
def store_json(filename, data)
|
18
|
+
@json_storage.store(filename, data)
|
19
|
+
end
|
20
|
+
|
21
|
+
def load_json(filename)
|
22
|
+
@json_storage.load(filename)
|
23
|
+
end
|
24
|
+
|
25
|
+
def update_json(filename, data)
|
26
|
+
@json_storage.update(filename, data)
|
27
|
+
end
|
28
|
+
|
29
|
+
def json_exists?(filename)
|
30
|
+
@json_storage.exists?(filename)
|
31
|
+
end
|
32
|
+
|
33
|
+
def json_metadata(filename)
|
34
|
+
@json_storage.metadata(filename)
|
35
|
+
end
|
36
|
+
|
37
|
+
# CSV operations for tabular data
|
38
|
+
def append_csv(filename, row_data)
|
39
|
+
@csv_storage.append(filename, row_data)
|
40
|
+
end
|
41
|
+
|
42
|
+
def read_csv(filename, filters = {})
|
43
|
+
if filters.empty?
|
44
|
+
@csv_storage.read_all(filename)
|
45
|
+
else
|
46
|
+
@csv_storage.read_filtered(filename, filters)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
def csv_summary(filename)
|
51
|
+
@csv_storage.summary(filename)
|
52
|
+
end
|
53
|
+
|
54
|
+
def csv_exists?(filename)
|
55
|
+
@csv_storage.exists?(filename)
|
56
|
+
end
|
57
|
+
|
58
|
+
# Convenience methods for common data types
|
59
|
+
|
60
|
+
# Analysis results (structured data)
|
61
|
+
def store_analysis_result(step_name, data, metadata = {})
|
62
|
+
result = {
|
63
|
+
step_name: step_name,
|
64
|
+
data: data,
|
65
|
+
metadata: metadata
|
66
|
+
}
|
67
|
+
store_json("analysis_results", result)
|
68
|
+
end
|
69
|
+
|
70
|
+
def load_analysis_result
|
71
|
+
load_json("analysis_results")
|
72
|
+
end
|
73
|
+
|
74
|
+
# Embeddings (structured data)
|
75
|
+
def store_embeddings(step_name, embeddings_data)
|
76
|
+
result = {
|
77
|
+
step_name: step_name,
|
78
|
+
embeddings_data: embeddings_data
|
79
|
+
}
|
80
|
+
store_json("embeddings", result)
|
81
|
+
end
|
82
|
+
|
83
|
+
def load_embeddings
|
84
|
+
load_json("embeddings")
|
85
|
+
end
|
86
|
+
|
87
|
+
# Metrics (tabular data)
|
88
|
+
def record_metric(step_name, metric_name, value, metadata = {})
|
89
|
+
row_data = {
|
90
|
+
step_name: step_name,
|
91
|
+
metric_name: metric_name,
|
92
|
+
value: value,
|
93
|
+
recorded_at: Time.now.iso8601
|
94
|
+
}.merge(metadata)
|
95
|
+
|
96
|
+
append_csv("metrics", row_data)
|
97
|
+
end
|
98
|
+
|
99
|
+
def get_metrics(filters = {})
|
100
|
+
read_csv("metrics", filters)
|
101
|
+
end
|
102
|
+
|
103
|
+
def get_metrics_summary
|
104
|
+
csv_summary("metrics")
|
105
|
+
end
|
106
|
+
|
107
|
+
# Step executions (tabular data)
|
108
|
+
def record_step_execution(step_name, provider_name, duration, success, metadata = {})
|
109
|
+
row_data = {
|
110
|
+
step_name: step_name,
|
111
|
+
provider_name: provider_name,
|
112
|
+
duration: duration,
|
113
|
+
success: success,
|
114
|
+
created_at: Time.now.iso8601
|
115
|
+
}.merge(metadata)
|
116
|
+
|
117
|
+
append_csv("step_executions", row_data)
|
118
|
+
end
|
119
|
+
|
120
|
+
def get_step_executions(filters = {})
|
121
|
+
read_csv("step_executions", filters)
|
122
|
+
end
|
123
|
+
|
124
|
+
def get_step_executions_summary
|
125
|
+
csv_summary("step_executions")
|
126
|
+
end
|
127
|
+
|
128
|
+
# Provider activities (tabular data)
|
129
|
+
def record_provider_activity(provider_name, step_name, start_time, end_time, duration, final_state, stuck_detected = false)
|
130
|
+
row_data = {
|
131
|
+
provider_name: provider_name,
|
132
|
+
step_name: step_name,
|
133
|
+
start_time: start_time&.iso8601,
|
134
|
+
end_time: end_time&.iso8601,
|
135
|
+
duration: duration,
|
136
|
+
final_state: final_state,
|
137
|
+
stuck_detected: stuck_detected,
|
138
|
+
created_at: Time.now.iso8601
|
139
|
+
}
|
140
|
+
|
141
|
+
append_csv("provider_activities", row_data)
|
142
|
+
end
|
143
|
+
|
144
|
+
def get_provider_activities(filters = {})
|
145
|
+
read_csv("provider_activities", filters)
|
146
|
+
end
|
147
|
+
|
148
|
+
def get_provider_activities_summary
|
149
|
+
csv_summary("provider_activities")
|
150
|
+
end
|
151
|
+
|
152
|
+
# Configuration and status (structured data)
|
153
|
+
def store_config(config_data)
|
154
|
+
store_json("config", config_data)
|
155
|
+
end
|
156
|
+
|
157
|
+
def load_config
|
158
|
+
load_json("config")
|
159
|
+
end
|
160
|
+
|
161
|
+
def store_status(status_data)
|
162
|
+
store_json("status", status_data)
|
163
|
+
end
|
164
|
+
|
165
|
+
def load_status
|
166
|
+
load_json("status")
|
167
|
+
end
|
168
|
+
|
169
|
+
# List all files
|
170
|
+
def list_json_files
|
171
|
+
@json_storage.list
|
172
|
+
end
|
173
|
+
|
174
|
+
def list_csv_files
|
175
|
+
@csv_storage.list
|
176
|
+
end
|
177
|
+
|
178
|
+
def list_all_files
|
179
|
+
{
|
180
|
+
json_files: list_json_files,
|
181
|
+
csv_files: list_csv_files
|
182
|
+
}
|
183
|
+
end
|
184
|
+
|
185
|
+
# Backup and restore
|
186
|
+
def backup_to(destination_dir)
|
187
|
+
FileUtils.mkdir_p(destination_dir)
|
188
|
+
# Copy contents of base_dir to destination_dir, avoiding recursive copying
|
189
|
+
if Dir.exist?(@base_dir)
|
190
|
+
Dir.glob(File.join(@base_dir, "*")).each do |item|
|
191
|
+
next if File.expand_path(item) == File.expand_path(destination_dir)
|
192
|
+
FileUtils.cp_r(item, destination_dir)
|
193
|
+
end
|
194
|
+
end
|
195
|
+
{success: true, backup_location: destination_dir}
|
196
|
+
rescue => error
|
197
|
+
{success: false, error: error.message}
|
198
|
+
end
|
199
|
+
|
200
|
+
def restore_from(source_dir)
|
201
|
+
return {success: false, error: "Source directory does not exist"} unless Dir.exist?(source_dir)
|
202
|
+
|
203
|
+
# Clear existing data
|
204
|
+
FileUtils.rm_rf(@base_dir) if Dir.exist?(@base_dir)
|
205
|
+
|
206
|
+
# Copy from source
|
207
|
+
FileUtils.cp_r(source_dir, @base_dir)
|
208
|
+
{success: true, restored_from: source_dir}
|
209
|
+
rescue => error
|
210
|
+
{success: false, error: error.message}
|
211
|
+
end
|
212
|
+
end
|
213
|
+
end
|
214
|
+
end
|
@@ -0,0 +1,140 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "json"
|
4
|
+
require "fileutils"
|
5
|
+
|
6
|
+
module Aidp
|
7
|
+
module Storage
|
8
|
+
# Simple JSON file storage for structured data
|
9
|
+
class JsonStorage
|
10
|
+
def initialize(base_dir = ".aidp")
|
11
|
+
@base_dir = base_dir
|
12
|
+
ensure_directory_exists
|
13
|
+
end
|
14
|
+
|
15
|
+
# Store data as JSON file
|
16
|
+
def store(filename, data)
|
17
|
+
file_path = get_file_path(filename)
|
18
|
+
FileUtils.mkdir_p(File.dirname(file_path))
|
19
|
+
|
20
|
+
json_data = {
|
21
|
+
"data" => data,
|
22
|
+
"created_at" => Time.now.iso8601,
|
23
|
+
"updated_at" => Time.now.iso8601
|
24
|
+
}
|
25
|
+
|
26
|
+
File.write(file_path, JSON.pretty_generate(json_data))
|
27
|
+
|
28
|
+
{
|
29
|
+
filename: filename,
|
30
|
+
file_path: file_path,
|
31
|
+
stored_at: Time.now,
|
32
|
+
success: true
|
33
|
+
}
|
34
|
+
rescue => error
|
35
|
+
{
|
36
|
+
filename: filename,
|
37
|
+
error: error.message,
|
38
|
+
success: false
|
39
|
+
}
|
40
|
+
end
|
41
|
+
|
42
|
+
# Load data from JSON file
|
43
|
+
def load(filename)
|
44
|
+
file_path = get_file_path(filename)
|
45
|
+
return nil unless File.exist?(file_path)
|
46
|
+
|
47
|
+
content = File.read(file_path)
|
48
|
+
json_data = JSON.parse(content)
|
49
|
+
json_data["data"]
|
50
|
+
rescue => error
|
51
|
+
puts "Error loading #{filename}: #{error.message}" if ENV["AIDP_DEBUG"]
|
52
|
+
nil
|
53
|
+
end
|
54
|
+
|
55
|
+
# Update existing data
|
56
|
+
def update(filename, data)
|
57
|
+
existing = load(filename)
|
58
|
+
return store(filename, data) unless existing
|
59
|
+
|
60
|
+
file_path = get_file_path(filename)
|
61
|
+
json_data = JSON.parse(File.read(file_path))
|
62
|
+
|
63
|
+
json_data["data"] = data
|
64
|
+
json_data["updated_at"] = Time.now.iso8601
|
65
|
+
|
66
|
+
File.write(file_path, JSON.pretty_generate(json_data))
|
67
|
+
|
68
|
+
{
|
69
|
+
filename: filename,
|
70
|
+
file_path: file_path,
|
71
|
+
updated_at: Time.now,
|
72
|
+
success: true
|
73
|
+
}
|
74
|
+
rescue => error
|
75
|
+
{
|
76
|
+
filename: filename,
|
77
|
+
error: error.message,
|
78
|
+
success: false
|
79
|
+
}
|
80
|
+
end
|
81
|
+
|
82
|
+
# Check if file exists
|
83
|
+
def exists?(filename)
|
84
|
+
File.exist?(get_file_path(filename))
|
85
|
+
end
|
86
|
+
|
87
|
+
# Delete file
|
88
|
+
def delete(filename)
|
89
|
+
file_path = get_file_path(filename)
|
90
|
+
return {success: true, message: "File does not exist"} unless File.exist?(file_path)
|
91
|
+
|
92
|
+
File.delete(file_path)
|
93
|
+
{success: true, message: "File deleted"}
|
94
|
+
rescue => error
|
95
|
+
{success: false, error: error.message}
|
96
|
+
end
|
97
|
+
|
98
|
+
# List all JSON files
|
99
|
+
def list
|
100
|
+
return [] unless Dir.exist?(@base_dir)
|
101
|
+
|
102
|
+
Dir.glob(File.join(@base_dir, "**", "*.json")).map do |file|
|
103
|
+
File.basename(file, ".json")
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
# Get file metadata
|
108
|
+
def metadata(filename)
|
109
|
+
file_path = get_file_path(filename)
|
110
|
+
return nil unless File.exist?(file_path)
|
111
|
+
|
112
|
+
content = File.read(file_path)
|
113
|
+
json_data = JSON.parse(content)
|
114
|
+
|
115
|
+
{
|
116
|
+
filename: filename,
|
117
|
+
file_path: file_path,
|
118
|
+
created_at: json_data["created_at"],
|
119
|
+
updated_at: json_data["updated_at"],
|
120
|
+
size: File.size(file_path)
|
121
|
+
}
|
122
|
+
rescue => error
|
123
|
+
puts "Error getting metadata for #{filename}: #{error.message}" if ENV["AIDP_DEBUG"]
|
124
|
+
nil
|
125
|
+
end
|
126
|
+
|
127
|
+
private
|
128
|
+
|
129
|
+
def get_file_path(filename)
|
130
|
+
# Ensure filename has .json extension
|
131
|
+
filename += ".json" unless filename.end_with?(".json")
|
132
|
+
File.join(@base_dir, filename)
|
133
|
+
end
|
134
|
+
|
135
|
+
def ensure_directory_exists
|
136
|
+
FileUtils.mkdir_p(@base_dir) unless Dir.exist?(@base_dir)
|
137
|
+
end
|
138
|
+
end
|
139
|
+
end
|
140
|
+
end
|