aidp 0.3.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +59 -4
- data/lib/aidp/analyze/agent_personas.rb +1 -1
- data/lib/aidp/analyze/database.rb +99 -82
- data/lib/aidp/analyze/error_handler.rb +12 -76
- data/lib/aidp/analyze/focus_guidance.rb +2 -2
- data/lib/aidp/analyze/metrics_storage.rb +336 -0
- data/lib/aidp/analyze/prioritizer.rb +2 -2
- data/lib/aidp/analyze/ruby_maat_integration.rb +6 -102
- data/lib/aidp/analyze/runner.rb +107 -191
- data/lib/aidp/analyze/steps.rb +29 -30
- data/lib/aidp/analyze/storage.rb +233 -171
- data/lib/aidp/cli/jobs_command.rb +489 -0
- data/lib/aidp/cli/terminal_io.rb +52 -0
- data/lib/aidp/cli.rb +104 -45
- data/lib/aidp/core_ext/class_attribute.rb +36 -0
- data/lib/aidp/database/pg_adapter.rb +148 -0
- data/lib/aidp/database_config.rb +69 -0
- data/lib/aidp/database_connection.rb +72 -0
- data/lib/aidp/database_migration.rb +158 -0
- data/lib/aidp/execute/runner.rb +65 -92
- data/lib/aidp/execute/steps.rb +81 -82
- data/lib/aidp/job_manager.rb +41 -0
- data/lib/aidp/jobs/base_job.rb +47 -0
- data/lib/aidp/jobs/provider_execution_job.rb +96 -0
- data/lib/aidp/provider_manager.rb +25 -0
- data/lib/aidp/providers/agent_supervisor.rb +348 -0
- data/lib/aidp/providers/anthropic.rb +166 -3
- data/lib/aidp/providers/base.rb +153 -6
- data/lib/aidp/providers/cursor.rb +247 -43
- data/lib/aidp/providers/gemini.rb +166 -3
- data/lib/aidp/providers/supervised_base.rb +317 -0
- data/lib/aidp/providers/supervised_cursor.rb +22 -0
- data/lib/aidp/version.rb +1 -1
- data/lib/aidp.rb +25 -34
- data/templates/ANALYZE/01_REPOSITORY_ANALYSIS.md +4 -4
- metadata +72 -35
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 26282ecc76062d41d599eb9bc8190303bcef16257fb98fa25250eecaf894270a
|
4
|
+
data.tar.gz: ee50c920a5aa61d2883f906c7bea5d09c6835725c27c359bd2bce7d698f594e2
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9fa312b72678f5bba517ee9add8c9fcd1bc06237f65781ddf429bb687b7f3a85fb3e031ebe992cb768d53885a09648d74c6f9d57282a6f988eeada7994ff534f
|
7
|
+
data.tar.gz: e15204813d179e130dea804b89dc0a5225e1410af19fc5420ed2e82a3992d19b7a6e9e2054dce0269dce957f1ebee42e6c881f301e8554643f0eff1167a6fd61
|
data/README.md
CHANGED
@@ -76,6 +76,7 @@ The pipeline includes 15 steps total:
|
|
76
76
|
aidp status # Show progress of all steps
|
77
77
|
aidp execute next # Run next pending step
|
78
78
|
aidp approve current # Approve current gate step
|
79
|
+
aidp jobs # Monitor background jobs (real-time)
|
79
80
|
aidp detect # See which AI provider will be used
|
80
81
|
aidp execute <step> # Run specific step (e.g., prd, arch, tasks)
|
81
82
|
aidp approve <step> # Approve specific step
|
@@ -97,6 +98,56 @@ AIDP_PROVIDER=anthropic aidp execute next
|
|
97
98
|
AIDP_LLM_CMD=/usr/local/bin/claude aidp execute next
|
98
99
|
```
|
99
100
|
|
101
|
+
## Background Jobs
|
102
|
+
|
103
|
+
AIDP uses background jobs to handle all AI provider executions, providing better reliability and real-time monitoring capabilities.
|
104
|
+
|
105
|
+
### Job Monitoring
|
106
|
+
|
107
|
+
Monitor running and completed jobs in real-time:
|
108
|
+
|
109
|
+
```bash
|
110
|
+
aidp jobs # Show job status with real-time updates
|
111
|
+
```
|
112
|
+
|
113
|
+
The jobs view displays:
|
114
|
+
|
115
|
+
- **Running jobs** with live progress updates
|
116
|
+
- **Queued jobs** waiting to be processed
|
117
|
+
- **Completed jobs** with execution results
|
118
|
+
- **Failed jobs** with error details
|
119
|
+
|
120
|
+
### Job Controls
|
121
|
+
|
122
|
+
From the jobs view, you can:
|
123
|
+
|
124
|
+
- **Retry failed jobs** by pressing `r` on a failed job
|
125
|
+
- **View job details** by pressing `d` on any job
|
126
|
+
- **Exit monitoring** by pressing `q`
|
127
|
+
|
128
|
+
### Job Persistence
|
129
|
+
|
130
|
+
- Jobs persist across CLI restarts
|
131
|
+
- Job history is preserved for analysis
|
132
|
+
- Failed jobs can be retried at any time
|
133
|
+
- All job metadata and logs are stored
|
134
|
+
|
135
|
+
### Database Setup
|
136
|
+
|
137
|
+
AIDP uses PostgreSQL for job management. Ensure PostgreSQL is installed and running:
|
138
|
+
|
139
|
+
```bash
|
140
|
+
# macOS (using Homebrew)
|
141
|
+
brew install postgresql
|
142
|
+
brew services start postgresql
|
143
|
+
|
144
|
+
# Ubuntu/Debian
|
145
|
+
sudo apt-get install postgresql postgresql-contrib
|
146
|
+
sudo systemctl start postgresql
|
147
|
+
|
148
|
+
# The database will be created automatically on first use
|
149
|
+
```
|
150
|
+
|
100
151
|
## File-Based Interaction
|
101
152
|
|
102
153
|
At gate steps, the AI creates files for interaction instead of requiring real-time chat:
|
@@ -137,20 +188,24 @@ Here's a typical session:
|
|
137
188
|
aidp execute next
|
138
189
|
# → Creates docs/PRD.md and PRD_QUESTIONS.md
|
139
190
|
|
140
|
-
# 2.
|
191
|
+
# 2. Monitor job progress (optional)
|
192
|
+
aidp jobs
|
193
|
+
# → Shows real-time job status and progress
|
194
|
+
|
195
|
+
# 3. Review the questions (if any)
|
141
196
|
cat PRD_QUESTIONS.md
|
142
197
|
# → If questions exist, edit the file with your answers, then re-run
|
143
198
|
|
144
|
-
#
|
199
|
+
# 4. Review the PRD
|
145
200
|
cat docs/PRD.md
|
146
201
|
# → Edit if needed
|
147
202
|
|
148
|
-
#
|
203
|
+
# 5. Approve and continue
|
149
204
|
aidp approve current
|
150
205
|
aidp execute next
|
151
206
|
# → Creates docs/NFRs.md automatically
|
152
207
|
|
153
|
-
#
|
208
|
+
# 6. Continue through gates
|
154
209
|
aidp execute next
|
155
210
|
# → Creates docs/Architecture.md and ARCH_QUESTIONS.md
|
156
211
|
# → Repeat review/approve cycle
|
@@ -9,7 +9,7 @@ module Aidp
|
|
9
9
|
"name" => "Repository Analyst",
|
10
10
|
"description" => "Expert in analyzing version control data, code evolution patterns, and repository metrics. Specializes in identifying hotspots, technical debt, and code quality trends over time.",
|
11
11
|
"expertise" => ["Git analysis", "Code metrics", "Temporal patterns", "Hotspot identification"],
|
12
|
-
"tools" => ["
|
12
|
+
"tools" => ["ruby-maat", "Git log analysis", "Statistical analysis"]
|
13
13
|
},
|
14
14
|
"Architecture Analyst" => {
|
15
15
|
"name" => "Architecture Analyst",
|
@@ -1,6 +1,6 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "
|
3
|
+
require "pg"
|
4
4
|
require "json"
|
5
5
|
require "fileutils"
|
6
6
|
|
@@ -8,7 +8,6 @@ module Aidp
|
|
8
8
|
class AnalysisDatabase
|
9
9
|
def initialize(project_dir = Dir.pwd)
|
10
10
|
@project_dir = project_dir
|
11
|
-
@db_path = File.join(project_dir, ".aidp-analysis.db")
|
12
11
|
ensure_database_exists
|
13
12
|
end
|
14
13
|
|
@@ -17,15 +16,21 @@ module Aidp
|
|
17
16
|
db = connect
|
18
17
|
|
19
18
|
# Store the main analysis result
|
20
|
-
db.
|
21
|
-
|
22
|
-
|
19
|
+
db.exec_params(
|
20
|
+
<<~SQL,
|
21
|
+
INSERT INTO analysis_results (step_name, data, metadata, created_at, updated_at)
|
22
|
+
VALUES ($1, $2, $3, $4, $5)
|
23
|
+
ON CONFLICT (step_name)
|
24
|
+
DO UPDATE SET
|
25
|
+
data = EXCLUDED.data,
|
26
|
+
metadata = EXCLUDED.metadata,
|
27
|
+
updated_at = EXCLUDED.updated_at
|
28
|
+
SQL
|
29
|
+
[step_name, data.to_json, metadata.to_json, Time.now, Time.now]
|
23
30
|
)
|
24
31
|
|
25
32
|
# Store metrics for indefinite retention
|
26
33
|
store_metrics(step_name, metadata[:metrics]) if metadata[:metrics]
|
27
|
-
|
28
|
-
db.close
|
29
34
|
end
|
30
35
|
|
31
36
|
# Store metrics that should be retained indefinitely
|
@@ -33,57 +38,66 @@ module Aidp
|
|
33
38
|
db = connect
|
34
39
|
|
35
40
|
metrics.each do |metric_name, value|
|
36
|
-
db.
|
37
|
-
|
38
|
-
|
41
|
+
db.exec_params(
|
42
|
+
<<~SQL,
|
43
|
+
INSERT INTO analysis_metrics (step_name, metric_name, value, recorded_at)
|
44
|
+
VALUES ($1, $2, $3, $4)
|
45
|
+
ON CONFLICT (step_name, metric_name, recorded_at)
|
46
|
+
DO UPDATE SET value = EXCLUDED.value
|
47
|
+
SQL
|
48
|
+
[step_name, metric_name.to_s, value.to_json, Time.now]
|
39
49
|
)
|
40
50
|
end
|
41
|
-
|
42
|
-
db.close
|
43
51
|
end
|
44
52
|
|
45
53
|
# Store embedding vectors for future semantic analysis
|
46
54
|
def store_embeddings(step_name, embeddings_data)
|
47
55
|
db = connect
|
48
56
|
|
49
|
-
db.
|
50
|
-
|
51
|
-
|
57
|
+
db.exec_params(
|
58
|
+
<<~SQL,
|
59
|
+
INSERT INTO embeddings (step_name, embeddings_data, created_at)
|
60
|
+
VALUES ($1, $2, $3)
|
61
|
+
ON CONFLICT (step_name)
|
62
|
+
DO UPDATE SET
|
63
|
+
embeddings_data = EXCLUDED.embeddings_data,
|
64
|
+
created_at = EXCLUDED.created_at
|
65
|
+
SQL
|
66
|
+
[step_name, embeddings_data.to_json, Time.now]
|
52
67
|
)
|
53
|
-
|
54
|
-
db.close
|
55
68
|
end
|
56
69
|
|
57
70
|
# Retrieve analysis results
|
58
71
|
def get_analysis_result(step_name)
|
59
72
|
db = connect
|
60
|
-
result = db.
|
61
|
-
|
62
|
-
|
73
|
+
result = db.exec_params(
|
74
|
+
"SELECT data, metadata, created_at, updated_at FROM analysis_results WHERE step_name = $1",
|
75
|
+
[step_name]
|
76
|
+
).first
|
63
77
|
|
64
78
|
return nil unless result
|
65
79
|
|
66
80
|
{
|
67
|
-
data: JSON.parse(result[
|
68
|
-
metadata: JSON.parse(result[
|
69
|
-
created_at: result[
|
70
|
-
updated_at: result[
|
81
|
+
data: JSON.parse(result["data"]),
|
82
|
+
metadata: JSON.parse(result["metadata"]),
|
83
|
+
created_at: result["created_at"],
|
84
|
+
updated_at: result["updated_at"]
|
71
85
|
}
|
72
86
|
end
|
73
87
|
|
74
88
|
# Retrieve metrics for a step
|
75
89
|
def get_metrics(step_name)
|
76
90
|
db = connect
|
77
|
-
results = db.
|
78
|
-
"SELECT metric_name, value, recorded_at FROM analysis_metrics WHERE step_name =
|
91
|
+
results = db.exec_params(
|
92
|
+
"SELECT metric_name, value, recorded_at FROM analysis_metrics WHERE step_name = $1 ORDER BY recorded_at DESC",
|
93
|
+
[step_name]
|
79
94
|
)
|
80
|
-
db.close
|
81
95
|
|
82
96
|
results.map do |row|
|
83
97
|
{
|
84
|
-
metric_name: row[
|
85
|
-
value: JSON.parse(row[
|
86
|
-
recorded_at: row[
|
98
|
+
metric_name: row["metric_name"],
|
99
|
+
value: JSON.parse(row["value"]),
|
100
|
+
recorded_at: row["recorded_at"]
|
87
101
|
}
|
88
102
|
end
|
89
103
|
end
|
@@ -91,15 +105,14 @@ module Aidp
|
|
91
105
|
# Get all metrics for trend analysis
|
92
106
|
def get_all_metrics
|
93
107
|
db = connect
|
94
|
-
results = db.
|
95
|
-
db.close
|
108
|
+
results = db.exec("SELECT step_name, metric_name, value, recorded_at FROM analysis_metrics ORDER BY recorded_at DESC")
|
96
109
|
|
97
110
|
results.map do |row|
|
98
111
|
{
|
99
|
-
step_name: row[
|
100
|
-
metric_name: row[
|
101
|
-
value: JSON.parse(row[
|
102
|
-
recorded_at: row[
|
112
|
+
step_name: row["step_name"],
|
113
|
+
metric_name: row["metric_name"],
|
114
|
+
value: JSON.parse(row["value"]),
|
115
|
+
recorded_at: row["recorded_at"]
|
103
116
|
}
|
104
117
|
end
|
105
118
|
end
|
@@ -109,28 +122,28 @@ module Aidp
|
|
109
122
|
db = connect
|
110
123
|
|
111
124
|
# Delete existing data
|
112
|
-
db.
|
113
|
-
db.
|
125
|
+
db.exec_params("DELETE FROM analysis_results WHERE step_name = $1", [step_name])
|
126
|
+
db.exec_params("DELETE FROM embeddings WHERE step_name = $1", [step_name])
|
114
127
|
|
115
128
|
# Store new data
|
116
|
-
db.
|
117
|
-
|
118
|
-
|
129
|
+
db.exec_params(
|
130
|
+
<<~SQL,
|
131
|
+
INSERT INTO analysis_results (step_name, data, metadata, created_at, updated_at)
|
132
|
+
VALUES ($1, $2, $3, $4, $5)
|
133
|
+
SQL
|
134
|
+
[step_name, data.to_json, metadata.to_json, Time.now, Time.now]
|
119
135
|
)
|
120
136
|
|
121
137
|
# Store metrics (these are retained indefinitely)
|
122
138
|
store_metrics(step_name, metadata[:metrics]) if metadata[:metrics]
|
123
|
-
|
124
|
-
db.close
|
125
139
|
end
|
126
140
|
|
127
141
|
# Delete analysis data (for user cleanup)
|
128
142
|
def delete_analysis_data(step_name)
|
129
143
|
db = connect
|
130
|
-
db.
|
131
|
-
db.
|
144
|
+
db.exec_params("DELETE FROM analysis_results WHERE step_name = $1", [step_name])
|
145
|
+
db.exec_params("DELETE FROM embeddings WHERE step_name = $1", [step_name])
|
132
146
|
# NOTE: metrics are NOT deleted as they should be retained indefinitely
|
133
|
-
db.close
|
134
147
|
end
|
135
148
|
|
136
149
|
# Export data in different formats
|
@@ -152,68 +165,72 @@ module Aidp
|
|
152
165
|
def get_statistics
|
153
166
|
db = connect
|
154
167
|
|
155
|
-
|
156
|
-
total_analysis_results: db.
|
157
|
-
total_metrics: db.
|
158
|
-
total_embeddings: db.
|
159
|
-
steps_analyzed: db.
|
160
|
-
oldest_metric: db.
|
161
|
-
newest_metric: db.
|
168
|
+
{
|
169
|
+
total_analysis_results: db.exec("SELECT COUNT(*) FROM analysis_results").first["count"].to_i,
|
170
|
+
total_metrics: db.exec("SELECT COUNT(*) FROM analysis_metrics").first["count"].to_i,
|
171
|
+
total_embeddings: db.exec("SELECT COUNT(*) FROM embeddings").first["count"].to_i,
|
172
|
+
steps_analyzed: db.exec("SELECT DISTINCT step_name FROM analysis_results").map { |row| row["step_name"] },
|
173
|
+
oldest_metric: db.exec("SELECT MIN(recorded_at) FROM analysis_metrics").first["min"],
|
174
|
+
newest_metric: db.exec("SELECT MAX(recorded_at) FROM analysis_metrics").first["max"]
|
162
175
|
}
|
163
|
-
|
164
|
-
db.close
|
165
|
-
stats
|
166
176
|
end
|
167
177
|
|
168
178
|
private
|
169
179
|
|
170
180
|
def ensure_database_exists
|
171
|
-
|
181
|
+
db = connect
|
182
|
+
create_schema(db)
|
183
|
+
end
|
172
184
|
|
173
|
-
|
185
|
+
def connect
|
186
|
+
@db ||= PG.connect(
|
187
|
+
host: ENV["AIDP_DB_HOST"] || "localhost",
|
188
|
+
port: ENV["AIDP_DB_PORT"] || 5432,
|
189
|
+
dbname: ENV["AIDP_DB_NAME"] || "aidp",
|
190
|
+
user: ENV["AIDP_DB_USER"] || ENV["USER"],
|
191
|
+
password: ENV["AIDP_DB_PASSWORD"]
|
192
|
+
)
|
193
|
+
@db.type_map_for_results = PG::BasicTypeMapForResults.new(@db)
|
194
|
+
@db
|
195
|
+
end
|
174
196
|
|
197
|
+
def create_schema(db)
|
175
198
|
# Create analysis_results table
|
176
|
-
db.
|
177
|
-
CREATE TABLE analysis_results (
|
199
|
+
db.exec(<<~SQL)
|
200
|
+
CREATE TABLE IF NOT EXISTS analysis_results (
|
178
201
|
step_name TEXT PRIMARY KEY,
|
179
|
-
data
|
180
|
-
metadata
|
181
|
-
created_at
|
182
|
-
updated_at
|
202
|
+
data JSONB NOT NULL,
|
203
|
+
metadata JSONB,
|
204
|
+
created_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
205
|
+
updated_at TIMESTAMP WITH TIME ZONE NOT NULL
|
183
206
|
)
|
184
207
|
SQL
|
185
208
|
|
186
209
|
# Create analysis_metrics table (indefinite retention)
|
187
|
-
db.
|
188
|
-
CREATE TABLE analysis_metrics (
|
189
|
-
id
|
210
|
+
db.exec(<<~SQL)
|
211
|
+
CREATE TABLE IF NOT EXISTS analysis_metrics (
|
212
|
+
id SERIAL PRIMARY KEY,
|
190
213
|
step_name TEXT NOT NULL,
|
191
214
|
metric_name TEXT NOT NULL,
|
192
|
-
value
|
193
|
-
recorded_at
|
215
|
+
value JSONB NOT NULL,
|
216
|
+
recorded_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
194
217
|
UNIQUE(step_name, metric_name, recorded_at)
|
195
218
|
)
|
196
219
|
SQL
|
197
220
|
|
198
221
|
# Create embeddings table (for future semantic analysis)
|
199
|
-
db.
|
200
|
-
CREATE TABLE embeddings (
|
222
|
+
db.exec(<<~SQL)
|
223
|
+
CREATE TABLE IF NOT EXISTS embeddings (
|
201
224
|
step_name TEXT PRIMARY KEY,
|
202
|
-
embeddings_data
|
203
|
-
created_at
|
225
|
+
embeddings_data JSONB NOT NULL,
|
226
|
+
created_at TIMESTAMP WITH TIME ZONE NOT NULL
|
204
227
|
)
|
205
228
|
SQL
|
206
229
|
|
207
230
|
# Create indexes for better performance
|
208
|
-
db.
|
209
|
-
db.
|
210
|
-
db.
|
211
|
-
|
212
|
-
db.close
|
213
|
-
end
|
214
|
-
|
215
|
-
def connect
|
216
|
-
SQLite3::Database.new(@db_path)
|
231
|
+
db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_metrics_step_name ON analysis_metrics(step_name)")
|
232
|
+
db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_metrics_recorded_at ON analysis_metrics(recorded_at)")
|
233
|
+
db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_results_updated_at ON analysis_results(updated_at)")
|
217
234
|
end
|
218
235
|
|
219
236
|
def export_to_csv(data)
|
@@ -105,13 +105,6 @@ module Aidp
|
|
105
105
|
end
|
106
106
|
end
|
107
107
|
|
108
|
-
def fallback_to_mock_data(operation, fallback_data)
|
109
|
-
operation.call
|
110
|
-
rescue => e
|
111
|
-
logger.warn("Operation failed, using fallback data: #{e.message}")
|
112
|
-
fallback_data
|
113
|
-
end
|
114
|
-
|
115
108
|
def skip_step_with_warning(step_name, error)
|
116
109
|
logger.warn("Skipping step '#{step_name}' due to error: #{error.message}")
|
117
110
|
{
|
@@ -190,7 +183,7 @@ module Aidp
|
|
190
183
|
SQLite3::CorruptException => :critical_error,
|
191
184
|
AnalysisTimeoutError => :chunk_and_retry,
|
192
185
|
AnalysisDataError => :continue_with_partial_data,
|
193
|
-
AnalysisToolError => :
|
186
|
+
AnalysisToolError => :log_and_continue
|
194
187
|
}
|
195
188
|
end
|
196
189
|
|
@@ -236,8 +229,6 @@ module Aidp
|
|
236
229
|
chunk_and_retry(error_info)
|
237
230
|
when :continue_with_partial_data
|
238
231
|
continue_with_partial(error_info)
|
239
|
-
when :fallback_to_mock_data
|
240
|
-
fallback_to_mock(error_info)
|
241
232
|
when :log_and_continue
|
242
233
|
log_and_continue(error_info)
|
243
234
|
else
|
@@ -273,7 +264,8 @@ module Aidp
|
|
273
264
|
if context[:network_required]
|
274
265
|
raise_critical_error({error: error, context: context})
|
275
266
|
else
|
276
|
-
|
267
|
+
logger.error("Network connection error: #{error.message}")
|
268
|
+
raise error
|
277
269
|
end
|
278
270
|
end
|
279
271
|
|
@@ -329,11 +321,15 @@ module Aidp
|
|
329
321
|
end
|
330
322
|
|
331
323
|
def handle_analysis_tool_error(error, context)
|
332
|
-
logger.
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
324
|
+
logger.error("Analysis tool error: #{error.message}")
|
325
|
+
tool_name = context[:tool_name] || "analysis tool"
|
326
|
+
error_msg = "#{tool_name} failed: #{error.message}"
|
327
|
+
|
328
|
+
if context[:installation_guide]
|
329
|
+
error_msg += "\n\nTo install #{tool_name}:\n#{context[:installation_guide]}"
|
330
|
+
end
|
331
|
+
|
332
|
+
raise AnalysisToolError.new(error_msg)
|
337
333
|
end
|
338
334
|
|
339
335
|
# Recovery strategy implementations
|
@@ -390,72 +386,12 @@ module Aidp
|
|
390
386
|
continue_with_partial_data(operation, partial_handler)
|
391
387
|
end
|
392
388
|
|
393
|
-
def fallback_to_mock(error_info)
|
394
|
-
context = error_info[:context]
|
395
|
-
operation = context[:operation]
|
396
|
-
mock_data = context[:mock_data] || generate_mock_data(context)
|
397
|
-
|
398
|
-
fallback_to_mock_data(operation, mock_data)
|
399
|
-
end
|
400
|
-
|
401
389
|
def log_and_continue(error_info)
|
402
390
|
error = error_info[:error]
|
403
391
|
logger.warn("Continuing after error: #{error.message}")
|
404
392
|
{status: "continued_with_error", error: error.message}
|
405
393
|
end
|
406
394
|
|
407
|
-
def generate_mock_data(context)
|
408
|
-
case context[:analysis_type]
|
409
|
-
when "repository"
|
410
|
-
generate_mock_repository_data
|
411
|
-
when "architecture"
|
412
|
-
generate_mock_architecture_data
|
413
|
-
when "test_coverage"
|
414
|
-
generate_mock_test_data
|
415
|
-
else
|
416
|
-
{status: "mock_data", message: "Mock data generated due to error"}
|
417
|
-
end
|
418
|
-
end
|
419
|
-
|
420
|
-
def generate_mock_repository_data
|
421
|
-
{
|
422
|
-
analysis_type: "repository",
|
423
|
-
status: "completed",
|
424
|
-
data: [
|
425
|
-
{entity: "mock_file.rb", nrev: 5, nloc: 100, churn: 20}
|
426
|
-
],
|
427
|
-
statistics: {
|
428
|
-
total_files: 1,
|
429
|
-
total_commits: 5,
|
430
|
-
total_lines: 100
|
431
|
-
}
|
432
|
-
}
|
433
|
-
end
|
434
|
-
|
435
|
-
def generate_mock_architecture_data
|
436
|
-
{
|
437
|
-
analysis_type: "architecture",
|
438
|
-
status: "completed",
|
439
|
-
data: {
|
440
|
-
pattern: "monolithic",
|
441
|
-
components: ["mock_component"],
|
442
|
-
dependencies: []
|
443
|
-
}
|
444
|
-
}
|
445
|
-
end
|
446
|
-
|
447
|
-
def generate_mock_test_data
|
448
|
-
{
|
449
|
-
analysis_type: "test_coverage",
|
450
|
-
status: "completed",
|
451
|
-
data: {
|
452
|
-
coverage: 75.0,
|
453
|
-
tests: 10,
|
454
|
-
files: 5
|
455
|
-
}
|
456
|
-
}
|
457
|
-
end
|
458
|
-
|
459
395
|
def calculate_recovery_success_rate
|
460
396
|
return 0.0 if @error_history.empty?
|
461
397
|
|
@@ -53,7 +53,7 @@ module Aidp
|
|
53
53
|
generate_focused_plan(selected_areas, recommendations)
|
54
54
|
end
|
55
55
|
|
56
|
-
# Get focus areas based on
|
56
|
+
# Get focus areas based on ruby-maat analysis
|
57
57
|
def get_code_maat_focus_areas
|
58
58
|
@code_maat.run_comprehensive_analysis
|
59
59
|
|
@@ -490,7 +490,7 @@ module Aidp
|
|
490
490
|
|
491
491
|
### Phase 1: Baseline Analysis
|
492
492
|
- Repository analysis to establish current state
|
493
|
-
-
|
493
|
+
- Ruby-maat analysis for historical patterns
|
494
494
|
- Feature analysis for current structure
|
495
495
|
|
496
496
|
### Phase 2: Focused Analysis
|