aidp 0.3.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +191 -5
  3. data/lib/aidp/analysis/kb_inspector.rb +456 -0
  4. data/lib/aidp/analysis/seams.rb +188 -0
  5. data/lib/aidp/analysis/tree_sitter_grammar_loader.rb +493 -0
  6. data/lib/aidp/analysis/tree_sitter_scan.rb +703 -0
  7. data/lib/aidp/analyze/agent_personas.rb +1 -1
  8. data/lib/aidp/analyze/agent_tool_executor.rb +5 -11
  9. data/lib/aidp/analyze/data_retention_manager.rb +0 -5
  10. data/lib/aidp/analyze/database.rb +99 -82
  11. data/lib/aidp/analyze/error_handler.rb +12 -79
  12. data/lib/aidp/analyze/export_manager.rb +0 -7
  13. data/lib/aidp/analyze/focus_guidance.rb +2 -2
  14. data/lib/aidp/analyze/incremental_analyzer.rb +1 -11
  15. data/lib/aidp/analyze/large_analysis_progress.rb +0 -5
  16. data/lib/aidp/analyze/memory_manager.rb +34 -60
  17. data/lib/aidp/analyze/metrics_storage.rb +336 -0
  18. data/lib/aidp/analyze/parallel_processor.rb +0 -6
  19. data/lib/aidp/analyze/performance_optimizer.rb +0 -3
  20. data/lib/aidp/analyze/prioritizer.rb +2 -2
  21. data/lib/aidp/analyze/repository_chunker.rb +14 -21
  22. data/lib/aidp/analyze/ruby_maat_integration.rb +6 -102
  23. data/lib/aidp/analyze/runner.rb +107 -191
  24. data/lib/aidp/analyze/steps.rb +35 -30
  25. data/lib/aidp/analyze/storage.rb +233 -178
  26. data/lib/aidp/analyze/tool_configuration.rb +21 -36
  27. data/lib/aidp/cli/jobs_command.rb +489 -0
  28. data/lib/aidp/cli/terminal_io.rb +52 -0
  29. data/lib/aidp/cli.rb +160 -45
  30. data/lib/aidp/core_ext/class_attribute.rb +36 -0
  31. data/lib/aidp/database/pg_adapter.rb +148 -0
  32. data/lib/aidp/database_config.rb +69 -0
  33. data/lib/aidp/database_connection.rb +72 -0
  34. data/lib/aidp/execute/runner.rb +65 -92
  35. data/lib/aidp/execute/steps.rb +81 -82
  36. data/lib/aidp/job_manager.rb +41 -0
  37. data/lib/aidp/jobs/base_job.rb +45 -0
  38. data/lib/aidp/jobs/provider_execution_job.rb +83 -0
  39. data/lib/aidp/provider_manager.rb +25 -0
  40. data/lib/aidp/providers/agent_supervisor.rb +348 -0
  41. data/lib/aidp/providers/anthropic.rb +160 -3
  42. data/lib/aidp/providers/base.rb +153 -6
  43. data/lib/aidp/providers/cursor.rb +245 -43
  44. data/lib/aidp/providers/gemini.rb +164 -3
  45. data/lib/aidp/providers/supervised_base.rb +317 -0
  46. data/lib/aidp/providers/supervised_cursor.rb +22 -0
  47. data/lib/aidp/version.rb +1 -1
  48. data/lib/aidp.rb +31 -34
  49. data/templates/ANALYZE/01_REPOSITORY_ANALYSIS.md +4 -4
  50. data/templates/ANALYZE/06a_tree_sitter_scan.md +217 -0
  51. metadata +91 -36
@@ -30,17 +30,11 @@ module Aidp
30
30
 
31
31
  log_execution_start(execution_id, tool_name, options)
32
32
 
33
- begin
34
- result = execute_tool_with_timeout(tool_name, options)
35
- execution_time = Time.now - start_time
36
-
37
- log_execution_success(execution_id, tool_name, execution_time, result)
38
- result
39
- rescue => e
40
- execution_time = Time.now - start_time
41
- log_execution_error(execution_id, tool_name, execution_time, e)
42
- raise
43
- end
33
+ result = execute_tool_with_timeout(tool_name, options)
34
+ execution_time = Time.now - start_time
35
+
36
+ log_execution_success(execution_id, tool_name, execution_time, result)
37
+ result
44
38
  end
45
39
 
46
40
  # Execute multiple tools in parallel
@@ -51,11 +51,6 @@ module Aidp
51
51
  when "immediate"
52
52
  results[:cleaned_data][data_type] = clean_immediately(data_type, dry_run)
53
53
  end
54
- rescue => e
55
- results[:errors] << {
56
- data_type: data_type,
57
- error: e.message
58
- }
59
54
  end
60
55
 
61
56
  results
@@ -1,6 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "sqlite3"
3
+ require "pg"
4
4
  require "json"
5
5
  require "fileutils"
6
6
 
@@ -8,7 +8,6 @@ module Aidp
8
8
  class AnalysisDatabase
9
9
  def initialize(project_dir = Dir.pwd)
10
10
  @project_dir = project_dir
11
- @db_path = File.join(project_dir, ".aidp-analysis.db")
12
11
  ensure_database_exists
13
12
  end
14
13
 
@@ -17,15 +16,21 @@ module Aidp
17
16
  db = connect
18
17
 
19
18
  # Store the main analysis result
20
- db.execute(
21
- "INSERT OR REPLACE INTO analysis_results (step_name, data, metadata, created_at, updated_at) VALUES (?, ?, ?, ?, ?)",
22
- [step_name, data.to_json, metadata.to_json, Time.now.iso8601, Time.now.iso8601]
19
+ db.exec_params(
20
+ <<~SQL,
21
+ INSERT INTO analysis_results (step_name, data, metadata, created_at, updated_at)
22
+ VALUES ($1, $2, $3, $4, $5)
23
+ ON CONFLICT (step_name)
24
+ DO UPDATE SET
25
+ data = EXCLUDED.data,
26
+ metadata = EXCLUDED.metadata,
27
+ updated_at = EXCLUDED.updated_at
28
+ SQL
29
+ [step_name, data.to_json, metadata.to_json, Time.now, Time.now]
23
30
  )
24
31
 
25
32
  # Store metrics for indefinite retention
26
33
  store_metrics(step_name, metadata[:metrics]) if metadata[:metrics]
27
-
28
- db.close
29
34
  end
30
35
 
31
36
  # Store metrics that should be retained indefinitely
@@ -33,57 +38,66 @@ module Aidp
33
38
  db = connect
34
39
 
35
40
  metrics.each do |metric_name, value|
36
- db.execute(
37
- "INSERT OR REPLACE INTO analysis_metrics (step_name, metric_name, value, recorded_at) VALUES (?, ?, ?, ?)",
38
- [step_name, metric_name.to_s, value.to_json, Time.now.iso8601]
41
+ db.exec_params(
42
+ <<~SQL,
43
+ INSERT INTO analysis_metrics (step_name, metric_name, value, recorded_at)
44
+ VALUES ($1, $2, $3, $4)
45
+ ON CONFLICT (step_name, metric_name, recorded_at)
46
+ DO UPDATE SET value = EXCLUDED.value
47
+ SQL
48
+ [step_name, metric_name.to_s, value.to_json, Time.now]
39
49
  )
40
50
  end
41
-
42
- db.close
43
51
  end
44
52
 
45
53
  # Store embedding vectors for future semantic analysis
46
54
  def store_embeddings(step_name, embeddings_data)
47
55
  db = connect
48
56
 
49
- db.execute(
50
- "INSERT OR REPLACE INTO embeddings (step_name, embeddings_data, created_at) VALUES (?, ?, ?)",
51
- [step_name, embeddings_data.to_json, Time.now.iso8601]
57
+ db.exec_params(
58
+ <<~SQL,
59
+ INSERT INTO embeddings (step_name, embeddings_data, created_at)
60
+ VALUES ($1, $2, $3)
61
+ ON CONFLICT (step_name)
62
+ DO UPDATE SET
63
+ embeddings_data = EXCLUDED.embeddings_data,
64
+ created_at = EXCLUDED.created_at
65
+ SQL
66
+ [step_name, embeddings_data.to_json, Time.now]
52
67
  )
53
-
54
- db.close
55
68
  end
56
69
 
57
70
  # Retrieve analysis results
58
71
  def get_analysis_result(step_name)
59
72
  db = connect
60
- result = db.execute("SELECT data, metadata, created_at, updated_at FROM analysis_results WHERE step_name = ?",
61
- [step_name]).first
62
- db.close
73
+ result = db.exec_params(
74
+ "SELECT data, metadata, created_at, updated_at FROM analysis_results WHERE step_name = $1",
75
+ [step_name]
76
+ ).first
63
77
 
64
78
  return nil unless result
65
79
 
66
80
  {
67
- data: JSON.parse(result[0]),
68
- metadata: JSON.parse(result[1]),
69
- created_at: result[2],
70
- updated_at: result[3]
81
+ data: JSON.parse(result["data"]),
82
+ metadata: JSON.parse(result["metadata"]),
83
+ created_at: result["created_at"],
84
+ updated_at: result["updated_at"]
71
85
  }
72
86
  end
73
87
 
74
88
  # Retrieve metrics for a step
75
89
  def get_metrics(step_name)
76
90
  db = connect
77
- results = db.execute(
78
- "SELECT metric_name, value, recorded_at FROM analysis_metrics WHERE step_name = ? ORDER BY recorded_at DESC", [step_name]
91
+ results = db.exec_params(
92
+ "SELECT metric_name, value, recorded_at FROM analysis_metrics WHERE step_name = $1 ORDER BY recorded_at DESC",
93
+ [step_name]
79
94
  )
80
- db.close
81
95
 
82
96
  results.map do |row|
83
97
  {
84
- metric_name: row[0],
85
- value: JSON.parse(row[1]),
86
- recorded_at: row[2]
98
+ metric_name: row["metric_name"],
99
+ value: JSON.parse(row["value"]),
100
+ recorded_at: row["recorded_at"]
87
101
  }
88
102
  end
89
103
  end
@@ -91,15 +105,14 @@ module Aidp
91
105
  # Get all metrics for trend analysis
92
106
  def get_all_metrics
93
107
  db = connect
94
- results = db.execute("SELECT step_name, metric_name, value, recorded_at FROM analysis_metrics ORDER BY recorded_at DESC")
95
- db.close
108
+ results = db.exec("SELECT step_name, metric_name, value, recorded_at FROM analysis_metrics ORDER BY recorded_at DESC")
96
109
 
97
110
  results.map do |row|
98
111
  {
99
- step_name: row[0],
100
- metric_name: row[1],
101
- value: JSON.parse(row[2]),
102
- recorded_at: row[2]
112
+ step_name: row["step_name"],
113
+ metric_name: row["metric_name"],
114
+ value: JSON.parse(row["value"]),
115
+ recorded_at: row["recorded_at"]
103
116
  }
104
117
  end
105
118
  end
@@ -109,28 +122,28 @@ module Aidp
109
122
  db = connect
110
123
 
111
124
  # Delete existing data
112
- db.execute("DELETE FROM analysis_results WHERE step_name = ?", [step_name])
113
- db.execute("DELETE FROM embeddings WHERE step_name = ?", [step_name])
125
+ db.exec_params("DELETE FROM analysis_results WHERE step_name = $1", [step_name])
126
+ db.exec_params("DELETE FROM embeddings WHERE step_name = $1", [step_name])
114
127
 
115
128
  # Store new data
116
- db.execute(
117
- "INSERT INTO analysis_results (step_name, data, metadata, created_at, updated_at) VALUES (?, ?, ?, ?, ?)",
118
- [step_name, data.to_json, metadata.to_json, Time.now.iso8601, Time.now.iso8601]
129
+ db.exec_params(
130
+ <<~SQL,
131
+ INSERT INTO analysis_results (step_name, data, metadata, created_at, updated_at)
132
+ VALUES ($1, $2, $3, $4, $5)
133
+ SQL
134
+ [step_name, data.to_json, metadata.to_json, Time.now, Time.now]
119
135
  )
120
136
 
121
137
  # Store metrics (these are retained indefinitely)
122
138
  store_metrics(step_name, metadata[:metrics]) if metadata[:metrics]
123
-
124
- db.close
125
139
  end
126
140
 
127
141
  # Delete analysis data (for user cleanup)
128
142
  def delete_analysis_data(step_name)
129
143
  db = connect
130
- db.execute("DELETE FROM analysis_results WHERE step_name = ?", [step_name])
131
- db.execute("DELETE FROM embeddings WHERE step_name = ?", [step_name])
144
+ db.exec_params("DELETE FROM analysis_results WHERE step_name = $1", [step_name])
145
+ db.exec_params("DELETE FROM embeddings WHERE step_name = $1", [step_name])
132
146
  # NOTE: metrics are NOT deleted as they should be retained indefinitely
133
- db.close
134
147
  end
135
148
 
136
149
  # Export data in different formats
@@ -152,68 +165,72 @@ module Aidp
152
165
  def get_statistics
153
166
  db = connect
154
167
 
155
- stats = {
156
- total_analysis_results: db.execute("SELECT COUNT(*) FROM analysis_results").first[0],
157
- total_metrics: db.execute("SELECT COUNT(*) FROM analysis_metrics").first[0],
158
- total_embeddings: db.execute("SELECT COUNT(*) FROM embeddings").first[0],
159
- steps_analyzed: db.execute("SELECT DISTINCT step_name FROM analysis_results").map { |row| row[0] },
160
- oldest_metric: db.execute("SELECT MIN(recorded_at) FROM analysis_metrics").first[0],
161
- newest_metric: db.execute("SELECT MAX(recorded_at) FROM analysis_metrics").first[0]
168
+ {
169
+ total_analysis_results: db.exec("SELECT COUNT(*) FROM analysis_results").first["count"].to_i,
170
+ total_metrics: db.exec("SELECT COUNT(*) FROM analysis_metrics").first["count"].to_i,
171
+ total_embeddings: db.exec("SELECT COUNT(*) FROM embeddings").first["count"].to_i,
172
+ steps_analyzed: db.exec("SELECT DISTINCT step_name FROM analysis_results").map { |row| row["step_name"] },
173
+ oldest_metric: db.exec("SELECT MIN(recorded_at) FROM analysis_metrics").first["min"],
174
+ newest_metric: db.exec("SELECT MAX(recorded_at) FROM analysis_metrics").first["max"]
162
175
  }
163
-
164
- db.close
165
- stats
166
176
  end
167
177
 
168
178
  private
169
179
 
170
180
  def ensure_database_exists
171
- return if File.exist?(@db_path)
181
+ db = connect
182
+ create_schema(db)
183
+ end
172
184
 
173
- db = SQLite3::Database.new(@db_path)
185
+ def connect
186
+ @db ||= PG.connect(
187
+ host: ENV["AIDP_DB_HOST"] || "localhost",
188
+ port: ENV["AIDP_DB_PORT"] || 5432,
189
+ dbname: ENV["AIDP_DB_NAME"] || "aidp",
190
+ user: ENV["AIDP_DB_USER"] || ENV["USER"],
191
+ password: ENV["AIDP_DB_PASSWORD"]
192
+ )
193
+ @db.type_map_for_results = PG::BasicTypeMapForResults.new(@db)
194
+ @db
195
+ end
174
196
 
197
+ def create_schema(db)
175
198
  # Create analysis_results table
176
- db.execute(<<~SQL)
177
- CREATE TABLE analysis_results (
199
+ db.exec(<<~SQL)
200
+ CREATE TABLE IF NOT EXISTS analysis_results (
178
201
  step_name TEXT PRIMARY KEY,
179
- data TEXT NOT NULL,
180
- metadata TEXT,
181
- created_at TEXT NOT NULL,
182
- updated_at TEXT NOT NULL
202
+ data JSONB NOT NULL,
203
+ metadata JSONB,
204
+ created_at TIMESTAMP WITH TIME ZONE NOT NULL,
205
+ updated_at TIMESTAMP WITH TIME ZONE NOT NULL
183
206
  )
184
207
  SQL
185
208
 
186
209
  # Create analysis_metrics table (indefinite retention)
187
- db.execute(<<~SQL)
188
- CREATE TABLE analysis_metrics (
189
- id INTEGER PRIMARY KEY AUTOINCREMENT,
210
+ db.exec(<<~SQL)
211
+ CREATE TABLE IF NOT EXISTS analysis_metrics (
212
+ id SERIAL PRIMARY KEY,
190
213
  step_name TEXT NOT NULL,
191
214
  metric_name TEXT NOT NULL,
192
- value TEXT NOT NULL,
193
- recorded_at TEXT NOT NULL,
215
+ value JSONB NOT NULL,
216
+ recorded_at TIMESTAMP WITH TIME ZONE NOT NULL,
194
217
  UNIQUE(step_name, metric_name, recorded_at)
195
218
  )
196
219
  SQL
197
220
 
198
221
  # Create embeddings table (for future semantic analysis)
199
- db.execute(<<~SQL)
200
- CREATE TABLE embeddings (
222
+ db.exec(<<~SQL)
223
+ CREATE TABLE IF NOT EXISTS embeddings (
201
224
  step_name TEXT PRIMARY KEY,
202
- embeddings_data TEXT NOT NULL,
203
- created_at TEXT NOT NULL
225
+ embeddings_data JSONB NOT NULL,
226
+ created_at TIMESTAMP WITH TIME ZONE NOT NULL
204
227
  )
205
228
  SQL
206
229
 
207
230
  # Create indexes for better performance
208
- db.execute("CREATE INDEX idx_analysis_metrics_step_name ON analysis_metrics(step_name)")
209
- db.execute("CREATE INDEX idx_analysis_metrics_recorded_at ON analysis_metrics(recorded_at)")
210
- db.execute("CREATE INDEX idx_analysis_results_updated_at ON analysis_results(updated_at)")
211
-
212
- db.close
213
- end
214
-
215
- def connect
216
- SQLite3::Database.new(@db_path)
231
+ db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_metrics_step_name ON analysis_metrics(step_name)")
232
+ db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_metrics_recorded_at ON analysis_metrics(recorded_at)")
233
+ db.exec("CREATE INDEX IF NOT EXISTS idx_analysis_results_updated_at ON analysis_results(updated_at)")
217
234
  end
218
235
 
219
236
  def export_to_csv(data)
@@ -105,13 +105,6 @@ module Aidp
105
105
  end
106
106
  end
107
107
 
108
- def fallback_to_mock_data(operation, fallback_data)
109
- operation.call
110
- rescue => e
111
- logger.warn("Operation failed, using fallback data: #{e.message}")
112
- fallback_data
113
- end
114
-
115
108
  def skip_step_with_warning(step_name, error)
116
109
  logger.warn("Skipping step '#{step_name}' due to error: #{error.message}")
117
110
  {
@@ -123,9 +116,6 @@ module Aidp
123
116
 
124
117
  def continue_with_partial_data(operation, partial_data_handler)
125
118
  operation.call
126
- rescue => e
127
- logger.warn("Operation failed, continuing with partial data: #{e.message}")
128
- partial_data_handler.call(e)
129
119
  end
130
120
 
131
121
  # Error reporting and statistics
@@ -190,7 +180,7 @@ module Aidp
190
180
  SQLite3::CorruptException => :critical_error,
191
181
  AnalysisTimeoutError => :chunk_and_retry,
192
182
  AnalysisDataError => :continue_with_partial_data,
193
- AnalysisToolError => :fallback_to_mock_data
183
+ AnalysisToolError => :log_and_continue
194
184
  }
195
185
  end
196
186
 
@@ -236,8 +226,6 @@ module Aidp
236
226
  chunk_and_retry(error_info)
237
227
  when :continue_with_partial_data
238
228
  continue_with_partial(error_info)
239
- when :fallback_to_mock_data
240
- fallback_to_mock(error_info)
241
229
  when :log_and_continue
242
230
  log_and_continue(error_info)
243
231
  else
@@ -273,7 +261,8 @@ module Aidp
273
261
  if context[:network_required]
274
262
  raise_critical_error({error: error, context: context})
275
263
  else
276
- fallback_to_mock_data(-> { context[:operation].call }, context[:fallback_data])
264
+ logger.error("Network connection error: #{error.message}")
265
+ raise error
277
266
  end
278
267
  end
279
268
 
@@ -329,11 +318,15 @@ module Aidp
329
318
  end
330
319
 
331
320
  def handle_analysis_tool_error(error, context)
332
- logger.warn("Analysis tool error: #{error.message}")
333
- fallback_to_mock_data(
334
- -> { context[:operation].call },
335
- context[:mock_data] || generate_mock_data(context)
336
- )
321
+ logger.error("Analysis tool error: #{error.message}")
322
+ tool_name = context[:tool_name] || "analysis tool"
323
+ error_msg = "#{tool_name} failed: #{error.message}"
324
+
325
+ if context[:installation_guide]
326
+ error_msg += "\n\nTo install #{tool_name}:\n#{context[:installation_guide]}"
327
+ end
328
+
329
+ raise AnalysisToolError.new(error_msg)
337
330
  end
338
331
 
339
332
  # Recovery strategy implementations
@@ -390,72 +383,12 @@ module Aidp
390
383
  continue_with_partial_data(operation, partial_handler)
391
384
  end
392
385
 
393
- def fallback_to_mock(error_info)
394
- context = error_info[:context]
395
- operation = context[:operation]
396
- mock_data = context[:mock_data] || generate_mock_data(context)
397
-
398
- fallback_to_mock_data(operation, mock_data)
399
- end
400
-
401
386
  def log_and_continue(error_info)
402
387
  error = error_info[:error]
403
388
  logger.warn("Continuing after error: #{error.message}")
404
389
  {status: "continued_with_error", error: error.message}
405
390
  end
406
391
 
407
- def generate_mock_data(context)
408
- case context[:analysis_type]
409
- when "repository"
410
- generate_mock_repository_data
411
- when "architecture"
412
- generate_mock_architecture_data
413
- when "test_coverage"
414
- generate_mock_test_data
415
- else
416
- {status: "mock_data", message: "Mock data generated due to error"}
417
- end
418
- end
419
-
420
- def generate_mock_repository_data
421
- {
422
- analysis_type: "repository",
423
- status: "completed",
424
- data: [
425
- {entity: "mock_file.rb", nrev: 5, nloc: 100, churn: 20}
426
- ],
427
- statistics: {
428
- total_files: 1,
429
- total_commits: 5,
430
- total_lines: 100
431
- }
432
- }
433
- end
434
-
435
- def generate_mock_architecture_data
436
- {
437
- analysis_type: "architecture",
438
- status: "completed",
439
- data: {
440
- pattern: "monolithic",
441
- components: ["mock_component"],
442
- dependencies: []
443
- }
444
- }
445
- end
446
-
447
- def generate_mock_test_data
448
- {
449
- analysis_type: "test_coverage",
450
- status: "completed",
451
- data: {
452
- coverage: 75.0,
453
- tests: 10,
454
- files: 5
455
- }
456
- }
457
- end
458
-
459
392
  def calculate_recovery_success_rate
460
393
  return 0.0 if @error_history.empty?
461
394
 
@@ -158,13 +158,6 @@ module Aidp
158
158
  size: File.size(output_path),
159
159
  generated_at: Time.now
160
160
  }
161
- rescue => e
162
- {
163
- success: false,
164
- error: e.message,
165
- format: format,
166
- generated_at: Time.now
167
- }
168
161
  end
169
162
 
170
163
  def format_export_data(data, options)
@@ -53,7 +53,7 @@ module Aidp
53
53
  generate_focused_plan(selected_areas, recommendations)
54
54
  end
55
55
 
56
- # Get focus areas based on Code Maat analysis
56
+ # Get focus areas based on ruby-maat analysis
57
57
  def get_code_maat_focus_areas
58
58
  @code_maat.run_comprehensive_analysis
59
59
 
@@ -490,7 +490,7 @@ module Aidp
490
490
 
491
491
  ### Phase 1: Baseline Analysis
492
492
  - Repository analysis to establish current state
493
- - Code Maat analysis for historical patterns
493
+ - Ruby-maat analysis for historical patterns
494
494
  - Feature analysis for current structure
495
495
 
496
496
  ### Phase 2: Focused Analysis
@@ -171,12 +171,7 @@ module Aidp
171
171
  state_file = get_state_file_path(analysis_type)
172
172
  return create_initial_state(analysis_type) unless File.exist?(state_file)
173
173
 
174
- begin
175
- YAML.load_file(state_file) || create_initial_state(analysis_type)
176
- rescue => e
177
- puts "Warning: Could not load analysis state: #{e.message}"
178
- create_initial_state(analysis_type)
179
- end
174
+ YAML.load_file(state_file) || create_initial_state(analysis_type)
180
175
  end
181
176
 
182
177
  def save_analysis_state(analysis_type, state)
@@ -367,11 +362,6 @@ module Aidp
367
362
  plan[:components].each do |component|
368
363
  component_result = analyze_component(component, plan[:analysis_type], options)
369
364
  results[:results][component] = component_result
370
- rescue => e
371
- results[:errors] << {
372
- component: component,
373
- error: e.message
374
- }
375
365
  end
376
366
 
377
367
  results[:end_time] = Time.now
@@ -292,11 +292,6 @@ module Aidp
292
292
  success: true,
293
293
  imported_progress: @current_progress
294
294
  }
295
- rescue => e
296
- {
297
- success: false,
298
- error: e.message
299
- }
300
295
  end
301
296
 
302
297
  private
@@ -54,38 +54,24 @@ module Aidp
54
54
  errors: []
55
55
  }
56
56
 
57
- begin
58
- dataset.each_with_index do |item, index|
59
- # Check memory usage
60
- current_memory = get_memory_usage
61
- results[:memory_usage] << current_memory
57
+ dataset.each_with_index do |item, index|
58
+ # Check memory usage
59
+ current_memory = get_memory_usage
60
+ results[:memory_usage] << current_memory
62
61
 
63
- # Trigger garbage collection if needed
64
- if should_trigger_gc?(current_memory)
65
- trigger_garbage_collection
66
- results[:gc_count] += 1
67
- end
62
+ # Trigger garbage collection if needed
63
+ if should_trigger_gc?(current_memory)
64
+ trigger_garbage_collection
65
+ results[:gc_count] += 1
66
+ end
68
67
 
69
- # Process item
70
- begin
71
- result = processor_method.call(item, options)
72
- results[:results] << result
73
- results[:processed_items] += 1
74
- rescue => e
75
- results[:errors] << {
76
- item_index: index,
77
- error: e.message
78
- }
79
- end
68
+ # Process item
69
+ result = processor_method.call(item, options)
70
+ results[:results] << result
71
+ results[:processed_items] += 1
80
72
 
81
- # Update memory tracking
82
- update_memory_tracking(current_memory)
83
- end
84
- rescue => e
85
- results[:errors] << {
86
- type: "streaming_error",
87
- message: e.message
88
- }
73
+ # Update memory tracking
74
+ update_memory_tracking(current_memory)
89
75
  end
90
76
 
91
77
  results
@@ -103,32 +89,25 @@ module Aidp
103
89
  errors: []
104
90
  }
105
91
 
106
- begin
107
- dataset.each_slice(chunk_size) do |chunk|
108
- # Check memory before processing chunk
109
- pre_chunk_memory = get_memory_usage
110
- results[:memory_usage] << pre_chunk_memory
111
-
112
- # Process chunk
113
- chunk_results = process_chunk(chunk, processor_method, options)
114
- results[:results].concat(chunk_results[:results])
115
- results[:errors].concat(chunk_results[:errors])
116
- results[:processed_items] += chunk_results[:processed_items]
117
-
118
- # Trigger garbage collection after chunk
119
- if should_trigger_gc?(pre_chunk_memory)
120
- trigger_garbage_collection
121
- results[:gc_count] += 1
122
- end
123
-
124
- results[:processed_chunks] += 1
125
- update_memory_tracking(pre_chunk_memory)
92
+ dataset.each_slice(chunk_size) do |chunk|
93
+ # Check memory before processing chunk
94
+ pre_chunk_memory = get_memory_usage
95
+ results[:memory_usage] << pre_chunk_memory
96
+
97
+ # Process chunk
98
+ chunk_results = process_chunk(chunk, processor_method, options)
99
+ results[:results].concat(chunk_results[:results])
100
+ results[:errors].concat(chunk_results[:errors])
101
+ results[:processed_items] += chunk_results[:processed_items]
102
+
103
+ # Trigger garbage collection after chunk
104
+ if should_trigger_gc?(pre_chunk_memory)
105
+ trigger_garbage_collection
106
+ results[:gc_count] += 1
126
107
  end
127
- rescue => e
128
- results[:errors] << {
129
- type: "chunking_error",
130
- message: e.message
131
- }
108
+
109
+ results[:processed_chunks] += 1
110
+ update_memory_tracking(pre_chunk_memory)
132
111
  end
133
112
 
134
113
  results
@@ -303,11 +282,6 @@ module Aidp
303
282
  result = processor_method.call(item, options)
304
283
  results[:results] << result
305
284
  results[:processed_items] += 1
306
- rescue => e
307
- results[:errors] << {
308
- item_index: index,
309
- error: e.message
310
- }
311
285
  end
312
286
 
313
287
  results
@@ -335,7 +309,7 @@ module Aidp
335
309
  def generate_cache_key(item)
336
310
  # Generate a cache key for the item
337
311
  Digest::MD5.hexdigest(item.to_json)
338
- rescue
312
+ rescue JSON::GeneratorError
339
313
  # Fallback to object_id if JSON serialization fails
340
314
  "item_#{item.object_id}"
341
315
  end