aidp 0.5.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +132 -1
- data/lib/aidp/analysis/kb_inspector.rb +456 -0
- data/lib/aidp/analysis/seams.rb +188 -0
- data/lib/aidp/analysis/tree_sitter_grammar_loader.rb +493 -0
- data/lib/aidp/analysis/tree_sitter_scan.rb +703 -0
- data/lib/aidp/analyze/agent_tool_executor.rb +5 -11
- data/lib/aidp/analyze/data_retention_manager.rb +0 -5
- data/lib/aidp/analyze/error_handler.rb +0 -3
- data/lib/aidp/analyze/export_manager.rb +0 -7
- data/lib/aidp/analyze/incremental_analyzer.rb +1 -11
- data/lib/aidp/analyze/large_analysis_progress.rb +0 -5
- data/lib/aidp/analyze/memory_manager.rb +34 -60
- data/lib/aidp/analyze/parallel_processor.rb +0 -6
- data/lib/aidp/analyze/performance_optimizer.rb +0 -3
- data/lib/aidp/analyze/repository_chunker.rb +14 -21
- data/lib/aidp/analyze/steps.rb +6 -0
- data/lib/aidp/analyze/storage.rb +0 -7
- data/lib/aidp/analyze/tool_configuration.rb +21 -36
- data/lib/aidp/cli/jobs_command.rb +9 -9
- data/lib/aidp/cli.rb +56 -0
- data/lib/aidp/jobs/base_job.rb +0 -2
- data/lib/aidp/jobs/provider_execution_job.rb +11 -24
- data/lib/aidp/providers/agent_supervisor.rb +2 -2
- data/lib/aidp/providers/anthropic.rb +15 -21
- data/lib/aidp/providers/cursor.rb +2 -4
- data/lib/aidp/providers/gemini.rb +4 -6
- data/lib/aidp/version.rb +1 -1
- data/lib/aidp.rb +6 -0
- data/templates/ANALYZE/06a_tree_sitter_scan.md +217 -0
- metadata +22 -4
- data/lib/aidp/database_migration.rb +0 -158
@@ -30,17 +30,11 @@ module Aidp
|
|
30
30
|
|
31
31
|
log_execution_start(execution_id, tool_name, options)
|
32
32
|
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
result
|
39
|
-
rescue => e
|
40
|
-
execution_time = Time.now - start_time
|
41
|
-
log_execution_error(execution_id, tool_name, execution_time, e)
|
42
|
-
raise
|
43
|
-
end
|
33
|
+
result = execute_tool_with_timeout(tool_name, options)
|
34
|
+
execution_time = Time.now - start_time
|
35
|
+
|
36
|
+
log_execution_success(execution_id, tool_name, execution_time, result)
|
37
|
+
result
|
44
38
|
end
|
45
39
|
|
46
40
|
# Execute multiple tools in parallel
|
@@ -116,9 +116,6 @@ module Aidp
|
|
116
116
|
|
117
117
|
def continue_with_partial_data(operation, partial_data_handler)
|
118
118
|
operation.call
|
119
|
-
rescue => e
|
120
|
-
logger.warn("Operation failed, continuing with partial data: #{e.message}")
|
121
|
-
partial_data_handler.call(e)
|
122
119
|
end
|
123
120
|
|
124
121
|
# Error reporting and statistics
|
@@ -158,13 +158,6 @@ module Aidp
|
|
158
158
|
size: File.size(output_path),
|
159
159
|
generated_at: Time.now
|
160
160
|
}
|
161
|
-
rescue => e
|
162
|
-
{
|
163
|
-
success: false,
|
164
|
-
error: e.message,
|
165
|
-
format: format,
|
166
|
-
generated_at: Time.now
|
167
|
-
}
|
168
161
|
end
|
169
162
|
|
170
163
|
def format_export_data(data, options)
|
@@ -171,12 +171,7 @@ module Aidp
|
|
171
171
|
state_file = get_state_file_path(analysis_type)
|
172
172
|
return create_initial_state(analysis_type) unless File.exist?(state_file)
|
173
173
|
|
174
|
-
|
175
|
-
YAML.load_file(state_file) || create_initial_state(analysis_type)
|
176
|
-
rescue => e
|
177
|
-
puts "Warning: Could not load analysis state: #{e.message}"
|
178
|
-
create_initial_state(analysis_type)
|
179
|
-
end
|
174
|
+
YAML.load_file(state_file) || create_initial_state(analysis_type)
|
180
175
|
end
|
181
176
|
|
182
177
|
def save_analysis_state(analysis_type, state)
|
@@ -367,11 +362,6 @@ module Aidp
|
|
367
362
|
plan[:components].each do |component|
|
368
363
|
component_result = analyze_component(component, plan[:analysis_type], options)
|
369
364
|
results[:results][component] = component_result
|
370
|
-
rescue => e
|
371
|
-
results[:errors] << {
|
372
|
-
component: component,
|
373
|
-
error: e.message
|
374
|
-
}
|
375
365
|
end
|
376
366
|
|
377
367
|
results[:end_time] = Time.now
|
@@ -54,38 +54,24 @@ module Aidp
|
|
54
54
|
errors: []
|
55
55
|
}
|
56
56
|
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
results[:memory_usage] << current_memory
|
57
|
+
dataset.each_with_index do |item, index|
|
58
|
+
# Check memory usage
|
59
|
+
current_memory = get_memory_usage
|
60
|
+
results[:memory_usage] << current_memory
|
62
61
|
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
62
|
+
# Trigger garbage collection if needed
|
63
|
+
if should_trigger_gc?(current_memory)
|
64
|
+
trigger_garbage_collection
|
65
|
+
results[:gc_count] += 1
|
66
|
+
end
|
68
67
|
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
results[:processed_items] += 1
|
74
|
-
rescue => e
|
75
|
-
results[:errors] << {
|
76
|
-
item_index: index,
|
77
|
-
error: e.message
|
78
|
-
}
|
79
|
-
end
|
68
|
+
# Process item
|
69
|
+
result = processor_method.call(item, options)
|
70
|
+
results[:results] << result
|
71
|
+
results[:processed_items] += 1
|
80
72
|
|
81
|
-
|
82
|
-
|
83
|
-
end
|
84
|
-
rescue => e
|
85
|
-
results[:errors] << {
|
86
|
-
type: "streaming_error",
|
87
|
-
message: e.message
|
88
|
-
}
|
73
|
+
# Update memory tracking
|
74
|
+
update_memory_tracking(current_memory)
|
89
75
|
end
|
90
76
|
|
91
77
|
results
|
@@ -103,32 +89,25 @@ module Aidp
|
|
103
89
|
errors: []
|
104
90
|
}
|
105
91
|
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
results[:gc_count] += 1
|
122
|
-
end
|
123
|
-
|
124
|
-
results[:processed_chunks] += 1
|
125
|
-
update_memory_tracking(pre_chunk_memory)
|
92
|
+
dataset.each_slice(chunk_size) do |chunk|
|
93
|
+
# Check memory before processing chunk
|
94
|
+
pre_chunk_memory = get_memory_usage
|
95
|
+
results[:memory_usage] << pre_chunk_memory
|
96
|
+
|
97
|
+
# Process chunk
|
98
|
+
chunk_results = process_chunk(chunk, processor_method, options)
|
99
|
+
results[:results].concat(chunk_results[:results])
|
100
|
+
results[:errors].concat(chunk_results[:errors])
|
101
|
+
results[:processed_items] += chunk_results[:processed_items]
|
102
|
+
|
103
|
+
# Trigger garbage collection after chunk
|
104
|
+
if should_trigger_gc?(pre_chunk_memory)
|
105
|
+
trigger_garbage_collection
|
106
|
+
results[:gc_count] += 1
|
126
107
|
end
|
127
|
-
|
128
|
-
results[:
|
129
|
-
|
130
|
-
message: e.message
|
131
|
-
}
|
108
|
+
|
109
|
+
results[:processed_chunks] += 1
|
110
|
+
update_memory_tracking(pre_chunk_memory)
|
132
111
|
end
|
133
112
|
|
134
113
|
results
|
@@ -303,11 +282,6 @@ module Aidp
|
|
303
282
|
result = processor_method.call(item, options)
|
304
283
|
results[:results] << result
|
305
284
|
results[:processed_items] += 1
|
306
|
-
rescue => e
|
307
|
-
results[:errors] << {
|
308
|
-
item_index: index,
|
309
|
-
error: e.message
|
310
|
-
}
|
311
285
|
end
|
312
286
|
|
313
287
|
results
|
@@ -335,7 +309,7 @@ module Aidp
|
|
335
309
|
def generate_cache_key(item)
|
336
310
|
# Generate a cache key for the item
|
337
311
|
Digest::MD5.hexdigest(item.to_json)
|
338
|
-
rescue
|
312
|
+
rescue JSON::GeneratorError
|
339
313
|
# Fallback to object_id if JSON serialization fails
|
340
314
|
"item_#{item.object_id}"
|
341
315
|
end
|
@@ -264,29 +264,22 @@ module Aidp
|
|
264
264
|
status: "running"
|
265
265
|
}
|
266
266
|
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
results[:data] = analyze_feature_chunk(chunk, analysis_type, options)
|
278
|
-
end
|
279
|
-
|
280
|
-
results[:status] = "completed"
|
281
|
-
results[:end_time] = Time.now
|
282
|
-
results[:duration] = results[:end_time] - results[:start_time]
|
283
|
-
rescue => e
|
284
|
-
results[:status] = "failed"
|
285
|
-
results[:error] = e.message
|
286
|
-
results[:end_time] = Time.now
|
287
|
-
results[:duration] = results[:end_time] - results[:start_time]
|
267
|
+
# Perform analysis based on chunk type
|
268
|
+
case chunk[:strategy]
|
269
|
+
when "time_based"
|
270
|
+
results[:data] = analyze_time_chunk(chunk, analysis_type, options)
|
271
|
+
when "commit_count"
|
272
|
+
results[:data] = analyze_commit_chunk(chunk, analysis_type, options)
|
273
|
+
when "size_based"
|
274
|
+
results[:data] = analyze_size_chunk(chunk, analysis_type, options)
|
275
|
+
when "feature_based"
|
276
|
+
results[:data] = analyze_feature_chunk(chunk, analysis_type, options)
|
288
277
|
end
|
289
278
|
|
279
|
+
results[:status] = "completed"
|
280
|
+
results[:end_time] = Time.now
|
281
|
+
results[:duration] = results[:end_time] - results[:start_time]
|
282
|
+
|
290
283
|
results
|
291
284
|
end
|
292
285
|
|
data/lib/aidp/analyze/steps.rb
CHANGED
@@ -40,6 +40,12 @@ module Aidp
|
|
40
40
|
"outs" => ["docs/analysis/static_analysis.md"],
|
41
41
|
"gate" => false
|
42
42
|
},
|
43
|
+
"06A_TREE_SITTER_SCAN" => {
|
44
|
+
"templates" => ["06a_tree_sitter_scan.md"],
|
45
|
+
"description" => "Tree-sitter powered static analysis to build knowledge base",
|
46
|
+
"outs" => [".aidp/kb/symbols.json", ".aidp/kb/seams.json", ".aidp/kb/hotspots.json"],
|
47
|
+
"gate" => false
|
48
|
+
},
|
43
49
|
"07_REFACTORING_RECOMMENDATIONS" => {
|
44
50
|
"templates" => ["07_refactoring_recommendations.md"],
|
45
51
|
"description" => "Provide actionable refactoring guidance",
|
data/lib/aidp/analyze/storage.rb
CHANGED
@@ -251,32 +251,27 @@ module Aidp
|
|
251
251
|
def import_config(file_path, scope = :project)
|
252
252
|
return false unless File.exist?(file_path)
|
253
253
|
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
end
|
263
|
-
|
264
|
-
case scope
|
265
|
-
when :project
|
266
|
-
@project_config = config_data
|
267
|
-
save_project_config
|
268
|
-
when :user
|
269
|
-
@user_config = config_data
|
270
|
-
save_user_config
|
271
|
-
else
|
272
|
-
raise ArgumentError, "Invalid scope: #{scope}"
|
273
|
-
end
|
254
|
+
config_data = case File.extname(file_path)
|
255
|
+
when ".yml", ".yaml"
|
256
|
+
YAML.load_file(file_path)
|
257
|
+
when ".json"
|
258
|
+
JSON.parse(File.read(file_path))
|
259
|
+
else
|
260
|
+
raise ArgumentError, "Unsupported file format: #{File.extname(file_path)}"
|
261
|
+
end
|
274
262
|
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
263
|
+
case scope
|
264
|
+
when :project
|
265
|
+
@project_config = config_data
|
266
|
+
save_project_config
|
267
|
+
when :user
|
268
|
+
@user_config = config_data
|
269
|
+
save_user_config
|
270
|
+
else
|
271
|
+
raise ArgumentError, "Invalid scope: #{scope}"
|
279
272
|
end
|
273
|
+
|
274
|
+
true
|
280
275
|
end
|
281
276
|
|
282
277
|
# Validate configuration
|
@@ -297,24 +292,14 @@ module Aidp
|
|
297
292
|
def load_user_config
|
298
293
|
return {} unless File.exist?(USER_CONFIG_FILE)
|
299
294
|
|
300
|
-
|
301
|
-
YAML.load_file(USER_CONFIG_FILE) || {}
|
302
|
-
rescue => e
|
303
|
-
warn "Failed to load user config: #{e.message}"
|
304
|
-
{}
|
305
|
-
end
|
295
|
+
YAML.load_file(USER_CONFIG_FILE) || {}
|
306
296
|
end
|
307
297
|
|
308
298
|
def load_project_config
|
309
299
|
config_path = project_config_path
|
310
300
|
return {} unless File.exist?(config_path)
|
311
301
|
|
312
|
-
|
313
|
-
YAML.load_file(config_path) || {}
|
314
|
-
rescue => e
|
315
|
-
warn "Failed to load project config: #{e.message}"
|
316
|
-
{}
|
317
|
-
end
|
302
|
+
YAML.load_file(config_path) || {}
|
318
303
|
end
|
319
304
|
|
320
305
|
def project_config_path
|
@@ -87,9 +87,6 @@ module Aidp
|
|
87
87
|
rescue Timeout::Error
|
88
88
|
@io.puts "Database connection timed out"
|
89
89
|
raise
|
90
|
-
rescue => e
|
91
|
-
@io.puts "Error connecting to database: #{e.message}"
|
92
|
-
raise
|
93
90
|
end
|
94
91
|
|
95
92
|
def render_job_list
|
@@ -396,8 +393,6 @@ module Aidp
|
|
396
393
|
else
|
397
394
|
"pending"
|
398
395
|
end
|
399
|
-
rescue => e
|
400
|
-
"error (#{e.message})"
|
401
396
|
end
|
402
397
|
|
403
398
|
def truncate_error(error)
|
@@ -426,8 +421,12 @@ module Aidp
|
|
426
421
|
data = JSON.parse(result["data"])
|
427
422
|
output << "Result: #{data["output"]}" if data["output"]
|
428
423
|
end
|
429
|
-
rescue
|
430
|
-
#
|
424
|
+
rescue Sequel::DatabaseError, PG::Error => e
|
425
|
+
# Database error - table might not exist
|
426
|
+
@io.puts "Warning: Could not fetch job result: #{e.message}" if ENV["AIDP_DEBUG"]
|
427
|
+
rescue JSON::ParserError => e
|
428
|
+
# JSON parse error
|
429
|
+
@io.puts "Warning: Could not parse job result data: #{e.message}" if ENV["AIDP_DEBUG"]
|
431
430
|
end
|
432
431
|
|
433
432
|
# 2. Check for any recent log entries
|
@@ -440,8 +439,9 @@ module Aidp
|
|
440
439
|
if logs && logs["last_error_message"]
|
441
440
|
output << "Error: #{logs["last_error_message"]}"
|
442
441
|
end
|
443
|
-
rescue
|
444
|
-
#
|
442
|
+
rescue Sequel::DatabaseError, PG::Error => e
|
443
|
+
# Database error fetching logs - continue with diagnostic
|
444
|
+
@io.puts "Warning: Could not fetch job logs: #{e.message}" if ENV["AIDP_DEBUG"]
|
445
445
|
end
|
446
446
|
|
447
447
|
# 3. Check if job appears to be hung
|
data/lib/aidp/cli.rb
CHANGED
@@ -196,6 +196,62 @@ module Aidp
|
|
196
196
|
command.run
|
197
197
|
end
|
198
198
|
|
199
|
+
desc "analyze code", "Run Tree-sitter static analysis to build knowledge base"
|
200
|
+
option :langs, type: :string, desc: "Comma-separated list of languages to analyze (default: ruby)"
|
201
|
+
option :threads, type: :numeric, desc: "Number of threads for parallel processing (default: CPU count)"
|
202
|
+
option :rebuild, type: :boolean, desc: "Rebuild knowledge base from scratch"
|
203
|
+
option :kb_dir, type: :string, desc: "Knowledge base directory (default: .aidp/kb)"
|
204
|
+
def analyze_code
|
205
|
+
require_relative "analysis/tree_sitter_scan"
|
206
|
+
|
207
|
+
langs = options[:langs] ? options[:langs].split(",").map(&:strip) : %w[ruby]
|
208
|
+
threads = options[:threads] || Etc.nprocessors
|
209
|
+
kb_dir = options[:kb_dir] || ".aidp/kb"
|
210
|
+
|
211
|
+
if options[:rebuild]
|
212
|
+
kb_path = File.expand_path(kb_dir, Dir.pwd)
|
213
|
+
FileUtils.rm_rf(kb_path) if File.exist?(kb_path)
|
214
|
+
puts "🗑️ Rebuilt knowledge base directory"
|
215
|
+
end
|
216
|
+
|
217
|
+
scanner = Aidp::Analysis::TreeSitterScan.new(
|
218
|
+
root: Dir.pwd,
|
219
|
+
kb_dir: kb_dir,
|
220
|
+
langs: langs,
|
221
|
+
threads: threads
|
222
|
+
)
|
223
|
+
|
224
|
+
scanner.run
|
225
|
+
end
|
226
|
+
|
227
|
+
desc "kb show [TYPE]", "Show knowledge base contents"
|
228
|
+
option :format, type: :string, desc: "Output format (json, table, summary)"
|
229
|
+
option :kb_dir, type: :string, desc: "Knowledge base directory (default: .aidp/kb)"
|
230
|
+
def kb_show(type = "summary")
|
231
|
+
require_relative "analysis/kb_inspector"
|
232
|
+
|
233
|
+
kb_dir = options[:kb_dir] || ".aidp/kb"
|
234
|
+
format = options[:format] || "summary"
|
235
|
+
|
236
|
+
inspector = Aidp::Analysis::KBInspector.new(kb_dir)
|
237
|
+
inspector.show(type, format: format)
|
238
|
+
end
|
239
|
+
|
240
|
+
desc "kb graph [TYPE]", "Generate graph visualization from knowledge base"
|
241
|
+
option :format, type: :string, desc: "Graph format (dot, json, mermaid)"
|
242
|
+
option :output, type: :string, desc: "Output file path"
|
243
|
+
option :kb_dir, type: :string, desc: "Knowledge base directory (default: .aidp/kb)"
|
244
|
+
def kb_graph(type = "imports")
|
245
|
+
require_relative "analysis/kb_inspector"
|
246
|
+
|
247
|
+
kb_dir = options[:kb_dir] || ".aidp/kb"
|
248
|
+
format = options[:format] || "dot"
|
249
|
+
output = options[:output]
|
250
|
+
|
251
|
+
inspector = Aidp::Analysis::KBInspector.new(kb_dir)
|
252
|
+
inspector.generate_graph(type, format: format, output: output)
|
253
|
+
end
|
254
|
+
|
199
255
|
desc "version", "Show version information"
|
200
256
|
def version
|
201
257
|
puts "Aidp version #{Aidp::VERSION}"
|
data/lib/aidp/jobs/base_job.rb
CHANGED
@@ -18,32 +18,19 @@ module Aidp
|
|
18
18
|
provider = Aidp::ProviderManager.get_provider(provider_type)
|
19
19
|
raise "Provider #{provider_type} not available" unless provider
|
20
20
|
|
21
|
-
|
22
|
-
|
23
|
-
result = provider.send(prompt: prompt, session: session)
|
21
|
+
# Execute provider
|
22
|
+
result = provider.send(prompt: prompt, session: session)
|
24
23
|
|
25
|
-
|
26
|
-
|
24
|
+
# Store result
|
25
|
+
store_result(result, metadata)
|
27
26
|
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
rescue => error
|
36
|
-
# Record metrics
|
37
|
-
record_metrics(
|
38
|
-
provider_type: provider_type,
|
39
|
-
duration: Time.now - start_time,
|
40
|
-
success: false,
|
41
|
-
error: error.message
|
42
|
-
)
|
43
|
-
|
44
|
-
# Re-raise error to trigger Que's retry mechanism
|
45
|
-
raise
|
46
|
-
end
|
27
|
+
# Record metrics
|
28
|
+
record_metrics(
|
29
|
+
provider_type: provider_type,
|
30
|
+
duration: Time.now - start_time,
|
31
|
+
success: true,
|
32
|
+
error: nil
|
33
|
+
)
|
47
34
|
end
|
48
35
|
|
49
36
|
private
|
@@ -57,8 +57,6 @@ module Aidp
|
|
57
57
|
|
58
58
|
# Stop checking if the process is done
|
59
59
|
break if wait.value
|
60
|
-
rescue
|
61
|
-
break
|
62
60
|
end
|
63
61
|
end
|
64
62
|
|
@@ -88,8 +86,8 @@ module Aidp
|
|
88
86
|
# Kill the process if it's taking too long
|
89
87
|
begin
|
90
88
|
Process.kill("TERM", wait.pid)
|
91
|
-
rescue
|
92
|
-
|
89
|
+
rescue Errno::ESRCH
|
90
|
+
# Process already terminated
|
93
91
|
end
|
94
92
|
|
95
93
|
mark_failed("claude timed out after #{timeout_seconds} seconds")
|
@@ -101,8 +99,8 @@ module Aidp
|
|
101
99
|
# Kill the process
|
102
100
|
begin
|
103
101
|
Process.kill("TERM", wait.pid)
|
104
|
-
rescue
|
105
|
-
|
102
|
+
rescue Errno::ESRCH
|
103
|
+
# Process already terminated
|
106
104
|
end
|
107
105
|
|
108
106
|
mark_failed("claude execution was interrupted")
|
@@ -143,21 +141,17 @@ module Aidp
|
|
143
141
|
|
144
142
|
def get_adaptive_timeout
|
145
143
|
# Try to get timeout recommendations from metrics storage
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
return (recommended * 1.2).ceil
|
158
|
-
end
|
159
|
-
rescue => e
|
160
|
-
puts "⚠️ Could not get adaptive timeout: #{e.message}" if ENV["AIDP_DEBUG"]
|
144
|
+
require_relative "../analyze/metrics_storage"
|
145
|
+
storage = Aidp::Analyze::MetricsStorage.new(Dir.pwd)
|
146
|
+
recommendations = storage.calculate_timeout_recommendations
|
147
|
+
|
148
|
+
# Get current step name from environment or context
|
149
|
+
step_name = ENV["AIDP_CURRENT_STEP"] || "unknown"
|
150
|
+
|
151
|
+
if recommendations[step_name]
|
152
|
+
recommended = recommendations[step_name][:recommended_timeout]
|
153
|
+
# Add 20% buffer for safety
|
154
|
+
return (recommended * 1.2).ceil
|
161
155
|
end
|
162
156
|
|
163
157
|
# Fallback timeouts based on step type patterns
|
@@ -121,8 +121,6 @@ module Aidp
|
|
121
121
|
|
122
122
|
# Stop checking if the process is done
|
123
123
|
break if wait.value
|
124
|
-
rescue
|
125
|
-
break
|
126
124
|
end
|
127
125
|
end
|
128
126
|
|
@@ -135,7 +133,7 @@ module Aidp
|
|
135
133
|
Process.kill("TERM", wait.pid)
|
136
134
|
sleep 2
|
137
135
|
Process.kill("KILL", wait.pid) if wait.value.nil?
|
138
|
-
rescue
|
136
|
+
rescue Errno::ESRCH
|
139
137
|
# Process already terminated
|
140
138
|
end
|
141
139
|
end
|
@@ -156,7 +154,7 @@ module Aidp
|
|
156
154
|
Process.kill("TERM", wait.pid)
|
157
155
|
sleep 1
|
158
156
|
Process.kill("KILL", wait.pid) if wait.value.nil?
|
159
|
-
rescue
|
157
|
+
rescue Errno::ESRCH
|
160
158
|
# Process already terminated
|
161
159
|
end
|
162
160
|
|