aidp 0.7.0 → 0.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +60 -214
  3. data/bin/aidp +1 -1
  4. data/lib/aidp/analysis/kb_inspector.rb +38 -23
  5. data/lib/aidp/analysis/seams.rb +2 -31
  6. data/lib/aidp/analysis/tree_sitter_grammar_loader.rb +1 -13
  7. data/lib/aidp/analysis/tree_sitter_scan.rb +3 -20
  8. data/lib/aidp/analyze/error_handler.rb +2 -75
  9. data/lib/aidp/analyze/json_file_storage.rb +292 -0
  10. data/lib/aidp/analyze/progress.rb +12 -0
  11. data/lib/aidp/analyze/progress_visualizer.rb +12 -17
  12. data/lib/aidp/analyze/ruby_maat_integration.rb +13 -31
  13. data/lib/aidp/analyze/runner.rb +256 -87
  14. data/lib/aidp/cli/jobs_command.rb +100 -432
  15. data/lib/aidp/cli.rb +309 -239
  16. data/lib/aidp/config.rb +298 -10
  17. data/lib/aidp/debug_logger.rb +195 -0
  18. data/lib/aidp/debug_mixin.rb +187 -0
  19. data/lib/aidp/execute/progress.rb +9 -0
  20. data/lib/aidp/execute/runner.rb +221 -40
  21. data/lib/aidp/execute/steps.rb +17 -7
  22. data/lib/aidp/execute/workflow_selector.rb +211 -0
  23. data/lib/aidp/harness/completion_checker.rb +268 -0
  24. data/lib/aidp/harness/condition_detector.rb +1526 -0
  25. data/lib/aidp/harness/config_loader.rb +373 -0
  26. data/lib/aidp/harness/config_manager.rb +382 -0
  27. data/lib/aidp/harness/config_schema.rb +1006 -0
  28. data/lib/aidp/harness/config_validator.rb +355 -0
  29. data/lib/aidp/harness/configuration.rb +477 -0
  30. data/lib/aidp/harness/enhanced_runner.rb +494 -0
  31. data/lib/aidp/harness/error_handler.rb +616 -0
  32. data/lib/aidp/harness/provider_config.rb +423 -0
  33. data/lib/aidp/harness/provider_factory.rb +306 -0
  34. data/lib/aidp/harness/provider_manager.rb +1269 -0
  35. data/lib/aidp/harness/provider_type_checker.rb +88 -0
  36. data/lib/aidp/harness/runner.rb +411 -0
  37. data/lib/aidp/harness/state/errors.rb +28 -0
  38. data/lib/aidp/harness/state/metrics.rb +219 -0
  39. data/lib/aidp/harness/state/persistence.rb +128 -0
  40. data/lib/aidp/harness/state/provider_state.rb +132 -0
  41. data/lib/aidp/harness/state/ui_state.rb +68 -0
  42. data/lib/aidp/harness/state/workflow_state.rb +123 -0
  43. data/lib/aidp/harness/state_manager.rb +586 -0
  44. data/lib/aidp/harness/status_display.rb +888 -0
  45. data/lib/aidp/harness/ui/base.rb +16 -0
  46. data/lib/aidp/harness/ui/enhanced_tui.rb +545 -0
  47. data/lib/aidp/harness/ui/enhanced_workflow_selector.rb +252 -0
  48. data/lib/aidp/harness/ui/error_handler.rb +132 -0
  49. data/lib/aidp/harness/ui/frame_manager.rb +361 -0
  50. data/lib/aidp/harness/ui/job_monitor.rb +500 -0
  51. data/lib/aidp/harness/ui/navigation/main_menu.rb +311 -0
  52. data/lib/aidp/harness/ui/navigation/menu_formatter.rb +120 -0
  53. data/lib/aidp/harness/ui/navigation/menu_item.rb +142 -0
  54. data/lib/aidp/harness/ui/navigation/menu_state.rb +139 -0
  55. data/lib/aidp/harness/ui/navigation/submenu.rb +202 -0
  56. data/lib/aidp/harness/ui/navigation/workflow_selector.rb +176 -0
  57. data/lib/aidp/harness/ui/progress_display.rb +280 -0
  58. data/lib/aidp/harness/ui/question_collector.rb +141 -0
  59. data/lib/aidp/harness/ui/spinner_group.rb +184 -0
  60. data/lib/aidp/harness/ui/spinner_helper.rb +152 -0
  61. data/lib/aidp/harness/ui/status_manager.rb +312 -0
  62. data/lib/aidp/harness/ui/status_widget.rb +280 -0
  63. data/lib/aidp/harness/ui/workflow_controller.rb +312 -0
  64. data/lib/aidp/harness/user_interface.rb +2381 -0
  65. data/lib/aidp/provider_manager.rb +131 -7
  66. data/lib/aidp/providers/anthropic.rb +28 -103
  67. data/lib/aidp/providers/base.rb +170 -0
  68. data/lib/aidp/providers/cursor.rb +52 -181
  69. data/lib/aidp/providers/gemini.rb +24 -107
  70. data/lib/aidp/providers/macos_ui.rb +99 -5
  71. data/lib/aidp/providers/opencode.rb +194 -0
  72. data/lib/aidp/storage/csv_storage.rb +172 -0
  73. data/lib/aidp/storage/file_manager.rb +214 -0
  74. data/lib/aidp/storage/json_storage.rb +140 -0
  75. data/lib/aidp/version.rb +1 -1
  76. data/lib/aidp.rb +54 -39
  77. data/templates/COMMON/AGENT_BASE.md +11 -0
  78. data/templates/EXECUTE/00_PRD.md +4 -4
  79. data/templates/EXECUTE/02_ARCHITECTURE.md +5 -4
  80. data/templates/EXECUTE/07_TEST_PLAN.md +4 -1
  81. data/templates/EXECUTE/08_TASKS.md +4 -4
  82. data/templates/EXECUTE/10_IMPLEMENTATION_AGENT.md +4 -4
  83. data/templates/README.md +279 -0
  84. data/templates/aidp-development.yml.example +373 -0
  85. data/templates/aidp-minimal.yml.example +48 -0
  86. data/templates/aidp-production.yml.example +475 -0
  87. data/templates/aidp.yml.example +598 -0
  88. metadata +93 -69
  89. data/lib/aidp/analyze/agent_personas.rb +0 -71
  90. data/lib/aidp/analyze/agent_tool_executor.rb +0 -439
  91. data/lib/aidp/analyze/data_retention_manager.rb +0 -421
  92. data/lib/aidp/analyze/database.rb +0 -260
  93. data/lib/aidp/analyze/dependencies.rb +0 -335
  94. data/lib/aidp/analyze/export_manager.rb +0 -418
  95. data/lib/aidp/analyze/focus_guidance.rb +0 -517
  96. data/lib/aidp/analyze/incremental_analyzer.rb +0 -533
  97. data/lib/aidp/analyze/language_analysis_strategies.rb +0 -897
  98. data/lib/aidp/analyze/large_analysis_progress.rb +0 -499
  99. data/lib/aidp/analyze/memory_manager.rb +0 -339
  100. data/lib/aidp/analyze/metrics_storage.rb +0 -336
  101. data/lib/aidp/analyze/parallel_processor.rb +0 -454
  102. data/lib/aidp/analyze/performance_optimizer.rb +0 -691
  103. data/lib/aidp/analyze/repository_chunker.rb +0 -697
  104. data/lib/aidp/analyze/static_analysis_detector.rb +0 -577
  105. data/lib/aidp/analyze/storage.rb +0 -655
  106. data/lib/aidp/analyze/tool_configuration.rb +0 -441
  107. data/lib/aidp/analyze/tool_modernization.rb +0 -750
  108. data/lib/aidp/database/pg_adapter.rb +0 -148
  109. data/lib/aidp/database_config.rb +0 -69
  110. data/lib/aidp/database_connection.rb +0 -72
  111. data/lib/aidp/job_manager.rb +0 -41
  112. data/lib/aidp/jobs/base_job.rb +0 -45
  113. data/lib/aidp/jobs/provider_execution_job.rb +0 -83
  114. data/lib/aidp/project_detector.rb +0 -117
  115. data/lib/aidp/providers/agent_supervisor.rb +0 -348
  116. data/lib/aidp/providers/supervised_base.rb +0 -317
  117. data/lib/aidp/providers/supervised_cursor.rb +0 -22
  118. data/lib/aidp/sync.rb +0 -13
  119. data/lib/aidp/workspace.rb +0 -19
@@ -1,691 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "concurrent"
4
- require "digest"
5
- require "json"
6
-
7
- module Aidp
8
- # Performance optimization system for large codebases
9
- class PerformanceOptimizer
10
- attr_reader :cache, :memory_manager, :parallel_executor, :config
11
-
12
- def initialize(project_dir, config = {})
13
- @project_dir = project_dir
14
- @config = DEFAULT_CONFIG.merge(config)
15
- @cache = setup_cache
16
- @memory_manager = MemoryManager.new(@config[:memory])
17
- @parallel_executor = setup_parallel_executor
18
- @performance_metrics = {}
19
- end
20
-
21
- # Optimize analysis performance for large codebases
22
- def optimize_analysis(analysis_type, data, options = {})
23
- start_time = Time.current
24
-
25
- # Check cache first
26
- cache_key = generate_cache_key(analysis_type, data)
27
- cached_result = @cache.get(cache_key)
28
-
29
- if cached_result && !options[:force_refresh]
30
- log_performance_metric(analysis_type, "cache_hit", Time.current - start_time)
31
- return cached_result
32
- end
33
-
34
- # Apply optimization strategies
35
- optimized_data = apply_optimization_strategies(analysis_type, data, options)
36
-
37
- # Execute analysis with optimizations
38
- result = execute_optimized_analysis(analysis_type, optimized_data, options)
39
-
40
- # Cache result
41
- @cache.set(cache_key, result, @config[:cache_ttl])
42
-
43
- # Record performance metrics
44
- duration = Time.current - start_time
45
- log_performance_metric(analysis_type, "analysis", duration)
46
-
47
- result
48
- end
49
-
50
- # Optimize file processing for large repositories
51
- def optimize_file_processing(files, processor, options = {})
52
- return process_files_sequentially(files, processor) if files.length < @config[:parallel_threshold]
53
-
54
- # Determine optimal chunk size
55
- chunk_size = calculate_optimal_chunk_size(files.length)
56
-
57
- # Split files into chunks
58
- chunks = files.each_slice(chunk_size).to_a
59
-
60
- # Process chunks in parallel
61
- results = process_chunks_parallel(chunks, processor, options)
62
-
63
- # Merge results
64
- merge_parallel_results(results)
65
- end
66
-
67
- # Optimize database operations
68
- def optimize_database_operations(operations, options = {})
69
- return execute_operations_sequentially(operations) if operations.length < @config[:batch_threshold]
70
-
71
- # Group operations by type
72
- grouped_operations = group_operations_by_type(operations)
73
-
74
- # Execute in batches
75
- results = execute_batched_operations(grouped_operations, options)
76
-
77
- # Merge results
78
- merge_database_results(results)
79
- end
80
-
81
- # Optimize memory usage
82
- def optimize_memory_usage(operation, options = {})
83
- @memory_manager.process_large_dataset(operation, options)
84
- end
85
-
86
- # Get performance statistics
87
- def get_performance_statistics
88
- {
89
- cache_stats: @cache.statistics,
90
- memory_stats: @memory_manager.get_memory_statistics,
91
- parallel_stats: @parallel_executor.statistics,
92
- analysis_metrics: @performance_metrics,
93
- recommendations: generate_performance_recommendations
94
- }
95
- end
96
-
97
- # Clear cache and reset metrics
98
- def clear_cache
99
- @cache.clear
100
- @performance_metrics.clear
101
- end
102
-
103
- private
104
-
105
- DEFAULT_CONFIG = {
106
- cache_ttl: 3600, # 1 hour
107
- parallel_threshold: 50,
108
- batch_threshold: 100,
109
- memory: {
110
- max_memory: 1024 * 1024 * 1024, # 1GB
111
- chunk_size: 1000,
112
- gc_threshold: 0.8
113
- },
114
- parallel: {
115
- max_workers: Concurrent.processor_count,
116
- timeout: 300,
117
- retry_attempts: 2
118
- }
119
- }.freeze
120
-
121
- def setup_cache
122
- CacheManager.new(
123
- max_size: @config[:cache_size] || 1000,
124
- ttl: @config[:cache_ttl]
125
- )
126
- end
127
-
128
- def setup_parallel_executor
129
- ParallelExecutor.new(
130
- max_workers: @config[:parallel][:max_workers],
131
- timeout: @config[:parallel][:timeout]
132
- )
133
- end
134
-
135
- def generate_cache_key(analysis_type, data)
136
- content = "#{analysis_type}:#{data.hash}:#{File.mtime(@project_dir).to_i}"
137
- Digest::MD5.hexdigest(content)
138
- end
139
-
140
- def apply_optimization_strategies(analysis_type, data, options)
141
- case analysis_type
142
- when "repository_analysis"
143
- optimize_repository_analysis(data, options)
144
- when "architecture_analysis"
145
- optimize_architecture_analysis(data, options)
146
- when "static_analysis"
147
- optimize_static_analysis(data, options)
148
- else
149
- data
150
- end
151
- end
152
-
153
- def optimize_repository_analysis(data, options)
154
- # Optimize Git log processing
155
- if data[:git_log] && data[:git_log].length > @config[:parallel_threshold]
156
- data[:git_log] = chunk_git_log(data[:git_log])
157
- end
158
-
159
- # Optimize file analysis
160
- if data[:files] && data[:files].length > @config[:parallel_threshold]
161
- data[:files] = chunk_files_for_analysis(data[:files])
162
- end
163
-
164
- data
165
- end
166
-
167
- def optimize_architecture_analysis(data, options)
168
- # Optimize dependency analysis
169
- if data[:dependencies] && data[:dependencies].length > @config[:parallel_threshold]
170
- data[:dependencies] = chunk_dependencies(data[:dependencies])
171
- end
172
-
173
- # Optimize pattern detection
174
- if data[:patterns] && data[:patterns].length > @config[:parallel_threshold]
175
- data[:patterns] = chunk_patterns(data[:patterns])
176
- end
177
-
178
- data
179
- end
180
-
181
- def optimize_static_analysis(data, options)
182
- # Optimize tool execution
183
- data[:tools] = group_tools_for_parallel_execution(data[:tools]) if data[:tools] && data[:tools].length > 1
184
-
185
- # Optimize file processing
186
- if data[:files] && data[:files].length > @config[:parallel_threshold]
187
- data[:files] = chunk_files_for_static_analysis(data[:files])
188
- end
189
-
190
- data
191
- end
192
-
193
- def execute_optimized_analysis(analysis_type, data, options)
194
- case analysis_type
195
- when "repository_analysis"
196
- execute_repository_analysis_optimized(data, options)
197
- when "architecture_analysis"
198
- execute_architecture_analysis_optimized(data, options)
199
- when "static_analysis"
200
- execute_static_analysis_optimized(data, options)
201
- else
202
- execute_generic_analysis(data, options)
203
- end
204
- end
205
-
206
- def execute_repository_analysis_optimized(data, options)
207
- results = []
208
-
209
- # Process Git log chunks in parallel
210
- if data[:git_log_chunks]
211
- git_results = @parallel_executor.execute(
212
- data[:git_log_chunks],
213
- method(:process_git_log_chunk)
214
- )
215
- results.concat(git_results)
216
- end
217
-
218
- # Process file chunks in parallel
219
- if data[:file_chunks]
220
- file_results = @parallel_executor.execute(
221
- data[:file_chunks],
222
- method(:process_file_chunk)
223
- )
224
- results.concat(file_results)
225
- end
226
-
227
- # Merge results
228
- merge_repository_analysis_results(results)
229
- end
230
-
231
- def execute_architecture_analysis_optimized(data, options)
232
- results = []
233
-
234
- # Process dependency chunks in parallel
235
- if data[:dependency_chunks]
236
- dep_results = @parallel_executor.execute(
237
- data[:dependency_chunks],
238
- method(:process_dependency_chunk)
239
- )
240
- results.concat(dep_results)
241
- end
242
-
243
- # Process pattern chunks in parallel
244
- if data[:pattern_chunks]
245
- pattern_results = @parallel_executor.execute(
246
- data[:pattern_chunks],
247
- method(:process_pattern_chunk)
248
- )
249
- results.concat(pattern_results)
250
- end
251
-
252
- # Merge results
253
- merge_architecture_analysis_results(results)
254
- end
255
-
256
- def execute_static_analysis_optimized(data, options)
257
- results = []
258
-
259
- # Execute tools in parallel
260
- if data[:tool_groups]
261
- tool_results = @parallel_executor.execute(
262
- data[:tool_groups],
263
- method(:execute_tool_group)
264
- )
265
- results.concat(tool_results)
266
- end
267
-
268
- # Process file chunks in parallel
269
- if data[:file_chunks]
270
- file_results = @parallel_executor.execute(
271
- data[:file_chunks],
272
- method(:process_static_analysis_chunk)
273
- )
274
- results.concat(file_results)
275
- end
276
-
277
- # Merge results
278
- merge_static_analysis_results(results)
279
- end
280
-
281
- def execute_generic_analysis(data, options)
282
- # Generic optimization for unknown analysis types
283
- if data.length > @config[:parallel_threshold]
284
- chunks = data.each_slice(@config[:parallel_threshold]).to_a
285
- results = @parallel_executor.execute(chunks, method(:process_generic_chunk))
286
- merge_generic_results(results)
287
- else
288
- process_generic_data(data)
289
- end
290
- end
291
-
292
- def process_files_sequentially(files, processor)
293
- files.map { |file| processor.call(file) }
294
- end
295
-
296
- def calculate_optimal_chunk_size(total_files)
297
- workers = @config[:parallel][:max_workers]
298
- optimal_size = (total_files.to_f / workers).ceil
299
- [optimal_size, @config[:memory][:chunk_size]].min
300
- end
301
-
302
- def process_chunks_parallel(chunks, processor, options)
303
- @parallel_executor.execute(chunks) do |chunk|
304
- chunk.map { |item| processor.call(item) }
305
- end
306
- end
307
-
308
- def merge_parallel_results(results)
309
- results.flatten.compact
310
- end
311
-
312
- def execute_operations_sequentially(operations)
313
- operations.map { |op| execute_database_operation(op) }
314
- end
315
-
316
- def group_operations_by_type(operations)
317
- operations.group_by { |op| op[:type] }
318
- end
319
-
320
- def execute_batched_operations(grouped_operations, options)
321
- results = {}
322
-
323
- grouped_operations.each do |type, ops|
324
- batches = ops.each_slice(@config[:batch_threshold]).to_a
325
- batch_results = @parallel_executor.execute(batches) do |batch|
326
- execute_batch_operation(type, batch)
327
- end
328
- results[type] = batch_results.flatten
329
- end
330
-
331
- results
332
- end
333
-
334
- def merge_database_results(results)
335
- results.values.flatten
336
- end
337
-
338
- def chunk_git_log(git_log)
339
- chunk_size = calculate_optimal_chunk_size(git_log.length)
340
- git_log.each_slice(chunk_size).to_a
341
- end
342
-
343
- def chunk_files_for_analysis(files)
344
- chunk_size = calculate_optimal_chunk_size(files.length)
345
- files.each_slice(chunk_size).to_a
346
- end
347
-
348
- def chunk_dependencies(dependencies)
349
- chunk_size = calculate_optimal_chunk_size(dependencies.length)
350
- dependencies.each_slice(chunk_size).to_a
351
- end
352
-
353
- def chunk_patterns(patterns)
354
- chunk_size = calculate_optimal_chunk_size(patterns.length)
355
- patterns.each_slice(chunk_size).to_a
356
- end
357
-
358
- def group_tools_for_parallel_execution(tools)
359
- # Group tools that can run in parallel
360
- groups = []
361
- current_group = []
362
-
363
- tools.each do |tool|
364
- if can_run_in_parallel?(current_group, tool)
365
- current_group << tool
366
- else
367
- groups << current_group unless current_group.empty?
368
- current_group = [tool]
369
- end
370
- end
371
-
372
- groups << current_group unless current_group.empty?
373
- groups
374
- end
375
-
376
- def chunk_files_for_static_analysis(files)
377
- chunk_size = calculate_optimal_chunk_size(files.length)
378
- files.each_slice(chunk_size).to_a
379
- end
380
-
381
- def can_run_in_parallel?(current_group, tool)
382
- # Check if tool can run in parallel with current group
383
- # This is a simplified check - in practice, you'd check for resource conflicts
384
- current_group.length < @config[:parallel][:max_workers]
385
- end
386
-
387
- # Processing methods for parallel execution
388
- def process_git_log_chunk(chunk)
389
- # Process a chunk of Git log entries
390
- chunk.map do |entry|
391
- {
392
- commit: entry[:hash],
393
- author: entry[:author],
394
- date: entry[:date],
395
- files: entry[:files]
396
- }
397
- end
398
- end
399
-
400
- def process_file_chunk(chunk)
401
- # Process a chunk of files
402
- chunk.map do |file|
403
- {
404
- path: file[:path],
405
- size: File.size(file[:path]),
406
- modified: File.mtime(file[:path])
407
- }
408
- end
409
- end
410
-
411
- def process_dependency_chunk(chunk)
412
- # Process a chunk of dependencies
413
- chunk.map do |dep|
414
- {
415
- source: dep[:source],
416
- target: dep[:target],
417
- type: dep[:type]
418
- }
419
- end
420
- end
421
-
422
- def process_pattern_chunk(chunk)
423
- # Process a chunk of patterns
424
- chunk.map do |pattern|
425
- {
426
- name: pattern[:name],
427
- files: pattern[:files],
428
- confidence: pattern[:confidence]
429
- }
430
- end
431
- end
432
-
433
- def execute_tool_group(tool_group)
434
- # Execute a group of tools
435
- tool_group.map do |tool|
436
- {
437
- tool: tool[:name],
438
- result: execute_single_tool(tool)
439
- }
440
- end
441
- end
442
-
443
- def process_static_analysis_chunk(chunk)
444
- # Process a chunk for static analysis
445
- chunk.map do |file|
446
- {
447
- file: file[:path],
448
- analysis: analyze_single_file(file)
449
- }
450
- end
451
- end
452
-
453
- def process_generic_chunk(chunk)
454
- # Process a generic chunk
455
- chunk.map { |item| process_generic_item(item) }
456
- end
457
-
458
- # Result merging methods
459
- def merge_repository_analysis_results(results)
460
- {
461
- commits: results.flat_map { |r| r[:commits] || [] },
462
- files: results.flat_map { |r| r[:files] || [] },
463
- statistics: aggregate_statistics(results.map { |r| r[:statistics] })
464
- }
465
- end
466
-
467
- def merge_architecture_analysis_results(results)
468
- {
469
- dependencies: results.flat_map { |r| r[:dependencies] || [] },
470
- patterns: results.flat_map { |r| r[:patterns] || [] },
471
- components: results.flat_map { |r| r[:components] || [] }
472
- }
473
- end
474
-
475
- def merge_static_analysis_results(results)
476
- {
477
- tool_results: results.flat_map { |r| r[:tool_results] || [] },
478
- file_analysis: results.flat_map { |r| r[:file_analysis] || [] },
479
- issues: results.flat_map { |r| r[:issues] || [] }
480
- }
481
- end
482
-
483
- def merge_generic_results(results)
484
- results.flatten.compact
485
- end
486
-
487
- # Database operation methods
488
- def execute_database_operation(operation)
489
- case operation[:type]
490
- when "select"
491
- execute_select_operation(operation)
492
- when "insert"
493
- execute_insert_operation(operation)
494
- when "update"
495
- execute_update_operation(operation)
496
- when "delete"
497
- execute_delete_operation(operation)
498
- else
499
- raise ArgumentError, "Unknown operation type: #{operation[:type]}"
500
- end
501
- end
502
-
503
- def execute_batch_operation(type, batch)
504
- batch.map { |op| execute_database_operation(op) }
505
- end
506
-
507
- def execute_select_operation(operation)
508
- # Execute SELECT operation
509
- {type: "select", result: "mock_result"}
510
- end
511
-
512
- def execute_insert_operation(operation)
513
- # Execute INSERT operation
514
- {type: "insert", result: "mock_result"}
515
- end
516
-
517
- def execute_update_operation(operation)
518
- # Execute UPDATE operation
519
- {type: "update", result: "mock_result"}
520
- end
521
-
522
- def execute_delete_operation(operation)
523
- # Execute DELETE operation
524
- {type: "delete", result: "mock_result"}
525
- end
526
-
527
- # Utility methods
528
- def execute_single_tool(tool)
529
- # Execute a single static analysis tool
530
- {tool: tool[:name], status: "completed", issues: []}
531
- end
532
-
533
- def analyze_single_file(file)
534
- # Analyze a single file
535
- {file: file[:path], complexity: 5, issues: []}
536
- end
537
-
538
- def process_generic_item(item)
539
- # Process a generic item
540
- {processed: true, data: item}
541
- end
542
-
543
- def aggregate_statistics(statistics_list)
544
- # Aggregate statistics from multiple results
545
- {
546
- total_files: statistics_list.sum { |s| s[:total_files] || 0 },
547
- total_commits: statistics_list.sum { |s| s[:total_commits] || 0 },
548
- total_lines: statistics_list.sum { |s| s[:total_lines] || 0 }
549
- }
550
- end
551
-
552
- def log_performance_metric(analysis_type, metric, duration)
553
- @performance_metrics[analysis_type] ||= {}
554
- @performance_metrics[analysis_type][metric] = duration
555
- end
556
-
557
- def generate_performance_recommendations
558
- recommendations = []
559
-
560
- # Analyze cache performance
561
- cache_stats = @cache.statistics
562
- recommendations << "Consider increasing cache size or TTL for better performance" if cache_stats[:hit_rate] < 0.5
563
-
564
- # Analyze memory usage
565
- memory_stats = @memory_manager.get_memory_statistics
566
- if memory_stats[:usage_percentage] > 80
567
- recommendations << "Consider reducing chunk size or implementing streaming for large datasets"
568
- end
569
-
570
- # Analyze parallel performance
571
- parallel_stats = @parallel_executor.statistics
572
- if parallel_stats[:utilization] < 0.7
573
- recommendations << "Consider adjusting parallel worker count for better resource utilization"
574
- end
575
-
576
- recommendations
577
- end
578
- end
579
-
580
- # Cache manager for performance optimization
581
- class CacheManager
582
- def initialize(max_size: 1000, ttl: 3600)
583
- @max_size = max_size
584
- @ttl = ttl
585
- @cache = {}
586
- @statistics = {hits: 0, misses: 0, sets: 0}
587
- end
588
-
589
- def get(key)
590
- entry = @cache[key]
591
- return nil unless entry && !expired?(entry)
592
-
593
- @statistics[:hits] += 1
594
- entry[:value]
595
- end
596
-
597
- def set(key, value, ttl = nil)
598
- cleanup_if_needed
599
-
600
- @cache[key] = {
601
- value: value,
602
- timestamp: Time.current,
603
- ttl: ttl || @ttl
604
- }
605
-
606
- @statistics[:sets] += 1
607
- end
608
-
609
- def clear
610
- @cache.clear
611
- @statistics = {hits: 0, misses: 0, sets: 0}
612
- end
613
-
614
- def statistics
615
- total_requests = @statistics[:hits] + @statistics[:misses]
616
- hit_rate = (total_requests > 0) ? @statistics[:hits].to_f / total_requests : 0
617
-
618
- {
619
- size: @cache.size,
620
- max_size: @max_size,
621
- hit_rate: hit_rate,
622
- hits: @statistics[:hits],
623
- misses: @statistics[:misses],
624
- sets: @statistics[:sets]
625
- }
626
- end
627
-
628
- private
629
-
630
- def expired?(entry)
631
- Time.current - entry[:timestamp] > entry[:ttl]
632
- end
633
-
634
- def cleanup_if_needed
635
- return unless @cache.size >= @max_size
636
-
637
- # Remove expired entries first
638
- @cache.delete_if { |_, entry| expired?(entry) }
639
-
640
- # If still over limit, remove oldest entries
641
- return unless @cache.size >= @max_size
642
-
643
- sorted_entries = @cache.sort_by { |_, entry| entry[:timestamp] }
644
- entries_to_remove = @cache.size - @max_size + 1
645
- entries_to_remove.times { |i| @cache.delete(sorted_entries[i][0]) }
646
- end
647
- end
648
-
649
- # Parallel executor for performance optimization
650
- class ParallelExecutor
651
- def initialize(max_workers: Concurrent.processor_count, timeout: 300)
652
- @max_workers = max_workers
653
- @timeout = timeout
654
- @statistics = {executions: 0, total_time: 0, errors: 0}
655
- end
656
-
657
- def execute(items, processor = nil)
658
- start_time = Time.current
659
- @statistics[:executions] += 1
660
-
661
- processor = method(processor) if processor.is_a?(Symbol) || processor.is_a?(String)
662
-
663
- futures = items.map do |item|
664
- Concurrent::Future.execute do
665
- processor ? processor.call(item) : item
666
- end
667
- end
668
-
669
- results = futures.map do |future|
670
- future.value(@timeout)
671
- end
672
-
673
- @statistics[:total_time] += Time.current - start_time
674
- results
675
- end
676
-
677
- def statistics
678
- avg_time = (@statistics[:executions] > 0) ? @statistics[:total_time] / @statistics[:executions] : 0
679
- utilization = (@statistics[:executions] > 0) ? @statistics[:total_time] / (@statistics[:executions] * @timeout) : 0
680
-
681
- {
682
- max_workers: @max_workers,
683
- executions: @statistics[:executions],
684
- total_time: @statistics[:total_time],
685
- average_time: avg_time,
686
- errors: @statistics[:errors],
687
- utilization: utilization
688
- }
689
- end
690
- end
691
- end