aidp 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE +21 -0
  3. data/README.md +210 -0
  4. data/bin/aidp +5 -0
  5. data/lib/aidp/analyze/agent_personas.rb +71 -0
  6. data/lib/aidp/analyze/agent_tool_executor.rb +445 -0
  7. data/lib/aidp/analyze/data_retention_manager.rb +426 -0
  8. data/lib/aidp/analyze/database.rb +243 -0
  9. data/lib/aidp/analyze/dependencies.rb +335 -0
  10. data/lib/aidp/analyze/error_handler.rb +486 -0
  11. data/lib/aidp/analyze/export_manager.rb +425 -0
  12. data/lib/aidp/analyze/feature_analyzer.rb +397 -0
  13. data/lib/aidp/analyze/focus_guidance.rb +517 -0
  14. data/lib/aidp/analyze/incremental_analyzer.rb +543 -0
  15. data/lib/aidp/analyze/language_analysis_strategies.rb +897 -0
  16. data/lib/aidp/analyze/large_analysis_progress.rb +504 -0
  17. data/lib/aidp/analyze/memory_manager.rb +365 -0
  18. data/lib/aidp/analyze/parallel_processor.rb +460 -0
  19. data/lib/aidp/analyze/performance_optimizer.rb +694 -0
  20. data/lib/aidp/analyze/prioritizer.rb +402 -0
  21. data/lib/aidp/analyze/progress.rb +75 -0
  22. data/lib/aidp/analyze/progress_visualizer.rb +320 -0
  23. data/lib/aidp/analyze/report_generator.rb +582 -0
  24. data/lib/aidp/analyze/repository_chunker.rb +702 -0
  25. data/lib/aidp/analyze/ruby_maat_integration.rb +572 -0
  26. data/lib/aidp/analyze/runner.rb +245 -0
  27. data/lib/aidp/analyze/static_analysis_detector.rb +577 -0
  28. data/lib/aidp/analyze/steps.rb +53 -0
  29. data/lib/aidp/analyze/storage.rb +600 -0
  30. data/lib/aidp/analyze/tool_configuration.rb +456 -0
  31. data/lib/aidp/analyze/tool_modernization.rb +750 -0
  32. data/lib/aidp/execute/progress.rb +76 -0
  33. data/lib/aidp/execute/runner.rb +135 -0
  34. data/lib/aidp/execute/steps.rb +113 -0
  35. data/lib/aidp/shared/cli.rb +117 -0
  36. data/lib/aidp/shared/config.rb +35 -0
  37. data/lib/aidp/shared/project_detector.rb +119 -0
  38. data/lib/aidp/shared/providers/anthropic.rb +26 -0
  39. data/lib/aidp/shared/providers/base.rb +17 -0
  40. data/lib/aidp/shared/providers/cursor.rb +102 -0
  41. data/lib/aidp/shared/providers/gemini.rb +26 -0
  42. data/lib/aidp/shared/providers/macos_ui.rb +26 -0
  43. data/lib/aidp/shared/sync.rb +15 -0
  44. data/lib/aidp/shared/util.rb +41 -0
  45. data/lib/aidp/shared/version.rb +7 -0
  46. data/lib/aidp/shared/workspace.rb +21 -0
  47. data/lib/aidp.rb +53 -0
  48. data/templates/ANALYZE/01_REPOSITORY_ANALYSIS.md +100 -0
  49. data/templates/ANALYZE/02_ARCHITECTURE_ANALYSIS.md +151 -0
  50. data/templates/ANALYZE/03_TEST_ANALYSIS.md +182 -0
  51. data/templates/ANALYZE/04_FUNCTIONALITY_ANALYSIS.md +200 -0
  52. data/templates/ANALYZE/05_DOCUMENTATION_ANALYSIS.md +202 -0
  53. data/templates/ANALYZE/06_STATIC_ANALYSIS.md +233 -0
  54. data/templates/ANALYZE/07_REFACTORING_RECOMMENDATIONS.md +316 -0
  55. data/templates/COMMON/AGENT_BASE.md +129 -0
  56. data/templates/COMMON/CONVENTIONS.md +19 -0
  57. data/templates/COMMON/TEMPLATES/ADR_TEMPLATE.md +21 -0
  58. data/templates/COMMON/TEMPLATES/DOMAIN_CHARTER.md +27 -0
  59. data/templates/COMMON/TEMPLATES/EVENT_EXAMPLE.yaml +16 -0
  60. data/templates/COMMON/TEMPLATES/MERMAID_C4.md +46 -0
  61. data/templates/COMMON/TEMPLATES/OPENAPI_STUB.yaml +11 -0
  62. data/templates/EXECUTE/00_PRD.md +36 -0
  63. data/templates/EXECUTE/01_NFRS.md +27 -0
  64. data/templates/EXECUTE/02A_ARCH_GATE_QUESTIONS.md +13 -0
  65. data/templates/EXECUTE/02_ARCHITECTURE.md +42 -0
  66. data/templates/EXECUTE/03_ADR_FACTORY.md +22 -0
  67. data/templates/EXECUTE/04_DOMAIN_DECOMPOSITION.md +24 -0
  68. data/templates/EXECUTE/05_CONTRACTS.md +27 -0
  69. data/templates/EXECUTE/06_THREAT_MODEL.md +23 -0
  70. data/templates/EXECUTE/07_TEST_PLAN.md +24 -0
  71. data/templates/EXECUTE/08_TASKS.md +29 -0
  72. data/templates/EXECUTE/09_SCAFFOLDING_DEVEX.md +25 -0
  73. data/templates/EXECUTE/10_IMPLEMENTATION_AGENT.md +30 -0
  74. data/templates/EXECUTE/11_STATIC_ANALYSIS.md +22 -0
  75. data/templates/EXECUTE/12_OBSERVABILITY_SLOS.md +21 -0
  76. data/templates/EXECUTE/13_DELIVERY_ROLLOUT.md +21 -0
  77. data/templates/EXECUTE/14_DOCS_PORTAL.md +23 -0
  78. data/templates/EXECUTE/15_POST_RELEASE.md +25 -0
  79. metadata +301 -0
@@ -0,0 +1,365 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+ require "yaml"
5
+ require "digest"
6
+
7
+ module Aidp
8
+ class MemoryManager
9
+ # Memory management strategies
10
+ MEMORY_STRATEGIES = %w[streaming chunking caching garbage_collection].freeze
11
+
12
+ # Default configuration
13
+ DEFAULT_CONFIG = {
14
+ max_memory_usage: 1024 * 1024 * 1024, # 1GB
15
+ chunk_size: 1000,
16
+ cache_size: 100,
17
+ gc_threshold: 0.8, # 80% memory usage triggers GC
18
+ streaming_enabled: true,
19
+ compression_enabled: false
20
+ }.freeze
21
+
22
+ def initialize(config = {})
23
+ @config = DEFAULT_CONFIG.merge(config)
24
+ @cache = {}
25
+ @memory_usage = 0
26
+ @peak_memory_usage = 0
27
+ @gc_count = 0
28
+ @streaming_data = []
29
+ end
30
+
31
+ # Process large dataset with memory management
32
+ def process_large_dataset(dataset, processor_method, options = {})
33
+ strategy = options[:strategy] || "streaming"
34
+
35
+ case strategy
36
+ when "streaming"
37
+ process_with_streaming(dataset, processor_method, options)
38
+ when "chunking"
39
+ process_with_chunking(dataset, processor_method, options)
40
+ when "caching"
41
+ process_with_caching(dataset, processor_method, options)
42
+ else
43
+ raise "Unknown memory management strategy: #{strategy}"
44
+ end
45
+ end
46
+
47
+ # Process data with streaming approach
48
+ def process_with_streaming(dataset, processor_method, options = {})
49
+ results = {
50
+ processed_items: 0,
51
+ memory_usage: [],
52
+ gc_count: 0,
53
+ results: [],
54
+ errors: []
55
+ }
56
+
57
+ begin
58
+ dataset.each_with_index do |item, index|
59
+ # Check memory usage
60
+ current_memory = get_memory_usage
61
+ results[:memory_usage] << current_memory
62
+
63
+ # Trigger garbage collection if needed
64
+ if should_trigger_gc?(current_memory)
65
+ trigger_garbage_collection
66
+ results[:gc_count] += 1
67
+ end
68
+
69
+ # Process item
70
+ begin
71
+ result = processor_method.call(item, options)
72
+ results[:results] << result
73
+ results[:processed_items] += 1
74
+ rescue => e
75
+ results[:errors] << {
76
+ item_index: index,
77
+ error: e.message
78
+ }
79
+ end
80
+
81
+ # Update memory tracking
82
+ update_memory_tracking(current_memory)
83
+ end
84
+ rescue => e
85
+ results[:errors] << {
86
+ type: "streaming_error",
87
+ message: e.message
88
+ }
89
+ end
90
+
91
+ results
92
+ end
93
+
94
+ # Process data with chunking approach
95
+ def process_with_chunking(dataset, processor_method, options = {})
96
+ chunk_size = options[:chunk_size] || @config[:chunk_size]
97
+ results = {
98
+ processed_chunks: 0,
99
+ processed_items: 0,
100
+ memory_usage: [],
101
+ gc_count: 0,
102
+ results: [],
103
+ errors: []
104
+ }
105
+
106
+ begin
107
+ dataset.each_slice(chunk_size) do |chunk|
108
+ # Check memory before processing chunk
109
+ pre_chunk_memory = get_memory_usage
110
+ results[:memory_usage] << pre_chunk_memory
111
+
112
+ # Process chunk
113
+ chunk_results = process_chunk(chunk, processor_method, options)
114
+ results[:results].concat(chunk_results[:results])
115
+ results[:errors].concat(chunk_results[:errors])
116
+ results[:processed_items] += chunk_results[:processed_items]
117
+
118
+ # Trigger garbage collection after chunk
119
+ if should_trigger_gc?(pre_chunk_memory)
120
+ trigger_garbage_collection
121
+ results[:gc_count] += 1
122
+ end
123
+
124
+ results[:processed_chunks] += 1
125
+ update_memory_tracking(pre_chunk_memory)
126
+ end
127
+ rescue => e
128
+ results[:errors] << {
129
+ type: "chunking_error",
130
+ message: e.message
131
+ }
132
+ end
133
+
134
+ results
135
+ end
136
+
137
+ # Process data with caching approach
138
+ def process_with_caching(dataset, processor_method, options = {})
139
+ cache_size = options[:cache_size] || @config[:cache_size]
140
+ results = {
141
+ processed_items: 0,
142
+ cache_hits: 0,
143
+ cache_misses: 0,
144
+ memory_usage: [],
145
+ gc_count: 0,
146
+ results: [],
147
+ errors: []
148
+ }
149
+
150
+ begin
151
+ dataset.each_with_index do |item, index|
152
+ # Check memory usage
153
+ current_memory = get_memory_usage
154
+ results[:memory_usage] << current_memory
155
+
156
+ # Check cache
157
+ cache_key = generate_cache_key(item)
158
+ if @cache.key?(cache_key)
159
+ results[:cache_hits] += 1
160
+ result = @cache[cache_key]
161
+ else
162
+ results[:cache_misses] += 1
163
+ begin
164
+ result = processor_method.call(item, options)
165
+ cache_result(cache_key, result, cache_size)
166
+ rescue => e
167
+ results[:errors] << {
168
+ item_index: index,
169
+ error: e.message
170
+ }
171
+ next
172
+ end
173
+ end
174
+
175
+ results[:results] << result
176
+ results[:processed_items] += 1
177
+
178
+ # Trigger garbage collection if needed
179
+ if should_trigger_gc?(current_memory)
180
+ trigger_garbage_collection
181
+ results[:gc_count] += 1
182
+ end
183
+
184
+ update_memory_tracking(current_memory)
185
+ end
186
+ rescue => e
187
+ results[:errors] << {
188
+ type: "caching_error",
189
+ message: e.message
190
+ }
191
+ end
192
+
193
+ results
194
+ end
195
+
196
+ # Optimize memory usage
197
+ def optimize_memory_usage(options = {})
198
+ optimizations = {
199
+ memory_before: get_memory_usage,
200
+ optimizations_applied: [],
201
+ memory_after: 0,
202
+ memory_saved: 0
203
+ }
204
+
205
+ # Clear cache if memory usage is high
206
+ if get_memory_usage > @config[:max_memory_usage] * 0.8
207
+ clear_cache
208
+ optimizations[:optimizations_applied] << "cache_cleared"
209
+ end
210
+
211
+ # Trigger garbage collection
212
+ trigger_garbage_collection
213
+ optimizations[:optimizations_applied] << "garbage_collection"
214
+
215
+ # Compress data if enabled
216
+ if @config[:compression_enabled]
217
+ compress_data
218
+ optimizations[:optimizations_applied] << "data_compression"
219
+ end
220
+
221
+ optimizations[:memory_after] = get_memory_usage
222
+ optimizations[:memory_saved] = optimizations[:memory_before] - optimizations[:memory_after]
223
+
224
+ optimizations
225
+ end
226
+
227
+ # Get memory statistics
228
+ def get_memory_statistics
229
+ {
230
+ current_memory: get_memory_usage,
231
+ peak_memory: @peak_memory_usage,
232
+ cache_size: @cache.length,
233
+ gc_count: @gc_count,
234
+ streaming_data_size: @streaming_data.length,
235
+ memory_limit: @config[:max_memory_usage],
236
+ memory_usage_percentage: (get_memory_usage.to_f / @config[:max_memory_usage] * 100).round(2)
237
+ }
238
+ end
239
+
240
+ # Clear memory
241
+ def clear_memory
242
+ clear_cache
243
+ @streaming_data.clear
244
+ trigger_garbage_collection
245
+
246
+ {
247
+ memory_cleared: true,
248
+ memory_after_clear: get_memory_usage
249
+ }
250
+ end
251
+
252
+ # Monitor memory usage
253
+ def monitor_memory_usage(duration = 60, interval = 1)
254
+ monitoring_data = {
255
+ start_time: Time.now,
256
+ duration: duration,
257
+ interval: interval,
258
+ measurements: [],
259
+ alerts: []
260
+ }
261
+
262
+ start_time = Time.now
263
+ end_time = start_time + duration
264
+
265
+ while Time.now < end_time
266
+ current_memory = get_memory_usage
267
+ current_time = Time.now
268
+
269
+ measurement = {
270
+ timestamp: current_time,
271
+ memory_usage: current_memory,
272
+ memory_percentage: (current_memory.to_f / @config[:max_memory_usage] * 100).round(2)
273
+ }
274
+
275
+ monitoring_data[:measurements] << measurement
276
+
277
+ # Check for memory alerts
278
+ if current_memory > @config[:max_memory_usage] * 0.9
279
+ monitoring_data[:alerts] << {
280
+ timestamp: current_time,
281
+ type: "high_memory_usage",
282
+ message: "Memory usage is at #{measurement[:memory_percentage]}%"
283
+ }
284
+ end
285
+
286
+ sleep(interval)
287
+ end
288
+
289
+ monitoring_data[:end_time] = Time.now
290
+ monitoring_data
291
+ end
292
+
293
+ private
294
+
295
+ def process_chunk(chunk, processor_method, options)
296
+ results = {
297
+ processed_items: 0,
298
+ results: [],
299
+ errors: []
300
+ }
301
+
302
+ chunk.each_with_index do |item, index|
303
+ result = processor_method.call(item, options)
304
+ results[:results] << result
305
+ results[:processed_items] += 1
306
+ rescue => e
307
+ results[:errors] << {
308
+ item_index: index,
309
+ error: e.message
310
+ }
311
+ end
312
+
313
+ results
314
+ end
315
+
316
+ def should_trigger_gc?(current_memory)
317
+ current_memory > @config[:max_memory_usage] * @config[:gc_threshold]
318
+ end
319
+
320
+ def trigger_garbage_collection
321
+ GC.start
322
+ @gc_count += 1
323
+ end
324
+
325
+ def get_memory_usage
326
+ # Get current memory usage in bytes
327
+ Process.getrusage(:SELF).maxrss * 1024
328
+ end
329
+
330
+ def update_memory_tracking(current_memory)
331
+ @memory_usage = current_memory
332
+ @peak_memory_usage = [@peak_memory_usage, current_memory].max
333
+ end
334
+
335
+ def generate_cache_key(item)
336
+ # Generate a cache key for the item
337
+ Digest::MD5.hexdigest(item.to_json)
338
+ rescue
339
+ # Fallback to object_id if JSON serialization fails
340
+ "item_#{item.object_id}"
341
+ end
342
+
343
+ def cache_result(key, result, max_cache_size)
344
+ # Add result to cache
345
+ @cache[key] = result
346
+
347
+ # Remove oldest entries if cache is full
348
+ return unless @cache.length > max_cache_size
349
+
350
+ oldest_key = @cache.keys.first
351
+ @cache.delete(oldest_key)
352
+ end
353
+
354
+ def clear_cache
355
+ @cache.clear
356
+ end
357
+
358
+ def compress_data
359
+ # Compress streaming data if it's large
360
+ return unless @streaming_data.length > 1000
361
+
362
+ @streaming_data = @streaming_data.last(500) # Keep only recent data
363
+ end
364
+ end
365
+ end