aidp 0.13.0 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +7 -0
- data/lib/aidp/cli/first_run_wizard.rb +28 -303
- data/lib/aidp/cli/issue_importer.rb +359 -0
- data/lib/aidp/cli.rb +151 -3
- data/lib/aidp/daemon/process_manager.rb +146 -0
- data/lib/aidp/daemon/runner.rb +232 -0
- data/lib/aidp/execute/async_work_loop_runner.rb +216 -0
- data/lib/aidp/execute/future_work_backlog.rb +411 -0
- data/lib/aidp/execute/guard_policy.rb +246 -0
- data/lib/aidp/execute/instruction_queue.rb +131 -0
- data/lib/aidp/execute/interactive_repl.rb +335 -0
- data/lib/aidp/execute/repl_macros.rb +651 -0
- data/lib/aidp/execute/steps.rb +8 -0
- data/lib/aidp/execute/work_loop_runner.rb +322 -36
- data/lib/aidp/execute/work_loop_state.rb +162 -0
- data/lib/aidp/harness/config_schema.rb +88 -0
- data/lib/aidp/harness/configuration.rb +48 -1
- data/lib/aidp/harness/ui/enhanced_workflow_selector.rb +2 -0
- data/lib/aidp/init/doc_generator.rb +256 -0
- data/lib/aidp/init/project_analyzer.rb +343 -0
- data/lib/aidp/init/runner.rb +83 -0
- data/lib/aidp/init.rb +5 -0
- data/lib/aidp/logger.rb +279 -0
- data/lib/aidp/setup/wizard.rb +777 -0
- data/lib/aidp/tooling_detector.rb +115 -0
- data/lib/aidp/version.rb +1 -1
- data/lib/aidp/watch/build_processor.rb +282 -0
- data/lib/aidp/watch/plan_generator.rb +166 -0
- data/lib/aidp/watch/plan_processor.rb +83 -0
- data/lib/aidp/watch/repository_client.rb +243 -0
- data/lib/aidp/watch/runner.rb +93 -0
- data/lib/aidp/watch/state_store.rb +105 -0
- data/lib/aidp/watch.rb +9 -0
- data/lib/aidp.rb +14 -0
- data/templates/implementation/simple_task.md +36 -0
- metadata +26 -1
@@ -0,0 +1,411 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "yaml"
|
4
|
+
require "fileutils"
|
5
|
+
|
6
|
+
module Aidp
|
7
|
+
module Execute
|
8
|
+
# Manages a backlog of future work items discovered during work loops
|
9
|
+
# Captures style violations, technical debt, and refactoring opportunities
|
10
|
+
# that are not directly related to the current feature being implemented
|
11
|
+
class FutureWorkBacklog
|
12
|
+
attr_reader :project_dir, :entries, :current_context
|
13
|
+
|
14
|
+
# Entry types
|
15
|
+
ENTRY_TYPES = {
|
16
|
+
style_violation: "Style Violation",
|
17
|
+
refactor_opportunity: "Refactor Opportunity",
|
18
|
+
technical_debt: "Technical Debt",
|
19
|
+
todo: "TODO",
|
20
|
+
performance: "Performance Issue",
|
21
|
+
security: "Security Concern",
|
22
|
+
documentation: "Documentation Needed"
|
23
|
+
}.freeze
|
24
|
+
|
25
|
+
# Priority levels
|
26
|
+
PRIORITIES = {
|
27
|
+
low: 1,
|
28
|
+
medium: 2,
|
29
|
+
high: 3,
|
30
|
+
critical: 4
|
31
|
+
}.freeze
|
32
|
+
|
33
|
+
def initialize(project_dir, options = {})
|
34
|
+
@project_dir = project_dir
|
35
|
+
@backlog_dir = File.join(project_dir, ".aidp")
|
36
|
+
@backlog_file = options[:backlog_file] || File.join(@backlog_dir, "future_work.yml")
|
37
|
+
@markdown_file = File.join(@backlog_dir, "future_work.md")
|
38
|
+
@entries = []
|
39
|
+
@current_context = {}
|
40
|
+
@options = options
|
41
|
+
|
42
|
+
ensure_backlog_directory
|
43
|
+
load_existing_backlog
|
44
|
+
end
|
45
|
+
|
46
|
+
# Add a new future work entry
|
47
|
+
# @param entry_hash [Hash] Entry details
|
48
|
+
# @option entry_hash [Symbol] :type Entry type (see ENTRY_TYPES)
|
49
|
+
# @option entry_hash [String] :file File path
|
50
|
+
# @option entry_hash [Integer,Range,String] :lines Line number(s)
|
51
|
+
# @option entry_hash [String] :reason Description of the issue
|
52
|
+
# @option entry_hash [String] :recommendation Recommended fix
|
53
|
+
# @option entry_hash [Symbol] :priority Priority level (see PRIORITIES)
|
54
|
+
# @option entry_hash [Hash] :metadata Additional metadata
|
55
|
+
def add_entry(entry_hash)
|
56
|
+
entry = normalize_entry(entry_hash)
|
57
|
+
|
58
|
+
# Avoid duplicates
|
59
|
+
return if duplicate?(entry)
|
60
|
+
|
61
|
+
entry[:id] = generate_entry_id
|
62
|
+
entry[:created_at] = Time.now.utc.iso8601
|
63
|
+
entry[:context] = @current_context.dup
|
64
|
+
|
65
|
+
@entries << entry
|
66
|
+
entry
|
67
|
+
end
|
68
|
+
|
69
|
+
# Set context for subsequent entries (e.g., current work loop, step)
|
70
|
+
def set_context(context_hash)
|
71
|
+
@current_context.merge!(context_hash)
|
72
|
+
end
|
73
|
+
|
74
|
+
# Clear context
|
75
|
+
def clear_context
|
76
|
+
@current_context.clear
|
77
|
+
end
|
78
|
+
|
79
|
+
# Save backlog to disk (both YAML and Markdown)
|
80
|
+
def save
|
81
|
+
save_yaml
|
82
|
+
save_markdown
|
83
|
+
end
|
84
|
+
|
85
|
+
# Get entries filtered by criteria
|
86
|
+
def filter(criteria = {})
|
87
|
+
filtered = @entries
|
88
|
+
|
89
|
+
filtered = filtered.select { |e| e[:type] == criteria[:type] } if criteria[:type]
|
90
|
+
filtered = filtered.select { |e| e[:file] == criteria[:file] } if criteria[:file]
|
91
|
+
filtered = filtered.select { |e| e[:priority] == criteria[:priority] } if criteria[:priority]
|
92
|
+
filtered = filtered.select { |e| e[:context][:work_loop] == criteria[:work_loop] } if criteria[:work_loop]
|
93
|
+
|
94
|
+
filtered
|
95
|
+
end
|
96
|
+
|
97
|
+
# Get entries grouped by type
|
98
|
+
def by_type
|
99
|
+
@entries.group_by { |e| e[:type] }
|
100
|
+
end
|
101
|
+
|
102
|
+
# Get entries grouped by file
|
103
|
+
def by_file
|
104
|
+
@entries.group_by { |e| e[:file] }
|
105
|
+
end
|
106
|
+
|
107
|
+
# Get entries grouped by priority
|
108
|
+
def by_priority
|
109
|
+
@entries.group_by { |e| e[:priority] }.sort_by { |priority, _| -PRIORITIES[priority] }.to_h
|
110
|
+
end
|
111
|
+
|
112
|
+
# Get summary statistics
|
113
|
+
def summary
|
114
|
+
{
|
115
|
+
total: @entries.size,
|
116
|
+
by_type: by_type.transform_values(&:size),
|
117
|
+
by_priority: by_priority.transform_values(&:size),
|
118
|
+
files_affected: @entries.map { |e| e[:file] }.uniq.size
|
119
|
+
}
|
120
|
+
end
|
121
|
+
|
122
|
+
# Mark entry as resolved
|
123
|
+
def resolve_entry(entry_id, resolution_note = nil)
|
124
|
+
entry = @entries.find { |e| e[:id] == entry_id }
|
125
|
+
return unless entry
|
126
|
+
|
127
|
+
entry[:resolved] = true
|
128
|
+
entry[:resolved_at] = Time.now.utc.iso8601
|
129
|
+
entry[:resolution_note] = resolution_note if resolution_note
|
130
|
+
end
|
131
|
+
|
132
|
+
# Remove resolved entries
|
133
|
+
def clear_resolved
|
134
|
+
@entries.reject! { |e| e[:resolved] }
|
135
|
+
end
|
136
|
+
|
137
|
+
# Convert entry to work loop PROMPT.md content
|
138
|
+
def entry_to_prompt(entry_id)
|
139
|
+
entry = @entries.find { |e| e[:id] == entry_id }
|
140
|
+
return unless entry
|
141
|
+
|
142
|
+
<<~PROMPT
|
143
|
+
# Work Loop: #{entry_type_display(entry[:type])}
|
144
|
+
|
145
|
+
## Task Description
|
146
|
+
|
147
|
+
**File**: #{entry[:file]}
|
148
|
+
**Lines**: #{entry[:lines]}
|
149
|
+
**Priority**: #{entry[:priority].to_s.upcase}
|
150
|
+
|
151
|
+
## Issue
|
152
|
+
|
153
|
+
#{entry[:reason]}
|
154
|
+
|
155
|
+
## Recommended Fix
|
156
|
+
|
157
|
+
#{entry[:recommendation]}
|
158
|
+
|
159
|
+
## Acceptance Criteria
|
160
|
+
|
161
|
+
- [ ] #{entry[:reason]} is resolved
|
162
|
+
- [ ] Code follows LLM_STYLE_GUIDE
|
163
|
+
- [ ] Tests pass
|
164
|
+
- [ ] No new style violations introduced
|
165
|
+
|
166
|
+
## Original Context
|
167
|
+
|
168
|
+
- Work Loop: #{entry[:context][:work_loop] || "N/A"}
|
169
|
+
- Step: #{entry[:context][:step] || "N/A"}
|
170
|
+
- Created: #{entry[:created_at]}
|
171
|
+
|
172
|
+
## Completion
|
173
|
+
|
174
|
+
Mark this complete by adding: STATUS: COMPLETE
|
175
|
+
PROMPT
|
176
|
+
end
|
177
|
+
|
178
|
+
# Display summary of backlog
|
179
|
+
def display_summary(output = $stdout)
|
180
|
+
return if @entries.empty?
|
181
|
+
|
182
|
+
output.puts "\n" + "=" * 80
|
183
|
+
output.puts "📝 Future Work Backlog Summary"
|
184
|
+
output.puts "=" * 80
|
185
|
+
|
186
|
+
sum = summary
|
187
|
+
output.puts "\nTotal Items: #{sum[:total]}"
|
188
|
+
output.puts "Files Affected: #{sum[:files_affected]}"
|
189
|
+
|
190
|
+
if sum[:by_type].any?
|
191
|
+
output.puts "\nBy Type:"
|
192
|
+
sum[:by_type].each do |type, count|
|
193
|
+
output.puts " #{entry_type_display(type)}: #{count}"
|
194
|
+
end
|
195
|
+
end
|
196
|
+
|
197
|
+
if sum[:by_priority].any?
|
198
|
+
output.puts "\nBy Priority:"
|
199
|
+
sum[:by_priority].each do |priority, count|
|
200
|
+
output.puts " #{priority.to_s.upcase}: #{count}"
|
201
|
+
end
|
202
|
+
end
|
203
|
+
|
204
|
+
output.puts "\n" + "-" * 80
|
205
|
+
output.puts "Review backlog: .aidp/future_work.md"
|
206
|
+
output.puts "Convert to work loop: aidp backlog convert <entry-id>"
|
207
|
+
output.puts "=" * 80 + "\n"
|
208
|
+
end
|
209
|
+
|
210
|
+
private
|
211
|
+
|
212
|
+
# Ensure backlog directory exists
|
213
|
+
def ensure_backlog_directory
|
214
|
+
FileUtils.mkdir_p(@backlog_dir) unless Dir.exist?(@backlog_dir)
|
215
|
+
end
|
216
|
+
|
217
|
+
# Load existing backlog from disk
|
218
|
+
def load_existing_backlog
|
219
|
+
return unless File.exist?(@backlog_file)
|
220
|
+
|
221
|
+
data = YAML.load_file(@backlog_file)
|
222
|
+
@entries = data["entries"] || [] if data.is_a?(Hash)
|
223
|
+
@entries = symbolize_keys_deep(@entries)
|
224
|
+
rescue => e
|
225
|
+
warn "Warning: Could not load existing backlog: #{e.message}"
|
226
|
+
@entries = []
|
227
|
+
end
|
228
|
+
|
229
|
+
# Save backlog to YAML
|
230
|
+
def save_yaml
|
231
|
+
data = {
|
232
|
+
"version" => "1.0",
|
233
|
+
"generated_at" => Time.now.utc.iso8601,
|
234
|
+
"project" => @project_dir,
|
235
|
+
"entries" => @entries.map { |e| stringify_keys_deep(e) }
|
236
|
+
}
|
237
|
+
|
238
|
+
File.write(@backlog_file, YAML.dump(data))
|
239
|
+
end
|
240
|
+
|
241
|
+
# Save backlog to Markdown (human-readable)
|
242
|
+
def save_markdown
|
243
|
+
content = generate_markdown
|
244
|
+
|
245
|
+
File.write(@markdown_file, content)
|
246
|
+
end
|
247
|
+
|
248
|
+
# Generate Markdown representation
|
249
|
+
def generate_markdown
|
250
|
+
lines = []
|
251
|
+
lines << "# Future Work Backlog"
|
252
|
+
lines << ""
|
253
|
+
lines << "Generated: #{Time.now.utc.iso8601}"
|
254
|
+
lines << "Project: #{@project_dir}"
|
255
|
+
lines << ""
|
256
|
+
|
257
|
+
sum = summary
|
258
|
+
lines << "## Summary"
|
259
|
+
lines << ""
|
260
|
+
lines << "- **Total Items**: #{sum[:total]}"
|
261
|
+
lines << "- **Files Affected**: #{sum[:files_affected]}"
|
262
|
+
lines << ""
|
263
|
+
|
264
|
+
# Group by priority
|
265
|
+
by_priority.each do |priority, entries|
|
266
|
+
lines << "## #{priority.to_s.upcase} Priority (#{entries.size})"
|
267
|
+
lines << ""
|
268
|
+
|
269
|
+
# Group by type within priority
|
270
|
+
entries.group_by { |e| e[:type] }.each do |type, type_entries|
|
271
|
+
lines << "### #{entry_type_display(type)}"
|
272
|
+
lines << ""
|
273
|
+
|
274
|
+
type_entries.each do |entry|
|
275
|
+
lines << format_entry_markdown(entry)
|
276
|
+
lines << ""
|
277
|
+
end
|
278
|
+
end
|
279
|
+
end
|
280
|
+
|
281
|
+
lines << "---"
|
282
|
+
lines << ""
|
283
|
+
lines << "## Usage"
|
284
|
+
lines << ""
|
285
|
+
lines << "Convert an entry to a work loop:"
|
286
|
+
lines << "```bash"
|
287
|
+
lines << "aidp backlog convert <entry-id>"
|
288
|
+
lines << "```"
|
289
|
+
lines << ""
|
290
|
+
|
291
|
+
lines.join("\n")
|
292
|
+
end
|
293
|
+
|
294
|
+
# Format single entry as Markdown
|
295
|
+
def format_entry_markdown(entry)
|
296
|
+
lines = []
|
297
|
+
lines << "#### #{entry[:id]} - #{entry[:file]}"
|
298
|
+
lines << ""
|
299
|
+
lines << "**Lines**: #{entry[:lines]}"
|
300
|
+
lines << ""
|
301
|
+
lines << "**Issue**: #{entry[:reason]}"
|
302
|
+
lines << ""
|
303
|
+
lines << "**Recommendation**: #{entry[:recommendation]}"
|
304
|
+
lines << ""
|
305
|
+
|
306
|
+
if entry[:context].any?
|
307
|
+
lines << "**Context**: Work Loop: #{entry[:context][:work_loop] || "N/A"}, Step: #{entry[:context][:step] || "N/A"}"
|
308
|
+
lines << ""
|
309
|
+
end
|
310
|
+
|
311
|
+
lines << "*Created: #{entry[:created_at]}*"
|
312
|
+
|
313
|
+
lines.join("\n")
|
314
|
+
end
|
315
|
+
|
316
|
+
# Normalize entry hash
|
317
|
+
def normalize_entry(entry_hash)
|
318
|
+
{
|
319
|
+
type: entry_hash[:type] || :technical_debt,
|
320
|
+
file: normalize_path(entry_hash[:file]),
|
321
|
+
lines: normalize_lines(entry_hash[:lines]),
|
322
|
+
reason: entry_hash[:reason] || "No reason provided",
|
323
|
+
recommendation: entry_hash[:recommendation] || "No recommendation provided",
|
324
|
+
priority: entry_hash[:priority] || :medium,
|
325
|
+
metadata: entry_hash[:metadata] || {},
|
326
|
+
resolved: false
|
327
|
+
}
|
328
|
+
end
|
329
|
+
|
330
|
+
# Check if entry is duplicate
|
331
|
+
def duplicate?(entry)
|
332
|
+
@entries.any? do |existing|
|
333
|
+
existing[:file] == entry[:file] &&
|
334
|
+
existing[:lines] == entry[:lines] &&
|
335
|
+
existing[:reason] == entry[:reason] &&
|
336
|
+
!existing[:resolved]
|
337
|
+
end
|
338
|
+
end
|
339
|
+
|
340
|
+
# Generate unique entry ID
|
341
|
+
def generate_entry_id
|
342
|
+
timestamp = Time.now.to_i
|
343
|
+
random = SecureRandom.hex(4)
|
344
|
+
"fw-#{timestamp}-#{random}"
|
345
|
+
end
|
346
|
+
|
347
|
+
# Normalize file path (relative to project)
|
348
|
+
def normalize_path(file_path)
|
349
|
+
return file_path unless file_path
|
350
|
+
|
351
|
+
path = Pathname.new(file_path)
|
352
|
+
project = Pathname.new(@project_dir)
|
353
|
+
|
354
|
+
if path.absolute?
|
355
|
+
path.relative_path_from(project).to_s
|
356
|
+
else
|
357
|
+
path.to_s
|
358
|
+
end
|
359
|
+
rescue ArgumentError
|
360
|
+
file_path
|
361
|
+
end
|
362
|
+
|
363
|
+
# Normalize line numbers
|
364
|
+
def normalize_lines(lines)
|
365
|
+
case lines
|
366
|
+
when Integer
|
367
|
+
lines.to_s
|
368
|
+
when Range
|
369
|
+
"#{lines.begin}-#{lines.end}"
|
370
|
+
when String
|
371
|
+
lines
|
372
|
+
else
|
373
|
+
"unknown"
|
374
|
+
end
|
375
|
+
end
|
376
|
+
|
377
|
+
# Get display name for entry type
|
378
|
+
def entry_type_display(type)
|
379
|
+
ENTRY_TYPES[type] || type.to_s.split("_").map(&:capitalize).join(" ")
|
380
|
+
end
|
381
|
+
|
382
|
+
# Recursively symbolize keys
|
383
|
+
def symbolize_keys_deep(obj)
|
384
|
+
case obj
|
385
|
+
when Hash
|
386
|
+
obj.each_with_object({}) do |(key, value), result|
|
387
|
+
result[key.to_sym] = symbolize_keys_deep(value)
|
388
|
+
end
|
389
|
+
when Array
|
390
|
+
obj.map { |item| symbolize_keys_deep(item) }
|
391
|
+
else
|
392
|
+
obj
|
393
|
+
end
|
394
|
+
end
|
395
|
+
|
396
|
+
# Recursively stringify keys
|
397
|
+
def stringify_keys_deep(obj)
|
398
|
+
case obj
|
399
|
+
when Hash
|
400
|
+
obj.each_with_object({}) do |(key, value), result|
|
401
|
+
result[key.to_s] = stringify_keys_deep(value)
|
402
|
+
end
|
403
|
+
when Array
|
404
|
+
obj.map { |item| stringify_keys_deep(item) }
|
405
|
+
else
|
406
|
+
obj
|
407
|
+
end
|
408
|
+
end
|
409
|
+
end
|
410
|
+
end
|
411
|
+
end
|
@@ -0,0 +1,246 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "pathname"
|
4
|
+
|
5
|
+
module Aidp
|
6
|
+
module Execute
|
7
|
+
# Enforces safety constraints during work loops
|
8
|
+
# Responsibilities:
|
9
|
+
# - Check file patterns (include/exclude globs)
|
10
|
+
# - Enforce max lines changed per commit
|
11
|
+
# - Track files requiring confirmation
|
12
|
+
# - Validate changes against policy before execution
|
13
|
+
class GuardPolicy
|
14
|
+
attr_reader :config, :project_dir
|
15
|
+
|
16
|
+
def initialize(project_dir, config)
|
17
|
+
@project_dir = project_dir
|
18
|
+
@config = config
|
19
|
+
@confirmed_files = Set.new
|
20
|
+
end
|
21
|
+
|
22
|
+
# Check if guards are enabled
|
23
|
+
def enabled?
|
24
|
+
config.dig(:enabled) == true
|
25
|
+
end
|
26
|
+
|
27
|
+
# Validate if a file can be modified
|
28
|
+
# Returns { allowed: true/false, reason: string }
|
29
|
+
def can_modify_file?(file_path)
|
30
|
+
return {allowed: true} unless enabled?
|
31
|
+
|
32
|
+
normalized_path = normalize_path(file_path)
|
33
|
+
|
34
|
+
# Check exclude patterns first
|
35
|
+
if excluded?(normalized_path)
|
36
|
+
return {
|
37
|
+
allowed: false,
|
38
|
+
reason: "File matches exclude pattern in guards configuration"
|
39
|
+
}
|
40
|
+
end
|
41
|
+
|
42
|
+
# Check include patterns (if specified, file must match at least one)
|
43
|
+
if has_include_patterns? && !included?(normalized_path)
|
44
|
+
return {
|
45
|
+
allowed: false,
|
46
|
+
reason: "File does not match any include pattern in guards configuration"
|
47
|
+
}
|
48
|
+
end
|
49
|
+
|
50
|
+
# Check if file requires confirmation
|
51
|
+
if requires_confirmation?(normalized_path) && !confirmed?(normalized_path)
|
52
|
+
return {
|
53
|
+
allowed: false,
|
54
|
+
reason: "File requires one-time confirmation before modification",
|
55
|
+
requires_confirmation: true,
|
56
|
+
file_path: normalized_path
|
57
|
+
}
|
58
|
+
end
|
59
|
+
|
60
|
+
{allowed: true}
|
61
|
+
end
|
62
|
+
|
63
|
+
# Confirm a file for modification (one-time confirmation)
|
64
|
+
def confirm_file(file_path)
|
65
|
+
normalized_path = normalize_path(file_path)
|
66
|
+
@confirmed_files.add(normalized_path)
|
67
|
+
end
|
68
|
+
|
69
|
+
# Check if total lines changed exceeds limit
|
70
|
+
# diff_stats: { file_path => { additions: n, deletions: n } }
|
71
|
+
def validate_changes(diff_stats)
|
72
|
+
return {valid: true} unless enabled?
|
73
|
+
|
74
|
+
errors = []
|
75
|
+
|
76
|
+
# Check max lines per commit
|
77
|
+
if (max_lines = config.dig(:max_lines_per_commit))
|
78
|
+
total_changes = calculate_total_changes(diff_stats)
|
79
|
+
|
80
|
+
if total_changes > max_lines
|
81
|
+
errors << "Total lines changed (#{total_changes}) exceeds limit (#{max_lines})"
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
# Check each file against policy
|
86
|
+
diff_stats.each do |file_path, stats|
|
87
|
+
result = can_modify_file?(file_path)
|
88
|
+
unless result[:allowed]
|
89
|
+
errors << "#{file_path}: #{result[:reason]}"
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
if errors.any?
|
94
|
+
{valid: false, errors: errors}
|
95
|
+
else
|
96
|
+
{valid: true}
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
# Get list of files requiring confirmation
|
101
|
+
def files_requiring_confirmation
|
102
|
+
return [] unless enabled?
|
103
|
+
|
104
|
+
patterns = config.dig(:confirm_files) || []
|
105
|
+
patterns.map { |pattern| expand_glob_pattern(pattern) }.flatten.compact
|
106
|
+
end
|
107
|
+
|
108
|
+
# Check if file requires confirmation
|
109
|
+
def requires_confirmation?(file_path)
|
110
|
+
return false unless enabled?
|
111
|
+
|
112
|
+
patterns = config.dig(:confirm_files) || []
|
113
|
+
normalized_path = normalize_path(file_path)
|
114
|
+
|
115
|
+
patterns.any? { |pattern| matches_pattern?(normalized_path, pattern) }
|
116
|
+
end
|
117
|
+
|
118
|
+
# Check if file has been confirmed
|
119
|
+
def confirmed?(file_path)
|
120
|
+
normalized_path = normalize_path(file_path)
|
121
|
+
@confirmed_files.include?(normalized_path)
|
122
|
+
end
|
123
|
+
|
124
|
+
# Get summary of guard policy configuration
|
125
|
+
def summary
|
126
|
+
return {enabled: false} unless enabled?
|
127
|
+
|
128
|
+
{
|
129
|
+
enabled: true,
|
130
|
+
include_patterns: config.dig(:include_files) || [],
|
131
|
+
exclude_patterns: config.dig(:exclude_files) || [],
|
132
|
+
confirm_patterns: config.dig(:confirm_files) || [],
|
133
|
+
max_lines_per_commit: config.dig(:max_lines_per_commit),
|
134
|
+
confirmed_files: @confirmed_files.to_a
|
135
|
+
}
|
136
|
+
end
|
137
|
+
|
138
|
+
# Bypass guards (for specific use cases like testing)
|
139
|
+
def bypass?
|
140
|
+
ENV["AIDP_BYPASS_GUARDS"] == "1" || config.dig(:bypass) == true
|
141
|
+
end
|
142
|
+
|
143
|
+
# Enable guards (override bypass)
|
144
|
+
def enable!
|
145
|
+
config[:enabled] = true
|
146
|
+
end
|
147
|
+
|
148
|
+
# Disable guards
|
149
|
+
def disable!
|
150
|
+
config[:enabled] = false
|
151
|
+
end
|
152
|
+
|
153
|
+
private
|
154
|
+
|
155
|
+
# Normalize file path relative to project directory
|
156
|
+
def normalize_path(file_path)
|
157
|
+
path = Pathname.new(file_path)
|
158
|
+
project = Pathname.new(@project_dir)
|
159
|
+
|
160
|
+
if path.absolute?
|
161
|
+
path.relative_path_from(project).to_s
|
162
|
+
else
|
163
|
+
path.to_s
|
164
|
+
end
|
165
|
+
rescue ArgumentError
|
166
|
+
# Path is outside project directory
|
167
|
+
file_path
|
168
|
+
end
|
169
|
+
|
170
|
+
# Check if file matches exclude patterns
|
171
|
+
def excluded?(file_path)
|
172
|
+
patterns = config.dig(:exclude_files) || []
|
173
|
+
patterns.any? { |pattern| matches_pattern?(file_path, pattern) }
|
174
|
+
end
|
175
|
+
|
176
|
+
# Check if file matches include patterns
|
177
|
+
def included?(file_path)
|
178
|
+
patterns = config.dig(:include_files) || []
|
179
|
+
patterns.any? { |pattern| matches_pattern?(file_path, pattern) }
|
180
|
+
end
|
181
|
+
|
182
|
+
# Check if include patterns are configured
|
183
|
+
def has_include_patterns?
|
184
|
+
patterns = config.dig(:include_files) || []
|
185
|
+
patterns.any?
|
186
|
+
end
|
187
|
+
|
188
|
+
# Match file path against glob pattern
|
189
|
+
# Uses File.fnmatch for safe, efficient pattern matching without ReDoS risk
|
190
|
+
def matches_pattern?(file_path, pattern)
|
191
|
+
# Ruby's File.fnmatch with FNM_EXTGLOB handles most patterns safely
|
192
|
+
# FNM_EXTGLOB enables {a,b} brace expansion
|
193
|
+
# For ** patterns, we need to handle them specially as fnmatch doesn't support ** natively
|
194
|
+
|
195
|
+
if pattern.include?("**")
|
196
|
+
# Convert ** to * for fnmatch compatibility and check if path contains the pattern parts
|
197
|
+
# Pattern like "lib/**/*.rb" should match "lib/foo/bar.rb"
|
198
|
+
pattern_parts = pattern.split("**").map(&:strip).reject(&:empty?)
|
199
|
+
|
200
|
+
if pattern_parts.empty?
|
201
|
+
# Pattern is just "**" - matches everything
|
202
|
+
true
|
203
|
+
elsif pattern_parts.size == 1
|
204
|
+
# Pattern like "**/file.rb" or "lib/**"
|
205
|
+
part = pattern_parts[0].sub(%r{^/}, "").sub(%r{/$}, "")
|
206
|
+
if pattern.start_with?("**")
|
207
|
+
# Matches if any part of the path matches
|
208
|
+
File.fnmatch(part, file_path, File::FNM_EXTGLOB) ||
|
209
|
+
File.fnmatch("**/#{part}", file_path, File::FNM_EXTGLOB) ||
|
210
|
+
file_path.end_with?(part) ||
|
211
|
+
file_path.include?("/#{part}")
|
212
|
+
else
|
213
|
+
# Pattern ends with **: match prefix
|
214
|
+
file_path.start_with?(part)
|
215
|
+
end
|
216
|
+
else
|
217
|
+
# Pattern like "lib/**/*.rb" - has prefix and suffix
|
218
|
+
prefix = pattern_parts[0].sub(%r{/$}, "")
|
219
|
+
suffix = pattern_parts[1].sub(%r{^/}, "")
|
220
|
+
|
221
|
+
file_path.start_with?(prefix) && File.fnmatch(suffix, file_path.sub(/^#{Regexp.escape(prefix)}\//, ""), File::FNM_EXTGLOB)
|
222
|
+
end
|
223
|
+
else
|
224
|
+
# Standard glob pattern - use File.fnmatch which is safe from ReDoS
|
225
|
+
# FNM_DOTMATCH allows * to match files starting with .
|
226
|
+
File.fnmatch(pattern, file_path, File::FNM_EXTGLOB | File::FNM_DOTMATCH)
|
227
|
+
end
|
228
|
+
end
|
229
|
+
|
230
|
+
# Expand glob pattern to actual files (for confirmation list)
|
231
|
+
def expand_glob_pattern(pattern)
|
232
|
+
Dir.glob(File.join(@project_dir, pattern), File::FNM_DOTMATCH).map do |file|
|
233
|
+
next if File.directory?(file)
|
234
|
+
normalize_path(file)
|
235
|
+
end
|
236
|
+
end
|
237
|
+
|
238
|
+
# Calculate total lines changed from diff stats
|
239
|
+
def calculate_total_changes(diff_stats)
|
240
|
+
diff_stats.values.sum do |stats|
|
241
|
+
(stats[:additions] || 0) + (stats[:deletions] || 0)
|
242
|
+
end
|
243
|
+
end
|
244
|
+
end
|
245
|
+
end
|
246
|
+
end
|