aidp 0.32.0 → 0.34.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +35 -0
- data/lib/aidp/analyze/feature_analyzer.rb +322 -320
- data/lib/aidp/analyze/tree_sitter_scan.rb +3 -0
- data/lib/aidp/auto_update/coordinator.rb +97 -7
- data/lib/aidp/auto_update.rb +0 -12
- data/lib/aidp/cli/devcontainer_commands.rb +0 -5
- data/lib/aidp/cli/eval_command.rb +399 -0
- data/lib/aidp/cli/harness_command.rb +1 -1
- data/lib/aidp/cli/security_command.rb +416 -0
- data/lib/aidp/cli/tools_command.rb +6 -4
- data/lib/aidp/cli.rb +172 -4
- data/lib/aidp/comment_consolidator.rb +78 -0
- data/lib/aidp/concurrency/exec.rb +3 -0
- data/lib/aidp/concurrency.rb +0 -3
- data/lib/aidp/config.rb +113 -1
- data/lib/aidp/config_paths.rb +91 -0
- data/lib/aidp/daemon/runner.rb +8 -4
- data/lib/aidp/errors.rb +134 -0
- data/lib/aidp/evaluations/context_capture.rb +205 -0
- data/lib/aidp/evaluations/evaluation_record.rb +114 -0
- data/lib/aidp/evaluations/evaluation_storage.rb +250 -0
- data/lib/aidp/evaluations.rb +23 -0
- data/lib/aidp/execute/async_work_loop_runner.rb +4 -1
- data/lib/aidp/execute/interactive_repl.rb +6 -2
- data/lib/aidp/execute/prompt_evaluator.rb +359 -0
- data/lib/aidp/execute/repl_macros.rb +100 -1
- data/lib/aidp/execute/work_loop_runner.rb +719 -58
- data/lib/aidp/execute/work_loop_state.rb +4 -1
- data/lib/aidp/execute/workflow_selector.rb +3 -0
- data/lib/aidp/harness/ai_decision_engine.rb +79 -0
- data/lib/aidp/harness/ai_filter_factory.rb +285 -0
- data/lib/aidp/harness/capability_registry.rb +2 -0
- data/lib/aidp/harness/condition_detector.rb +3 -0
- data/lib/aidp/harness/config_loader.rb +3 -0
- data/lib/aidp/harness/config_schema.rb +97 -1
- data/lib/aidp/harness/config_validator.rb +1 -1
- data/lib/aidp/harness/configuration.rb +61 -5
- data/lib/aidp/harness/enhanced_runner.rb +14 -11
- data/lib/aidp/harness/error_handler.rb +3 -0
- data/lib/aidp/harness/filter_definition.rb +212 -0
- data/lib/aidp/harness/generated_filter_strategy.rb +197 -0
- data/lib/aidp/harness/output_filter.rb +50 -25
- data/lib/aidp/harness/output_filter_config.rb +129 -0
- data/lib/aidp/harness/provider_factory.rb +3 -0
- data/lib/aidp/harness/provider_manager.rb +96 -2
- data/lib/aidp/harness/runner.rb +5 -12
- data/lib/aidp/harness/state/persistence.rb +3 -0
- data/lib/aidp/harness/state_manager.rb +3 -0
- data/lib/aidp/harness/status_display.rb +28 -20
- data/lib/aidp/harness/test_runner.rb +179 -41
- data/lib/aidp/harness/thinking_depth_manager.rb +44 -28
- data/lib/aidp/harness/ui/enhanced_tui.rb +4 -0
- data/lib/aidp/harness/ui/enhanced_workflow_selector.rb +4 -0
- data/lib/aidp/harness/ui/error_handler.rb +3 -0
- data/lib/aidp/harness/ui/job_monitor.rb +4 -0
- data/lib/aidp/harness/ui/navigation/submenu.rb +2 -2
- data/lib/aidp/harness/ui/navigation/workflow_selector.rb +6 -0
- data/lib/aidp/harness/ui/spinner_helper.rb +3 -0
- data/lib/aidp/harness/ui/workflow_controller.rb +3 -0
- data/lib/aidp/harness/user_interface.rb +3 -0
- data/lib/aidp/loader.rb +195 -0
- data/lib/aidp/logger.rb +3 -0
- data/lib/aidp/message_display.rb +31 -0
- data/lib/aidp/metadata/compiler.rb +29 -17
- data/lib/aidp/metadata/query.rb +1 -1
- data/lib/aidp/metadata/scanner.rb +8 -1
- data/lib/aidp/metadata/tool_metadata.rb +13 -13
- data/lib/aidp/metadata/validator.rb +10 -0
- data/lib/aidp/metadata.rb +16 -0
- data/lib/aidp/pr_worktree_manager.rb +20 -8
- data/lib/aidp/provider_manager.rb +4 -7
- data/lib/aidp/providers/base.rb +2 -0
- data/lib/aidp/security/rule_of_two_enforcer.rb +210 -0
- data/lib/aidp/security/secrets_proxy.rb +328 -0
- data/lib/aidp/security/secrets_registry.rb +227 -0
- data/lib/aidp/security/trifecta_state.rb +220 -0
- data/lib/aidp/security/watch_mode_handler.rb +306 -0
- data/lib/aidp/security/work_loop_adapter.rb +277 -0
- data/lib/aidp/security.rb +56 -0
- data/lib/aidp/setup/wizard.rb +283 -11
- data/lib/aidp/skills.rb +0 -5
- data/lib/aidp/storage/csv_storage.rb +3 -0
- data/lib/aidp/style_guide/selector.rb +360 -0
- data/lib/aidp/tooling_detector.rb +283 -16
- data/lib/aidp/version.rb +1 -1
- data/lib/aidp/watch/auto_merger.rb +274 -0
- data/lib/aidp/watch/auto_pr_processor.rb +125 -7
- data/lib/aidp/watch/build_processor.rb +16 -1
- data/lib/aidp/watch/change_request_processor.rb +682 -150
- data/lib/aidp/watch/ci_fix_processor.rb +262 -4
- data/lib/aidp/watch/feedback_collector.rb +191 -0
- data/lib/aidp/watch/hierarchical_pr_strategy.rb +256 -0
- data/lib/aidp/watch/implementation_verifier.rb +142 -1
- data/lib/aidp/watch/plan_generator.rb +70 -13
- data/lib/aidp/watch/plan_processor.rb +12 -5
- data/lib/aidp/watch/projects_processor.rb +286 -0
- data/lib/aidp/watch/repository_client.rb +871 -22
- data/lib/aidp/watch/review_processor.rb +33 -6
- data/lib/aidp/watch/runner.rb +80 -29
- data/lib/aidp/watch/state_store.rb +233 -0
- data/lib/aidp/watch/sub_issue_creator.rb +221 -0
- data/lib/aidp/watch.rb +5 -7
- data/lib/aidp/workflows/guided_agent.rb +4 -0
- data/lib/aidp/workstream_cleanup.rb +0 -2
- data/lib/aidp/workstream_executor.rb +3 -4
- data/lib/aidp/worktree.rb +61 -12
- data/lib/aidp/worktree_branch_manager.rb +347 -101
- data/lib/aidp.rb +21 -106
- data/templates/implementation/iterative_implementation.md +46 -3
- metadata +91 -36
- data/lib/aidp/config/paths.rb +0 -131
|
@@ -26,9 +26,9 @@ module Aidp
|
|
|
26
26
|
def self.parse_issues_url(issues_url)
|
|
27
27
|
case issues_url
|
|
28
28
|
when %r{\Ahttps://github\.com/([^/]+)/([^/]+)(?:/issues)?/?\z}
|
|
29
|
-
[
|
|
29
|
+
[::Regexp.last_match(1), ::Regexp.last_match(2)]
|
|
30
30
|
when %r{\A([^/]+)/([^/]+)\z}
|
|
31
|
-
[
|
|
31
|
+
[::Regexp.last_match(1), ::Regexp.last_match(2)]
|
|
32
32
|
else
|
|
33
33
|
raise ArgumentError, "Unsupported issues URL: #{issues_url}"
|
|
34
34
|
end
|
|
@@ -50,7 +50,14 @@ module Aidp
|
|
|
50
50
|
end
|
|
51
51
|
|
|
52
52
|
def list_issues(labels: [], state: "open")
|
|
53
|
-
gh_available?
|
|
53
|
+
if gh_available?
|
|
54
|
+
list_issues_via_gh(labels: labels,
|
|
55
|
+
state: state)
|
|
56
|
+
else
|
|
57
|
+
list_issues_via_api(
|
|
58
|
+
labels: labels, state: state
|
|
59
|
+
)
|
|
60
|
+
end
|
|
54
61
|
end
|
|
55
62
|
|
|
56
63
|
def fetch_issue(number)
|
|
@@ -70,7 +77,10 @@ module Aidp
|
|
|
70
77
|
end
|
|
71
78
|
|
|
72
79
|
def create_pull_request(title:, body:, head:, base:, issue_number:, draft: false, assignee: nil)
|
|
73
|
-
|
|
80
|
+
raise("GitHub CLI not available - cannot create PR") unless gh_available?
|
|
81
|
+
|
|
82
|
+
create_pull_request_via_gh(title: title, body: body, head: head, base: base,
|
|
83
|
+
issue_number: issue_number, draft: draft, assignee: assignee)
|
|
74
84
|
end
|
|
75
85
|
|
|
76
86
|
def add_labels(number, *labels)
|
|
@@ -109,17 +119,200 @@ module Aidp
|
|
|
109
119
|
end
|
|
110
120
|
|
|
111
121
|
def post_review_comment(number, body, commit_id: nil, path: nil, line: nil)
|
|
112
|
-
gh_available?
|
|
122
|
+
if gh_available?
|
|
123
|
+
post_review_comment_via_gh(number, body, commit_id: commit_id, path: path,
|
|
124
|
+
line: line)
|
|
125
|
+
else
|
|
126
|
+
post_review_comment_via_api(number,
|
|
127
|
+
body, commit_id: commit_id, path: path, line: line)
|
|
128
|
+
end
|
|
113
129
|
end
|
|
114
130
|
|
|
115
131
|
def list_pull_requests(labels: [], state: "open")
|
|
116
|
-
gh_available?
|
|
132
|
+
if gh_available?
|
|
133
|
+
list_pull_requests_via_gh(labels: labels,
|
|
134
|
+
state: state)
|
|
135
|
+
else
|
|
136
|
+
list_pull_requests_via_api(
|
|
137
|
+
labels: labels, state: state
|
|
138
|
+
)
|
|
139
|
+
end
|
|
117
140
|
end
|
|
118
141
|
|
|
119
142
|
def fetch_pr_comments(number)
|
|
120
143
|
gh_available? ? fetch_pr_comments_via_gh(number) : fetch_pr_comments_via_api(number)
|
|
121
144
|
end
|
|
122
145
|
|
|
146
|
+
# Convert a draft PR to ready for review
|
|
147
|
+
# @param number [Integer] PR number
|
|
148
|
+
# @return [Boolean] True if successful
|
|
149
|
+
def mark_pr_ready_for_review(number)
|
|
150
|
+
raise("GitHub CLI not available - cannot mark PR ready") unless gh_available?
|
|
151
|
+
mark_pr_ready_for_review_via_gh(number)
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
# Request reviewers for a PR
|
|
155
|
+
# @param number [Integer] PR number
|
|
156
|
+
# @param reviewers [Array<String>] GitHub usernames to request as reviewers
|
|
157
|
+
# @return [Boolean] True if successful
|
|
158
|
+
def request_reviewers(number, reviewers:)
|
|
159
|
+
return true if reviewers.nil? || reviewers.empty?
|
|
160
|
+
raise("GitHub CLI not available - cannot request reviewers") unless gh_available?
|
|
161
|
+
request_reviewers_via_gh(number, reviewers: reviewers)
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
# Get the actor who most recently added a label to a PR
|
|
165
|
+
# @param number [Integer] PR number
|
|
166
|
+
# @return [String, nil] GitHub username or nil
|
|
167
|
+
def most_recent_pr_label_actor(number)
|
|
168
|
+
gh_available? ? most_recent_pr_label_actor_via_gh(number) : nil
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
# Fetch reactions on a specific comment
|
|
172
|
+
# Returns array of reactions with user and content (emoji type)
|
|
173
|
+
def fetch_comment_reactions(comment_id)
|
|
174
|
+
gh_available? ? fetch_comment_reactions_via_gh(comment_id) : fetch_comment_reactions_via_api(comment_id)
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
# Create or update a categorized comment (e.g., under a header) on an issue.
|
|
178
|
+
# If a comment with the category header exists, either append to it or
|
|
179
|
+
# replace it while archiving the previous content inline.
|
|
180
|
+
def consolidate_category_comment(number, category_header, content, append: false)
|
|
181
|
+
Aidp.log_debug(
|
|
182
|
+
"repository_client",
|
|
183
|
+
"consolidate_category_comment_started",
|
|
184
|
+
number: number,
|
|
185
|
+
category_header: category_header,
|
|
186
|
+
append: append,
|
|
187
|
+
content_length: content.length,
|
|
188
|
+
content_preview: content[0, 100]
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
existing_comment = find_comment(number, category_header)
|
|
192
|
+
|
|
193
|
+
if existing_comment
|
|
194
|
+
body = if append
|
|
195
|
+
Aidp.log_debug(
|
|
196
|
+
"repository_client",
|
|
197
|
+
"updating_category_comment_appending",
|
|
198
|
+
comment_id: existing_comment[:id],
|
|
199
|
+
existing_body_length: existing_comment[:body].length,
|
|
200
|
+
existing_body_preview: existing_comment[:body][0, 100],
|
|
201
|
+
appending_content_length: content.length,
|
|
202
|
+
appending_content_preview: content[0, 100]
|
|
203
|
+
)
|
|
204
|
+
"#{existing_comment[:body]}\n\n#{content}"
|
|
205
|
+
else
|
|
206
|
+
Aidp.log_debug(
|
|
207
|
+
"repository_client",
|
|
208
|
+
"updating_category_comment_replacing",
|
|
209
|
+
comment_id: existing_comment[:id],
|
|
210
|
+
existing_body_length: existing_comment[:body].length,
|
|
211
|
+
existing_body_preview: existing_comment[:body][0, 100],
|
|
212
|
+
replacement_content_length: content.length,
|
|
213
|
+
replacement_content_preview: content[0, 100]
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
archived_prefix = "<!-- ARCHIVED_PLAN_START "
|
|
217
|
+
archived_suffix = " ARCHIVED_PLAN_END -->"
|
|
218
|
+
archived_content = "#{archived_prefix}#{Time.now.utc.iso8601}#{archived_suffix}\n\n#{existing_comment[:body].gsub(
|
|
219
|
+
/^(#{Regexp.escape(category_header)}|#{Regexp.escape(archived_prefix)}.*?#{Regexp.escape(archived_suffix)})/m, ""
|
|
220
|
+
)}\n\n"
|
|
221
|
+
|
|
222
|
+
"#{category_header}\n\n#{archived_content}#{content}"
|
|
223
|
+
end
|
|
224
|
+
|
|
225
|
+
update_comment(existing_comment[:id], body)
|
|
226
|
+
|
|
227
|
+
Aidp.log_debug(
|
|
228
|
+
"repository_client",
|
|
229
|
+
"existing_category_comment_updated",
|
|
230
|
+
comment_id: existing_comment[:id],
|
|
231
|
+
updated_body_length: body.length,
|
|
232
|
+
updated_body_preview: body[0, 100],
|
|
233
|
+
update_method: append ? "append" : "replace"
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
{
|
|
237
|
+
id: existing_comment[:id],
|
|
238
|
+
body: body
|
|
239
|
+
}
|
|
240
|
+
else
|
|
241
|
+
body = "#{category_header}\n\n#{content}"
|
|
242
|
+
|
|
243
|
+
post_comment(number, body)
|
|
244
|
+
|
|
245
|
+
Aidp.log_debug(
|
|
246
|
+
"repository_client",
|
|
247
|
+
"new_category_comment_created",
|
|
248
|
+
issue_number: number,
|
|
249
|
+
body_length: body.length,
|
|
250
|
+
body_preview: body[0, 100],
|
|
251
|
+
category_header: category_header
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
{
|
|
255
|
+
id: 999,
|
|
256
|
+
body: body
|
|
257
|
+
}
|
|
258
|
+
end
|
|
259
|
+
rescue => e
|
|
260
|
+
Aidp.log_error(
|
|
261
|
+
"repository_client",
|
|
262
|
+
"consolidate_category_comment_failed",
|
|
263
|
+
error: e.message,
|
|
264
|
+
error_class: e.class.name,
|
|
265
|
+
number: number,
|
|
266
|
+
category_header: category_header,
|
|
267
|
+
content_length: content.length,
|
|
268
|
+
content_preview: content[0, 100],
|
|
269
|
+
backtrace: e.backtrace&.first(5)
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
raise RuntimeError, "GitHub error", e.backtrace
|
|
273
|
+
end
|
|
274
|
+
|
|
275
|
+
# GitHub Projects V2 operations
|
|
276
|
+
def fetch_project(project_id)
|
|
277
|
+
raise "GitHub CLI not available - Projects API requires gh CLI" unless gh_available?
|
|
278
|
+
fetch_project_via_gh(project_id)
|
|
279
|
+
end
|
|
280
|
+
|
|
281
|
+
def list_project_items(project_id)
|
|
282
|
+
raise "GitHub CLI not available - Projects API requires gh CLI" unless gh_available?
|
|
283
|
+
list_project_items_via_gh(project_id)
|
|
284
|
+
end
|
|
285
|
+
|
|
286
|
+
def link_issue_to_project(project_id, issue_number)
|
|
287
|
+
raise "GitHub CLI not available - Projects API requires gh CLI" unless gh_available?
|
|
288
|
+
link_issue_to_project_via_gh(project_id, issue_number)
|
|
289
|
+
end
|
|
290
|
+
|
|
291
|
+
def update_project_item_field(item_id, field_id, value)
|
|
292
|
+
raise "GitHub CLI not available - Projects API requires gh CLI" unless gh_available?
|
|
293
|
+
update_project_item_field_via_gh(item_id, field_id, value)
|
|
294
|
+
end
|
|
295
|
+
|
|
296
|
+
def fetch_project_fields(project_id)
|
|
297
|
+
raise "GitHub CLI not available - Projects API requires gh CLI" unless gh_available?
|
|
298
|
+
fetch_project_fields_via_gh(project_id)
|
|
299
|
+
end
|
|
300
|
+
|
|
301
|
+
def create_project_field(project_id, name, field_type, options: nil)
|
|
302
|
+
raise "GitHub CLI not available - Projects API requires gh CLI" unless gh_available?
|
|
303
|
+
create_project_field_via_gh(project_id, name, field_type, options: options)
|
|
304
|
+
end
|
|
305
|
+
|
|
306
|
+
def create_issue(title:, body:, labels: [], assignees: [])
|
|
307
|
+
raise "GitHub CLI not available - cannot create issue" unless gh_available?
|
|
308
|
+
create_issue_via_gh(title: title, body: body, labels: labels, assignees: assignees)
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
def merge_pull_request(number, merge_method: "squash")
|
|
312
|
+
raise "GitHub CLI not available - cannot merge PR" unless gh_available?
|
|
313
|
+
merge_pull_request_via_gh(number, merge_method: merge_method)
|
|
314
|
+
end
|
|
315
|
+
|
|
123
316
|
private
|
|
124
317
|
|
|
125
318
|
# Retry a GitHub CLI operation with exponential backoff
|
|
@@ -180,7 +373,10 @@ module Aidp
|
|
|
180
373
|
def list_issues_via_api(labels:, state:)
|
|
181
374
|
label_param = labels.join(",")
|
|
182
375
|
uri = URI("https://api.github.com/repos/#{full_repo}/issues?state=#{state}")
|
|
183
|
-
|
|
376
|
+
unless label_param.empty?
|
|
377
|
+
uri.query = [uri.query,
|
|
378
|
+
"labels=#{URI.encode_www_form_component(label_param)}"].compact.join("&")
|
|
379
|
+
end
|
|
184
380
|
|
|
185
381
|
response = Net::HTTP.get_response(uri)
|
|
186
382
|
return [] unless response.code == "200"
|
|
@@ -218,11 +414,20 @@ module Aidp
|
|
|
218
414
|
end
|
|
219
415
|
|
|
220
416
|
def post_comment_via_gh(number, body)
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
417
|
+
# Use gh api to post comment and get structured response with comment ID
|
|
418
|
+
with_gh_retry("post_comment") do
|
|
419
|
+
cmd = ["gh", "api", "repos/#{full_repo}/issues/#{number}/comments",
|
|
420
|
+
"-X", "POST", "-f", "body=#{body}"]
|
|
421
|
+
stdout, stderr, status = Open3.capture3(*cmd)
|
|
422
|
+
raise "Failed to post comment via gh: #{stderr.strip}" unless status.success?
|
|
224
423
|
|
|
225
|
-
|
|
424
|
+
response = JSON.parse(stdout)
|
|
425
|
+
{
|
|
426
|
+
id: response["id"],
|
|
427
|
+
url: response["html_url"],
|
|
428
|
+
body: response["body"]
|
|
429
|
+
}
|
|
430
|
+
end
|
|
226
431
|
end
|
|
227
432
|
|
|
228
433
|
def post_comment_via_api(number, body)
|
|
@@ -236,7 +441,13 @@ module Aidp
|
|
|
236
441
|
end
|
|
237
442
|
|
|
238
443
|
raise "GitHub API comment failed (#{response.code})" unless response.code.start_with?("2")
|
|
239
|
-
|
|
444
|
+
|
|
445
|
+
data = JSON.parse(response.body)
|
|
446
|
+
{
|
|
447
|
+
id: data["id"],
|
|
448
|
+
url: data["html_url"],
|
|
449
|
+
body: data["body"]
|
|
450
|
+
}
|
|
240
451
|
end
|
|
241
452
|
|
|
242
453
|
def find_comment_via_gh(number, header_text)
|
|
@@ -274,9 +485,57 @@ module Aidp
|
|
|
274
485
|
end
|
|
275
486
|
|
|
276
487
|
raise "GitHub API update comment failed (#{response.code})" unless response.code.start_with?("2")
|
|
488
|
+
|
|
277
489
|
response.body
|
|
278
490
|
end
|
|
279
491
|
|
|
492
|
+
def fetch_comment_reactions_via_gh(comment_id)
|
|
493
|
+
with_gh_retry("fetch_comment_reactions") do
|
|
494
|
+
cmd = ["gh", "api", "repos/#{full_repo}/issues/comments/#{comment_id}/reactions"]
|
|
495
|
+
stdout, stderr, status = Open3.capture3(*cmd)
|
|
496
|
+
raise "Failed to fetch reactions via gh: #{stderr.strip}" unless status.success?
|
|
497
|
+
|
|
498
|
+
reactions = JSON.parse(stdout)
|
|
499
|
+
reactions.map do |r|
|
|
500
|
+
{
|
|
501
|
+
id: r["id"],
|
|
502
|
+
user: r.dig("user", "login"),
|
|
503
|
+
content: r["content"],
|
|
504
|
+
created_at: r["created_at"]
|
|
505
|
+
}
|
|
506
|
+
end
|
|
507
|
+
end
|
|
508
|
+
rescue => e
|
|
509
|
+
Aidp.log_error("repository_client", "fetch_reactions_failed", comment_id: comment_id, error: e.message)
|
|
510
|
+
[]
|
|
511
|
+
end
|
|
512
|
+
|
|
513
|
+
def fetch_comment_reactions_via_api(comment_id)
|
|
514
|
+
uri = URI("https://api.github.com/repos/#{full_repo}/issues/comments/#{comment_id}/reactions")
|
|
515
|
+
request = Net::HTTP::Get.new(uri)
|
|
516
|
+
# Reactions API requires special Accept header
|
|
517
|
+
request["Accept"] = "application/vnd.github+json"
|
|
518
|
+
|
|
519
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
|
|
520
|
+
http.request(request)
|
|
521
|
+
end
|
|
522
|
+
|
|
523
|
+
return [] unless response.code.start_with?("2")
|
|
524
|
+
|
|
525
|
+
reactions = JSON.parse(response.body)
|
|
526
|
+
reactions.map do |r|
|
|
527
|
+
{
|
|
528
|
+
id: r["id"],
|
|
529
|
+
user: r.dig("user", "login"),
|
|
530
|
+
content: r["content"],
|
|
531
|
+
created_at: r["created_at"]
|
|
532
|
+
}
|
|
533
|
+
end
|
|
534
|
+
rescue => e
|
|
535
|
+
Aidp.log_error("repository_client", "fetch_reactions_api_failed", comment_id: comment_id, error: e.message)
|
|
536
|
+
[]
|
|
537
|
+
end
|
|
538
|
+
|
|
280
539
|
def create_pull_request_via_gh(title:, body:, head:, base:, issue_number:, draft: false, assignee: nil)
|
|
281
540
|
Aidp.log_debug(
|
|
282
541
|
"repository_client",
|
|
@@ -395,6 +654,7 @@ module Aidp
|
|
|
395
654
|
end
|
|
396
655
|
|
|
397
656
|
raise "Failed to add labels via API (#{response.code})" unless response.code.start_with?("2")
|
|
657
|
+
|
|
398
658
|
response.body
|
|
399
659
|
end
|
|
400
660
|
|
|
@@ -505,7 +765,10 @@ module Aidp
|
|
|
505
765
|
def list_pull_requests_via_api(labels:, state:)
|
|
506
766
|
label_param = labels.join(",")
|
|
507
767
|
uri = URI("https://api.github.com/repos/#{full_repo}/pulls?state=#{state}")
|
|
508
|
-
|
|
768
|
+
unless label_param.empty?
|
|
769
|
+
uri.query = [uri.query,
|
|
770
|
+
"labels=#{URI.encode_www_form_component(label_param)}"].compact.join("&")
|
|
771
|
+
end
|
|
509
772
|
|
|
510
773
|
response = Net::HTTP.get_response(uri)
|
|
511
774
|
return [] unless response.code == "200"
|
|
@@ -517,7 +780,7 @@ module Aidp
|
|
|
517
780
|
end
|
|
518
781
|
|
|
519
782
|
def fetch_pull_request_via_gh(number)
|
|
520
|
-
fields = %w[number title body labels state url headRefName baseRefName commits author mergeable]
|
|
783
|
+
fields = %w[number title body labels state url headRefName baseRefName headRefOid commits author mergeable mergeStateStatus]
|
|
521
784
|
cmd = ["gh", "pr", "view", number.to_s, "--repo", full_repo, "--json", fields.join(",")]
|
|
522
785
|
|
|
523
786
|
stdout, stderr, status = Open3.capture3(*cmd)
|
|
@@ -556,6 +819,7 @@ module Aidp
|
|
|
556
819
|
end
|
|
557
820
|
|
|
558
821
|
raise "GitHub API diff failed (#{response.code})" unless response.code == "200"
|
|
822
|
+
|
|
559
823
|
response.body
|
|
560
824
|
end
|
|
561
825
|
|
|
@@ -623,7 +887,8 @@ module Aidp
|
|
|
623
887
|
data = JSON.parse(response.body)
|
|
624
888
|
data["check_runs"] || []
|
|
625
889
|
else
|
|
626
|
-
Aidp.log_warn("repository_client", "Failed to fetch check runs via API", sha: head_sha,
|
|
890
|
+
Aidp.log_warn("repository_client", "Failed to fetch check runs via API", sha: head_sha,
|
|
891
|
+
code: response.code)
|
|
627
892
|
[]
|
|
628
893
|
end
|
|
629
894
|
|
|
@@ -639,7 +904,7 @@ module Aidp
|
|
|
639
904
|
|
|
640
905
|
def post_review_comment_via_gh(number, body, commit_id: nil, path: nil, line: nil)
|
|
641
906
|
if path && line && commit_id
|
|
642
|
-
#
|
|
907
|
+
# NOTE: gh CLI doesn't support inline comments directly, so we use the API
|
|
643
908
|
# For inline comments, we need to use the GitHub API
|
|
644
909
|
post_review_comment_via_api(number, body, commit_id: commit_id, path: path, line: line)
|
|
645
910
|
else
|
|
@@ -714,6 +979,98 @@ module Aidp
|
|
|
714
979
|
[]
|
|
715
980
|
end
|
|
716
981
|
|
|
982
|
+
def mark_pr_ready_for_review_via_gh(number)
|
|
983
|
+
cmd = ["gh", "pr", "ready", number.to_s, "--repo", full_repo]
|
|
984
|
+
_stdout, stderr, status = Open3.capture3(*cmd)
|
|
985
|
+
|
|
986
|
+
unless status.success?
|
|
987
|
+
Aidp.log_warn("repository_client", "mark_pr_ready_failed",
|
|
988
|
+
pr: number, stderr: stderr.strip)
|
|
989
|
+
return false
|
|
990
|
+
end
|
|
991
|
+
|
|
992
|
+
Aidp.log_info("repository_client", "pr_marked_ready", pr: number)
|
|
993
|
+
true
|
|
994
|
+
rescue => e
|
|
995
|
+
Aidp.log_error("repository_client", "mark_pr_ready_exception",
|
|
996
|
+
pr: number, error: e.message)
|
|
997
|
+
false
|
|
998
|
+
end
|
|
999
|
+
|
|
1000
|
+
def request_reviewers_via_gh(number, reviewers:)
|
|
1001
|
+
reviewer_args = reviewers.flat_map { |r| ["--add-reviewer", r] }
|
|
1002
|
+
cmd = ["gh", "pr", "edit", number.to_s, "--repo", full_repo] + reviewer_args
|
|
1003
|
+
_stdout, stderr, status = Open3.capture3(*cmd)
|
|
1004
|
+
|
|
1005
|
+
unless status.success?
|
|
1006
|
+
Aidp.log_warn("repository_client", "request_reviewers_failed",
|
|
1007
|
+
pr: number, reviewers: reviewers, stderr: stderr.strip)
|
|
1008
|
+
return false
|
|
1009
|
+
end
|
|
1010
|
+
|
|
1011
|
+
Aidp.log_info("repository_client", "reviewers_requested",
|
|
1012
|
+
pr: number, reviewers: reviewers)
|
|
1013
|
+
true
|
|
1014
|
+
rescue => e
|
|
1015
|
+
Aidp.log_error("repository_client", "request_reviewers_exception",
|
|
1016
|
+
pr: number, reviewers: reviewers, error: e.message)
|
|
1017
|
+
false
|
|
1018
|
+
end
|
|
1019
|
+
|
|
1020
|
+
def most_recent_pr_label_actor_via_gh(number)
|
|
1021
|
+
# Use GitHub GraphQL API to fetch the most recent label event actor for a PR
|
|
1022
|
+
query = <<~GRAPHQL
|
|
1023
|
+
query($owner: String!, $repo: String!, $number: Int!) {
|
|
1024
|
+
repository(owner: $owner, name: $repo) {
|
|
1025
|
+
pullRequest(number: $number) {
|
|
1026
|
+
timelineItems(last: 100, itemTypes: [LABELED_EVENT]) {
|
|
1027
|
+
nodes {
|
|
1028
|
+
... on LabeledEvent {
|
|
1029
|
+
createdAt
|
|
1030
|
+
actor {
|
|
1031
|
+
login
|
|
1032
|
+
}
|
|
1033
|
+
}
|
|
1034
|
+
}
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
GRAPHQL
|
|
1040
|
+
|
|
1041
|
+
cmd = [
|
|
1042
|
+
"gh", "api", "graphql",
|
|
1043
|
+
"-f", "query=#{query}",
|
|
1044
|
+
"-F", "owner=#{owner}",
|
|
1045
|
+
"-F", "repo=#{repo}",
|
|
1046
|
+
"-F", "number=#{number}"
|
|
1047
|
+
]
|
|
1048
|
+
|
|
1049
|
+
stdout, stderr, status = Open3.capture3(*cmd)
|
|
1050
|
+
unless status.success?
|
|
1051
|
+
Aidp.log_warn("repository_client", "pr_label_actor_query_failed",
|
|
1052
|
+
pr: number, error: stderr.strip)
|
|
1053
|
+
return nil
|
|
1054
|
+
end
|
|
1055
|
+
|
|
1056
|
+
data = JSON.parse(stdout)
|
|
1057
|
+
events = data.dig("data", "repository", "pullRequest", "timelineItems", "nodes") || []
|
|
1058
|
+
|
|
1059
|
+
valid_events = events.select { |event| event.dig("actor", "login") }
|
|
1060
|
+
return nil if valid_events.empty?
|
|
1061
|
+
|
|
1062
|
+
most_recent = valid_events.max_by { |event| event["createdAt"] }
|
|
1063
|
+
most_recent.dig("actor", "login")
|
|
1064
|
+
rescue JSON::ParserError => e
|
|
1065
|
+
Aidp.log_warn("repository_client", "pr_label_actor_parse_failed",
|
|
1066
|
+
pr: number, error: e.message)
|
|
1067
|
+
nil
|
|
1068
|
+
rescue => e
|
|
1069
|
+
Aidp.log_warn("repository_client", "pr_label_actor_exception",
|
|
1070
|
+
pr: number, error: e.message)
|
|
1071
|
+
nil
|
|
1072
|
+
end
|
|
1073
|
+
|
|
717
1074
|
# Normalization methods for PRs
|
|
718
1075
|
def normalize_pull_request(raw)
|
|
719
1076
|
{
|
|
@@ -752,8 +1109,9 @@ module Aidp
|
|
|
752
1109
|
url: raw["url"],
|
|
753
1110
|
head_ref: raw["headRefName"],
|
|
754
1111
|
base_ref: raw["baseRefName"],
|
|
755
|
-
head_sha: raw.dig("commits", 0, "oid")
|
|
756
|
-
mergeable: raw["mergeable"]
|
|
1112
|
+
head_sha: raw["headRefOid"] || raw.dig("commits", 0, "oid"),
|
|
1113
|
+
mergeable: raw["mergeable"],
|
|
1114
|
+
merge_state_status: raw["mergeStateStatus"]&.downcase
|
|
757
1115
|
}
|
|
758
1116
|
end
|
|
759
1117
|
|
|
@@ -769,7 +1127,8 @@ module Aidp
|
|
|
769
1127
|
head_ref: raw.dig("head", "ref"),
|
|
770
1128
|
base_ref: raw.dig("base", "ref"),
|
|
771
1129
|
head_sha: raw.dig("head", "sha"),
|
|
772
|
-
mergeable: raw["mergeable"]
|
|
1130
|
+
mergeable: raw["mergeable"],
|
|
1131
|
+
merge_state_status: raw["merge_state_status"]&.downcase
|
|
773
1132
|
}
|
|
774
1133
|
end
|
|
775
1134
|
|
|
@@ -822,6 +1181,13 @@ module Aidp
|
|
|
822
1181
|
end
|
|
823
1182
|
|
|
824
1183
|
def normalize_ci_status_combined(check_runs, commit_statuses, head_sha)
|
|
1184
|
+
# Log raw inputs for debugging
|
|
1185
|
+
Aidp.log_debug("repository_client", "normalize_ci_status_combined_raw_inputs",
|
|
1186
|
+
check_run_count: check_runs.length,
|
|
1187
|
+
commit_status_count: commit_statuses.length,
|
|
1188
|
+
check_runs_sample: check_runs.first(3).map { |cr| {name: cr["name"], status: cr["status"], conclusion: cr["conclusion"]} },
|
|
1189
|
+
commit_statuses_sample: commit_statuses.first(3).map { |cs| {context: cs["context"], state: cs["state"]} })
|
|
1190
|
+
|
|
825
1191
|
# Convert commit statuses to same format as check runs for unified processing
|
|
826
1192
|
# normalize_ci_status expects string keys, so we use string keys here
|
|
827
1193
|
checks_from_statuses = commit_statuses.map do |status|
|
|
@@ -841,7 +1207,8 @@ module Aidp
|
|
|
841
1207
|
Aidp.log_debug("repository_client", "combined_ci_checks",
|
|
842
1208
|
check_run_count: check_runs.length,
|
|
843
1209
|
commit_status_count: commit_statuses.length,
|
|
844
|
-
total_checks: all_checks.length
|
|
1210
|
+
total_checks: all_checks.length,
|
|
1211
|
+
combined_checks_sample: all_checks.first(5).map { |c| {name: c["name"], status: c["status"], conclusion: c["conclusion"]} })
|
|
845
1212
|
|
|
846
1213
|
# Use existing normalize logic
|
|
847
1214
|
normalize_ci_status(all_checks, head_sha)
|
|
@@ -864,6 +1231,12 @@ module Aidp
|
|
|
864
1231
|
end
|
|
865
1232
|
|
|
866
1233
|
def normalize_ci_status(check_runs, head_sha)
|
|
1234
|
+
# Log raw input data for debugging
|
|
1235
|
+
Aidp.log_debug("repository_client", "normalize_ci_status_raw_input",
|
|
1236
|
+
sha: head_sha,
|
|
1237
|
+
raw_check_run_count: check_runs.length,
|
|
1238
|
+
raw_checks_detailed: check_runs.map { |r| {name: r["name"], status: r["status"], conclusion: r["conclusion"]} })
|
|
1239
|
+
|
|
867
1240
|
checks = check_runs.map do |run|
|
|
868
1241
|
{
|
|
869
1242
|
name: run["name"],
|
|
@@ -874,7 +1247,7 @@ module Aidp
|
|
|
874
1247
|
}
|
|
875
1248
|
end
|
|
876
1249
|
|
|
877
|
-
Aidp.log_debug("repository_client", "
|
|
1250
|
+
Aidp.log_debug("repository_client", "normalize_ci_status_normalized",
|
|
878
1251
|
check_count: checks.length,
|
|
879
1252
|
checks: checks.map { |c| {name: c[:name], status: c[:status], conclusion: c[:conclusion]} })
|
|
880
1253
|
|
|
@@ -904,7 +1277,9 @@ module Aidp
|
|
|
904
1277
|
non_success_checks = checks.reject { |c| c[:conclusion] == "success" }
|
|
905
1278
|
Aidp.log_debug("repository_client", "ci_status_unknown",
|
|
906
1279
|
non_success_count: non_success_checks.length,
|
|
907
|
-
non_success_checks: non_success_checks.map
|
|
1280
|
+
non_success_checks: non_success_checks.map do |c|
|
|
1281
|
+
{name: c[:name], conclusion: c[:conclusion]}
|
|
1282
|
+
end)
|
|
908
1283
|
"unknown"
|
|
909
1284
|
end
|
|
910
1285
|
|
|
@@ -992,6 +1367,480 @@ module Aidp
|
|
|
992
1367
|
updated_at: raw["updated_at"]
|
|
993
1368
|
}
|
|
994
1369
|
end
|
|
1370
|
+
|
|
1371
|
+
# GitHub Projects V2 API implementations
|
|
1372
|
+
def fetch_project_via_gh(project_id)
|
|
1373
|
+
Aidp.log_debug("repository_client", "fetch_project", project_id: project_id)
|
|
1374
|
+
|
|
1375
|
+
query = <<~GRAPHQL
|
|
1376
|
+
query($projectId: ID!) {
|
|
1377
|
+
node(id: $projectId) {
|
|
1378
|
+
... on ProjectV2 {
|
|
1379
|
+
id
|
|
1380
|
+
title
|
|
1381
|
+
number
|
|
1382
|
+
url
|
|
1383
|
+
fields(first: 100) {
|
|
1384
|
+
nodes {
|
|
1385
|
+
... on ProjectV2Field {
|
|
1386
|
+
id
|
|
1387
|
+
name
|
|
1388
|
+
dataType
|
|
1389
|
+
}
|
|
1390
|
+
... on ProjectV2SingleSelectField {
|
|
1391
|
+
id
|
|
1392
|
+
name
|
|
1393
|
+
dataType
|
|
1394
|
+
options {
|
|
1395
|
+
id
|
|
1396
|
+
name
|
|
1397
|
+
}
|
|
1398
|
+
}
|
|
1399
|
+
}
|
|
1400
|
+
}
|
|
1401
|
+
}
|
|
1402
|
+
}
|
|
1403
|
+
}
|
|
1404
|
+
GRAPHQL
|
|
1405
|
+
|
|
1406
|
+
result = execute_graphql_query(query, projectId: project_id)
|
|
1407
|
+
project_data = result.dig("data", "node")
|
|
1408
|
+
|
|
1409
|
+
unless project_data
|
|
1410
|
+
Aidp.log_warn("repository_client", "Project not found", project_id: project_id)
|
|
1411
|
+
raise "Project not found: #{project_id}"
|
|
1412
|
+
end
|
|
1413
|
+
|
|
1414
|
+
normalize_project(project_data)
|
|
1415
|
+
rescue => e
|
|
1416
|
+
Aidp.log_error("repository_client", "Failed to fetch project", project_id: project_id, error: e.message)
|
|
1417
|
+
raise
|
|
1418
|
+
end
|
|
1419
|
+
|
|
1420
|
+
def list_project_items_via_gh(project_id)
|
|
1421
|
+
Aidp.log_debug("repository_client", "list_project_items", project_id: project_id)
|
|
1422
|
+
|
|
1423
|
+
query = <<~GRAPHQL
|
|
1424
|
+
query($projectId: ID!, $cursor: String) {
|
|
1425
|
+
node(id: $projectId) {
|
|
1426
|
+
... on ProjectV2 {
|
|
1427
|
+
items(first: 100, after: $cursor) {
|
|
1428
|
+
pageInfo {
|
|
1429
|
+
hasNextPage
|
|
1430
|
+
endCursor
|
|
1431
|
+
}
|
|
1432
|
+
nodes {
|
|
1433
|
+
id
|
|
1434
|
+
type
|
|
1435
|
+
content {
|
|
1436
|
+
... on Issue {
|
|
1437
|
+
number
|
|
1438
|
+
title
|
|
1439
|
+
state
|
|
1440
|
+
url
|
|
1441
|
+
}
|
|
1442
|
+
... on PullRequest {
|
|
1443
|
+
number
|
|
1444
|
+
title
|
|
1445
|
+
state
|
|
1446
|
+
url
|
|
1447
|
+
}
|
|
1448
|
+
}
|
|
1449
|
+
fieldValues(first: 100) {
|
|
1450
|
+
nodes {
|
|
1451
|
+
... on ProjectV2ItemFieldTextValue {
|
|
1452
|
+
text
|
|
1453
|
+
field {
|
|
1454
|
+
... on ProjectV2Field {
|
|
1455
|
+
id
|
|
1456
|
+
name
|
|
1457
|
+
}
|
|
1458
|
+
}
|
|
1459
|
+
}
|
|
1460
|
+
... on ProjectV2ItemFieldSingleSelectValue {
|
|
1461
|
+
name
|
|
1462
|
+
field {
|
|
1463
|
+
... on ProjectV2SingleSelectField {
|
|
1464
|
+
id
|
|
1465
|
+
name
|
|
1466
|
+
}
|
|
1467
|
+
}
|
|
1468
|
+
}
|
|
1469
|
+
}
|
|
1470
|
+
}
|
|
1471
|
+
}
|
|
1472
|
+
}
|
|
1473
|
+
}
|
|
1474
|
+
}
|
|
1475
|
+
}
|
|
1476
|
+
GRAPHQL
|
|
1477
|
+
|
|
1478
|
+
all_items = []
|
|
1479
|
+
cursor = nil
|
|
1480
|
+
has_next_page = true
|
|
1481
|
+
|
|
1482
|
+
while has_next_page
|
|
1483
|
+
variables = {projectId: project_id}
|
|
1484
|
+
variables[:cursor] = cursor if cursor
|
|
1485
|
+
|
|
1486
|
+
result = execute_graphql_query(query, **variables)
|
|
1487
|
+
items_data = result.dig("data", "node", "items")
|
|
1488
|
+
|
|
1489
|
+
break unless items_data
|
|
1490
|
+
|
|
1491
|
+
items = items_data["nodes"] || []
|
|
1492
|
+
all_items.concat(items.map { |item| normalize_project_item(item) })
|
|
1493
|
+
|
|
1494
|
+
page_info = items_data["pageInfo"]
|
|
1495
|
+
has_next_page = page_info["hasNextPage"]
|
|
1496
|
+
cursor = page_info["endCursor"]
|
|
1497
|
+
end
|
|
1498
|
+
|
|
1499
|
+
Aidp.log_debug("repository_client", "list_project_items_complete", project_id: project_id, count: all_items.size)
|
|
1500
|
+
all_items
|
|
1501
|
+
rescue => e
|
|
1502
|
+
Aidp.log_error("repository_client", "Failed to list project items", project_id: project_id, error: e.message)
|
|
1503
|
+
raise
|
|
1504
|
+
end
|
|
1505
|
+
|
|
1506
|
+
def link_issue_to_project_via_gh(project_id, issue_number)
|
|
1507
|
+
Aidp.log_debug("repository_client", "link_issue_to_project", project_id: project_id, issue_number: issue_number)
|
|
1508
|
+
|
|
1509
|
+
# First, get the issue's node ID
|
|
1510
|
+
issue_query = <<~GRAPHQL
|
|
1511
|
+
query($owner: String!, $repo: String!, $number: Int!) {
|
|
1512
|
+
repository(owner: $owner, name: $repo) {
|
|
1513
|
+
issue(number: $number) {
|
|
1514
|
+
id
|
|
1515
|
+
}
|
|
1516
|
+
}
|
|
1517
|
+
}
|
|
1518
|
+
GRAPHQL
|
|
1519
|
+
|
|
1520
|
+
issue_result = execute_graphql_query(issue_query, owner: owner, repo: repo, number: issue_number)
|
|
1521
|
+
issue_id = issue_result.dig("data", "repository", "issue", "id")
|
|
1522
|
+
|
|
1523
|
+
unless issue_id
|
|
1524
|
+
raise "Issue ##{issue_number} not found in #{full_repo}"
|
|
1525
|
+
end
|
|
1526
|
+
|
|
1527
|
+
# Now add the issue to the project
|
|
1528
|
+
mutation = <<~GRAPHQL
|
|
1529
|
+
mutation($projectId: ID!, $contentId: ID!) {
|
|
1530
|
+
addProjectV2ItemById(input: {projectId: $projectId, contentId: $contentId}) {
|
|
1531
|
+
item {
|
|
1532
|
+
id
|
|
1533
|
+
}
|
|
1534
|
+
}
|
|
1535
|
+
}
|
|
1536
|
+
GRAPHQL
|
|
1537
|
+
|
|
1538
|
+
result = execute_graphql_query(mutation, projectId: project_id, contentId: issue_id)
|
|
1539
|
+
item_id = result.dig("data", "addProjectV2ItemById", "item", "id")
|
|
1540
|
+
|
|
1541
|
+
Aidp.log_debug("repository_client", "link_issue_to_project_complete", project_id: project_id, issue_number: issue_number, item_id: item_id)
|
|
1542
|
+
item_id
|
|
1543
|
+
rescue => e
|
|
1544
|
+
Aidp.log_error("repository_client", "Failed to link issue to project", project_id: project_id, issue_number: issue_number, error: e.message)
|
|
1545
|
+
raise
|
|
1546
|
+
end
|
|
1547
|
+
|
|
1548
|
+
def update_project_item_field_via_gh(item_id, field_id, value)
|
|
1549
|
+
Aidp.log_debug("repository_client", "update_project_item_field", item_id: item_id, field_id: field_id, value: value)
|
|
1550
|
+
|
|
1551
|
+
# Determine the mutation based on value type
|
|
1552
|
+
mutation = if value.is_a?(Hash) && value[:option_id]
|
|
1553
|
+
# Single select field
|
|
1554
|
+
<<~GRAPHQL
|
|
1555
|
+
mutation($projectId: ID!, $itemId: ID!, $fieldId: ID!, $optionId: String!) {
|
|
1556
|
+
updateProjectV2ItemFieldValue(input: {
|
|
1557
|
+
projectId: $projectId
|
|
1558
|
+
itemId: $itemId
|
|
1559
|
+
fieldId: $fieldId
|
|
1560
|
+
value: {singleSelectOptionId: $optionId}
|
|
1561
|
+
}) {
|
|
1562
|
+
projectV2Item {
|
|
1563
|
+
id
|
|
1564
|
+
}
|
|
1565
|
+
}
|
|
1566
|
+
}
|
|
1567
|
+
GRAPHQL
|
|
1568
|
+
else
|
|
1569
|
+
# Text field
|
|
1570
|
+
<<~GRAPHQL
|
|
1571
|
+
mutation($projectId: ID!, $itemId: ID!, $fieldId: ID!, $text: String!) {
|
|
1572
|
+
updateProjectV2ItemFieldValue(input: {
|
|
1573
|
+
projectId: $projectId
|
|
1574
|
+
itemId: $itemId
|
|
1575
|
+
fieldId: $fieldId
|
|
1576
|
+
value: {text: $text}
|
|
1577
|
+
}) {
|
|
1578
|
+
projectV2Item {
|
|
1579
|
+
id
|
|
1580
|
+
}
|
|
1581
|
+
}
|
|
1582
|
+
}
|
|
1583
|
+
GRAPHQL
|
|
1584
|
+
end
|
|
1585
|
+
|
|
1586
|
+
# Note: We need the project ID for the mutation
|
|
1587
|
+
# For now, we'll require it to be passed in the value hash
|
|
1588
|
+
project_id = value.is_a?(Hash) ? value[:project_id] : nil
|
|
1589
|
+
raise "project_id required in value hash" unless project_id
|
|
1590
|
+
|
|
1591
|
+
variables = {
|
|
1592
|
+
projectId: project_id,
|
|
1593
|
+
itemId: item_id,
|
|
1594
|
+
fieldId: field_id
|
|
1595
|
+
}
|
|
1596
|
+
|
|
1597
|
+
if value.is_a?(Hash) && value[:option_id]
|
|
1598
|
+
variables[:optionId] = value[:option_id]
|
|
1599
|
+
else
|
|
1600
|
+
variables[:text] = value.to_s
|
|
1601
|
+
end
|
|
1602
|
+
|
|
1603
|
+
result = execute_graphql_query(mutation, **variables)
|
|
1604
|
+
success = result.dig("data", "updateProjectV2ItemFieldValue", "projectV2Item", "id")
|
|
1605
|
+
|
|
1606
|
+
Aidp.log_debug("repository_client", "update_project_item_field_complete", item_id: item_id, field_id: field_id, success: !success.nil?)
|
|
1607
|
+
success
|
|
1608
|
+
rescue => e
|
|
1609
|
+
Aidp.log_error("repository_client", "Failed to update project item field", item_id: item_id, field_id: field_id, error: e.message)
|
|
1610
|
+
raise
|
|
1611
|
+
end
|
|
1612
|
+
|
|
1613
|
+
def fetch_project_fields_via_gh(project_id)
|
|
1614
|
+
Aidp.log_debug("repository_client", "fetch_project_fields", project_id: project_id)
|
|
1615
|
+
|
|
1616
|
+
query = <<~GRAPHQL
|
|
1617
|
+
query($projectId: ID!) {
|
|
1618
|
+
node(id: $projectId) {
|
|
1619
|
+
... on ProjectV2 {
|
|
1620
|
+
fields(first: 100) {
|
|
1621
|
+
nodes {
|
|
1622
|
+
... on ProjectV2Field {
|
|
1623
|
+
id
|
|
1624
|
+
name
|
|
1625
|
+
dataType
|
|
1626
|
+
}
|
|
1627
|
+
... on ProjectV2SingleSelectField {
|
|
1628
|
+
id
|
|
1629
|
+
name
|
|
1630
|
+
dataType
|
|
1631
|
+
options {
|
|
1632
|
+
id
|
|
1633
|
+
name
|
|
1634
|
+
}
|
|
1635
|
+
}
|
|
1636
|
+
}
|
|
1637
|
+
}
|
|
1638
|
+
}
|
|
1639
|
+
}
|
|
1640
|
+
}
|
|
1641
|
+
GRAPHQL
|
|
1642
|
+
|
|
1643
|
+
result = execute_graphql_query(query, projectId: project_id)
|
|
1644
|
+
fields_data = result.dig("data", "node", "fields", "nodes") || []
|
|
1645
|
+
|
|
1646
|
+
fields = fields_data.map { |field| normalize_project_field(field) }
|
|
1647
|
+
Aidp.log_debug("repository_client", "fetch_project_fields_complete", project_id: project_id, count: fields.size)
|
|
1648
|
+
fields
|
|
1649
|
+
rescue => e
|
|
1650
|
+
Aidp.log_error("repository_client", "Failed to fetch project fields", project_id: project_id, error: e.message)
|
|
1651
|
+
raise
|
|
1652
|
+
end
|
|
1653
|
+
|
|
1654
|
+
def create_project_field_via_gh(project_id, name, field_type, options: nil)
|
|
1655
|
+
Aidp.log_debug("repository_client", "create_project_field", project_id: project_id, name: name, field_type: field_type)
|
|
1656
|
+
|
|
1657
|
+
mutation = if field_type == "SINGLE_SELECT" && options
|
|
1658
|
+
<<~GRAPHQL
|
|
1659
|
+
mutation($projectId: ID!, $name: String!, $dataType: ProjectV2CustomFieldType!, $options: [ProjectV2SingleSelectFieldOptionInput!]!) {
|
|
1660
|
+
createProjectV2Field(input: {
|
|
1661
|
+
projectId: $projectId
|
|
1662
|
+
dataType: $dataType
|
|
1663
|
+
name: $name
|
|
1664
|
+
singleSelectOptions: $options
|
|
1665
|
+
}) {
|
|
1666
|
+
projectV2Field {
|
|
1667
|
+
... on ProjectV2SingleSelectField {
|
|
1668
|
+
id
|
|
1669
|
+
name
|
|
1670
|
+
dataType
|
|
1671
|
+
options {
|
|
1672
|
+
id
|
|
1673
|
+
name
|
|
1674
|
+
}
|
|
1675
|
+
}
|
|
1676
|
+
}
|
|
1677
|
+
}
|
|
1678
|
+
}
|
|
1679
|
+
GRAPHQL
|
|
1680
|
+
else
|
|
1681
|
+
<<~GRAPHQL
|
|
1682
|
+
mutation($projectId: ID!, $name: String!, $dataType: ProjectV2CustomFieldType!) {
|
|
1683
|
+
createProjectV2Field(input: {
|
|
1684
|
+
projectId: $projectId
|
|
1685
|
+
dataType: $dataType
|
|
1686
|
+
name: $name
|
|
1687
|
+
}) {
|
|
1688
|
+
projectV2Field {
|
|
1689
|
+
... on ProjectV2Field {
|
|
1690
|
+
id
|
|
1691
|
+
name
|
|
1692
|
+
dataType
|
|
1693
|
+
}
|
|
1694
|
+
}
|
|
1695
|
+
}
|
|
1696
|
+
}
|
|
1697
|
+
GRAPHQL
|
|
1698
|
+
end
|
|
1699
|
+
|
|
1700
|
+
variables = {projectId: project_id, name: name, dataType: field_type}
|
|
1701
|
+
variables[:options] = options if options
|
|
1702
|
+
|
|
1703
|
+
result = execute_graphql_query(mutation, **variables)
|
|
1704
|
+
field_data = result.dig("data", "createProjectV2Field", "projectV2Field")
|
|
1705
|
+
|
|
1706
|
+
unless field_data
|
|
1707
|
+
Aidp.log_warn("repository_client", "Failed to create project field", project_id: project_id, name: name)
|
|
1708
|
+
raise "Failed to create project field: #{name}"
|
|
1709
|
+
end
|
|
1710
|
+
|
|
1711
|
+
field = normalize_project_field(field_data)
|
|
1712
|
+
Aidp.log_debug("repository_client", "create_project_field_complete", project_id: project_id, field_id: field[:id])
|
|
1713
|
+
field
|
|
1714
|
+
rescue => e
|
|
1715
|
+
Aidp.log_error("repository_client", "Failed to create project field", project_id: project_id, name: name, error: e.message)
|
|
1716
|
+
raise
|
|
1717
|
+
end
|
|
1718
|
+
|
|
1719
|
+
def create_issue_via_gh(title:, body:, labels: [], assignees: [])
|
|
1720
|
+
Aidp.log_debug("repository_client", "create_issue", title: title, label_count: labels.size, assignee_count: assignees.size)
|
|
1721
|
+
|
|
1722
|
+
cmd = ["gh", "issue", "create", "--repo", full_repo, "--title", title, "--body", body]
|
|
1723
|
+
labels.each { |label| cmd += ["--label", label] }
|
|
1724
|
+
assignees.each { |assignee| cmd += ["--assignee", assignee] }
|
|
1725
|
+
|
|
1726
|
+
stdout, stderr, status = Open3.capture3(*cmd)
|
|
1727
|
+
raise "Failed to create issue via gh: #{stderr.strip}" unless status.success?
|
|
1728
|
+
|
|
1729
|
+
# Parse the issue URL to get the number
|
|
1730
|
+
issue_url = stdout.strip
|
|
1731
|
+
issue_number = issue_url.split("/").last.to_i
|
|
1732
|
+
|
|
1733
|
+
Aidp.log_debug("repository_client", "create_issue_complete", issue_number: issue_number, url: issue_url)
|
|
1734
|
+
{number: issue_number, url: issue_url}
|
|
1735
|
+
rescue => e
|
|
1736
|
+
Aidp.log_error("repository_client", "Failed to create issue", title: title, error: e.message)
|
|
1737
|
+
raise
|
|
1738
|
+
end
|
|
1739
|
+
|
|
1740
|
+
def merge_pull_request_via_gh(number, merge_method: "squash")
|
|
1741
|
+
Aidp.log_debug("repository_client", "merge_pull_request", number: number, merge_method: merge_method)
|
|
1742
|
+
|
|
1743
|
+
cmd = ["gh", "pr", "merge", number.to_s, "--repo", full_repo]
|
|
1744
|
+
case merge_method
|
|
1745
|
+
when "merge"
|
|
1746
|
+
cmd << "--merge"
|
|
1747
|
+
when "squash"
|
|
1748
|
+
cmd << "--squash"
|
|
1749
|
+
when "rebase"
|
|
1750
|
+
cmd << "--rebase"
|
|
1751
|
+
else
|
|
1752
|
+
raise "Unknown merge method: #{merge_method}"
|
|
1753
|
+
end
|
|
1754
|
+
|
|
1755
|
+
# Add auto-delete branch flag
|
|
1756
|
+
cmd << "--delete-branch"
|
|
1757
|
+
|
|
1758
|
+
stdout, stderr, status = Open3.capture3(*cmd)
|
|
1759
|
+
raise "Failed to merge PR via gh: #{stderr.strip}" unless status.success?
|
|
1760
|
+
|
|
1761
|
+
Aidp.log_debug("repository_client", "merge_pull_request_complete", number: number)
|
|
1762
|
+
stdout.strip
|
|
1763
|
+
rescue => e
|
|
1764
|
+
Aidp.log_error("repository_client", "Failed to merge PR", number: number, error: e.message)
|
|
1765
|
+
raise
|
|
1766
|
+
end
|
|
1767
|
+
|
|
1768
|
+
def execute_graphql_query(query, **variables)
|
|
1769
|
+
cmd = ["gh", "api", "graphql", "-f", "query=#{query}"]
|
|
1770
|
+
variables.each do |key, value|
|
|
1771
|
+
flag = value.is_a?(Integer) ? "-F" : "-f"
|
|
1772
|
+
cmd += [flag, "#{key}=#{value}"]
|
|
1773
|
+
end
|
|
1774
|
+
|
|
1775
|
+
stdout, stderr, status = Open3.capture3(*cmd)
|
|
1776
|
+
unless status.success?
|
|
1777
|
+
Aidp.log_warn("repository_client", "GraphQL query failed", error: stderr.strip)
|
|
1778
|
+
raise "GraphQL query failed: #{stderr.strip}"
|
|
1779
|
+
end
|
|
1780
|
+
|
|
1781
|
+
JSON.parse(stdout)
|
|
1782
|
+
rescue JSON::ParserError => e
|
|
1783
|
+
Aidp.log_error("repository_client", "Failed to parse GraphQL response", error: e.message)
|
|
1784
|
+
raise "Failed to parse GraphQL response: #{e.message}"
|
|
1785
|
+
end
|
|
1786
|
+
|
|
1787
|
+
def normalize_project(raw)
|
|
1788
|
+
{
|
|
1789
|
+
id: raw["id"],
|
|
1790
|
+
title: raw["title"],
|
|
1791
|
+
number: raw["number"],
|
|
1792
|
+
url: raw["url"],
|
|
1793
|
+
fields: Array(raw.dig("fields", "nodes")).map { |field| normalize_project_field(field) }
|
|
1794
|
+
}
|
|
1795
|
+
end
|
|
1796
|
+
|
|
1797
|
+
def normalize_project_field(raw)
|
|
1798
|
+
field = {
|
|
1799
|
+
id: raw["id"],
|
|
1800
|
+
name: raw["name"],
|
|
1801
|
+
data_type: raw["dataType"]
|
|
1802
|
+
}
|
|
1803
|
+
|
|
1804
|
+
# Add options for single select fields
|
|
1805
|
+
if raw["options"]
|
|
1806
|
+
field[:options] = raw["options"].map { |opt| {id: opt["id"], name: opt["name"]} }
|
|
1807
|
+
end
|
|
1808
|
+
|
|
1809
|
+
field
|
|
1810
|
+
end
|
|
1811
|
+
|
|
1812
|
+
def normalize_project_item(raw)
|
|
1813
|
+
item = {
|
|
1814
|
+
id: raw["id"],
|
|
1815
|
+
type: raw["type"]
|
|
1816
|
+
}
|
|
1817
|
+
|
|
1818
|
+
# Add content (issue or PR)
|
|
1819
|
+
if raw["content"]
|
|
1820
|
+
content = raw["content"]
|
|
1821
|
+
item[:content] = {
|
|
1822
|
+
number: content["number"],
|
|
1823
|
+
title: content["title"],
|
|
1824
|
+
state: content["state"],
|
|
1825
|
+
url: content["url"]
|
|
1826
|
+
}
|
|
1827
|
+
end
|
|
1828
|
+
|
|
1829
|
+
# Add field values
|
|
1830
|
+
if raw["fieldValues"]
|
|
1831
|
+
field_values = {}
|
|
1832
|
+
Array(raw.dig("fieldValues", "nodes")).each do |fv|
|
|
1833
|
+
next unless fv["field"]
|
|
1834
|
+
|
|
1835
|
+
field_name = fv.dig("field", "name")
|
|
1836
|
+
field_value = fv["text"] || fv["name"]
|
|
1837
|
+
field_values[field_name] = field_value
|
|
1838
|
+
end
|
|
1839
|
+
item[:field_values] = field_values
|
|
1840
|
+
end
|
|
1841
|
+
|
|
1842
|
+
item
|
|
1843
|
+
end
|
|
995
1844
|
end
|
|
996
1845
|
end
|
|
997
1846
|
end
|