carson 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,253 @@
1
+ module Carson
2
+ class Runtime
3
+ module Review
4
+ module DataAccess
5
+ private
6
+
7
+ def pull_request_details( owner:, repo:, pr_number: )
8
+ node = pull_request_details_node( owner: owner, repo: repo, pr_number: pr_number )
9
+ paginate_pull_request_connections!( owner: owner, repo: repo, pr_number: pr_number, node: node )
10
+ normalise_pull_request_details( node: node )
11
+ end
12
+
13
+ # Base PR payload with first page of each connection; remaining pages are fetched separately.
14
+ def pull_request_details_node( owner:, repo:, pr_number: )
15
+ stdout_text, stderr_text, success, = gh_run(
16
+ "api", "graphql",
17
+ "-f", "query=#{pull_request_details_query}",
18
+ "-F", "owner=#{owner}",
19
+ "-F", "repo=#{repo}",
20
+ "-F", "number=#{pr_number}"
21
+ )
22
+ unless success
23
+ error_text = gh_error_text( stdout_text: stdout_text, stderr_text: stderr_text, fallback: "unable to read pull request ##{pr_number}" )
24
+ raise error_text
25
+ end
26
+ payload = JSON.parse( stdout_text )
27
+ node = payload.dig( "data", "repository", "pullRequest" )
28
+ raise "pull request ##{pr_number} not found" unless node.is_a?( Hash )
29
+ node
30
+ end
31
+
32
+ # Paginates every relevant PR connection so gate/sweep decisions are based on complete data.
33
+ def paginate_pull_request_connections!( owner:, repo:, pr_number:, node: )
34
+ paginate_pull_request_connection!( owner: owner, repo: repo, pr_number: pr_number, node: node, connection_name: "reviewThreads" )
35
+ paginate_pull_request_connection!( owner: owner, repo: repo, pr_number: pr_number, node: node, connection_name: "comments" )
36
+ paginate_pull_request_connection!( owner: owner, repo: repo, pr_number: pr_number, node: node, connection_name: "reviews" )
37
+ end
38
+
39
+ # Fetches remaining connection pages using pageInfo; missing pageInfo defaults to one-page behaviour.
40
+ def paginate_pull_request_connection!( owner:, repo:, pr_number:, node:, connection_name: )
41
+ connection = node[ connection_name ]
42
+ return unless connection.is_a?( Hash )
43
+ nodes = Array( connection[ "nodes" ] )
44
+ page_info = connection[ "pageInfo" ].is_a?( Hash ) ? connection[ "pageInfo" ] : {}
45
+ while page_info[ "hasNextPage" ] == true
46
+ cursor = page_info[ "endCursor" ].to_s
47
+ break if cursor.empty?
48
+ page_connection = pull_request_connection_page(
49
+ owner: owner,
50
+ repo: repo,
51
+ pr_number: pr_number,
52
+ connection_name: connection_name,
53
+ after_cursor: cursor
54
+ )
55
+ nodes.concat( Array( page_connection[ "nodes" ] ) )
56
+ page_info = page_connection[ "pageInfo" ].is_a?( Hash ) ? page_connection[ "pageInfo" ] : {}
57
+ end
58
+ connection[ "nodes" ] = nodes
59
+ connection[ "pageInfo" ] = page_info
60
+ end
61
+
62
+ # Requests one additional page for the chosen PR connection.
63
+ def pull_request_connection_page( owner:, repo:, pr_number:, connection_name:, after_cursor: )
64
+ query = pull_request_connection_page_query( connection_name: connection_name )
65
+ stdout_text, stderr_text, success, = gh_run(
66
+ "api", "graphql",
67
+ "-f", "query=#{query}",
68
+ "-F", "owner=#{owner}",
69
+ "-F", "repo=#{repo}",
70
+ "-F", "number=#{pr_number}",
71
+ "-F", "after=#{after_cursor}"
72
+ )
73
+ unless success
74
+ error_text = gh_error_text(
75
+ stdout_text: stdout_text,
76
+ stderr_text: stderr_text,
77
+ fallback: "unable to paginate pull request ##{pr_number} #{connection_name}"
78
+ )
79
+ raise error_text
80
+ end
81
+ payload = JSON.parse( stdout_text )
82
+ node = payload.dig( "data", "repository", "pullRequest" )
83
+ raise "pull request ##{pr_number} not found during #{connection_name} pagination" unless node.is_a?( Hash )
84
+ connection = node[ connection_name ]
85
+ raise "missing #{connection_name} payload during pagination" unless connection.is_a?( Hash )
86
+ connection
87
+ end
88
+
89
+ def normalise_pull_request_details( node: )
90
+ {
91
+ number: node.fetch( "number" ),
92
+ title: node.fetch( "title" ).to_s,
93
+ url: node.fetch( "url" ).to_s,
94
+ state: node.fetch( "state" ).to_s.upcase,
95
+ updated_at: node.fetch( "updatedAt" ).to_s,
96
+ merged_at: node[ "mergedAt" ].to_s,
97
+ closed_at: node[ "closedAt" ].to_s,
98
+ author: { login: node.dig( "author", "login" ).to_s },
99
+ comments: normalise_issue_comments( nodes: node.dig( "comments", "nodes" ) ),
100
+ reviews: normalise_reviews( nodes: node.dig( "reviews", "nodes" ) ),
101
+ review_threads: normalise_review_threads( nodes: node.dig( "reviewThreads", "nodes" ) )
102
+ }
103
+ end
104
+
105
+ # Converts GraphQL issue comment nodes into a stable internal format.
106
+ def normalise_issue_comments( nodes: )
107
+ Array( nodes ).map do |entry|
108
+ {
109
+ author: entry.dig( "author", "login" ).to_s,
110
+ body: entry[ "body" ].to_s,
111
+ url: entry[ "url" ].to_s,
112
+ created_at: entry[ "createdAt" ].to_s
113
+ }
114
+ end
115
+ end
116
+
117
+ # Converts GraphQL review nodes into a stable internal format.
118
+ def normalise_reviews( nodes: )
119
+ Array( nodes ).map do |entry|
120
+ {
121
+ author: entry.dig( "author", "login" ).to_s,
122
+ state: entry[ "state" ].to_s.upcase,
123
+ body: entry[ "body" ].to_s,
124
+ url: entry[ "url" ].to_s,
125
+ created_at: entry[ "submittedAt" ].to_s
126
+ }
127
+ end
128
+ end
129
+
130
+ # Converts GraphQL review-thread nodes into a stable internal format.
131
+ def normalise_review_threads( nodes: )
132
+ Array( nodes ).map do |entry|
133
+ {
134
+ is_resolved: entry[ "isResolved" ] == true,
135
+ is_outdated: entry[ "isOutdated" ] == true,
136
+ comments: Array( entry.dig( "comments", "nodes" ) ).map do |comment|
137
+ {
138
+ author: comment.dig( "author", "login" ).to_s,
139
+ body: comment[ "body" ].to_s,
140
+ url: comment[ "url" ].to_s,
141
+ created_at: comment[ "createdAt" ].to_s
142
+ }
143
+ end
144
+ }
145
+ end
146
+ end
147
+
148
+ def recent_pull_requests_for_sweep( owner:, repo:, cutoff_time: )
149
+ results = []
150
+ page = 1
151
+ max_pages = 50
152
+ loop do
153
+ stdout_text, stderr_text, success, = gh_run(
154
+ "api", "repos/#{owner}/#{repo}/pulls",
155
+ "--method", "GET",
156
+ "-f", "state=all",
157
+ "-f", "sort=updated",
158
+ "-f", "direction=desc",
159
+ "-f", "per_page=100",
160
+ "-f", "page=#{page}"
161
+ )
162
+ unless success
163
+ error_text = gh_error_text( stdout_text: stdout_text, stderr_text: stderr_text, fallback: "unable to list pull requests for review sweep" )
164
+ raise error_text
165
+ end
166
+
167
+ page_nodes = Array( JSON.parse( stdout_text ) )
168
+ break if page_nodes.empty?
169
+ stop_paging = false
170
+
171
+ page_nodes.each do |entry|
172
+ updated_time = parse_time_or_nil( text: entry[ "updated_at" ] )
173
+ next if updated_time.nil?
174
+ if updated_time < cutoff_time
175
+ stop_paging = true
176
+ next
177
+ end
178
+ state = normalise_rest_pull_request_state( entry: entry )
179
+ next unless config.review_sweep_states.include?( sweep_state_for( pr_state: state ) )
180
+ results << {
181
+ number: entry[ "number" ],
182
+ title: entry[ "title" ].to_s,
183
+ url: entry[ "html_url" ].to_s,
184
+ state: state,
185
+ updated_at: updated_time.utc.iso8601,
186
+ merged_at: entry[ "merged_at" ].to_s,
187
+ closed_at: entry[ "closed_at" ].to_s,
188
+ author: entry.dig( "user", "login" ).to_s
189
+ }
190
+ end
191
+
192
+ break if stop_paging
193
+ if page >= max_pages
194
+ probe_stdout_text, probe_stderr_text, probe_success, = gh_run(
195
+ "api", "repos/#{owner}/#{repo}/pulls",
196
+ "--method", "GET",
197
+ "-f", "state=all",
198
+ "-f", "sort=updated",
199
+ "-f", "direction=desc",
200
+ "-f", "per_page=100",
201
+ "-f", "page=#{page + 1}"
202
+ )
203
+ unless probe_success
204
+ error_text = gh_error_text( stdout_text: probe_stdout_text, stderr_text: probe_stderr_text, fallback: "unable to verify review sweep pagination limit" )
205
+ raise error_text
206
+ end
207
+ probe_nodes = Array( JSON.parse( probe_stdout_text ) )
208
+ raise "review sweep pull request pagination exceeded safety limit (#{max_pages} pages)" unless probe_nodes.empty?
209
+ break
210
+ end
211
+
212
+ page += 1
213
+ end
214
+ results
215
+ end
216
+
217
+ # REST /pulls payload normaliser so merged PRs stay distinguishable from closed-unmerged PRs.
218
+ def normalise_rest_pull_request_state( entry: )
219
+ base_state = entry[ "state" ].to_s.upcase
220
+ return "MERGED" if base_state == "CLOSED" && !entry[ "merged_at" ].to_s.strip.empty?
221
+ base_state
222
+ end
223
+
224
+ # Extracts owner/repository from configured git remote URL.
225
+ def repository_coordinates
226
+ remote_url = git_capture!( "config", "--get", "remote.#{config.git_remote}.url" ).strip
227
+ match = remote_url.match( %r{\A(?:git@|https?://|ssh://git@)?[^/:]+[:/](?<owner>[^/]+)/(?<repo>[^/]+?)(?:\.git)?\z} )
228
+ return [ match[ :owner ], match[ :repo ] ] unless match.nil?
229
+
230
+ stdout_text, = gh_capture_soft( "repo", "view", "--json", "nameWithOwner", "--jq", ".nameWithOwner" )
231
+ name_with_owner = stdout_text.to_s.strip
232
+ if name_with_owner.include?( "/" )
233
+ owner, repo = name_with_owner.split( "/", 2 )
234
+ return [ owner, repo ] unless owner.to_s.empty? || repo.to_s.empty?
235
+ end
236
+
237
+ repo_name = File.basename( remote_url ).sub( /\.git\z/, "" )
238
+ return [ "local", repo_name ] unless repo_name.empty?
239
+ raise "unable to parse owner/repo from remote URL #{remote_url}"
240
+ end
241
+
242
+ # Optional CI override for detached-HEAD contexts where branch-based PR lookup is not possible.
243
+ def carson_pr_number_override
244
+ text = ENV.fetch( "CARSON_PR_NUMBER", "" ).to_s.strip
245
+ return nil if text.empty?
246
+ Integer( text )
247
+ rescue ArgumentError
248
+ raise "invalid CARSON_PR_NUMBER value #{text.inspect}"
249
+ end
250
+ end
251
+ end
252
+ end
253
+ end
@@ -0,0 +1,224 @@
1
+ module Carson
2
+ class Runtime
3
+ module Review
4
+ module GateSupport
5
+ private
6
+
7
+ def wait_for_review_warmup
8
+ return unless config.review_wait_seconds.positive?
9
+ puts_line "warmup_wait_seconds: #{config.review_wait_seconds}"
10
+ sleep config.review_wait_seconds
11
+ end
12
+
13
+ # Poll delay between consecutive snapshot reads during convergence checks.
14
+ def wait_for_review_poll
15
+ return unless config.review_poll_seconds.positive?
16
+ puts_line "poll_wait_seconds: #{config.review_poll_seconds}"
17
+ sleep config.review_poll_seconds
18
+ end
19
+
20
+ # Fetches live PR review state and derives unresolved-thread plus disposition-ack summary.
21
+ def review_gate_snapshot( owner:, repo:, pr_number: )
22
+ details = pull_request_details( owner: owner, repo: repo, pr_number: pr_number )
23
+ pr_author = details.dig( :author, :login ).to_s
24
+ unresolved_threads = unresolved_thread_entries( details: details )
25
+ actionable_top_level = actionable_top_level_items( details: details, pr_author: pr_author )
26
+ acknowledgements = disposition_acknowledgements( details: details, pr_author: pr_author )
27
+ unacknowledged_actionable = actionable_top_level.reject { |item| acknowledged_by_disposition?( item: item, acknowledgements: acknowledgements ) }
28
+ {
29
+ latest_activity: latest_review_activity( details: details ),
30
+ unresolved_threads: unresolved_threads,
31
+ actionable_top_level: actionable_top_level,
32
+ unacknowledged_actionable: unacknowledged_actionable,
33
+ acknowledgements: acknowledgements
34
+ }
35
+ end
36
+
37
+ # Deterministic signature used to compare two review snapshots for convergence.
38
+ def review_gate_signature( snapshot: )
39
+ {
40
+ latest_activity: snapshot.fetch( :latest_activity ).to_s,
41
+ unresolved_urls: snapshot.fetch( :unresolved_threads ).map { |entry| entry.fetch( :url ) }.sort,
42
+ unacknowledged_urls: snapshot.fetch( :unacknowledged_actionable ).map { |entry| entry.fetch( :url ) }.sort
43
+ }
44
+ end
45
+
46
+ # Pull request selected by current branch; nil is returned when no PR exists.
47
+ def current_pull_request_for_branch( branch_name: )
48
+ stdout_text, stderr_text, success, = gh_run( "pr", "view", "--", branch_name, "--json", "number,title,url,state" )
49
+ unless success
50
+ error_text = gh_error_text( stdout_text: stdout_text, stderr_text: stderr_text, fallback: "unable to read PR for branch #{branch_name}" )
51
+ return nil if error_text.downcase.include?( "no pull requests found" )
52
+ raise error_text
53
+ end
54
+ data = JSON.parse( stdout_text )
55
+ {
56
+ number: data.fetch( "number" ),
57
+ title: data.fetch( "title" ).to_s,
58
+ url: data.fetch( "url" ).to_s,
59
+ state: data.fetch( "state" ).to_s
60
+ }
61
+ end
62
+
63
+ def unresolved_thread_entries( details: )
64
+ Array( details.fetch( :review_threads ) ).each_with_index.map do |thread, index|
65
+ next if thread.fetch( :is_resolved )
66
+ # Outdated threads belong to superseded diffs and should not block current merge readiness.
67
+ next if thread.fetch( :is_outdated )
68
+ comments = thread.fetch( :comments )
69
+ first_comment = comments.first || {}
70
+ latest_time = comments.map { |entry| entry.fetch( :created_at ) }.max.to_s
71
+ {
72
+ url: blank_to( value: first_comment.fetch( :url, "" ), default: "#{details.fetch( :url )}#thread-#{index + 1}" ),
73
+ author: first_comment.fetch( :author, "" ),
74
+ created_at: latest_time,
75
+ outdated: thread.fetch( :is_outdated ),
76
+ reason: "unresolved_thread"
77
+ }
78
+ end.compact
79
+ end
80
+
81
+ # Actionable top-level findings include CHANGES_REQUESTED reviews or risk-keyword findings.
82
+ def actionable_top_level_items( details:, pr_author: )
83
+ items = []
84
+ Array( details.fetch( :comments ) ).each do |comment|
85
+ next if comment.fetch( :author ) == pr_author
86
+ next if disposition_prefixed?( text: comment.fetch( :body ) )
87
+ hits = matched_risk_keywords( text: comment.fetch( :body ) )
88
+ next if hits.empty?
89
+ items << {
90
+ kind: "issue_comment",
91
+ url: comment.fetch( :url ),
92
+ author: comment.fetch( :author ),
93
+ created_at: comment.fetch( :created_at ),
94
+ reason: "risk_keywords: #{hits.join( ', ' )}"
95
+ }
96
+ end
97
+ Array( details.fetch( :reviews ) ).each do |review|
98
+ next if review.fetch( :author ) == pr_author
99
+ next if disposition_prefixed?( text: review.fetch( :body ) )
100
+ hits = matched_risk_keywords( text: review.fetch( :body ) )
101
+ changes_requested = review.fetch( :state ) == "CHANGES_REQUESTED"
102
+ next if hits.empty? && !changes_requested
103
+ reason = changes_requested ? "changes_requested_review" : "risk_keywords: #{hits.join( ', ' )}"
104
+ items << {
105
+ kind: "review",
106
+ url: review.fetch( :url ),
107
+ author: review.fetch( :author ),
108
+ created_at: review.fetch( :created_at ),
109
+ reason: reason
110
+ }
111
+ end
112
+ deduplicate_findings_by_url( items: items )
113
+ end
114
+
115
+ # Parses acknowledgement messages and extracts referenced review URLs plus disposition.
116
+ def disposition_acknowledgements( details:, pr_author: )
117
+ sources = []
118
+ sources.concat( Array( details.fetch( :comments ) ) )
119
+ sources.concat( Array( details.fetch( :reviews ) ) )
120
+ sources.concat( Array( details.fetch( :review_threads ) ).flat_map { |thread| thread.fetch( :comments ) } )
121
+ sources.map do |entry|
122
+ next unless entry.fetch( :author, "" ) == pr_author
123
+ body = entry.fetch( :body, "" ).to_s
124
+ next unless disposition_prefixed?( text: body )
125
+ disposition = disposition_token( text: body )
126
+ next if disposition.nil?
127
+ target_urls = extract_github_urls( text: body )
128
+ next if target_urls.empty?
129
+ {
130
+ url: entry.fetch( :url, "" ),
131
+ created_at: entry.fetch( :created_at, "" ),
132
+ disposition: disposition,
133
+ target_urls: target_urls
134
+ }
135
+ end.compact
136
+ end
137
+
138
+ # True when any disposition acknowledgement references the specific finding URL.
139
+ def acknowledged_by_disposition?( item:, acknowledgements: )
140
+ acknowledgements.any? do |ack|
141
+ Array( ack.fetch( :target_urls ) ).any? { |url| url == item.fetch( :url ) }
142
+ end
143
+ end
144
+
145
+ # Latest review activity marker used by convergence snapshots.
146
+ def latest_review_activity( details: )
147
+ timestamps = []
148
+ timestamps << details.fetch( :updated_at )
149
+ timestamps.concat( Array( details.fetch( :comments ) ).map { |entry| entry.fetch( :created_at ) } )
150
+ timestamps.concat( Array( details.fetch( :reviews ) ).map { |entry| entry.fetch( :created_at ) } )
151
+ timestamps.concat( Array( details.fetch( :review_threads ) ).flat_map { |thread| thread.fetch( :comments ) }.map { |entry| entry.fetch( :created_at ) } )
152
+ timestamps.map { |text| parse_time_or_nil( text: text ) }.compact.max&.utc&.iso8601
153
+ end
154
+
155
+ # Writes review gate artefacts using fixed report names in global report output.
156
+ def write_review_gate_report( report: )
157
+ markdown_path, json_path = write_report(
158
+ report: report,
159
+ markdown_name: REVIEW_GATE_REPORT_MD,
160
+ json_name: REVIEW_GATE_REPORT_JSON,
161
+ renderer: method( :render_review_gate_markdown )
162
+ )
163
+ puts_line "review_gate_report_markdown: #{markdown_path}"
164
+ puts_line "review_gate_report_json: #{json_path}"
165
+ rescue StandardError => e
166
+ puts_line "review_gate_report_write: SKIP (#{e.message})"
167
+ end
168
+
169
+ # Human-readable review gate report for merge-readiness evidence.
170
+ def render_review_gate_markdown( report: )
171
+ lines = []
172
+ lines << "# Carson Review Gate Report"
173
+ lines << ""
174
+ lines << "- Generated at: #{report.fetch( :generated_at )}"
175
+ lines << "- Branch: #{report.fetch( :branch )}"
176
+ lines << "- Status: #{report.fetch( :status )}"
177
+ lines << "- Converged: #{report.fetch( :converged )}"
178
+ lines << "- Poll attempts: #{report.fetch( :poll_attempts, 0 )}"
179
+ lines << "- Wait seconds: #{report.fetch( :wait_seconds )}"
180
+ lines << "- Poll seconds: #{report.fetch( :poll_seconds )}"
181
+ lines << "- Max polls: #{report.fetch( :max_polls )}"
182
+ lines << ""
183
+ lines << "## Pull Request"
184
+ pr = report[ :pr ]
185
+ if pr.nil?
186
+ lines << "- not available"
187
+ else
188
+ lines << "- Number: ##{pr.fetch( :number )}"
189
+ lines << "- Title: #{pr.fetch( :title )}"
190
+ lines << "- URL: #{pr.fetch( :url )}"
191
+ lines << "- State: #{pr.fetch( :state )}"
192
+ end
193
+ lines << ""
194
+ lines << "## Block Reasons"
195
+ if report.fetch( :block_reasons ).empty?
196
+ lines << "- none"
197
+ else
198
+ report.fetch( :block_reasons ).each { |reason| lines << "- #{reason}" }
199
+ end
200
+ lines << ""
201
+ lines << "## Unresolved Threads"
202
+ if report.fetch( :unresolved_threads ).empty?
203
+ lines << "- none"
204
+ else
205
+ report.fetch( :unresolved_threads ).each do |entry|
206
+ lines << "- #{entry.fetch( :url )} (author: #{entry.fetch( :author )}, outdated: #{entry.fetch( :outdated )})"
207
+ end
208
+ end
209
+ lines << ""
210
+ lines << "## Unacknowledged Actionable Top-Level Findings"
211
+ if report.fetch( :unacknowledged_actionable ).empty?
212
+ lines << "- none"
213
+ else
214
+ report.fetch( :unacknowledged_actionable ).each do |entry|
215
+ lines << "- #{entry.fetch( :kind )}: #{entry.fetch( :url )} (author: #{entry.fetch( :author )}, reason: #{entry.fetch( :reason )})"
216
+ end
217
+ end
218
+ lines << ""
219
+ lines.join( "\n" )
220
+ end
221
+ end
222
+ end
223
+ end
224
+ end
@@ -0,0 +1,164 @@
1
+ module Carson
2
+ class Runtime
3
+ module Review
4
+ module QueryText
5
+ private
6
+
7
+ def pull_request_connection_page_query( connection_name: )
8
+ case connection_name
9
+ when "comments"
10
+ pull_request_comments_page_query
11
+ when "reviews"
12
+ pull_request_reviews_page_query
13
+ when "reviewThreads"
14
+ pull_request_review_threads_page_query
15
+ else
16
+ raise "unsupported pull request connection #{connection_name}"
17
+ end
18
+ end
19
+
20
+ # GraphQL query kept in one place so gate/sweep consume the same PR payload schema.
21
+ def pull_request_details_query
22
+ <<~GRAPHQL
23
+ query($owner:String!, $repo:String!, $number:Int!) {
24
+ repository(owner:$owner, name:$repo) {
25
+ pullRequest(number:$number) {
26
+ number
27
+ title
28
+ url
29
+ state
30
+ updatedAt
31
+ mergedAt
32
+ closedAt
33
+ author { login }
34
+ reviewThreads(first:100) {
35
+ pageInfo {
36
+ hasNextPage
37
+ endCursor
38
+ }
39
+ nodes {
40
+ isResolved
41
+ isOutdated
42
+ comments(first:100) {
43
+ nodes {
44
+ author { login }
45
+ body
46
+ url
47
+ createdAt
48
+ }
49
+ }
50
+ }
51
+ }
52
+ comments(first:100) {
53
+ pageInfo {
54
+ hasNextPage
55
+ endCursor
56
+ }
57
+ nodes {
58
+ author { login }
59
+ body
60
+ url
61
+ createdAt
62
+ }
63
+ }
64
+ reviews(first:100) {
65
+ pageInfo {
66
+ hasNextPage
67
+ endCursor
68
+ }
69
+ nodes {
70
+ author { login }
71
+ state
72
+ body
73
+ url
74
+ submittedAt
75
+ }
76
+ }
77
+ }
78
+ }
79
+ }
80
+ GRAPHQL
81
+ end
82
+
83
+ # Additional page query for top-level issue comments.
84
+ def pull_request_comments_page_query
85
+ <<~GRAPHQL
86
+ query($owner:String!, $repo:String!, $number:Int!, $after:String!) {
87
+ repository(owner:$owner, name:$repo) {
88
+ pullRequest(number:$number) {
89
+ comments(first:100, after:$after) {
90
+ pageInfo {
91
+ hasNextPage
92
+ endCursor
93
+ }
94
+ nodes {
95
+ author { login }
96
+ body
97
+ url
98
+ createdAt
99
+ }
100
+ }
101
+ }
102
+ }
103
+ }
104
+ GRAPHQL
105
+ end
106
+
107
+ # Additional page query for top-level reviews.
108
+ def pull_request_reviews_page_query
109
+ <<~GRAPHQL
110
+ query($owner:String!, $repo:String!, $number:Int!, $after:String!) {
111
+ repository(owner:$owner, name:$repo) {
112
+ pullRequest(number:$number) {
113
+ reviews(first:100, after:$after) {
114
+ pageInfo {
115
+ hasNextPage
116
+ endCursor
117
+ }
118
+ nodes {
119
+ author { login }
120
+ state
121
+ body
122
+ url
123
+ submittedAt
124
+ }
125
+ }
126
+ }
127
+ }
128
+ }
129
+ GRAPHQL
130
+ end
131
+
132
+ # Additional page query for review threads.
133
+ def pull_request_review_threads_page_query
134
+ <<~GRAPHQL
135
+ query($owner:String!, $repo:String!, $number:Int!, $after:String!) {
136
+ repository(owner:$owner, name:$repo) {
137
+ pullRequest(number:$number) {
138
+ reviewThreads(first:100, after:$after) {
139
+ pageInfo {
140
+ hasNextPage
141
+ endCursor
142
+ }
143
+ nodes {
144
+ isResolved
145
+ isOutdated
146
+ comments(first:100) {
147
+ nodes {
148
+ author { login }
149
+ body
150
+ url
151
+ createdAt
152
+ }
153
+ }
154
+ }
155
+ }
156
+ }
157
+ }
158
+ }
159
+ GRAPHQL
160
+ end
161
+ end
162
+ end
163
+ end
164
+ end