ultralytics-actions 0.0.100__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ultralytics-actions might be problematic. Click here for more details.

actions/review_pr.py ADDED
@@ -0,0 +1,323 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ import re
7
+
8
+ from .utils import GITHUB_API_URL, Action, get_completion, remove_html_comments
9
+
10
+ REVIEW_MARKER = "🔍 PR Review"
11
+ EMOJI_MAP = {"CRITICAL": "❗", "HIGH": "⚠️", "MEDIUM": "💡", "LOW": "📝", "SUGGESTION": "💭"}
12
+ SKIP_PATTERNS = [
13
+ r"\.lock$", # Lock files
14
+ r"-lock\.(json|yaml|yml)$",
15
+ r"\.min\.(js|css)$", # Minified
16
+ r"\.bundle\.(js|css)$",
17
+ r"(^|/)dist/", # Generated/vendored directories
18
+ r"(^|/)build/",
19
+ r"(^|/)vendor/",
20
+ r"(^|/)node_modules/",
21
+ r"\.pb\.py$", # Proto generated
22
+ r"_pb2\.py$",
23
+ r"_pb2_grpc\.py$",
24
+ r"^package-lock\.json$", # Package locks
25
+ r"^yarn\.lock$",
26
+ r"^poetry\.lock$",
27
+ r"^Pipfile\.lock$",
28
+ r"\.(svg|png|jpe?g|gif)$", # Images
29
+ ]
30
+
31
+
32
+ def parse_diff_files(diff_text: str) -> dict:
33
+ """Parse diff to extract file paths, valid line numbers, and line content for comments."""
34
+ files, current_file, current_line = {}, None, 0
35
+
36
+ for line in diff_text.split("\n"):
37
+ if line.startswith("diff --git"):
38
+ match = re.search(r" b/(.+)$", line)
39
+ current_file = match.group(1) if match else None
40
+ current_line = 0
41
+ if current_file:
42
+ files[current_file] = {}
43
+ elif line.startswith("@@") and current_file:
44
+ match = re.search(r"@@.*\+(\d+)(?:,\d+)?", line)
45
+ current_line = int(match.group(1)) if match else 0
46
+ elif current_file and current_line > 0:
47
+ if line.startswith("+") and not line.startswith("+++"):
48
+ files[current_file][current_line] = line[1:]
49
+ current_line += 1
50
+ elif not line.startswith("-"):
51
+ current_line += 1
52
+
53
+ return files
54
+
55
+
56
+ def generate_pr_review(repository: str, diff_text: str, pr_title: str, pr_description: str) -> dict:
57
+ """Generate comprehensive PR review with line-specific comments and overall assessment."""
58
+ if not diff_text:
59
+ return {"comments": [], "summary": "No changes detected in diff"}
60
+
61
+ diff_files = parse_diff_files(diff_text)
62
+ if not diff_files:
63
+ return {"comments": [], "summary": "No files with changes detected in diff"}
64
+
65
+ # Filter out generated/vendored files
66
+ filtered_files = {
67
+ path: lines
68
+ for path, lines in diff_files.items()
69
+ if not any(re.search(pattern, path) for pattern in SKIP_PATTERNS)
70
+ }
71
+ skipped_count = len(diff_files) - len(filtered_files)
72
+ diff_files = filtered_files
73
+
74
+ if not diff_files:
75
+ return {"comments": [], "summary": f"All {skipped_count} changed files are generated/vendored (skipped review)"}
76
+
77
+ file_list = list(diff_files.keys())
78
+ limit = round(128000 * 3.3 * 0.5) # 3.3 characters per token for half a 256k context window
79
+ diff_truncated = len(diff_text) > limit
80
+ lines_changed = sum(len(lines) for lines in diff_files.values())
81
+
82
+ comment_guidance = (
83
+ "Provide up to 1-3 comments only if critical issues exist"
84
+ if lines_changed < 50
85
+ else "Provide up to 3-5 comments only if high-impact issues exist"
86
+ if lines_changed < 200
87
+ else "Provide up to 5-10 comments only for the most critical issues"
88
+ )
89
+
90
+ content = (
91
+ "You are an expert code reviewer for Ultralytics. Provide detailed inline comments on specific code changes.\n\n"
92
+ "Focus on: Code quality, style, best practices, bugs, edge cases, error handling, performance, security, documentation, test coverage\n\n"
93
+ "FORMATTING: Use backticks for code, file names, branch names, function names, variable names, packages\n\n"
94
+ "CRITICAL RULES:\n"
95
+ "1. Quality over quantity: Zero comments is fine for clean code - only flag truly important issues\n"
96
+ f"2. {comment_guidance} - these are maximums, not targets\n"
97
+ "3. CRITICAL: Do not post separate comments on adjacent/nearby lines (within 10 lines). Combine all related issues into ONE comment\n"
98
+ "4. When combining issues from multiple lines, use 'start_line' (first line) and 'line' (last line) to highlight the entire range\n"
99
+ "5. Each comment must reference separate areas - no overlapping line ranges\n"
100
+ "6. Prioritize: CRITICAL bugs/security > HIGH impact issues > code quality\n"
101
+ "7. Keep comments concise, friendly, and easy to understand - avoid jargon when possible\n"
102
+ "8. DO not comment on routine changes: adding imports, adding dependencies, updating version numbers, standard refactoring\n"
103
+ "9. Trust the developer - only flag issues with clear evidence of problems, not hypothetical concerns\n\n"
104
+ "SUMMARY GUIDELINES:\n"
105
+ "- Keep summary brief, clear, and actionable - avoid overly detailed explanations\n"
106
+ "- Highlight only the most important findings\n"
107
+ "- Do NOT include file names or line numbers in the summary - inline comments already show exact locations\n"
108
+ "- Focus on what needs to be fixed, not where\n\n"
109
+ "CODE SUGGESTIONS:\n"
110
+ "- ONLY provide 'suggestion' field when you have high certainty the code is problematic AND sufficient context for a confident fix\n"
111
+ "- If uncertain about the correct fix, omit 'suggestion' field and explain the concern in 'message' only\n"
112
+ "- Suggestions must be ready-to-merge code with NO comments, placeholders, or explanations\n"
113
+ "- Suggestions replace ONLY the single line at 'line' - for multi-line fixes, describe the change in 'message' instead\n"
114
+ "- Do NOT provide 'start_line' when including a 'suggestion' - suggestions are always single-line only\n"
115
+ "- Suggestion content must match the exact indentation of the original line\n"
116
+ "- Never include triple backticks (```) in suggestions as they break markdown formatting\n"
117
+ "- It's better to flag an issue without a suggestion than provide a wrong or uncertain fix\n\n"
118
+ "Return JSON: "
119
+ '{"comments": [{"file": "exact/path", "line": N, "severity": "HIGH", "message": "...", "suggestion": "..."}], "summary": "..."}\n\n'
120
+ "Rules:\n"
121
+ "- Only comment on NEW lines (starting with + in diff)\n"
122
+ "- Use exact file paths from diff (no ./ prefix)\n"
123
+ "- Line numbers must match NEW file line numbers from @@ hunks\n"
124
+ "- When '- old' then '+ new', new line keeps SAME line number\n"
125
+ "- Severity: CRITICAL, HIGH, MEDIUM, LOW, SUGGESTION\n"
126
+ f"- Files changed: {len(file_list)} ({', '.join(file_list[:10])}{'...' if len(file_list) > 10 else ''})\n"
127
+ f"- Lines changed: {lines_changed}\n"
128
+ )
129
+
130
+ messages = [
131
+ {"role": "system", "content": content},
132
+ {
133
+ "role": "user",
134
+ "content": (
135
+ f"Review this PR in https://github.com/{repository}:\n"
136
+ f"Title: {pr_title}\n"
137
+ f"Description: {remove_html_comments(pr_description or '')[:1000]}\n\n"
138
+ f"Diff:\n{diff_text[:limit]}\n\n"
139
+ "Now review this diff according to the rules above. Return JSON with comments array and summary."
140
+ ),
141
+ },
142
+ ]
143
+
144
+ try:
145
+ response = get_completion(messages, reasoning_effort="medium")
146
+ print("\n" + "=" * 80 + f"\nFULL AI RESPONSE:\n{response}\n" + "=" * 80 + "\n")
147
+
148
+ json_str = re.search(r"```(?:json)?\s*(\{.*?\})\s*```", response, re.DOTALL)
149
+ review_data = json.loads(json_str.group(1) if json_str else response)
150
+
151
+ print(f"AI generated {len(review_data.get('comments', []))} comments")
152
+
153
+ # Validate, filter, and deduplicate comments
154
+ unique_comments = {}
155
+ for c in review_data.get("comments", []):
156
+ file_path, line_num = c.get("file"), c.get("line", 0)
157
+ start_line = c.get("start_line")
158
+
159
+ # Validate line numbers are in diff
160
+ if file_path not in diff_files or line_num not in diff_files[file_path]:
161
+ print(f"Filtered out {file_path}:{line_num} (available: {list(diff_files.get(file_path, {}))[:10]}...)")
162
+ continue
163
+
164
+ # Validate start_line if provided - drop start_line for suggestions (single-line only)
165
+ if start_line:
166
+ if c.get("suggestion"):
167
+ print(f"Dropping start_line for {file_path}:{line_num} - suggestions must be single-line only")
168
+ c.pop("start_line", None)
169
+ elif start_line >= line_num:
170
+ print(f"Invalid start_line {start_line} >= line {line_num} for {file_path}, dropping start_line")
171
+ c.pop("start_line", None)
172
+ elif start_line not in diff_files[file_path]:
173
+ print(f"start_line {start_line} not in diff for {file_path}, dropping start_line")
174
+ c.pop("start_line", None)
175
+
176
+ # Deduplicate by line number
177
+ key = f"{file_path}:{line_num}"
178
+ if key not in unique_comments:
179
+ unique_comments[key] = c
180
+ else:
181
+ print(f"⚠️ AI duplicate for {key}: {c.get('severity')} - {c.get('message')[:60]}...")
182
+
183
+ review_data.update(
184
+ {
185
+ "comments": list(unique_comments.values()),
186
+ "diff_files": diff_files,
187
+ "diff_truncated": diff_truncated,
188
+ "skipped_files": skipped_count,
189
+ }
190
+ )
191
+ print(f"Valid comments after filtering: {len(review_data['comments'])}")
192
+ return review_data
193
+
194
+ except json.JSONDecodeError as e:
195
+ print(f"JSON parsing failed... {e}")
196
+ return {"comments": [], "summary": "Review generation encountered a JSON parsing error"}
197
+ except Exception as e:
198
+ print(f"Review generation failed: {e}")
199
+ import traceback
200
+
201
+ traceback.print_exc()
202
+ return {"comments": [], "summary": "Review generation encountered an error"}
203
+
204
+
205
+ def dismiss_previous_reviews(event: Action) -> int:
206
+ """Dismiss previous bot reviews and delete inline comments, returns count for numbering."""
207
+ if not (pr_number := event.pr.get("number")) or not (bot_username := event.get_username()):
208
+ return 1
209
+
210
+ review_count = 0
211
+ reviews_url = f"{GITHUB_API_URL}/repos/{event.repository}/pulls/{pr_number}/reviews"
212
+ if (response := event.get(reviews_url)).status_code == 200:
213
+ for review in response.json():
214
+ if review.get("user", {}).get("login") == bot_username and REVIEW_MARKER in (review.get("body") or ""):
215
+ review_count += 1
216
+ if review.get("state") in ["APPROVED", "CHANGES_REQUESTED"] and (review_id := review.get("id")):
217
+ event.put(f"{reviews_url}/{review_id}/dismissals", json={"message": "Superseded by new review"})
218
+
219
+ # Delete previous inline comments
220
+ comments_url = f"{GITHUB_API_URL}/repos/{event.repository}/pulls/{pr_number}/comments"
221
+ if (response := event.get(comments_url)).status_code == 200:
222
+ for comment in response.json():
223
+ if comment.get("user", {}).get("login") == bot_username and (comment_id := comment.get("id")):
224
+ event.delete(
225
+ f"{GITHUB_API_URL}/repos/{event.repository}/pulls/comments/{comment_id}",
226
+ expected_status=[200, 204, 404],
227
+ )
228
+
229
+ return review_count + 1
230
+
231
+
232
+ def post_review_summary(event: Action, review_data: dict, review_number: int) -> None:
233
+ """Post overall review summary and inline comments as a single PR review."""
234
+ if not (pr_number := event.pr.get("number")) or not (commit_sha := event.pr.get("head", {}).get("sha")):
235
+ return
236
+
237
+ review_title = f"{REVIEW_MARKER} {review_number}" if review_number > 1 else REVIEW_MARKER
238
+ comments = review_data.get("comments", [])
239
+ event_type = (
240
+ "REQUEST_CHANGES" if any(c.get("severity") not in ["LOW", "SUGGESTION", None] for c in comments) else "APPROVE"
241
+ )
242
+
243
+ body = (
244
+ f"## {review_title}\n\n"
245
+ "<sub>Made with ❤️ by [Ultralytics Actions](https://github.com/ultralytics/actions)</sub>\n\n"
246
+ f"{review_data.get('summary', 'Review completed')}\n\n"
247
+ )
248
+
249
+ if comments:
250
+ shown = min(len(comments), 10)
251
+ body += f"💬 Posted {shown} inline comment{'s' if shown != 1 else ''}{' (10 shown, more available)' if len(comments) > 10 else ''}\n"
252
+
253
+ if review_data.get("diff_truncated"):
254
+ body += "\n⚠️ **Large PR**: Review focused on critical issues. Some details may not be covered.\n"
255
+
256
+ if skipped := review_data.get("skipped_files"):
257
+ body += f"\n📋 **Skipped {skipped} file{'s' if skipped != 1 else ''}** (lock files, minified, images, etc.)\n"
258
+
259
+ # Build inline comments for the review
260
+ review_comments = []
261
+ for comment in comments[:10]:
262
+ if not (file_path := comment.get("file")) or not (line := comment.get("line", 0)):
263
+ continue
264
+
265
+ severity = comment.get("severity", "SUGGESTION")
266
+ comment_body = f"{EMOJI_MAP.get(severity, '💭')} **{severity}**: {comment.get('message', '')}"
267
+
268
+ if suggestion := comment.get("suggestion"):
269
+ if "```" not in suggestion:
270
+ # Extract original line indentation and apply to suggestion
271
+ if original_line := review_data.get("diff_files", {}).get(file_path, {}).get(line):
272
+ indent = len(original_line) - len(original_line.lstrip())
273
+ suggestion = " " * indent + suggestion.strip()
274
+ comment_body += f"\n\n**Suggested change:**\n```suggestion\n{suggestion}\n```"
275
+
276
+ # Build comment with optional start_line for multi-line context
277
+ review_comment = {"path": file_path, "line": line, "body": comment_body, "side": "RIGHT"}
278
+ if start_line := comment.get("start_line"):
279
+ if start_line < line:
280
+ review_comment["start_line"] = start_line
281
+ review_comment["start_side"] = "RIGHT"
282
+ print(f"Multi-line comment: {file_path}:{start_line}-{line}")
283
+
284
+ review_comments.append(review_comment)
285
+
286
+ # Submit review with inline comments
287
+ payload = {"commit_id": commit_sha, "body": body, "event": event_type}
288
+ if review_comments:
289
+ payload["comments"] = review_comments
290
+ print(f"Posting review with {len(review_comments)} inline comments")
291
+
292
+ event.post(
293
+ f"{GITHUB_API_URL}/repos/{event.repository}/pulls/{pr_number}/reviews",
294
+ json=payload,
295
+ )
296
+
297
+
298
+ def main(*args, **kwargs):
299
+ """Main entry point for PR review action."""
300
+ event = Action(*args, **kwargs)
301
+
302
+ # Handle review requests
303
+ if event.event_name == "pull_request" and event.event_data.get("action") == "review_requested":
304
+ if event.event_data.get("requested_reviewer", {}).get("login") != event.get_username():
305
+ return
306
+ print(f"Review requested from {event.get_username()}")
307
+
308
+ if not event.pr or event.pr.get("state") != "open":
309
+ print(f"Skipping: PR state is {event.pr.get('state') if event.pr else 'None'}")
310
+ return
311
+
312
+ print(f"Starting PR review for #{event.pr['number']}")
313
+ review_number = dismiss_previous_reviews(event)
314
+
315
+ diff = event.get_pr_diff()
316
+ review = generate_pr_review(event.repository, diff, event.pr.get("title", ""), event.pr.get("body", ""))
317
+
318
+ post_review_summary(event, review, review_number)
319
+ print("PR review completed")
320
+
321
+
322
+ if __name__ == "__main__":
323
+ main()
actions/summarize_pr.py CHANGED
@@ -2,17 +2,14 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- import time
5
+ from .utils import GITHUB_API_URL, Action, get_completion, get_pr_summary_prompt
6
6
 
7
- from .utils import GITHUB_API_URL, GITHUB_GRAPHQL_URL, Action, get_completion
8
-
9
- # Constants
10
7
  SUMMARY_START = (
11
8
  "## 🛠️ PR Summary\n\n<sub>Made with ❤️ by [Ultralytics Actions](https://github.com/ultralytics/actions)<sub>\n\n"
12
9
  )
13
10
 
14
11
 
15
- def generate_merge_message(pr_summary=None, pr_credit=None, pr_url=None):
12
+ def generate_merge_message(pr_summary, pr_credit, pr_url):
16
13
  """Generates a motivating thank-you message for merged PR contributors."""
17
14
  messages = [
18
15
  {
@@ -32,17 +29,8 @@ def generate_merge_message(pr_summary=None, pr_credit=None, pr_url=None):
32
29
  return get_completion(messages)
33
30
 
34
31
 
35
- def post_merge_message(event, summary, pr_credit):
36
- """Posts thank you message on PR after merge."""
37
- pr_url = f"{GITHUB_API_URL}/repos/{event.repository}/pulls/{event.pr['number']}"
38
- comment_url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{event.pr['number']}/comments"
39
- message = generate_merge_message(summary, pr_credit, pr_url)
40
- event.post(comment_url, json={"body": message})
41
-
42
-
43
32
  def generate_issue_comment(pr_url, pr_summary, pr_credit, pr_title=""):
44
33
  """Generates personalized issue comment based on PR context."""
45
- # Extract repo info from PR URL (format: api.github.com/repos/owner/repo/pulls/number)
46
34
  repo_parts = pr_url.split("/repos/")[1].split("/pulls/")[0] if "/repos/" in pr_url else ""
47
35
  owner_repo = repo_parts.split("/")
48
36
  repo_name = owner_repo[-1] if len(owner_repo) > 1 else "package"
@@ -72,137 +60,46 @@ def generate_issue_comment(pr_url, pr_summary, pr_credit, pr_title=""):
72
60
 
73
61
 
74
62
  def generate_pr_summary(repository, diff_text):
75
- """Generates a concise, professional summary of a PR using OpenAI's API for Ultralytics repositories."""
76
- if not diff_text:
77
- diff_text = "**ERROR: DIFF IS EMPTY, THERE ARE ZERO CODE CHANGES IN THIS PR."
78
- ratio = 3.3 # about 3.3 characters per token
79
- limit = round(128000 * ratio * 0.5) # use up to 50% of the 128k context window for prompt
63
+ """Generates a concise, professional summary of a PR using OpenAI's API."""
64
+ prompt, is_large = get_pr_summary_prompt(repository, diff_text)
65
+
80
66
  messages = [
81
67
  {
82
68
  "role": "system",
83
69
  "content": "You are an Ultralytics AI assistant skilled in software development and technical communication. Your task is to summarize GitHub PRs from Ultralytics in a way that is accurate, concise, and understandable to both expert developers and non-expert users. Focus on highlighting the key changes and their impact in simple, concise terms.",
84
70
  },
85
- {
86
- "role": "user",
87
- "content": f"Summarize this '{repository}' PR, focusing on major changes, their purpose, and potential impact. Keep the summary clear and concise, suitable for a broad audience. Add emojis to enliven the summary. Reply directly with a summary along these example guidelines, though feel free to adjust as appropriate:\n\n"
88
- f"### 🌟 Summary (single-line synopsis)\n"
89
- f"### 📊 Key Changes (bullet points highlighting any major changes)\n"
90
- f"### 🎯 Purpose & Impact (bullet points explaining any benefits and potential impact to users)\n"
91
- f"\n\nHere's the PR diff:\n\n{diff_text[:limit]}",
92
- },
71
+ {"role": "user", "content": prompt},
93
72
  ]
94
73
  reply = get_completion(messages, temperature=1.0)
95
- if len(diff_text) > limit:
74
+ if is_large:
96
75
  reply = "**WARNING ⚠️** this PR is very large, summary may not cover all changes.\n\n" + reply
97
76
  return SUMMARY_START + reply
98
77
 
99
78
 
100
- def update_pr_description(event, new_summary, max_retries=2):
101
- """Updates PR description with new summary, retrying if description is None."""
102
- description = ""
103
- url = f"{GITHUB_API_URL}/repos/{event.repository}/pulls/{event.pr['number']}"
104
- for i in range(max_retries + 1):
105
- description = event.get(url).json().get("body") or ""
106
- if description:
107
- break
108
- if i < max_retries:
109
- print("No current PR description found, retrying...")
110
- time.sleep(1)
111
-
112
- # Check if existing summary is present and update accordingly
113
- start = "## 🛠️ PR Summary"
114
- if start in description:
115
- print("Existing PR Summary found, replacing.")
116
- updated_description = description.split(start)[0] + new_summary
117
- else:
118
- print("PR Summary not found, appending.")
119
- updated_description = description + "\n\n" + new_summary
120
-
121
- # Update the PR description
122
- event.patch(url, json={"body": updated_description})
123
-
124
-
125
79
  def label_fixed_issues(event, pr_summary):
126
80
  """Labels issues closed by PR when merged, notifies users, and returns PR contributors."""
127
- query = """
128
- query($owner: String!, $repo: String!, $pr_number: Int!) {
129
- repository(owner: $owner, name: $repo) {
130
- pullRequest(number: $pr_number) {
131
- closingIssuesReferences(first: 50) { nodes { number } }
132
- url
133
- title
134
- body
135
- author { login, __typename }
136
- reviews(first: 50) { nodes { author { login, __typename } } }
137
- comments(first: 50) { nodes { author { login, __typename } } }
138
- commits(first: 100) { nodes { commit { author { user { login } }, committer { user { login } } } } }
139
- }
140
- }
141
- }
142
- """
143
- owner, repo = event.repository.split("/")
144
- variables = {"owner": owner, "repo": repo, "pr_number": event.pr["number"]}
145
- response = event.post(GITHUB_GRAPHQL_URL, json={"query": query, "variables": variables})
146
- if response.status_code != 200:
147
- return None # no linked issues
148
-
149
- try:
150
- data = response.json()["data"]["repository"]["pullRequest"]
151
- comments = data["reviews"]["nodes"] + data["comments"]["nodes"]
152
- token_username = event.get_username() # get GITHUB_TOKEN username
153
- author = data["author"]["login"] if data["author"]["__typename"] != "Bot" else None
154
- pr_title = data.get("title", "")
155
-
156
- # Get unique contributors from reviews and comments
157
- contributors = {x["author"]["login"] for x in comments if x["author"]["__typename"] != "Bot"}
158
-
159
- # Add commit authors and committers that have GitHub accounts linked
160
- for commit in data["commits"]["nodes"]:
161
- commit_data = commit["commit"]
162
- for user_type in ["author", "committer"]:
163
- if user := commit_data[user_type].get("user"):
164
- if login := user.get("login"):
165
- contributors.add(login)
166
-
167
- contributors.discard(author)
168
- contributors.discard(token_username)
169
-
170
- # Write credit string
171
- pr_credit = "" # i.e. "@user1 with contributions from @user2, @user3"
172
- if author and author != token_username:
173
- pr_credit += f"@{author}"
174
- if contributors:
175
- pr_credit += (" with contributions from " if pr_credit else "") + ", ".join(f"@{c}" for c in contributors)
176
-
177
- # Generate personalized comment
178
- comment = generate_issue_comment(
179
- pr_url=data["url"], pr_summary=pr_summary, pr_credit=pr_credit, pr_title=pr_title
180
- )
181
-
182
- # Update linked issues
183
- for issue in data["closingIssuesReferences"]["nodes"]:
184
- number = issue["number"]
185
- # Add fixed label
186
- event.post(f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}/labels", json={"labels": ["fixed"]})
187
-
188
- # Add comment
189
- event.post(f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}/comments", json={"body": comment})
190
-
191
- return pr_credit
192
- except KeyError as e:
193
- print(f"Error parsing GraphQL response: {e}")
81
+ pr_credit, data = event.get_pr_contributors()
82
+ if not pr_credit:
194
83
  return None
195
84
 
85
+ comment = generate_issue_comment(data["url"], pr_summary, pr_credit, data.get("title", ""))
86
+
87
+ for issue in data["closingIssuesReferences"]["nodes"]:
88
+ number = issue["number"]
89
+ event.post(f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}/labels", json={"labels": ["fixed"]})
90
+ event.post(f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}/comments", json={"body": comment})
196
91
 
197
- def remove_pr_labels(event, labels=()):
198
- """Removes specified labels from PR."""
199
- for label in labels: # Can be extended with more labels in the future
200
- event.delete(f"{GITHUB_API_URL}/repos/{event.repository}/issues/{event.pr['number']}/labels/{label}")
92
+ return pr_credit
201
93
 
202
94
 
203
95
  def main(*args, **kwargs):
204
96
  """Summarize a pull request and update its description with a summary."""
205
97
  event = Action(*args, **kwargs)
98
+ action = event.event_data.get("action")
99
+
100
+ if action == "opened":
101
+ print("Skipping PR open - handled by first_interaction.py with unified API call")
102
+ return
206
103
 
207
104
  print(f"Retrieving diff for PR {event.pr['number']}")
208
105
  diff = event.get_pr_diff()
@@ -213,17 +110,18 @@ def main(*args, **kwargs):
213
110
 
214
111
  # Update PR description
215
112
  print("Updating PR description...")
216
- update_pr_description(event, summary)
113
+ event.update_pr_description(event.pr["number"], summary)
217
114
 
218
- # Update linked issues and post thank you message if merged
219
115
  if event.pr.get("merged"):
220
116
  print("PR is merged, labeling fixed issues...")
221
117
  pr_credit = label_fixed_issues(event, summary)
222
118
  print("Removing TODO label from PR...")
223
- remove_pr_labels(event, labels=["TODO"])
119
+ event.remove_labels(event.pr["number"], labels=("TODO",))
224
120
  if pr_credit:
225
121
  print("Posting PR author thank you message...")
226
- post_merge_message(event, summary, pr_credit)
122
+ pr_url = f"{GITHUB_API_URL}/repos/{event.repository}/pulls/{event.pr['number']}"
123
+ message = generate_merge_message(summary, pr_credit, pr_url)
124
+ event.add_comment(event.pr["number"], None, message, "pull request")
227
125
 
228
126
 
229
127
  if __name__ == "__main__":
@@ -42,7 +42,7 @@ def get_prs_between_tags(event, previous_tag: str, latest_tag: str) -> list:
42
42
  pr_numbers.update(pr_matches)
43
43
 
44
44
  prs = []
45
- time.sleep(10) # sleep 10 seconds to allow final PR summary to update on merge
45
+ time.sleep(10) # Allow final PR summary to update on merge
46
46
  for pr_number in sorted(pr_numbers): # earliest to latest
47
47
  pr_url = f"{GITHUB_API_URL}/repos/{event.repository}/pulls/{pr_number}"
48
48
  pr_response = event.get(pr_url)
@@ -68,9 +68,15 @@ def get_prs_between_tags(event, previous_tag: str, latest_tag: str) -> list:
68
68
  def get_new_contributors(event, prs: list) -> set:
69
69
  """Identify new contributors who made their first merged PR in the current release."""
70
70
  new_contributors = set()
71
+ checked_authors = set()
71
72
  for pr in prs:
72
73
  author = pr["author"]
73
- # Check if this is the author's first contribution
74
+ if author in checked_authors:
75
+ print(f"Skipping duplicate author: {author}")
76
+ continue
77
+ checked_authors.add(author)
78
+
79
+ time.sleep(2) # Rate limit: GitHub search API has strict limits
74
80
  url = f"{GITHUB_API_URL}/search/issues?q=repo:{event.repository}+author:{author}+is:pr+is:merged&sort=created&order=asc"
75
81
  r = event.get(url)
76
82
  if r.status_code == 200:
@@ -79,6 +85,11 @@ def get_new_contributors(event, prs: list) -> set:
79
85
  first_pr = data["items"][0]
80
86
  if first_pr["number"] == pr["number"]:
81
87
  new_contributors.add(author)
88
+ elif r.status_code == 403:
89
+ print(f"⚠️ Rate limit hit checking {author}, stopping contributor check")
90
+ break
91
+ else:
92
+ print(f"Failed to check {author}: {r.status_code}")
82
93
  return new_contributors
83
94
 
84
95
 
@@ -39,6 +39,9 @@ def add_indentation(code_block, num_spaces):
39
39
 
40
40
  def format_code_with_ruff(temp_dir):
41
41
  """Formats Python code files in the specified directory using ruff linter and docformatter tools."""
42
+ if not next(Path(temp_dir).rglob("*.py"), None):
43
+ return
44
+
42
45
  try:
43
46
  # Run ruff format
44
47
  subprocess.run(
actions/utils/__init__.py CHANGED
@@ -9,7 +9,13 @@ from .common_utils import (
9
9
  remove_html_comments,
10
10
  )
11
11
  from .github_utils import GITHUB_API_URL, GITHUB_GRAPHQL_URL, Action, ultralytics_actions_info
12
- from .openai_utils import get_completion
12
+ from .openai_utils import (
13
+ filter_labels,
14
+ get_completion,
15
+ get_pr_open_response,
16
+ get_pr_summary_guidelines,
17
+ get_pr_summary_prompt,
18
+ )
13
19
  from .version_utils import check_pubdev_version, check_pypi_version
14
20
 
15
21
  __all__ = (
@@ -23,7 +29,11 @@ __all__ = (
23
29
  "allow_redirect",
24
30
  "check_pubdev_version",
25
31
  "check_pypi_version",
32
+ "filter_labels",
26
33
  "get_completion",
34
+ "get_pr_open_response",
35
+ "get_pr_summary_guidelines",
36
+ "get_pr_summary_prompt",
27
37
  "remove_html_comments",
28
38
  "ultralytics_actions_info",
29
39
  )