ultralytics-actions 0.0.33__tar.gz → 0.0.35__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/PKG-INFO +3 -3
  2. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/actions/__init__.py +1 -1
  3. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/actions/first_interaction.py +70 -73
  4. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/actions/summarize_pr.py +32 -35
  5. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/actions/summarize_release.py +23 -21
  6. ultralytics_actions-0.0.35/actions/utils/__init__.py +19 -0
  7. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/actions/utils/common_utils.py +17 -14
  8. ultralytics_actions-0.0.35/actions/utils/github_utils.py +163 -0
  9. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/pyproject.toml +15 -2
  10. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/ultralytics_actions.egg-info/PKG-INFO +3 -3
  11. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/ultralytics_actions.egg-info/requires.txt +2 -2
  12. ultralytics_actions-0.0.33/actions/utils/__init__.py +0 -45
  13. ultralytics_actions-0.0.33/actions/utils/github_utils.py +0 -157
  14. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/LICENSE +0 -0
  15. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/README.md +0 -0
  16. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/actions/update_markdown_code_blocks.py +0 -0
  17. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/actions/utils/openai_utils.py +0 -0
  18. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/setup.cfg +0 -0
  19. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/ultralytics_actions.egg-info/SOURCES.txt +0 -0
  20. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/ultralytics_actions.egg-info/dependency_links.txt +0 -0
  21. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/ultralytics_actions.egg-info/entry_points.txt +0 -0
  22. {ultralytics_actions-0.0.33 → ultralytics_actions-0.0.35}/ultralytics_actions.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics-actions
3
- Version: 0.0.33
3
+ Version: 0.0.35
4
4
  Summary: Ultralytics Actions for GitHub automation and PR management.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -28,8 +28,8 @@ Classifier: Operating System :: OS Independent
28
28
  Requires-Python: >=3.8
29
29
  Description-Content-Type: text/markdown
30
30
  License-File: LICENSE
31
- Requires-Dist: requests>=2.26.0
32
- Requires-Dist: ruff>=0.1.6
31
+ Requires-Dist: requests>=2.32.3
32
+ Requires-Dist: ruff>=0.8.4
33
33
  Requires-Dist: docformatter>=1.7.5
34
34
  Provides-Extra: dev
35
35
  Requires-Dist: pytest; extra == "dev"
@@ -22,4 +22,4 @@
22
22
  # ├── test_summarize_pr.py
23
23
  # └── ...
24
24
 
25
- __version__ = "0.0.33"
25
+ __version__ = "0.0.35"
@@ -6,15 +6,9 @@ from typing import Dict, List, Tuple
6
6
  import requests
7
7
 
8
8
  from .utils import (
9
- EVENT_DATA,
10
9
  GITHUB_API_URL,
11
- GITHUB_EVENT_NAME,
12
- GITHUB_HEADERS,
13
- GITHUB_REPOSITORY,
10
+ Action,
14
11
  get_completion,
15
- get_github_data,
16
- get_pr_diff,
17
- graphql_request,
18
12
  remove_html_comments,
19
13
  )
20
14
 
@@ -22,21 +16,23 @@ from .utils import (
22
16
  BLOCK_USER = os.getenv("BLOCK_USER", "false").lower() == "true"
23
17
 
24
18
 
25
- def get_event_content() -> Tuple[int, str, str, str, str, str, str]:
19
+ def get_event_content(event) -> Tuple[int, str, str, str, str, str, str]:
26
20
  """Extracts key information from GitHub event data for issues, pull requests, or discussions."""
27
- action = EVENT_DATA["action"] # 'opened', 'closed', 'created' (discussion), etc.
28
- if GITHUB_EVENT_NAME == "issues":
29
- item = EVENT_DATA["issue"]
21
+ data = event.event_data
22
+ name = event.event_name
23
+ action = data["action"] # 'opened', 'closed', 'created' (discussion), etc.
24
+ if name == "issues":
25
+ item = data["issue"]
30
26
  issue_type = "issue"
31
- elif GITHUB_EVENT_NAME in ["pull_request", "pull_request_target"]:
32
- pr_number = EVENT_DATA["pull_request"]["number"]
33
- item = get_github_data(f"pulls/{pr_number}")
27
+ elif name in ["pull_request", "pull_request_target"]:
28
+ pr_number = data["pull_request"]["number"]
29
+ item = event.get_repo_data(f"pulls/{pr_number}")
34
30
  issue_type = "pull request"
35
- elif GITHUB_EVENT_NAME == "discussion":
36
- item = EVENT_DATA["discussion"]
31
+ elif name == "discussion":
32
+ item = data["discussion"]
37
33
  issue_type = "discussion"
38
34
  else:
39
- raise ValueError(f"Unsupported event type: {GITHUB_EVENT_NAME}")
35
+ raise ValueError(f"Unsupported event type: {name}")
40
36
 
41
37
  number = item["number"]
42
38
  node_id = item.get("node_id") or item.get("id")
@@ -46,7 +42,7 @@ def get_event_content() -> Tuple[int, str, str, str, str, str, str]:
46
42
  return number, node_id, title, body, username, issue_type, action
47
43
 
48
44
 
49
- def update_issue_pr_content(number: int, node_id: str, issue_type: str):
45
+ def update_issue_pr_content(event, number: int, node_id: str, issue_type: str):
50
46
  """Updates the title and body of an issue, pull request, or discussion with predefined content."""
51
47
  new_title = "Content Under Review"
52
48
  new_body = """This post has been flagged for review by [Ultralytics Actions](https://ultralytics.com/actions) due to possible spam, abuse, or off-topic content. For more information please see our:
@@ -68,14 +64,14 @@ mutation($discussionId: ID!, $title: String!, $body: String!) {
68
64
  }
69
65
  }
70
66
  """
71
- graphql_request(mutation, variables={"discussionId": node_id, "title": new_title, "body": new_body})
67
+ event.graphql_request(mutation, variables={"discussionId": node_id, "title": new_title, "body": new_body})
72
68
  else:
73
- url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{number}"
74
- r = requests.patch(url, json={"title": new_title, "body": new_body}, headers=GITHUB_HEADERS)
69
+ url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}"
70
+ r = requests.patch(url, json={"title": new_title, "body": new_body}, headers=event.headers)
75
71
  print(f"{'Successful' if r.status_code == 200 else 'Fail'} issue/PR #{number} update: {r.status_code}")
76
72
 
77
73
 
78
- def close_issue_pr(number: int, node_id: str, issue_type: str):
74
+ def close_issue_pr(event, number: int, node_id: str, issue_type: str):
79
75
  """Closes the specified issue, pull request, or discussion using the GitHub API."""
80
76
  if issue_type == "discussion":
81
77
  mutation = """
@@ -87,14 +83,14 @@ mutation($discussionId: ID!) {
87
83
  }
88
84
  }
89
85
  """
90
- graphql_request(mutation, variables={"discussionId": node_id})
86
+ event.graphql_request(mutation, variables={"discussionId": node_id})
91
87
  else:
92
- url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{number}"
93
- r = requests.patch(url, json={"state": "closed"}, headers=GITHUB_HEADERS)
88
+ url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}"
89
+ r = requests.patch(url, json={"state": "closed"}, headers=event.headers)
94
90
  print(f"{'Successful' if r.status_code == 200 else 'Fail'} issue/PR #{number} close: {r.status_code}")
95
91
 
96
92
 
97
- def lock_issue_pr(number: int, node_id: str, issue_type: str):
93
+ def lock_issue_pr(event, number: int, node_id: str, issue_type: str):
98
94
  """Locks an issue, pull request, or discussion to prevent further interactions."""
99
95
  if issue_type == "discussion":
100
96
  mutation = """
@@ -108,17 +104,17 @@ mutation($lockableId: ID!, $lockReason: LockReason) {
108
104
  }
109
105
  }
110
106
  """
111
- graphql_request(mutation, variables={"lockableId": node_id, "lockReason": "OFF_TOPIC"})
107
+ event.graphql_request(mutation, variables={"lockableId": node_id, "lockReason": "OFF_TOPIC"})
112
108
  else:
113
- url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{number}/lock"
114
- r = requests.put(url, json={"lock_reason": "off-topic"}, headers=GITHUB_HEADERS)
109
+ url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}/lock"
110
+ r = requests.put(url, json={"lock_reason": "off-topic"}, headers=event.headers)
115
111
  print(f"{'Successful' if r.status_code in {200, 204} else 'Fail'} issue/PR #{number} lock: {r.status_code}")
116
112
 
117
113
 
118
- def block_user(username: str):
114
+ def block_user(event, username: str):
119
115
  """Blocks a user from the organization using the GitHub API."""
120
- url = f"{GITHUB_API_URL}/orgs/{GITHUB_REPOSITORY.split('/')[0]}/blocks/{username}"
121
- r = requests.put(url, headers=GITHUB_HEADERS)
116
+ url = f"{GITHUB_API_URL}/orgs/{event.repository.split('/')[0]}/blocks/{username}"
117
+ r = requests.put(url, headers=event.headers)
122
118
  print(f"{'Successful' if r.status_code == 204 else 'Fail'} user block for {username}: {r.status_code}")
123
119
 
124
120
 
@@ -167,7 +163,7 @@ YOUR RESPONSE (label names only):
167
163
  messages = [
168
164
  {
169
165
  "role": "system",
170
- "content": "You are a helpful assistant that labels GitHub issues, pull requests, and discussions.",
166
+ "content": "You are an Ultralytics AI assistant that labels GitHub issues, PRs, and discussions.",
171
167
  },
172
168
  {"role": "user", "content": prompt},
173
169
  ]
@@ -183,7 +179,7 @@ YOUR RESPONSE (label names only):
183
179
  ]
184
180
 
185
181
 
186
- def get_label_ids(labels: List[str]) -> List[str]:
182
+ def get_label_ids(event, labels: List[str]) -> List[str]:
187
183
  """Retrieves GitHub label IDs for a list of label names using the GraphQL API."""
188
184
  query = """
189
185
  query($owner: String!, $name: String!) {
@@ -197,8 +193,8 @@ query($owner: String!, $name: String!) {
197
193
  }
198
194
  }
199
195
  """
200
- owner, repo = GITHUB_REPOSITORY.split("/")
201
- result = graphql_request(query, variables={"owner": owner, "name": repo})
196
+ owner, repo = event.repository.split("/")
197
+ result = event.graphql_request(query, variables={"owner": owner, "name": repo})
202
198
  if "data" in result and "repository" in result["data"]:
203
199
  all_labels = result["data"]["repository"]["labels"]["nodes"]
204
200
  label_map = {label["name"].lower(): label["id"] for label in all_labels}
@@ -208,14 +204,14 @@ query($owner: String!, $name: String!) {
208
204
  return []
209
205
 
210
206
 
211
- def apply_labels(number: int, node_id: str, labels: List[str], issue_type: str):
207
+ def apply_labels(event, number: int, node_id: str, labels: List[str], issue_type: str):
212
208
  """Applies specified labels to a GitHub issue, pull request, or discussion using the appropriate API."""
213
209
  if "Alert" in labels:
214
- create_alert_label()
210
+ create_alert_label(event)
215
211
 
216
212
  if issue_type == "discussion":
217
213
  print(f"Using node_id: {node_id}") # Debug print
218
- label_ids = get_label_ids(labels)
214
+ label_ids = get_label_ids(event, labels)
219
215
  if not label_ids:
220
216
  print("No valid labels to apply.")
221
217
  return
@@ -231,29 +227,29 @@ mutation($labelableId: ID!, $labelIds: [ID!]!) {
231
227
  }
232
228
  }
233
229
  """
234
- graphql_request(mutation, {"labelableId": node_id, "labelIds": label_ids})
230
+ event.graphql_request(mutation, {"labelableId": node_id, "labelIds": label_ids})
235
231
  print(f"Successfully applied labels: {', '.join(labels)}")
236
232
  else:
237
- url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{number}/labels"
238
- r = requests.post(url, json={"labels": labels}, headers=GITHUB_HEADERS)
233
+ url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}/labels"
234
+ r = requests.post(url, json={"labels": labels}, headers=event.headers)
239
235
  print(f"{'Successful' if r.status_code == 200 else 'Fail'} apply labels {', '.join(labels)}: {r.status_code}")
240
236
 
241
237
 
242
- def create_alert_label():
238
+ def create_alert_label(event):
243
239
  """Creates the 'Alert' label in the repository if it doesn't exist, with a red color and description."""
244
240
  alert_label = {"name": "Alert", "color": "FF0000", "description": "Potential spam, abuse, or off-topic."}
245
- requests.post(f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/labels", json=alert_label, headers=GITHUB_HEADERS)
241
+ requests.post(f"{GITHUB_API_URL}/repos/{event.repository}/labels", json=alert_label, headers=event.headers)
246
242
 
247
243
 
248
- def is_org_member(username: str) -> bool:
244
+ def is_org_member(event, username: str) -> bool:
249
245
  """Checks if a user is a member of the organization using the GitHub API."""
250
- org_name = GITHUB_REPOSITORY.split("/")[0]
246
+ org_name = event.repository.split("/")[0]
251
247
  url = f"{GITHUB_API_URL}/orgs/{org_name}/members/{username}"
252
- r = requests.get(url, headers=GITHUB_HEADERS)
248
+ r = requests.get(url, headers=event.headers)
253
249
  return r.status_code == 204 # 204 means the user is a member
254
250
 
255
251
 
256
- def add_comment(number: int, node_id: str, comment: str, issue_type: str):
252
+ def add_comment(event, number: int, node_id: str, comment: str, issue_type: str):
257
253
  """Adds a comment to the specified issue, pull request, or discussion using the GitHub API."""
258
254
  if issue_type == "discussion":
259
255
  mutation = """
@@ -265,17 +261,17 @@ mutation($discussionId: ID!, $body: String!) {
265
261
  }
266
262
  }
267
263
  """
268
- graphql_request(mutation, variables={"discussionId": node_id, "body": comment})
264
+ event.graphql_request(mutation, variables={"discussionId": node_id, "body": comment})
269
265
  else:
270
- url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{number}/comments"
271
- r = requests.post(url, json={"body": comment}, headers=GITHUB_HEADERS)
266
+ url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}/comments"
267
+ r = requests.post(url, json={"body": comment}, headers=event.headers)
272
268
  print(f"{'Successful' if r.status_code in {200, 201} else 'Fail'} issue/PR #{number} comment: {r.status_code}")
273
269
 
274
270
 
275
- def get_first_interaction_response(issue_type: str, title: str, body: str, username: str, number: int) -> str:
271
+ def get_first_interaction_response(event, issue_type: str, title: str, body: str, username: str) -> str:
276
272
  """Generates a custom LLM response for GitHub issues, PRs, or discussions based on content."""
277
273
  issue_discussion_response = f"""
278
- 👋 Hello @{username}, thank you for submitting a `{GITHUB_REPOSITORY}` 🚀 {issue_type.capitalize()}. To help us address your concern efficiently, please ensure you've provided the following information:
274
+ 👋 Hello @{username}, thank you for submitting a `{event.repository}` 🚀 {issue_type.capitalize()}. To help us address your concern efficiently, please ensure you've provided the following information:
279
275
 
280
276
  1. For bug reports:
281
277
  - A clear and concise description of the bug
@@ -300,10 +296,10 @@ Thank you for your contribution to improving our project!
300
296
  """
301
297
 
302
298
  pr_response = f"""
303
- 👋 Hello @{username}, thank you for submitting an `{GITHUB_REPOSITORY}` 🚀 PR! To ensure a seamless integration of your work, please review the following checklist:
299
+ 👋 Hello @{username}, thank you for submitting an `{event.repository}` 🚀 PR! To ensure a seamless integration of your work, please review the following checklist:
304
300
 
305
- - ✅ **Define a Purpose**: Clearly explain the purpose of your fix or feature in your PR description, and link to any [relevant issues](https://github.com/{GITHUB_REPOSITORY}/issues). Ensure your commit messages are clear, concise, and adhere to the project's conventions.
306
- - ✅ **Synchronize with Source**: Confirm your PR is synchronized with the `{GITHUB_REPOSITORY}` `main` branch. If it's behind, update it by clicking the 'Update branch' button or by running `git pull` and `git merge main` locally.
301
+ - ✅ **Define a Purpose**: Clearly explain the purpose of your fix or feature in your PR description, and link to any [relevant issues](https://github.com/{event.repository}/issues). Ensure your commit messages are clear, concise, and adhere to the project's conventions.
302
+ - ✅ **Synchronize with Source**: Confirm your PR is synchronized with the `{event.repository}` `main` branch. If it's behind, update it by clicking the 'Update branch' button or by running `git pull` and `git merge main` locally.
307
303
  - ✅ **Ensure CI Checks Pass**: Verify all Ultralytics [Continuous Integration (CI)](https://docs.ultralytics.com/help/CI/) checks are passing. If any checks fail, please address the issues.
308
304
  - ✅ **Update Documentation**: Update the relevant [documentation](https://docs.ultralytics.com) for any new or modified features.
309
305
  - ✅ **Add Tests**: If applicable, include or update tests to cover your changes, and confirm that all tests are passing.
@@ -318,9 +314,9 @@ For more guidance, please refer to our [Contributing Guide](https://docs.ultraly
318
314
  else:
319
315
  example = os.getenv("FIRST_ISSUE_RESPONSE") or issue_discussion_response
320
316
 
321
- org_name, repo_name = GITHUB_REPOSITORY.split("/")
322
- repo_url = f"https://github.com/{GITHUB_REPOSITORY}"
323
- diff = get_pr_diff(number)[:32000] if issue_type == "pull request" else ""
317
+ org_name, repo_name = event.repository.split("/")
318
+ repo_url = f"https://github.com/{event.repository}"
319
+ diff = event.get_pr_diff()[:32000] if issue_type == "pull request" else ""
324
320
 
325
321
  prompt = f"""Generate a customized response to the new GitHub {issue_type} below:
326
322
 
@@ -359,39 +355,40 @@ YOUR {issue_type.upper()} RESPONSE:
359
355
  messages = [
360
356
  {
361
357
  "role": "system",
362
- "content": f"You are a helpful assistant responding to GitHub {issue_type}s for {org_name}.",
358
+ "content": f"You are an Ultralytics AI assistant responding to GitHub {issue_type}s for {org_name}.",
363
359
  },
364
360
  {"role": "user", "content": prompt},
365
361
  ]
366
362
  return get_completion(messages)
367
363
 
368
364
 
369
- def main():
370
- """Executes autolabeling and custom response generation for new GitHub issues, PRs, and discussions."""
371
- number, node_id, title, body, username, issue_type, action = get_event_content()
372
- available_labels = get_github_data("labels")
365
+ def main(*args, **kwargs):
366
+ """Executes auto-labeling and custom response generation for new GitHub issues, PRs, and discussions."""
367
+ event = Action(*args, **kwargs)
368
+ number, node_id, title, body, username, issue_type, action = get_event_content(event)
369
+ available_labels = event.get_repo_data("labels")
373
370
  label_descriptions = {label["name"]: label.get("description", "") for label in available_labels}
374
371
  if issue_type == "discussion":
375
372
  current_labels = [] # For discussions, labels may need to be fetched differently or adjusted
376
373
  else:
377
- current_labels = [label["name"].lower() for label in get_github_data(f"issues/{number}/labels")]
374
+ current_labels = [label["name"].lower() for label in event.get_repo_data(f"issues/{number}/labels")]
378
375
  relevant_labels = get_relevant_labels(issue_type, title, body, label_descriptions, current_labels)
379
376
 
380
377
  if relevant_labels:
381
- apply_labels(number, node_id, relevant_labels, issue_type)
382
- if "Alert" in relevant_labels and not is_org_member(username):
383
- update_issue_pr_content(number, node_id, issue_type)
378
+ apply_labels(event, number, node_id, relevant_labels, issue_type)
379
+ if "Alert" in relevant_labels and not is_org_member(event, username):
380
+ update_issue_pr_content(event, number, node_id, issue_type)
384
381
  if issue_type != "pull request":
385
- close_issue_pr(number, node_id, issue_type)
386
- lock_issue_pr(number, node_id, issue_type)
382
+ close_issue_pr(event, number, node_id, issue_type)
383
+ lock_issue_pr(event, number, node_id, issue_type)
387
384
  if BLOCK_USER:
388
- block_user(username=username)
385
+ block_user(event, username=username)
389
386
  else:
390
387
  print("No relevant labels found or applied.")
391
388
 
392
389
  if action in {"opened", "created"}:
393
- custom_response = get_first_interaction_response(issue_type, title, body, username, number)
394
- add_comment(number, node_id, custom_response, issue_type)
390
+ custom_response = get_first_interaction_response(event, issue_type, title, body, username)
391
+ add_comment(event, number, node_id, custom_response, issue_type)
395
392
 
396
393
 
397
394
  if __name__ == "__main__":
@@ -6,12 +6,8 @@ import requests
6
6
 
7
7
  from .utils import (
8
8
  GITHUB_API_URL,
9
- GITHUB_HEADERS,
10
- GITHUB_REPOSITORY,
11
- PR,
9
+ Action,
12
10
  get_completion,
13
- get_github_username,
14
- get_pr_diff,
15
11
  )
16
12
 
17
13
  # Constants
@@ -40,11 +36,11 @@ def generate_merge_message(pr_summary=None, pr_credit=None):
40
36
  return get_completion(messages)
41
37
 
42
38
 
43
- def post_merge_message(pr_number, summary, pr_credit):
39
+ def post_merge_message(pr_number, repository, summary, pr_credit, headers):
44
40
  """Posts thank you message on PR after merge."""
45
41
  message = generate_merge_message(summary, pr_credit)
46
- comment_url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{pr_number}/comments"
47
- response = requests.post(comment_url, json={"body": message}, headers=GITHUB_HEADERS)
42
+ comment_url = f"{GITHUB_API_URL}/repos/{repository}/issues/{pr_number}/comments"
43
+ response = requests.post(comment_url, json={"body": message}, headers=headers)
48
44
  return response.status_code == 201
49
45
 
50
46
 
@@ -72,7 +68,7 @@ def generate_issue_comment(pr_url, pr_summary, pr_credit):
72
68
  return get_completion(messages)
73
69
 
74
70
 
75
- def generate_pr_summary(repo_name, diff_text):
71
+ def generate_pr_summary(repository, diff_text):
76
72
  """Generates a concise, professional summary of a PR using OpenAI's API for Ultralytics repositories."""
77
73
  if not diff_text:
78
74
  diff_text = "**ERROR: DIFF IS EMPTY, THERE ARE ZERO CODE CHANGES IN THIS PR."
@@ -85,7 +81,7 @@ def generate_pr_summary(repo_name, diff_text):
85
81
  },
86
82
  {
87
83
  "role": "user",
88
- "content": f"Summarize this '{repo_name}' PR, focusing on major changes, their purpose, and potential impact. Keep the summary clear and concise, suitable for a broad audience. Add emojis to enliven the summary. Reply directly with a summary along these example guidelines, though feel free to adjust as appropriate:\n\n"
84
+ "content": f"Summarize this '{repository}' PR, focusing on major changes, their purpose, and potential impact. Keep the summary clear and concise, suitable for a broad audience. Add emojis to enliven the summary. Reply directly with a summary along these example guidelines, though feel free to adjust as appropriate:\n\n"
89
85
  f"### 🌟 Summary (single-line synopsis)\n"
90
86
  f"### 📊 Key Changes (bullet points highlighting any major changes)\n"
91
87
  f"### 🎯 Purpose & Impact (bullet points explaining any benefits and potential impact to users)\n"
@@ -98,12 +94,12 @@ def generate_pr_summary(repo_name, diff_text):
98
94
  return SUMMARY_START + reply
99
95
 
100
96
 
101
- def update_pr_description(repo_name, pr_number, new_summary, max_retries=2):
97
+ def update_pr_description(repository, pr_number, new_summary, headers, max_retries=2):
102
98
  """Updates PR description with new summary, retrying if description is None."""
103
- pr_url = f"{GITHUB_API_URL}/repos/{repo_name}/pulls/{pr_number}"
99
+ pr_url = f"{GITHUB_API_URL}/repos/{repository}/pulls/{pr_number}"
104
100
  description = ""
105
101
  for i in range(max_retries + 1):
106
- description = requests.get(pr_url, headers=GITHUB_HEADERS).json().get("body") or ""
102
+ description = requests.get(pr_url, headers=headers).json().get("body") or ""
107
103
  if description:
108
104
  break
109
105
  if i < max_retries:
@@ -120,11 +116,11 @@ def update_pr_description(repo_name, pr_number, new_summary, max_retries=2):
120
116
  updated_description = description + "\n\n" + new_summary
121
117
 
122
118
  # Update the PR description
123
- update_response = requests.patch(pr_url, json={"body": updated_description}, headers=GITHUB_HEADERS)
119
+ update_response = requests.patch(pr_url, json={"body": updated_description}, headers=headers)
124
120
  return update_response.status_code
125
121
 
126
122
 
127
- def label_fixed_issues(pr_number, pr_summary):
123
+ def label_fixed_issues(repository, pr_number, pr_summary, headers, action):
128
124
  """Labels issues closed by PR when merged, notifies users, returns PR contributors."""
129
125
  query = """
130
126
  query($owner: String!, $repo: String!, $pr_number: Int!) {
@@ -141,10 +137,10 @@ query($owner: String!, $repo: String!, $pr_number: Int!) {
141
137
  }
142
138
  }
143
139
  """
144
- owner, repo = GITHUB_REPOSITORY.split("/")
140
+ owner, repo = repository.split("/")
145
141
  variables = {"owner": owner, "repo": repo, "pr_number": pr_number}
146
142
  graphql_url = "https://api.github.com/graphql"
147
- response = requests.post(graphql_url, json={"query": query, "variables": variables}, headers=GITHUB_HEADERS)
143
+ response = requests.post(graphql_url, json={"query": query, "variables": variables}, headers=headers)
148
144
 
149
145
  if response.status_code != 200:
150
146
  print(f"Failed to fetch linked issues. Status code: {response.status_code}")
@@ -153,7 +149,7 @@ query($owner: String!, $repo: String!, $pr_number: Int!) {
153
149
  try:
154
150
  data = response.json()["data"]["repository"]["pullRequest"]
155
151
  comments = data["reviews"]["nodes"] + data["comments"]["nodes"]
156
- token_username = get_github_username() # get GITHUB_TOKEN username
152
+ token_username = action.get_username() # get GITHUB_TOKEN username
157
153
  author = data["author"]["login"] if data["author"]["__typename"] != "Bot" else None
158
154
 
159
155
  # Get unique contributors from reviews and comments
@@ -184,12 +180,12 @@ query($owner: String!, $repo: String!, $pr_number: Int!) {
184
180
  for issue in data["closingIssuesReferences"]["nodes"]:
185
181
  issue_number = issue["number"]
186
182
  # Add fixed label
187
- label_url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{issue_number}/labels"
188
- label_response = requests.post(label_url, json={"labels": ["fixed"]}, headers=GITHUB_HEADERS)
183
+ label_url = f"{GITHUB_API_URL}/repos/{repository}/issues/{issue_number}/labels"
184
+ label_response = requests.post(label_url, json={"labels": ["fixed"]}, headers=headers)
189
185
 
190
186
  # Add comment
191
- comment_url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{issue_number}/comments"
192
- comment_response = requests.post(comment_url, json={"body": comment}, headers=GITHUB_HEADERS)
187
+ comment_url = f"{GITHUB_API_URL}/repos/{repository}/issues/{issue_number}/comments"
188
+ comment_response = requests.post(comment_url, json={"body": comment}, headers=headers)
193
189
 
194
190
  if label_response.status_code == 200 and comment_response.status_code == 201:
195
191
  print(f"Added 'fixed' label and comment to issue #{issue_number}")
@@ -205,42 +201,43 @@ query($owner: String!, $repo: String!, $pr_number: Int!) {
205
201
  return [], None
206
202
 
207
203
 
208
- def remove_todos_on_merge(pr_number):
204
+ def remove_todos_on_merge(pr_number, repository, headers):
209
205
  """Removes specified labels from PR."""
210
206
  for label in ["TODO"]: # Can be extended with more labels in the future
211
- requests.delete(
212
- f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{pr_number}/labels/{label}", headers=GITHUB_HEADERS
213
- )
207
+ requests.delete(f"{GITHUB_API_URL}/repos/{repository}/issues/{pr_number}/labels/{label}", headers=headers)
214
208
 
215
209
 
216
- def main():
210
+ def main(*args, **kwargs):
217
211
  """Summarize a pull request and update its description with a summary."""
218
- pr_number = PR["number"]
212
+ action = Action(*args, **kwargs)
213
+ pr_number = action.pr["number"]
214
+ headers = action.headers
215
+ repository = action.repository
219
216
 
220
217
  print(f"Retrieving diff for PR {pr_number}")
221
- diff = get_pr_diff(pr_number)
218
+ diff = action.get_pr_diff()
222
219
 
223
220
  # Generate PR summary
224
221
  print("Generating PR summary...")
225
- summary = generate_pr_summary(GITHUB_REPOSITORY, diff)
222
+ summary = generate_pr_summary(repository, diff)
226
223
 
227
224
  # Update PR description
228
225
  print("Updating PR description...")
229
- status_code = update_pr_description(GITHUB_REPOSITORY, pr_number, summary)
226
+ status_code = update_pr_description(repository, pr_number, summary, headers)
230
227
  if status_code == 200:
231
228
  print("PR description updated successfully.")
232
229
  else:
233
230
  print(f"Failed to update PR description. Status code: {status_code}")
234
231
 
235
232
  # Update linked issues and post thank you message if merged
236
- if PR.get("merged"):
233
+ if action.pr.get("merged"):
237
234
  print("PR is merged, labeling fixed issues...")
238
- pr_credit = label_fixed_issues(pr_number, summary)
235
+ pr_credit = label_fixed_issues(repository, pr_number, summary, headers, action)
239
236
  print("Removing TODO label from PR...")
240
- remove_todos_on_merge(pr_number)
237
+ remove_todos_on_merge(pr_number, repository, headers)
241
238
  if pr_credit:
242
239
  print("Posting PR author thank you message...")
243
- post_merge_message(pr_number, summary, pr_credit)
240
+ post_merge_message(pr_number, repository, summary, pr_credit, headers)
244
241
 
245
242
 
246
243
  if __name__ == "__main__":
@@ -10,10 +10,7 @@ import requests
10
10
 
11
11
  from .utils import (
12
12
  GITHUB_API_URL,
13
- GITHUB_HEADERS,
14
- GITHUB_HEADERS_DIFF,
15
- GITHUB_REPOSITORY,
16
- GITHUB_TOKEN,
13
+ Action,
17
14
  get_completion,
18
15
  remove_html_comments,
19
16
  )
@@ -23,17 +20,17 @@ CURRENT_TAG = os.getenv("CURRENT_TAG")
23
20
  PREVIOUS_TAG = os.getenv("PREVIOUS_TAG")
24
21
 
25
22
 
26
- def get_release_diff(repo_name: str, previous_tag: str, latest_tag: str) -> str:
23
+ def get_release_diff(repo_name: str, previous_tag: str, latest_tag: str, headers: dict) -> str:
27
24
  """Retrieves the differences between two specified Git tags in a GitHub repository."""
28
25
  url = f"{GITHUB_API_URL}/repos/{repo_name}/compare/{previous_tag}...{latest_tag}"
29
- r = requests.get(url, headers=GITHUB_HEADERS_DIFF)
26
+ r = requests.get(url, headers=headers)
30
27
  return r.text if r.status_code == 200 else f"Failed to get diff: {r.content}"
31
28
 
32
29
 
33
- def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str) -> list:
30
+ def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str, headers: dict) -> list:
34
31
  """Retrieves and processes pull requests merged between two specified tags in a GitHub repository."""
35
32
  url = f"{GITHUB_API_URL}/repos/{repo_name}/compare/{previous_tag}...{latest_tag}"
36
- r = requests.get(url, headers=GITHUB_HEADERS)
33
+ r = requests.get(url, headers=headers)
37
34
  r.raise_for_status()
38
35
 
39
36
  data = r.json()
@@ -47,7 +44,7 @@ def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str) ->
47
44
  time.sleep(10) # sleep 10 seconds to allow final PR summary to update on merge
48
45
  for pr_number in sorted(pr_numbers): # earliest to latest
49
46
  pr_url = f"{GITHUB_API_URL}/repos/{repo_name}/pulls/{pr_number}"
50
- pr_response = requests.get(pr_url, headers=GITHUB_HEADERS)
47
+ pr_response = requests.get(pr_url, headers=headers)
51
48
  if pr_response.status_code == 200:
52
49
  pr_data = pr_response.json()
53
50
  prs.append(
@@ -67,14 +64,14 @@ def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str) ->
67
64
  return prs
68
65
 
69
66
 
70
- def get_new_contributors(repo: str, prs: list) -> set:
67
+ def get_new_contributors(repo: str, prs: list, headers: dict) -> set:
71
68
  """Identify new contributors who made their first merged PR in the current release."""
72
69
  new_contributors = set()
73
70
  for pr in prs:
74
71
  author = pr["author"]
75
72
  # Check if this is the author's first contribution
76
73
  url = f"{GITHUB_API_URL}/search/issues?q=repo:{repo}+author:{author}+is:pr+is:merged&sort=created&order=asc"
77
- r = requests.get(url, headers=GITHUB_HEADERS)
74
+ r = requests.get(url, headers=headers)
78
75
  if r.status_code == 200:
79
76
  data = r.json()
80
77
  if data["total_count"] > 0:
@@ -84,7 +81,9 @@ def get_new_contributors(repo: str, prs: list) -> set:
84
81
  return new_contributors
85
82
 
86
83
 
87
- def generate_release_summary(diff: str, prs: list, latest_tag: str, previous_tag: str, repo_name: str) -> str:
84
+ def generate_release_summary(
85
+ diff: str, prs: list, latest_tag: str, previous_tag: str, repo_name: str, headers: dict
86
+ ) -> str:
88
87
  """Generate a concise release summary with key changes, purpose, and impact for a new Ultralytics version."""
89
88
  pr_summaries = "\n\n".join(
90
89
  [f"PR #{pr['number']}: {pr['title']} by @{pr['author']}\n{pr['body'][:1000]}" for pr in prs]
@@ -100,7 +99,7 @@ def generate_release_summary(diff: str, prs: list, latest_tag: str, previous_tag
100
99
  whats_changed = "\n".join([f"* {pr['title']} by @{pr['author']} in {pr['html_url']}" for pr in prs])
101
100
 
102
101
  # Generate New Contributors section
103
- new_contributors = get_new_contributors(repo_name, prs)
102
+ new_contributors = get_new_contributors(repo_name, prs, headers)
104
103
  new_contributors_section = (
105
104
  "\n## New Contributors\n"
106
105
  + "\n".join(
@@ -138,11 +137,11 @@ def generate_release_summary(diff: str, prs: list, latest_tag: str, previous_tag
138
137
  return get_completion(messages) + release_suffix
139
138
 
140
139
 
141
- def create_github_release(repo_name: str, tag_name: str, name: str, body: str) -> int:
140
+ def create_github_release(repo_name: str, tag_name: str, name: str, body: str, headers: dict) -> int:
142
141
  """Creates a GitHub release with specified tag, name, and body content for the given repository."""
143
142
  url = f"{GITHUB_API_URL}/repos/{repo_name}/releases"
144
143
  data = {"tag_name": tag_name, "name": name, "body": body, "draft": False, "prerelease": False}
145
- r = requests.post(url, headers=GITHUB_HEADERS, json=data)
144
+ r = requests.post(url, headers=headers, json=data)
146
145
  return r.status_code
147
146
 
148
147
 
@@ -156,22 +155,24 @@ def get_previous_tag() -> str:
156
155
  return "HEAD~1"
157
156
 
158
157
 
159
- def main():
158
+ def main(*args, **kwargs):
160
159
  """Automates generating and publishing a GitHub release summary from PRs and commit differences."""
161
- if not all([GITHUB_TOKEN, CURRENT_TAG]):
160
+ action = Action(*args, **kwargs)
161
+
162
+ if not all([action.token, CURRENT_TAG]):
162
163
  raise ValueError("One or more required environment variables are missing.")
163
164
 
164
165
  previous_tag = PREVIOUS_TAG or get_previous_tag()
165
166
 
166
167
  # Get the diff between the tags
167
- diff = get_release_diff(GITHUB_REPOSITORY, previous_tag, CURRENT_TAG)
168
+ diff = get_release_diff(action.repository, previous_tag, CURRENT_TAG, action.headers_diff)
168
169
 
169
170
  # Get PRs merged between the tags
170
- prs = get_prs_between_tags(GITHUB_REPOSITORY, previous_tag, CURRENT_TAG)
171
+ prs = get_prs_between_tags(action.repository, previous_tag, CURRENT_TAG, action.headers)
171
172
 
172
173
  # Generate release summary
173
174
  try:
174
- summary = generate_release_summary(diff, prs, CURRENT_TAG, previous_tag, GITHUB_REPOSITORY)
175
+ summary = generate_release_summary(diff, prs, CURRENT_TAG, previous_tag, action.repository, action.headers)
175
176
  except Exception as e:
176
177
  print(f"Failed to generate summary: {str(e)}")
177
178
  summary = "Failed to generate summary."
@@ -181,7 +182,8 @@ def main():
181
182
  commit_message = subprocess.run(cmd, check=True, text=True, capture_output=True).stdout.split("\n")[0].strip()
182
183
 
183
184
  # Create the release on GitHub
184
- status_code = create_github_release(GITHUB_REPOSITORY, CURRENT_TAG, f"{CURRENT_TAG} - {commit_message}", summary)
185
+ msg = f"{CURRENT_TAG} - {commit_message}"
186
+ status_code = create_github_release(action.repository, CURRENT_TAG, msg, summary, action.headers)
185
187
  if status_code == 201:
186
188
  print(f"Successfully created release {CURRENT_TAG}")
187
189
  else:
@@ -0,0 +1,19 @@
1
+ # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
+
3
+ from .common_utils import remove_html_comments
4
+ from .github_utils import (
5
+ GITHUB_API_URL,
6
+ Action,
7
+ check_pypi_version,
8
+ ultralytics_actions_info,
9
+ )
10
+ from .openai_utils import get_completion
11
+
12
+ __all__ = (
13
+ "GITHUB_API_URL",
14
+ "Action",
15
+ "check_pypi_version",
16
+ "get_completion",
17
+ "remove_html_comments",
18
+ "ultralytics_actions_info",
19
+ )
@@ -1,10 +1,9 @@
1
1
  # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
2
 
3
3
  import re
4
- import socket
5
4
  import time
6
- import urllib
7
5
  from concurrent.futures import ThreadPoolExecutor
6
+ from urllib import parse
8
7
 
9
8
 
10
9
  def remove_html_comments(body: str) -> str:
@@ -37,6 +36,10 @@ def is_url(url, check=True, max_attempts=3, timeout=2):
37
36
  "github.com", # ignore GitHub links that may be private repos
38
37
  "kaggle.com", # blocks automated header requests
39
38
  "reddit.com", # blocks automated header requests
39
+ "linkedin.com",
40
+ "twitter.com",
41
+ "x.com",
42
+ "storage.googleapis.com", # private GCS buckets
40
43
  )
41
44
  try:
42
45
  # Check allow list
@@ -44,7 +47,7 @@ def is_url(url, check=True, max_attempts=3, timeout=2):
44
47
  return True
45
48
 
46
49
  # Check structure
47
- result = urllib.parse.urlparse(url)
50
+ result = parse.urlparse(url)
48
51
  if not all([result.scheme, result.netloc]):
49
52
  return False
50
53
 
@@ -52,16 +55,14 @@ def is_url(url, check=True, max_attempts=3, timeout=2):
52
55
  if check:
53
56
  for attempt in range(max_attempts):
54
57
  try:
55
- req = urllib.request.Request(
56
- url,
57
- method="HEAD",
58
- headers={
59
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
60
- },
61
- )
62
- with urllib.request.urlopen(req, timeout=timeout) as response:
63
- return response.getcode() < 400
64
- except (urllib.error.URLError, socket.timeout):
58
+ headers = {
59
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) Chrome/120.0.0.0",
60
+ "Accept": "*",
61
+ "Accept-Language": "*",
62
+ "Accept-Encoding": "*",
63
+ }
64
+ return requests.head(url, headers=headers, timeout=timeout, allow_redirects=True).status_code < 400
65
+ except Exception:
65
66
  if attempt == max_attempts - 1: # last attempt
66
67
  return False
67
68
  time.sleep(2**attempt) # exponential backoff
@@ -84,13 +85,15 @@ def check_links_in_string(text, verbose=True, return_bad=False):
84
85
  r"(?:/[^\s\"')\]]*)?" # Optional path
85
86
  r")"
86
87
  )
88
+ # all_urls.extend([url for url in match if url and parse.urlparse(url).scheme])
87
89
  all_urls = []
88
90
  for md_text, md_url, plain_url in re.findall(pattern, text):
89
91
  url = md_url or plain_url
90
- if url and urllib.parse.urlparse(url).scheme:
92
+ if url and parse.urlparse(url).scheme:
91
93
  all_urls.append(url)
92
94
 
93
95
  urls = set(map(clean_url, all_urls)) # remove extra characters and make unique
96
+ # bad_urls = [x for x in urls if not is_url(x, check=True)] # single-thread
94
97
  with ThreadPoolExecutor(max_workers=16) as executor: # multi-thread
95
98
  bad_urls = [url for url, valid in zip(urls, executor.map(lambda x: not is_url(x, check=True), urls)) if valid]
96
99
 
@@ -0,0 +1,163 @@
1
+ # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
+ import json
3
+ import os
4
+ from pathlib import Path
5
+
6
+ import requests
7
+
8
+ from actions import __version__
9
+
10
+ GITHUB_API_URL = "https://api.github.com"
11
+
12
+
13
+ class Action:
14
+ """Handles GitHub Actions API interactions and event processing."""
15
+
16
+ def __init__(
17
+ self,
18
+ token: str = None,
19
+ event_name: str = None,
20
+ event_data: dict = None,
21
+ ):
22
+ self.token = token or os.getenv("GITHUB_TOKEN")
23
+ self.event_name = event_name or os.getenv("GITHUB_EVENT_NAME")
24
+ self.event_data = event_data or self._load_event_data(os.getenv("GITHUB_EVENT_PATH"))
25
+
26
+ self.pr = self.event_data.get("pull_request", {})
27
+ self.repository = self.event_data.get("repository", {}).get("full_name")
28
+ self.headers = {"Authorization": f"token {self.token}", "Accept": "application/vnd.github.v3+json"}
29
+ self.headers_diff = {"Authorization": f"token {self.token}", "Accept": "application/vnd.github.v3.diff"}
30
+
31
+ @staticmethod
32
+ def _load_event_data(event_path: str) -> dict:
33
+ """Loads GitHub event data from path if it exists."""
34
+ if event_path and Path(event_path).exists():
35
+ return json.loads(Path(event_path).read_text())
36
+ return {}
37
+
38
+ def get_username(self) -> str | None:
39
+ """Gets username associated with the GitHub token."""
40
+ query = "query { viewer { login } }"
41
+ response = requests.post(f"{GITHUB_API_URL}/graphql", json={"query": query}, headers=self.headers)
42
+ if response.status_code != 200:
43
+ print(f"Failed to fetch authenticated user. Status code: {response.status_code}")
44
+ return None
45
+ try:
46
+ return response.json()["data"]["viewer"]["login"]
47
+ except KeyError as e:
48
+ print(f"Error parsing authenticated user response: {e}")
49
+ return None
50
+
51
+ def get_pr_diff(self) -> str:
52
+ """Retrieves the diff content for a specified pull request."""
53
+ url = f"{GITHUB_API_URL}/repos/{self.repository}/pulls/{self.pr.get('number')}"
54
+ r = requests.get(url, headers=self.headers_diff)
55
+ return r.text if r.status_code == 200 else ""
56
+
57
+ def get_repo_data(self, endpoint: str) -> dict:
58
+ """Fetches repository data from a specified endpoint."""
59
+ r = requests.get(f"{GITHUB_API_URL}/repos/{self.repository}/{endpoint}", headers=self.headers)
60
+ r.raise_for_status()
61
+ return r.json()
62
+
63
+ def graphql_request(self, query: str, variables: dict = None) -> dict:
64
+ """Executes a GraphQL query against the GitHub API."""
65
+ headers = {
66
+ "Authorization": f"Bearer {self.token}",
67
+ "Content-Type": "application/json",
68
+ "Accept": "application/vnd.github.v4+json",
69
+ }
70
+ r = requests.post(f"{GITHUB_API_URL}/graphql", json={"query": query, "variables": variables}, headers=headers)
71
+ r.raise_for_status()
72
+ result = r.json()
73
+ success = "data" in result and not result.get("errors")
74
+ print(
75
+ f"{'Successful' if success else 'Failed'} discussion GraphQL request: {result.get('errors', 'No errors')}"
76
+ )
77
+ return result
78
+
79
+ def print_info(self):
80
+ """Print GitHub Actions information."""
81
+ info = {
82
+ "github.event_name": self.event_name,
83
+ "github.event.action": self.event_data.get("action"),
84
+ "github.repository": self.repository,
85
+ "github.event.pull_request.number": self.pr.get("number"),
86
+ "github.event.pull_request.head.repo.full_name": self.pr.get("head", {}).get("repo", {}).get("full_name"),
87
+ "github.actor": os.environ.get("GITHUB_ACTOR"),
88
+ "github.event.pull_request.head.ref": self.pr.get("head", {}).get("ref"),
89
+ "github.ref": os.environ.get("GITHUB_REF"),
90
+ "github.head_ref": os.environ.get("GITHUB_HEAD_REF"),
91
+ "github.base_ref": os.environ.get("GITHUB_BASE_REF"),
92
+ "github.base_sha": self.pr.get("base", {}).get("sha"),
93
+ }
94
+
95
+ if self.event_name == "discussion":
96
+ discussion = self.event_data.get("discussion", {})
97
+ info.update(
98
+ {
99
+ "github.event.discussion.node_id": discussion.get("node_id"),
100
+ "github.event.discussion.number": discussion.get("number"),
101
+ }
102
+ )
103
+
104
+ max_key_length = max(len(key) for key in info)
105
+ header = f"Ultralytics Actions {__version__} Information " + "-" * 40
106
+ print(header)
107
+ for key, value in info.items():
108
+ print(f"{key:<{max_key_length + 5}}{value}")
109
+ print("-" * len(header))
110
+
111
+
112
+ def ultralytics_actions_info():
113
+ Action().print_info()
114
+
115
+
116
+ def check_pypi_version(pyproject_toml="pyproject.toml"):
117
+ """Compares local and PyPI package versions to determine if a new version should be published."""
118
+ import re
119
+
120
+ import tomllib # requires Python>=3.11
121
+
122
+ version_pattern = re.compile(r"^\d+\.\d+\.\d+$") # e.g. 0.0.0
123
+
124
+ with open(pyproject_toml, "rb") as f:
125
+ pyproject = tomllib.load(f)
126
+
127
+ package_name = pyproject["project"]["name"]
128
+ local_version = pyproject["project"].get("version", "dynamic")
129
+
130
+ # If version is dynamic, extract it from the specified file
131
+ if local_version == "dynamic":
132
+ version_attr = pyproject["tool"]["setuptools"]["dynamic"]["version"]["attr"]
133
+ module_path, attr_name = version_attr.rsplit(".", 1)
134
+ with open(f"{module_path.replace('.', '/')}/__init__.py") as f:
135
+ local_version = next(line.split("=")[1].strip().strip("'\"") for line in f if line.startswith(attr_name))
136
+
137
+ print(f"Local Version: {local_version}")
138
+ if not bool(version_pattern.match(local_version)):
139
+ print("WARNING: Incorrect local version pattern")
140
+ return "0.0.0", "0.0.0", False
141
+
142
+ # Get online version from PyPI
143
+ response = requests.get(f"https://pypi.org/pypi/{package_name}/json")
144
+ online_version = response.json()["info"]["version"] if response.status_code == 200 else None
145
+ print(f"Online Version: {online_version or 'Not Found'}")
146
+
147
+ # Determine if a new version should be published
148
+ if online_version:
149
+ local_ver = tuple(map(int, local_version.split(".")))
150
+ online_ver = tuple(map(int, online_version.split(".")))
151
+ major_diff = local_ver[0] - online_ver[0]
152
+ minor_diff = local_ver[1] - online_ver[1]
153
+ patch_diff = local_ver[2] - online_ver[2]
154
+
155
+ publish = (
156
+ (major_diff == 0 and minor_diff == 0 and 0 < patch_diff <= 2)
157
+ or (major_diff == 0 and minor_diff == 1 and local_ver[2] == 0)
158
+ or (major_diff == 1 and local_ver[1] == 0 and local_ver[2] == 0)
159
+ ) # should publish an update
160
+ else:
161
+ publish = True # publish as this is likely a first release
162
+
163
+ return local_version, online_version, publish
@@ -65,8 +65,8 @@ classifiers = [
65
65
  ]
66
66
 
67
67
  dependencies = [
68
- "requests>=2.26.0",
69
- "ruff>=0.1.6",
68
+ "requests>=2.32.3",
69
+ "ruff>=0.8.4",
70
70
  "docformatter>=1.7.5",
71
71
  ]
72
72
 
@@ -95,3 +95,16 @@ packages = { find = { where = ["."], include = ["actions", "actions.*"] } }
95
95
 
96
96
  [tool.setuptools.dynamic]
97
97
  version = { attr = "actions.__version__" }
98
+
99
+ [tool.ruff]
100
+ line-length = 120
101
+
102
+ [tool.ruff.format]
103
+ docstring-code-format = true
104
+
105
+ [tool.docformatter]
106
+ wrap-summaries = 120
107
+ wrap-descriptions = 120
108
+ pre-summary-newline = true
109
+ close-quotes-on-newline = true
110
+ in-place = true
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics-actions
3
- Version: 0.0.33
3
+ Version: 0.0.35
4
4
  Summary: Ultralytics Actions for GitHub automation and PR management.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -28,8 +28,8 @@ Classifier: Operating System :: OS Independent
28
28
  Requires-Python: >=3.8
29
29
  Description-Content-Type: text/markdown
30
30
  License-File: LICENSE
31
- Requires-Dist: requests>=2.26.0
32
- Requires-Dist: ruff>=0.1.6
31
+ Requires-Dist: requests>=2.32.3
32
+ Requires-Dist: ruff>=0.8.4
33
33
  Requires-Dist: docformatter>=1.7.5
34
34
  Provides-Extra: dev
35
35
  Requires-Dist: pytest; extra == "dev"
@@ -1,5 +1,5 @@
1
- requests>=2.26.0
2
- ruff>=0.1.6
1
+ requests>=2.32.3
2
+ ruff>=0.8.4
3
3
  docformatter>=1.7.5
4
4
 
5
5
  [dev]
@@ -1,45 +0,0 @@
1
- # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
-
3
- from .common_utils import remove_html_comments
4
- from .github_utils import (
5
- DISCUSSION,
6
- EVENT_DATA,
7
- GITHUB_API_URL,
8
- GITHUB_EVENT_NAME,
9
- GITHUB_EVENT_PATH,
10
- GITHUB_HEADERS,
11
- GITHUB_HEADERS_DIFF,
12
- GITHUB_REPOSITORY,
13
- GITHUB_TOKEN,
14
- PR,
15
- check_pypi_version,
16
- get_github_data,
17
- get_github_username,
18
- get_pr_diff,
19
- graphql_request,
20
- ultralytics_actions_info,
21
- )
22
- from .openai_utils import OPENAI_API_KEY, OPENAI_MODEL, get_completion
23
-
24
- __all__ = (
25
- "remove_html_comments",
26
- "EVENT_DATA",
27
- "GITHUB_API_URL",
28
- "GITHUB_HEADERS",
29
- "GITHUB_HEADERS_DIFF",
30
- "GITHUB_TOKEN",
31
- "GITHUB_REPOSITORY",
32
- "PR",
33
- "DISCUSSION",
34
- "GITHUB_EVENT_NAME",
35
- "GITHUB_EVENT_PATH",
36
- "get_github_data",
37
- "get_pr_diff",
38
- "graphql_request",
39
- "OPENAI_API_KEY",
40
- "OPENAI_MODEL",
41
- "get_completion",
42
- "get_github_username",
43
- "check_pypi_version",
44
- "ultralytics_actions_info",
45
- )
@@ -1,157 +0,0 @@
1
- # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
- import json
3
- import os
4
- from pathlib import Path
5
-
6
- import requests
7
-
8
- from actions import __version__
9
-
10
- GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
11
- GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
12
- GITHUB_EVENT_NAME = os.getenv("GITHUB_EVENT_NAME")
13
- GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH")
14
- GITHUB_API_URL = "https://api.github.com"
15
- GITHUB_HEADERS = {"Authorization": f"token {GITHUB_TOKEN}", "Accept": "application/vnd.github.v3+json"}
16
- GITHUB_HEADERS_DIFF = {"Authorization": f"token {GITHUB_TOKEN}", "Accept": "application/vnd.github.v3.diff"}
17
-
18
- EVENT_DATA = {}
19
- if GITHUB_EVENT_PATH:
20
- event_path = Path(GITHUB_EVENT_PATH)
21
- if event_path.exists():
22
- EVENT_DATA = json.loads(event_path.read_text())
23
- PR = EVENT_DATA.get("pull_request", {})
24
- DISCUSSION = EVENT_DATA.get("discussion", {})
25
-
26
-
27
- def get_github_username():
28
- """Gets username associated with the GitHub token in GITHUB_HEADERS."""
29
- query = """
30
- query {
31
- viewer {
32
- login
33
- }
34
- }
35
- """
36
- response = requests.post("https://api.github.com/graphql", json={"query": query}, headers=GITHUB_HEADERS)
37
- if response.status_code != 200:
38
- print(f"Failed to fetch authenticated user. Status code: {response.status_code}")
39
- return None
40
-
41
- try:
42
- return response.json()["data"]["viewer"]["login"]
43
- except KeyError as e:
44
- print(f"Error parsing authenticated user response: {e}")
45
- return None
46
-
47
-
48
- def get_pr_diff(pr_number: int) -> str:
49
- """Retrieves the diff content for a specified pull request in a GitHub repository."""
50
- url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/pulls/{pr_number}"
51
- r = requests.get(url, headers=GITHUB_HEADERS_DIFF)
52
- return r.text if r.status_code == 200 else ""
53
-
54
-
55
- def get_github_data(endpoint: str) -> dict:
56
- """Fetches GitHub repository data from a specified endpoint using the GitHub API."""
57
- r = requests.get(f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/{endpoint}", headers=GITHUB_HEADERS)
58
- r.raise_for_status()
59
- return r.json()
60
-
61
-
62
- def graphql_request(query: str, variables: dict = None) -> dict:
63
- """Executes a GraphQL query against the GitHub API and returns the response as a dictionary."""
64
- headers = {
65
- "Authorization": f"Bearer {GITHUB_TOKEN}",
66
- "Content-Type": "application/json",
67
- "Accept": "application/vnd.github.v4+json",
68
- }
69
- r = requests.post(f"{GITHUB_API_URL}/graphql", json={"query": query, "variables": variables}, headers=headers)
70
- r.raise_for_status()
71
- result = r.json()
72
- success = "data" in result and not result.get("errors")
73
- print(f"{'Successful' if success else 'Fail'} discussion GraphQL request: {result.get('errors', 'No errors')}")
74
- return result
75
-
76
-
77
- def check_pypi_version(pyproject_toml="pyproject.toml"):
78
- """Compares local and PyPI package versions to determine if a new version should be published."""
79
- import re
80
-
81
- import tomllib # requires Python>=3.11
82
-
83
- version_pattern = re.compile(r"^\d+\.\d+\.\d+$") # e.g. 0.0.0
84
-
85
- with open(pyproject_toml, "rb") as f:
86
- pyproject = tomllib.load(f)
87
-
88
- package_name = pyproject["project"]["name"]
89
- local_version = pyproject["project"].get("version", "dynamic")
90
-
91
- # If version is dynamic, extract it from the specified file
92
- if local_version == "dynamic":
93
- version_attr = pyproject["tool"]["setuptools"]["dynamic"]["version"]["attr"]
94
- module_path, attr_name = version_attr.rsplit(".", 1)
95
- with open(f"{module_path.replace('.', '/')}/__init__.py") as f:
96
- local_version = next(line.split("=")[1].strip().strip("'\"") for line in f if line.startswith(attr_name))
97
-
98
- print(f"Local Version: {local_version}")
99
- if not bool(version_pattern.match(local_version)):
100
- print("WARNING: Incorrect local version pattern")
101
- return "0.0.0", "0.0.0", False
102
-
103
- # Get online version from PyPI
104
- response = requests.get(f"https://pypi.org/pypi/{package_name}/json")
105
- online_version = response.json()["info"]["version"] if response.status_code == 200 else None
106
- print(f"Online Version: {online_version or 'Not Found'}")
107
-
108
- # Determine if a new version should be published
109
- if online_version:
110
- local_ver = tuple(map(int, local_version.split(".")))
111
- online_ver = tuple(map(int, online_version.split(".")))
112
- major_diff = local_ver[0] - online_ver[0]
113
- minor_diff = local_ver[1] - online_ver[1]
114
- patch_diff = local_ver[2] - online_ver[2]
115
-
116
- publish = (
117
- (major_diff == 0 and minor_diff == 0 and 0 < patch_diff <= 2)
118
- or (major_diff == 0 and minor_diff == 1 and local_ver[2] == 0)
119
- or (major_diff == 1 and local_ver[1] == 0 and local_ver[2] == 0)
120
- ) # should publish an update
121
- else:
122
- publish = True # publish as this is likely a first release
123
-
124
- return local_version, online_version, publish
125
-
126
-
127
- def ultralytics_actions_info():
128
- """Print Ultralytics Actions information."""
129
- info = {
130
- "github.event_name": GITHUB_EVENT_NAME,
131
- "github.event.action": EVENT_DATA.get("action"),
132
- "github.repository": GITHUB_REPOSITORY,
133
- "github.event.pull_request.number": PR.get("number"),
134
- "github.event.pull_request.head.repo.full_name": PR.get("head", {}).get("repo", {}).get("full_name"),
135
- "github.actor": os.environ.get("GITHUB_ACTOR"),
136
- "github.event.pull_request.head.ref": PR.get("head", {}).get("ref"),
137
- "github.ref": os.environ.get("GITHUB_REF"),
138
- "github.head_ref": os.environ.get("GITHUB_HEAD_REF"),
139
- "github.base_ref": os.environ.get("GITHUB_BASE_REF"),
140
- "github.base_sha": PR.get("base", {}).get("sha"),
141
- }
142
-
143
- if GITHUB_EVENT_NAME == "discussion":
144
- info.update(
145
- {
146
- "github.event.discussion.node_id": DISCUSSION.get("node_id"),
147
- "github.event.discussion.number": DISCUSSION.get("number"),
148
- }
149
- )
150
-
151
- # Print information
152
- max_key_length = max(len(key) for key in info)
153
- header = f"Ultralytics Actions {__version__} Information " + "-" * 40
154
- print(header)
155
- for key, value in info.items():
156
- print(f"{key:<{max_key_length + 5}}{value}")
157
- print("-" * len(header)) # footer