ultralytics-actions 0.0.30__tar.gz → 0.0.34__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/PKG-INFO +1 -1
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/actions/__init__.py +1 -1
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/actions/first_interaction.py +70 -73
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/actions/summarize_pr.py +72 -70
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/actions/summarize_release.py +23 -21
- ultralytics_actions-0.0.34/actions/utils/__init__.py +19 -0
- ultralytics_actions-0.0.34/actions/utils/github_utils.py +163 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/actions/utils/openai_utils.py +2 -2
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/pyproject.toml +13 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/ultralytics_actions.egg-info/PKG-INFO +1 -1
- ultralytics_actions-0.0.30/actions/utils/__init__.py +0 -45
- ultralytics_actions-0.0.30/actions/utils/github_utils.py +0 -157
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/LICENSE +0 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/README.md +0 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/actions/update_markdown_code_blocks.py +0 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/actions/utils/common_utils.py +0 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/setup.cfg +0 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/ultralytics_actions.egg-info/SOURCES.txt +0 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/ultralytics_actions.egg-info/dependency_links.txt +0 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/ultralytics_actions.egg-info/entry_points.txt +0 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/ultralytics_actions.egg-info/requires.txt +0 -0
- {ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/ultralytics_actions.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ultralytics-actions
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.34
|
4
4
|
Summary: Ultralytics Actions for GitHub automation and PR management.
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
@@ -6,15 +6,9 @@ from typing import Dict, List, Tuple
|
|
6
6
|
import requests
|
7
7
|
|
8
8
|
from .utils import (
|
9
|
-
EVENT_DATA,
|
10
9
|
GITHUB_API_URL,
|
11
|
-
|
12
|
-
GITHUB_HEADERS,
|
13
|
-
GITHUB_REPOSITORY,
|
10
|
+
Action,
|
14
11
|
get_completion,
|
15
|
-
get_github_data,
|
16
|
-
get_pr_diff,
|
17
|
-
graphql_request,
|
18
12
|
remove_html_comments,
|
19
13
|
)
|
20
14
|
|
@@ -22,21 +16,23 @@ from .utils import (
|
|
22
16
|
BLOCK_USER = os.getenv("BLOCK_USER", "false").lower() == "true"
|
23
17
|
|
24
18
|
|
25
|
-
def get_event_content() -> Tuple[int, str, str, str, str, str, str]:
|
19
|
+
def get_event_content(event) -> Tuple[int, str, str, str, str, str, str]:
|
26
20
|
"""Extracts key information from GitHub event data for issues, pull requests, or discussions."""
|
27
|
-
|
28
|
-
|
29
|
-
|
21
|
+
data = event.event_data
|
22
|
+
name = event.event_name
|
23
|
+
action = data["action"] # 'opened', 'closed', 'created' (discussion), etc.
|
24
|
+
if name == "issues":
|
25
|
+
item = data["issue"]
|
30
26
|
issue_type = "issue"
|
31
|
-
elif
|
32
|
-
pr_number =
|
33
|
-
item =
|
27
|
+
elif name in ["pull_request", "pull_request_target"]:
|
28
|
+
pr_number = data["pull_request"]["number"]
|
29
|
+
item = event.get_repo_data(f"pulls/{pr_number}")
|
34
30
|
issue_type = "pull request"
|
35
|
-
elif
|
36
|
-
item =
|
31
|
+
elif name == "discussion":
|
32
|
+
item = data["discussion"]
|
37
33
|
issue_type = "discussion"
|
38
34
|
else:
|
39
|
-
raise ValueError(f"Unsupported event type: {
|
35
|
+
raise ValueError(f"Unsupported event type: {name}")
|
40
36
|
|
41
37
|
number = item["number"]
|
42
38
|
node_id = item.get("node_id") or item.get("id")
|
@@ -46,7 +42,7 @@ def get_event_content() -> Tuple[int, str, str, str, str, str, str]:
|
|
46
42
|
return number, node_id, title, body, username, issue_type, action
|
47
43
|
|
48
44
|
|
49
|
-
def update_issue_pr_content(number: int, node_id: str, issue_type: str):
|
45
|
+
def update_issue_pr_content(event, number: int, node_id: str, issue_type: str):
|
50
46
|
"""Updates the title and body of an issue, pull request, or discussion with predefined content."""
|
51
47
|
new_title = "Content Under Review"
|
52
48
|
new_body = """This post has been flagged for review by [Ultralytics Actions](https://ultralytics.com/actions) due to possible spam, abuse, or off-topic content. For more information please see our:
|
@@ -68,14 +64,14 @@ mutation($discussionId: ID!, $title: String!, $body: String!) {
|
|
68
64
|
}
|
69
65
|
}
|
70
66
|
"""
|
71
|
-
graphql_request(mutation, variables={"discussionId": node_id, "title": new_title, "body": new_body})
|
67
|
+
event.graphql_request(mutation, variables={"discussionId": node_id, "title": new_title, "body": new_body})
|
72
68
|
else:
|
73
|
-
url = f"{GITHUB_API_URL}/repos/{
|
74
|
-
r = requests.patch(url, json={"title": new_title, "body": new_body}, headers=
|
69
|
+
url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}"
|
70
|
+
r = requests.patch(url, json={"title": new_title, "body": new_body}, headers=event.headers)
|
75
71
|
print(f"{'Successful' if r.status_code == 200 else 'Fail'} issue/PR #{number} update: {r.status_code}")
|
76
72
|
|
77
73
|
|
78
|
-
def close_issue_pr(number: int, node_id: str, issue_type: str):
|
74
|
+
def close_issue_pr(event, number: int, node_id: str, issue_type: str):
|
79
75
|
"""Closes the specified issue, pull request, or discussion using the GitHub API."""
|
80
76
|
if issue_type == "discussion":
|
81
77
|
mutation = """
|
@@ -87,14 +83,14 @@ mutation($discussionId: ID!) {
|
|
87
83
|
}
|
88
84
|
}
|
89
85
|
"""
|
90
|
-
graphql_request(mutation, variables={"discussionId": node_id})
|
86
|
+
event.graphql_request(mutation, variables={"discussionId": node_id})
|
91
87
|
else:
|
92
|
-
url = f"{GITHUB_API_URL}/repos/{
|
93
|
-
r = requests.patch(url, json={"state": "closed"}, headers=
|
88
|
+
url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}"
|
89
|
+
r = requests.patch(url, json={"state": "closed"}, headers=event.headers)
|
94
90
|
print(f"{'Successful' if r.status_code == 200 else 'Fail'} issue/PR #{number} close: {r.status_code}")
|
95
91
|
|
96
92
|
|
97
|
-
def lock_issue_pr(number: int, node_id: str, issue_type: str):
|
93
|
+
def lock_issue_pr(event, number: int, node_id: str, issue_type: str):
|
98
94
|
"""Locks an issue, pull request, or discussion to prevent further interactions."""
|
99
95
|
if issue_type == "discussion":
|
100
96
|
mutation = """
|
@@ -108,17 +104,17 @@ mutation($lockableId: ID!, $lockReason: LockReason) {
|
|
108
104
|
}
|
109
105
|
}
|
110
106
|
"""
|
111
|
-
graphql_request(mutation, variables={"lockableId": node_id, "lockReason": "OFF_TOPIC"})
|
107
|
+
event.graphql_request(mutation, variables={"lockableId": node_id, "lockReason": "OFF_TOPIC"})
|
112
108
|
else:
|
113
|
-
url = f"{GITHUB_API_URL}/repos/{
|
114
|
-
r = requests.put(url, json={"lock_reason": "off-topic"}, headers=
|
109
|
+
url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}/lock"
|
110
|
+
r = requests.put(url, json={"lock_reason": "off-topic"}, headers=event.headers)
|
115
111
|
print(f"{'Successful' if r.status_code in {200, 204} else 'Fail'} issue/PR #{number} lock: {r.status_code}")
|
116
112
|
|
117
113
|
|
118
|
-
def block_user(username: str):
|
114
|
+
def block_user(event, username: str):
|
119
115
|
"""Blocks a user from the organization using the GitHub API."""
|
120
|
-
url = f"{GITHUB_API_URL}/orgs/{
|
121
|
-
r = requests.put(url, headers=
|
116
|
+
url = f"{GITHUB_API_URL}/orgs/{event.repository.split('/')[0]}/blocks/{username}"
|
117
|
+
r = requests.put(url, headers=event.headers)
|
122
118
|
print(f"{'Successful' if r.status_code == 204 else 'Fail'} user block for {username}: {r.status_code}")
|
123
119
|
|
124
120
|
|
@@ -167,7 +163,7 @@ YOUR RESPONSE (label names only):
|
|
167
163
|
messages = [
|
168
164
|
{
|
169
165
|
"role": "system",
|
170
|
-
"content": "You are
|
166
|
+
"content": "You are an Ultralytics AI assistant that labels GitHub issues, PRs, and discussions.",
|
171
167
|
},
|
172
168
|
{"role": "user", "content": prompt},
|
173
169
|
]
|
@@ -183,7 +179,7 @@ YOUR RESPONSE (label names only):
|
|
183
179
|
]
|
184
180
|
|
185
181
|
|
186
|
-
def get_label_ids(labels: List[str]) -> List[str]:
|
182
|
+
def get_label_ids(event, labels: List[str]) -> List[str]:
|
187
183
|
"""Retrieves GitHub label IDs for a list of label names using the GraphQL API."""
|
188
184
|
query = """
|
189
185
|
query($owner: String!, $name: String!) {
|
@@ -197,8 +193,8 @@ query($owner: String!, $name: String!) {
|
|
197
193
|
}
|
198
194
|
}
|
199
195
|
"""
|
200
|
-
owner, repo =
|
201
|
-
result = graphql_request(query, variables={"owner": owner, "name": repo})
|
196
|
+
owner, repo = event.repository.split("/")
|
197
|
+
result = event.graphql_request(query, variables={"owner": owner, "name": repo})
|
202
198
|
if "data" in result and "repository" in result["data"]:
|
203
199
|
all_labels = result["data"]["repository"]["labels"]["nodes"]
|
204
200
|
label_map = {label["name"].lower(): label["id"] for label in all_labels}
|
@@ -208,14 +204,14 @@ query($owner: String!, $name: String!) {
|
|
208
204
|
return []
|
209
205
|
|
210
206
|
|
211
|
-
def apply_labels(number: int, node_id: str, labels: List[str], issue_type: str):
|
207
|
+
def apply_labels(event, number: int, node_id: str, labels: List[str], issue_type: str):
|
212
208
|
"""Applies specified labels to a GitHub issue, pull request, or discussion using the appropriate API."""
|
213
209
|
if "Alert" in labels:
|
214
|
-
create_alert_label()
|
210
|
+
create_alert_label(event)
|
215
211
|
|
216
212
|
if issue_type == "discussion":
|
217
213
|
print(f"Using node_id: {node_id}") # Debug print
|
218
|
-
label_ids = get_label_ids(labels)
|
214
|
+
label_ids = get_label_ids(event, labels)
|
219
215
|
if not label_ids:
|
220
216
|
print("No valid labels to apply.")
|
221
217
|
return
|
@@ -231,29 +227,29 @@ mutation($labelableId: ID!, $labelIds: [ID!]!) {
|
|
231
227
|
}
|
232
228
|
}
|
233
229
|
"""
|
234
|
-
graphql_request(mutation, {"labelableId": node_id, "labelIds": label_ids})
|
230
|
+
event.graphql_request(mutation, {"labelableId": node_id, "labelIds": label_ids})
|
235
231
|
print(f"Successfully applied labels: {', '.join(labels)}")
|
236
232
|
else:
|
237
|
-
url = f"{GITHUB_API_URL}/repos/{
|
238
|
-
r = requests.post(url, json={"labels": labels}, headers=
|
233
|
+
url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}/labels"
|
234
|
+
r = requests.post(url, json={"labels": labels}, headers=event.headers)
|
239
235
|
print(f"{'Successful' if r.status_code == 200 else 'Fail'} apply labels {', '.join(labels)}: {r.status_code}")
|
240
236
|
|
241
237
|
|
242
|
-
def create_alert_label():
|
238
|
+
def create_alert_label(event):
|
243
239
|
"""Creates the 'Alert' label in the repository if it doesn't exist, with a red color and description."""
|
244
240
|
alert_label = {"name": "Alert", "color": "FF0000", "description": "Potential spam, abuse, or off-topic."}
|
245
|
-
requests.post(f"{GITHUB_API_URL}/repos/{
|
241
|
+
requests.post(f"{GITHUB_API_URL}/repos/{event.repository}/labels", json=alert_label, headers=event.headers)
|
246
242
|
|
247
243
|
|
248
|
-
def is_org_member(username: str) -> bool:
|
244
|
+
def is_org_member(event, username: str) -> bool:
|
249
245
|
"""Checks if a user is a member of the organization using the GitHub API."""
|
250
|
-
org_name =
|
246
|
+
org_name = event.repository.split("/")[0]
|
251
247
|
url = f"{GITHUB_API_URL}/orgs/{org_name}/members/{username}"
|
252
|
-
r = requests.get(url, headers=
|
248
|
+
r = requests.get(url, headers=event.headers)
|
253
249
|
return r.status_code == 204 # 204 means the user is a member
|
254
250
|
|
255
251
|
|
256
|
-
def add_comment(number: int, node_id: str, comment: str, issue_type: str):
|
252
|
+
def add_comment(event, number: int, node_id: str, comment: str, issue_type: str):
|
257
253
|
"""Adds a comment to the specified issue, pull request, or discussion using the GitHub API."""
|
258
254
|
if issue_type == "discussion":
|
259
255
|
mutation = """
|
@@ -265,17 +261,17 @@ mutation($discussionId: ID!, $body: String!) {
|
|
265
261
|
}
|
266
262
|
}
|
267
263
|
"""
|
268
|
-
graphql_request(mutation, variables={"discussionId": node_id, "body": comment})
|
264
|
+
event.graphql_request(mutation, variables={"discussionId": node_id, "body": comment})
|
269
265
|
else:
|
270
|
-
url = f"{GITHUB_API_URL}/repos/{
|
271
|
-
r = requests.post(url, json={"body": comment}, headers=
|
266
|
+
url = f"{GITHUB_API_URL}/repos/{event.repository}/issues/{number}/comments"
|
267
|
+
r = requests.post(url, json={"body": comment}, headers=event.headers)
|
272
268
|
print(f"{'Successful' if r.status_code in {200, 201} else 'Fail'} issue/PR #{number} comment: {r.status_code}")
|
273
269
|
|
274
270
|
|
275
|
-
def get_first_interaction_response(issue_type: str, title: str, body: str, username: str
|
271
|
+
def get_first_interaction_response(event, issue_type: str, title: str, body: str, username: str) -> str:
|
276
272
|
"""Generates a custom LLM response for GitHub issues, PRs, or discussions based on content."""
|
277
273
|
issue_discussion_response = f"""
|
278
|
-
👋 Hello @{username}, thank you for submitting a `{
|
274
|
+
👋 Hello @{username}, thank you for submitting a `{event.repository}` 🚀 {issue_type.capitalize()}. To help us address your concern efficiently, please ensure you've provided the following information:
|
279
275
|
|
280
276
|
1. For bug reports:
|
281
277
|
- A clear and concise description of the bug
|
@@ -300,10 +296,10 @@ Thank you for your contribution to improving our project!
|
|
300
296
|
"""
|
301
297
|
|
302
298
|
pr_response = f"""
|
303
|
-
👋 Hello @{username}, thank you for submitting an `{
|
299
|
+
👋 Hello @{username}, thank you for submitting an `{event.repository}` 🚀 PR! To ensure a seamless integration of your work, please review the following checklist:
|
304
300
|
|
305
|
-
- ✅ **Define a Purpose**: Clearly explain the purpose of your fix or feature in your PR description, and link to any [relevant issues](https://github.com/{
|
306
|
-
- ✅ **Synchronize with Source**: Confirm your PR is synchronized with the `{
|
301
|
+
- ✅ **Define a Purpose**: Clearly explain the purpose of your fix or feature in your PR description, and link to any [relevant issues](https://github.com/{event.repository}/issues). Ensure your commit messages are clear, concise, and adhere to the project's conventions.
|
302
|
+
- ✅ **Synchronize with Source**: Confirm your PR is synchronized with the `{event.repository}` `main` branch. If it's behind, update it by clicking the 'Update branch' button or by running `git pull` and `git merge main` locally.
|
307
303
|
- ✅ **Ensure CI Checks Pass**: Verify all Ultralytics [Continuous Integration (CI)](https://docs.ultralytics.com/help/CI/) checks are passing. If any checks fail, please address the issues.
|
308
304
|
- ✅ **Update Documentation**: Update the relevant [documentation](https://docs.ultralytics.com) for any new or modified features.
|
309
305
|
- ✅ **Add Tests**: If applicable, include or update tests to cover your changes, and confirm that all tests are passing.
|
@@ -318,9 +314,9 @@ For more guidance, please refer to our [Contributing Guide](https://docs.ultraly
|
|
318
314
|
else:
|
319
315
|
example = os.getenv("FIRST_ISSUE_RESPONSE") or issue_discussion_response
|
320
316
|
|
321
|
-
org_name, repo_name =
|
322
|
-
repo_url = f"https://github.com/{
|
323
|
-
diff = get_pr_diff(
|
317
|
+
org_name, repo_name = event.repository.split("/")
|
318
|
+
repo_url = f"https://github.com/{event.repository}"
|
319
|
+
diff = event.get_pr_diff()[:32000] if issue_type == "pull request" else ""
|
324
320
|
|
325
321
|
prompt = f"""Generate a customized response to the new GitHub {issue_type} below:
|
326
322
|
|
@@ -359,39 +355,40 @@ YOUR {issue_type.upper()} RESPONSE:
|
|
359
355
|
messages = [
|
360
356
|
{
|
361
357
|
"role": "system",
|
362
|
-
"content": f"You are
|
358
|
+
"content": f"You are an Ultralytics AI assistant responding to GitHub {issue_type}s for {org_name}.",
|
363
359
|
},
|
364
360
|
{"role": "user", "content": prompt},
|
365
361
|
]
|
366
362
|
return get_completion(messages)
|
367
363
|
|
368
364
|
|
369
|
-
def main():
|
370
|
-
"""Executes
|
371
|
-
|
372
|
-
|
365
|
+
def main(*args, **kwargs):
|
366
|
+
"""Executes auto-labeling and custom response generation for new GitHub issues, PRs, and discussions."""
|
367
|
+
event = Action(*args, **kwargs)
|
368
|
+
number, node_id, title, body, username, issue_type, action = get_event_content(event)
|
369
|
+
available_labels = event.get_repo_data("labels")
|
373
370
|
label_descriptions = {label["name"]: label.get("description", "") for label in available_labels}
|
374
371
|
if issue_type == "discussion":
|
375
372
|
current_labels = [] # For discussions, labels may need to be fetched differently or adjusted
|
376
373
|
else:
|
377
|
-
current_labels = [label["name"].lower() for label in
|
374
|
+
current_labels = [label["name"].lower() for label in event.get_repo_data(f"issues/{number}/labels")]
|
378
375
|
relevant_labels = get_relevant_labels(issue_type, title, body, label_descriptions, current_labels)
|
379
376
|
|
380
377
|
if relevant_labels:
|
381
|
-
apply_labels(number, node_id, relevant_labels, issue_type)
|
382
|
-
if "Alert" in relevant_labels and not is_org_member(username):
|
383
|
-
update_issue_pr_content(number, node_id, issue_type)
|
378
|
+
apply_labels(event, number, node_id, relevant_labels, issue_type)
|
379
|
+
if "Alert" in relevant_labels and not is_org_member(event, username):
|
380
|
+
update_issue_pr_content(event, number, node_id, issue_type)
|
384
381
|
if issue_type != "pull request":
|
385
|
-
close_issue_pr(number, node_id, issue_type)
|
386
|
-
lock_issue_pr(number, node_id, issue_type)
|
382
|
+
close_issue_pr(event, number, node_id, issue_type)
|
383
|
+
lock_issue_pr(event, number, node_id, issue_type)
|
387
384
|
if BLOCK_USER:
|
388
|
-
block_user(username=username)
|
385
|
+
block_user(event, username=username)
|
389
386
|
else:
|
390
387
|
print("No relevant labels found or applied.")
|
391
388
|
|
392
389
|
if action in {"opened", "created"}:
|
393
|
-
custom_response = get_first_interaction_response(issue_type, title, body, username
|
394
|
-
add_comment(number, node_id, custom_response, issue_type)
|
390
|
+
custom_response = get_first_interaction_response(event, issue_type, title, body, username)
|
391
|
+
add_comment(event, number, node_id, custom_response, issue_type)
|
395
392
|
|
396
393
|
|
397
394
|
if __name__ == "__main__":
|
@@ -6,12 +6,8 @@ import requests
|
|
6
6
|
|
7
7
|
from .utils import (
|
8
8
|
GITHUB_API_URL,
|
9
|
-
|
10
|
-
GITHUB_REPOSITORY,
|
11
|
-
PR,
|
9
|
+
Action,
|
12
10
|
get_completion,
|
13
|
-
get_github_username,
|
14
|
-
get_pr_diff,
|
15
11
|
)
|
16
12
|
|
17
13
|
# Constants
|
@@ -20,13 +16,8 @@ SUMMARY_START = (
|
|
20
16
|
)
|
21
17
|
|
22
18
|
|
23
|
-
def generate_merge_message(
|
19
|
+
def generate_merge_message(pr_summary=None, pr_credit=None):
|
24
20
|
"""Generates a thank-you message for merged PR contributors."""
|
25
|
-
contributors_str = ", ".join(f"@{c}" for c in contributors if c != pr_author)
|
26
|
-
mention_str = f"@{pr_author}"
|
27
|
-
if contributors_str:
|
28
|
-
mention_str += f" and {contributors_str}"
|
29
|
-
|
30
21
|
messages = [
|
31
22
|
{
|
32
23
|
"role": "system",
|
@@ -34,10 +25,10 @@ def generate_merge_message(pr_author, contributors, pr_summary=None):
|
|
34
25
|
},
|
35
26
|
{
|
36
27
|
"role": "user",
|
37
|
-
"content": f"Write a friendly thank you for a merged PR by
|
28
|
+
"content": f"Write a friendly thank you for a merged GitHub PR by {pr_credit}. "
|
38
29
|
f"Context from PR:\n{pr_summary}\n\n"
|
39
|
-
f"Start with the exciting message that this PR is now merged, and weave in an inspiring quote "
|
40
|
-
f"from a
|
30
|
+
f"Start with the exciting message that this PR is now merged, and weave in an inspiring but obscure quote "
|
31
|
+
f"from a historical figure in science, art, stoicism and philosophy. "
|
41
32
|
f"Keep the message concise yet relevant to the specific contributions in this PR. "
|
42
33
|
f"We want the contributors to feel their effort is appreciated and will make a difference in the world.",
|
43
34
|
},
|
@@ -45,15 +36,15 @@ def generate_merge_message(pr_author, contributors, pr_summary=None):
|
|
45
36
|
return get_completion(messages)
|
46
37
|
|
47
38
|
|
48
|
-
def post_merge_message(pr_number,
|
39
|
+
def post_merge_message(pr_number, repository, summary, pr_credit, headers):
|
49
40
|
"""Posts thank you message on PR after merge."""
|
50
|
-
message = generate_merge_message(
|
51
|
-
comment_url = f"{GITHUB_API_URL}/repos/{
|
52
|
-
response = requests.post(comment_url, json={"body": message}, headers=
|
41
|
+
message = generate_merge_message(summary, pr_credit)
|
42
|
+
comment_url = f"{GITHUB_API_URL}/repos/{repository}/issues/{pr_number}/comments"
|
43
|
+
response = requests.post(comment_url, json={"body": message}, headers=headers)
|
53
44
|
return response.status_code == 201
|
54
45
|
|
55
46
|
|
56
|
-
def generate_issue_comment(pr_url, pr_summary):
|
47
|
+
def generate_issue_comment(pr_url, pr_summary, pr_credit):
|
57
48
|
"""Generates a personalized issue comment using based on the PR context."""
|
58
49
|
messages = [
|
59
50
|
{
|
@@ -62,21 +53,22 @@ def generate_issue_comment(pr_url, pr_summary):
|
|
62
53
|
},
|
63
54
|
{
|
64
55
|
"role": "user",
|
65
|
-
"content": f"Write a GitHub issue comment announcing a potential fix
|
56
|
+
"content": f"Write a GitHub issue comment announcing a potential fix for this issue is now merged in linked PR {pr_url} by {pr_credit}\n\n"
|
66
57
|
f"Context from PR:\n{pr_summary}\n\n"
|
67
58
|
f"Include:\n"
|
68
59
|
f"1. An explanation of key changes from the PR that may resolve this issue\n"
|
69
|
-
f"2.
|
60
|
+
f"2. Credit to the PR author and contributors\n"
|
61
|
+
f"3. Options for testing if PR changes have resolved this issue:\n"
|
70
62
|
f" - pip install git+https://github.com/ultralytics/ultralytics.git@main # test latest changes\n"
|
71
63
|
f" - or await next official PyPI release\n"
|
72
|
-
f"
|
73
|
-
f"
|
64
|
+
f"4. Request feedback on whether the PR changes resolve the issue\n"
|
65
|
+
f"5. Thank 🙏 for reporting the issue and welcome any further feedback if the issue persists\n\n",
|
74
66
|
},
|
75
67
|
]
|
76
68
|
return get_completion(messages)
|
77
69
|
|
78
70
|
|
79
|
-
def generate_pr_summary(
|
71
|
+
def generate_pr_summary(repository, diff_text):
|
80
72
|
"""Generates a concise, professional summary of a PR using OpenAI's API for Ultralytics repositories."""
|
81
73
|
if not diff_text:
|
82
74
|
diff_text = "**ERROR: DIFF IS EMPTY, THERE ARE ZERO CODE CHANGES IN THIS PR."
|
@@ -89,7 +81,7 @@ def generate_pr_summary(repo_name, diff_text):
|
|
89
81
|
},
|
90
82
|
{
|
91
83
|
"role": "user",
|
92
|
-
"content": f"Summarize this '{
|
84
|
+
"content": f"Summarize this '{repository}' PR, focusing on major changes, their purpose, and potential impact. Keep the summary clear and concise, suitable for a broad audience. Add emojis to enliven the summary. Reply directly with a summary along these example guidelines, though feel free to adjust as appropriate:\n\n"
|
93
85
|
f"### 🌟 Summary (single-line synopsis)\n"
|
94
86
|
f"### 📊 Key Changes (bullet points highlighting any major changes)\n"
|
95
87
|
f"### 🎯 Purpose & Impact (bullet points explaining any benefits and potential impact to users)\n"
|
@@ -102,12 +94,12 @@ def generate_pr_summary(repo_name, diff_text):
|
|
102
94
|
return SUMMARY_START + reply
|
103
95
|
|
104
96
|
|
105
|
-
def update_pr_description(
|
97
|
+
def update_pr_description(repository, pr_number, new_summary, headers, max_retries=2):
|
106
98
|
"""Updates PR description with new summary, retrying if description is None."""
|
107
|
-
pr_url = f"{GITHUB_API_URL}/repos/{
|
99
|
+
pr_url = f"{GITHUB_API_URL}/repos/{repository}/pulls/{pr_number}"
|
108
100
|
description = ""
|
109
101
|
for i in range(max_retries + 1):
|
110
|
-
description = requests.get(pr_url, headers=
|
102
|
+
description = requests.get(pr_url, headers=headers).json().get("body") or ""
|
111
103
|
if description:
|
112
104
|
break
|
113
105
|
if i < max_retries:
|
@@ -124,65 +116,76 @@ def update_pr_description(repo_name, pr_number, new_summary, max_retries=2):
|
|
124
116
|
updated_description = description + "\n\n" + new_summary
|
125
117
|
|
126
118
|
# Update the PR description
|
127
|
-
update_response = requests.patch(pr_url, json={"body": updated_description}, headers=
|
119
|
+
update_response = requests.patch(pr_url, json={"body": updated_description}, headers=headers)
|
128
120
|
return update_response.status_code
|
129
121
|
|
130
122
|
|
131
|
-
def label_fixed_issues(pr_number, pr_summary):
|
132
|
-
"""Labels issues closed by
|
123
|
+
def label_fixed_issues(repository, pr_number, pr_summary, headers, action):
|
124
|
+
"""Labels issues closed by PR when merged, notifies users, returns PR contributors."""
|
133
125
|
query = """
|
134
126
|
query($owner: String!, $repo: String!, $pr_number: Int!) {
|
135
127
|
repository(owner: $owner, name: $repo) {
|
136
128
|
pullRequest(number: $pr_number) {
|
137
|
-
closingIssuesReferences(first: 50) {
|
138
|
-
nodes {
|
139
|
-
number
|
140
|
-
}
|
141
|
-
}
|
129
|
+
closingIssuesReferences(first: 50) { nodes { number } }
|
142
130
|
url
|
143
131
|
body
|
144
132
|
author { login, __typename }
|
145
|
-
reviews(first: 50) {
|
146
|
-
|
147
|
-
}
|
148
|
-
comments(first: 50) {
|
149
|
-
nodes { author { login, __typename } }
|
150
|
-
}
|
133
|
+
reviews(first: 50) { nodes { author { login, __typename } } }
|
134
|
+
comments(first: 50) { nodes { author { login, __typename } } }
|
135
|
+
commits(first: 100) { nodes { commit { author { user { login } }, committer { user { login } } } } }
|
151
136
|
}
|
152
137
|
}
|
153
138
|
}
|
154
139
|
"""
|
155
|
-
|
156
|
-
owner, repo = GITHUB_REPOSITORY.split("/")
|
140
|
+
owner, repo = repository.split("/")
|
157
141
|
variables = {"owner": owner, "repo": repo, "pr_number": pr_number}
|
158
142
|
graphql_url = "https://api.github.com/graphql"
|
159
|
-
response = requests.post(graphql_url, json={"query": query, "variables": variables}, headers=
|
143
|
+
response = requests.post(graphql_url, json={"query": query, "variables": variables}, headers=headers)
|
144
|
+
|
160
145
|
if response.status_code != 200:
|
161
146
|
print(f"Failed to fetch linked issues. Status code: {response.status_code}")
|
162
147
|
return [], None
|
163
148
|
|
164
149
|
try:
|
165
150
|
data = response.json()["data"]["repository"]["pullRequest"]
|
166
|
-
comments = data["reviews"]["nodes"] + data["comments"]["nodes"]
|
167
|
-
|
151
|
+
comments = data["reviews"]["nodes"] + data["comments"]["nodes"]
|
152
|
+
token_username = action.get_username() # get GITHUB_TOKEN username
|
153
|
+
author = data["author"]["login"] if data["author"]["__typename"] != "Bot" else None
|
168
154
|
|
169
155
|
# Get unique contributors from reviews and comments
|
170
156
|
contributors = {x["author"]["login"] for x in comments if x["author"]["__typename"] != "Bot"}
|
171
|
-
|
157
|
+
|
158
|
+
# Add commit authors and committers that have GitHub accounts linked
|
159
|
+
for commit in data["commits"]["nodes"]:
|
160
|
+
commit_data = commit["commit"]
|
161
|
+
for user_type in ["author", "committer"]:
|
162
|
+
if user := commit_data[user_type].get("user"):
|
163
|
+
if login := user.get("login"):
|
164
|
+
contributors.add(login)
|
165
|
+
|
166
|
+
contributors.discard(author)
|
167
|
+
contributors.discard(token_username)
|
168
|
+
|
169
|
+
# Write credit string
|
170
|
+
pr_credit = "" # i.e. "@user1 with contributions from @user2, @user3"
|
171
|
+
if author and author != token_username:
|
172
|
+
pr_credit += f"@{author}"
|
173
|
+
if contributors:
|
174
|
+
pr_credit += (" with contributions from " if pr_credit else "") + ", ".join(f"@{c}" for c in contributors)
|
172
175
|
|
173
176
|
# Generate personalized comment
|
174
|
-
comment = generate_issue_comment(pr_url=data["url"], pr_summary=pr_summary)
|
177
|
+
comment = generate_issue_comment(pr_url=data["url"], pr_summary=pr_summary, pr_credit=pr_credit)
|
175
178
|
|
176
179
|
# Update linked issues
|
177
180
|
for issue in data["closingIssuesReferences"]["nodes"]:
|
178
181
|
issue_number = issue["number"]
|
179
182
|
# Add fixed label
|
180
|
-
label_url = f"{GITHUB_API_URL}/repos/{
|
181
|
-
label_response = requests.post(label_url, json={"labels": ["fixed"]}, headers=
|
183
|
+
label_url = f"{GITHUB_API_URL}/repos/{repository}/issues/{issue_number}/labels"
|
184
|
+
label_response = requests.post(label_url, json={"labels": ["fixed"]}, headers=headers)
|
182
185
|
|
183
186
|
# Add comment
|
184
|
-
comment_url = f"{GITHUB_API_URL}/repos/{
|
185
|
-
comment_response = requests.post(comment_url, json={"body": comment}, headers=
|
187
|
+
comment_url = f"{GITHUB_API_URL}/repos/{repository}/issues/{issue_number}/comments"
|
188
|
+
comment_response = requests.post(comment_url, json={"body": comment}, headers=headers)
|
186
189
|
|
187
190
|
if label_response.status_code == 200 and comment_response.status_code == 201:
|
188
191
|
print(f"Added 'fixed' label and comment to issue #{issue_number}")
|
@@ -192,50 +195,49 @@ query($owner: String!, $repo: String!, $pr_number: Int!) {
|
|
192
195
|
f"Comment status: {comment_response.status_code}"
|
193
196
|
)
|
194
197
|
|
195
|
-
return
|
198
|
+
return pr_credit
|
196
199
|
except KeyError as e:
|
197
200
|
print(f"Error parsing GraphQL response: {e}")
|
198
201
|
return [], None
|
199
202
|
|
200
203
|
|
201
|
-
def remove_todos_on_merge(pr_number):
|
204
|
+
def remove_todos_on_merge(pr_number, repository, headers):
|
202
205
|
"""Removes specified labels from PR."""
|
203
206
|
for label in ["TODO"]: # Can be extended with more labels in the future
|
204
|
-
requests.delete(
|
205
|
-
f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{pr_number}/labels/{label}", headers=GITHUB_HEADERS
|
206
|
-
)
|
207
|
+
requests.delete(f"{GITHUB_API_URL}/repos/{repository}/issues/{pr_number}/labels/{label}", headers=headers)
|
207
208
|
|
208
209
|
|
209
|
-
def main():
|
210
|
+
def main(*args, **kwargs):
|
210
211
|
"""Summarize a pull request and update its description with a summary."""
|
211
|
-
|
212
|
+
action = Action(*args, **kwargs)
|
213
|
+
pr_number = action.pr["number"]
|
214
|
+
headers = action.headers
|
215
|
+
repository = action.repository
|
212
216
|
|
213
217
|
print(f"Retrieving diff for PR {pr_number}")
|
214
|
-
diff = get_pr_diff(
|
218
|
+
diff = action.get_pr_diff()
|
215
219
|
|
216
220
|
# Generate PR summary
|
217
221
|
print("Generating PR summary...")
|
218
|
-
summary = generate_pr_summary(
|
222
|
+
summary = generate_pr_summary(repository, diff)
|
219
223
|
|
220
224
|
# Update PR description
|
221
225
|
print("Updating PR description...")
|
222
|
-
status_code = update_pr_description(
|
226
|
+
status_code = update_pr_description(repository, pr_number, summary, headers)
|
223
227
|
if status_code == 200:
|
224
228
|
print("PR description updated successfully.")
|
225
229
|
else:
|
226
230
|
print(f"Failed to update PR description. Status code: {status_code}")
|
227
231
|
|
228
232
|
# Update linked issues and post thank you message if merged
|
229
|
-
if
|
233
|
+
if action.pr.get("merged"):
|
230
234
|
print("PR is merged, labeling fixed issues...")
|
231
|
-
|
235
|
+
pr_credit = label_fixed_issues(repository, pr_number, summary, headers, action)
|
232
236
|
print("Removing TODO label from PR...")
|
233
|
-
remove_todos_on_merge(pr_number)
|
234
|
-
|
235
|
-
if author and author != username:
|
237
|
+
remove_todos_on_merge(pr_number, repository, headers)
|
238
|
+
if pr_credit:
|
236
239
|
print("Posting PR author thank you message...")
|
237
|
-
|
238
|
-
post_merge_message(pr_number, author, contributors, summary)
|
240
|
+
post_merge_message(pr_number, repository, summary, pr_credit, headers)
|
239
241
|
|
240
242
|
|
241
243
|
if __name__ == "__main__":
|
@@ -10,10 +10,7 @@ import requests
|
|
10
10
|
|
11
11
|
from .utils import (
|
12
12
|
GITHUB_API_URL,
|
13
|
-
|
14
|
-
GITHUB_HEADERS_DIFF,
|
15
|
-
GITHUB_REPOSITORY,
|
16
|
-
GITHUB_TOKEN,
|
13
|
+
Action,
|
17
14
|
get_completion,
|
18
15
|
remove_html_comments,
|
19
16
|
)
|
@@ -23,17 +20,17 @@ CURRENT_TAG = os.getenv("CURRENT_TAG")
|
|
23
20
|
PREVIOUS_TAG = os.getenv("PREVIOUS_TAG")
|
24
21
|
|
25
22
|
|
26
|
-
def get_release_diff(repo_name: str, previous_tag: str, latest_tag: str) -> str:
|
23
|
+
def get_release_diff(repo_name: str, previous_tag: str, latest_tag: str, headers: dict) -> str:
|
27
24
|
"""Retrieves the differences between two specified Git tags in a GitHub repository."""
|
28
25
|
url = f"{GITHUB_API_URL}/repos/{repo_name}/compare/{previous_tag}...{latest_tag}"
|
29
|
-
r = requests.get(url, headers=
|
26
|
+
r = requests.get(url, headers=headers)
|
30
27
|
return r.text if r.status_code == 200 else f"Failed to get diff: {r.content}"
|
31
28
|
|
32
29
|
|
33
|
-
def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str) -> list:
|
30
|
+
def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str, headers: dict) -> list:
|
34
31
|
"""Retrieves and processes pull requests merged between two specified tags in a GitHub repository."""
|
35
32
|
url = f"{GITHUB_API_URL}/repos/{repo_name}/compare/{previous_tag}...{latest_tag}"
|
36
|
-
r = requests.get(url, headers=
|
33
|
+
r = requests.get(url, headers=headers)
|
37
34
|
r.raise_for_status()
|
38
35
|
|
39
36
|
data = r.json()
|
@@ -47,7 +44,7 @@ def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str) ->
|
|
47
44
|
time.sleep(10) # sleep 10 seconds to allow final PR summary to update on merge
|
48
45
|
for pr_number in sorted(pr_numbers): # earliest to latest
|
49
46
|
pr_url = f"{GITHUB_API_URL}/repos/{repo_name}/pulls/{pr_number}"
|
50
|
-
pr_response = requests.get(pr_url, headers=
|
47
|
+
pr_response = requests.get(pr_url, headers=headers)
|
51
48
|
if pr_response.status_code == 200:
|
52
49
|
pr_data = pr_response.json()
|
53
50
|
prs.append(
|
@@ -67,14 +64,14 @@ def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str) ->
|
|
67
64
|
return prs
|
68
65
|
|
69
66
|
|
70
|
-
def get_new_contributors(repo: str, prs: list) -> set:
|
67
|
+
def get_new_contributors(repo: str, prs: list, headers: dict) -> set:
|
71
68
|
"""Identify new contributors who made their first merged PR in the current release."""
|
72
69
|
new_contributors = set()
|
73
70
|
for pr in prs:
|
74
71
|
author = pr["author"]
|
75
72
|
# Check if this is the author's first contribution
|
76
73
|
url = f"{GITHUB_API_URL}/search/issues?q=repo:{repo}+author:{author}+is:pr+is:merged&sort=created&order=asc"
|
77
|
-
r = requests.get(url, headers=
|
74
|
+
r = requests.get(url, headers=headers)
|
78
75
|
if r.status_code == 200:
|
79
76
|
data = r.json()
|
80
77
|
if data["total_count"] > 0:
|
@@ -84,7 +81,9 @@ def get_new_contributors(repo: str, prs: list) -> set:
|
|
84
81
|
return new_contributors
|
85
82
|
|
86
83
|
|
87
|
-
def generate_release_summary(
|
84
|
+
def generate_release_summary(
|
85
|
+
diff: str, prs: list, latest_tag: str, previous_tag: str, repo_name: str, headers: dict
|
86
|
+
) -> str:
|
88
87
|
"""Generate a concise release summary with key changes, purpose, and impact for a new Ultralytics version."""
|
89
88
|
pr_summaries = "\n\n".join(
|
90
89
|
[f"PR #{pr['number']}: {pr['title']} by @{pr['author']}\n{pr['body'][:1000]}" for pr in prs]
|
@@ -100,7 +99,7 @@ def generate_release_summary(diff: str, prs: list, latest_tag: str, previous_tag
|
|
100
99
|
whats_changed = "\n".join([f"* {pr['title']} by @{pr['author']} in {pr['html_url']}" for pr in prs])
|
101
100
|
|
102
101
|
# Generate New Contributors section
|
103
|
-
new_contributors = get_new_contributors(repo_name, prs)
|
102
|
+
new_contributors = get_new_contributors(repo_name, prs, headers)
|
104
103
|
new_contributors_section = (
|
105
104
|
"\n## New Contributors\n"
|
106
105
|
+ "\n".join(
|
@@ -138,11 +137,11 @@ def generate_release_summary(diff: str, prs: list, latest_tag: str, previous_tag
|
|
138
137
|
return get_completion(messages) + release_suffix
|
139
138
|
|
140
139
|
|
141
|
-
def create_github_release(repo_name: str, tag_name: str, name: str, body: str) -> int:
|
140
|
+
def create_github_release(repo_name: str, tag_name: str, name: str, body: str, headers: dict) -> int:
|
142
141
|
"""Creates a GitHub release with specified tag, name, and body content for the given repository."""
|
143
142
|
url = f"{GITHUB_API_URL}/repos/{repo_name}/releases"
|
144
143
|
data = {"tag_name": tag_name, "name": name, "body": body, "draft": False, "prerelease": False}
|
145
|
-
r = requests.post(url, headers=
|
144
|
+
r = requests.post(url, headers=headers, json=data)
|
146
145
|
return r.status_code
|
147
146
|
|
148
147
|
|
@@ -156,22 +155,24 @@ def get_previous_tag() -> str:
|
|
156
155
|
return "HEAD~1"
|
157
156
|
|
158
157
|
|
159
|
-
def main():
|
158
|
+
def main(*args, **kwargs):
|
160
159
|
"""Automates generating and publishing a GitHub release summary from PRs and commit differences."""
|
161
|
-
|
160
|
+
action = Action(*args, **kwargs)
|
161
|
+
|
162
|
+
if not all([action.token, CURRENT_TAG]):
|
162
163
|
raise ValueError("One or more required environment variables are missing.")
|
163
164
|
|
164
165
|
previous_tag = PREVIOUS_TAG or get_previous_tag()
|
165
166
|
|
166
167
|
# Get the diff between the tags
|
167
|
-
diff = get_release_diff(
|
168
|
+
diff = get_release_diff(action.repository, previous_tag, CURRENT_TAG, action.headers_diff)
|
168
169
|
|
169
170
|
# Get PRs merged between the tags
|
170
|
-
prs = get_prs_between_tags(
|
171
|
+
prs = get_prs_between_tags(action.repository, previous_tag, CURRENT_TAG, action.headers)
|
171
172
|
|
172
173
|
# Generate release summary
|
173
174
|
try:
|
174
|
-
summary = generate_release_summary(diff, prs, CURRENT_TAG, previous_tag,
|
175
|
+
summary = generate_release_summary(diff, prs, CURRENT_TAG, previous_tag, action.repository, action.headers)
|
175
176
|
except Exception as e:
|
176
177
|
print(f"Failed to generate summary: {str(e)}")
|
177
178
|
summary = "Failed to generate summary."
|
@@ -181,7 +182,8 @@ def main():
|
|
181
182
|
commit_message = subprocess.run(cmd, check=True, text=True, capture_output=True).stdout.split("\n")[0].strip()
|
182
183
|
|
183
184
|
# Create the release on GitHub
|
184
|
-
|
185
|
+
msg = f"{CURRENT_TAG} - {commit_message}"
|
186
|
+
status_code = create_github_release(action.repository, CURRENT_TAG, msg, summary, action.headers)
|
185
187
|
if status_code == 201:
|
186
188
|
print(f"Successfully created release {CURRENT_TAG}")
|
187
189
|
else:
|
@@ -0,0 +1,19 @@
|
|
1
|
+
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
|
+
|
3
|
+
from .common_utils import remove_html_comments
|
4
|
+
from .github_utils import (
|
5
|
+
GITHUB_API_URL,
|
6
|
+
Action,
|
7
|
+
check_pypi_version,
|
8
|
+
ultralytics_actions_info,
|
9
|
+
)
|
10
|
+
from .openai_utils import get_completion
|
11
|
+
|
12
|
+
__all__ = (
|
13
|
+
"GITHUB_API_URL",
|
14
|
+
"Action",
|
15
|
+
"check_pypi_version",
|
16
|
+
"get_completion",
|
17
|
+
"remove_html_comments",
|
18
|
+
"ultralytics_actions_info",
|
19
|
+
)
|
@@ -0,0 +1,163 @@
|
|
1
|
+
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
|
+
import json
|
3
|
+
import os
|
4
|
+
from pathlib import Path
|
5
|
+
|
6
|
+
import requests
|
7
|
+
|
8
|
+
from actions import __version__
|
9
|
+
|
10
|
+
GITHUB_API_URL = "https://api.github.com"
|
11
|
+
|
12
|
+
|
13
|
+
class Action:
|
14
|
+
"""Handles GitHub Actions API interactions and event processing."""
|
15
|
+
|
16
|
+
def __init__(
|
17
|
+
self,
|
18
|
+
token: str = None,
|
19
|
+
event_name: str = None,
|
20
|
+
event_data: dict = None,
|
21
|
+
):
|
22
|
+
self.token = token or os.getenv("GITHUB_TOKEN")
|
23
|
+
self.event_name = event_name or os.getenv("GITHUB_EVENT_NAME")
|
24
|
+
self.event_data = event_data or self._load_event_data(os.getenv("GITHUB_EVENT_PATH"))
|
25
|
+
|
26
|
+
self.pr = self.event_data.get("pull_request", {})
|
27
|
+
self.repository = self.event_data.get("repository", {}).get("full_name")
|
28
|
+
self.headers = {"Authorization": f"token {self.token}", "Accept": "application/vnd.github.v3+json"}
|
29
|
+
self.headers_diff = {"Authorization": f"token {self.token}", "Accept": "application/vnd.github.v3.diff"}
|
30
|
+
|
31
|
+
@staticmethod
|
32
|
+
def _load_event_data(event_path: str) -> dict:
|
33
|
+
"""Loads GitHub event data from path if it exists."""
|
34
|
+
if event_path and Path(event_path).exists():
|
35
|
+
return json.loads(Path(event_path).read_text())
|
36
|
+
return {}
|
37
|
+
|
38
|
+
def get_username(self) -> str | None:
|
39
|
+
"""Gets username associated with the GitHub token."""
|
40
|
+
query = "query { viewer { login } }"
|
41
|
+
response = requests.post(f"{GITHUB_API_URL}/graphql", json={"query": query}, headers=self.headers)
|
42
|
+
if response.status_code != 200:
|
43
|
+
print(f"Failed to fetch authenticated user. Status code: {response.status_code}")
|
44
|
+
return None
|
45
|
+
try:
|
46
|
+
return response.json()["data"]["viewer"]["login"]
|
47
|
+
except KeyError as e:
|
48
|
+
print(f"Error parsing authenticated user response: {e}")
|
49
|
+
return None
|
50
|
+
|
51
|
+
def get_pr_diff(self) -> str:
|
52
|
+
"""Retrieves the diff content for a specified pull request."""
|
53
|
+
url = f"{GITHUB_API_URL}/repos/{self.repository}/pulls/{self.pr.get('number')}"
|
54
|
+
r = requests.get(url, headers=self.headers_diff)
|
55
|
+
return r.text if r.status_code == 200 else ""
|
56
|
+
|
57
|
+
def get_repo_data(self, endpoint: str) -> dict:
|
58
|
+
"""Fetches repository data from a specified endpoint."""
|
59
|
+
r = requests.get(f"{GITHUB_API_URL}/repos/{self.repository}/{endpoint}", headers=self.headers)
|
60
|
+
r.raise_for_status()
|
61
|
+
return r.json()
|
62
|
+
|
63
|
+
def graphql_request(self, query: str, variables: dict = None) -> dict:
|
64
|
+
"""Executes a GraphQL query against the GitHub API."""
|
65
|
+
headers = {
|
66
|
+
"Authorization": f"Bearer {self.token}",
|
67
|
+
"Content-Type": "application/json",
|
68
|
+
"Accept": "application/vnd.github.v4+json",
|
69
|
+
}
|
70
|
+
r = requests.post(f"{GITHUB_API_URL}/graphql", json={"query": query, "variables": variables}, headers=headers)
|
71
|
+
r.raise_for_status()
|
72
|
+
result = r.json()
|
73
|
+
success = "data" in result and not result.get("errors")
|
74
|
+
print(
|
75
|
+
f"{'Successful' if success else 'Failed'} discussion GraphQL request: {result.get('errors', 'No errors')}"
|
76
|
+
)
|
77
|
+
return result
|
78
|
+
|
79
|
+
def print_info(self):
|
80
|
+
"""Print GitHub Actions information."""
|
81
|
+
info = {
|
82
|
+
"github.event_name": self.event_name,
|
83
|
+
"github.event.action": self.event_data.get("action"),
|
84
|
+
"github.repository": self.repository,
|
85
|
+
"github.event.pull_request.number": self.pr.get("number"),
|
86
|
+
"github.event.pull_request.head.repo.full_name": self.pr.get("head", {}).get("repo", {}).get("full_name"),
|
87
|
+
"github.actor": os.environ.get("GITHUB_ACTOR"),
|
88
|
+
"github.event.pull_request.head.ref": self.pr.get("head", {}).get("ref"),
|
89
|
+
"github.ref": os.environ.get("GITHUB_REF"),
|
90
|
+
"github.head_ref": os.environ.get("GITHUB_HEAD_REF"),
|
91
|
+
"github.base_ref": os.environ.get("GITHUB_BASE_REF"),
|
92
|
+
"github.base_sha": self.pr.get("base", {}).get("sha"),
|
93
|
+
}
|
94
|
+
|
95
|
+
if self.event_name == "discussion":
|
96
|
+
discussion = self.event_data.get("discussion", {})
|
97
|
+
info.update(
|
98
|
+
{
|
99
|
+
"github.event.discussion.node_id": discussion.get("node_id"),
|
100
|
+
"github.event.discussion.number": discussion.get("number"),
|
101
|
+
}
|
102
|
+
)
|
103
|
+
|
104
|
+
max_key_length = max(len(key) for key in info)
|
105
|
+
header = f"Ultralytics Actions {__version__} Information " + "-" * 40
|
106
|
+
print(header)
|
107
|
+
for key, value in info.items():
|
108
|
+
print(f"{key:<{max_key_length + 5}}{value}")
|
109
|
+
print("-" * len(header))
|
110
|
+
|
111
|
+
|
112
|
+
def ultralytics_actions_info():
|
113
|
+
Action().print_info()
|
114
|
+
|
115
|
+
|
116
|
+
def check_pypi_version(pyproject_toml="pyproject.toml"):
|
117
|
+
"""Compares local and PyPI package versions to determine if a new version should be published."""
|
118
|
+
import re
|
119
|
+
|
120
|
+
import tomllib # requires Python>=3.11
|
121
|
+
|
122
|
+
version_pattern = re.compile(r"^\d+\.\d+\.\d+$") # e.g. 0.0.0
|
123
|
+
|
124
|
+
with open(pyproject_toml, "rb") as f:
|
125
|
+
pyproject = tomllib.load(f)
|
126
|
+
|
127
|
+
package_name = pyproject["project"]["name"]
|
128
|
+
local_version = pyproject["project"].get("version", "dynamic")
|
129
|
+
|
130
|
+
# If version is dynamic, extract it from the specified file
|
131
|
+
if local_version == "dynamic":
|
132
|
+
version_attr = pyproject["tool"]["setuptools"]["dynamic"]["version"]["attr"]
|
133
|
+
module_path, attr_name = version_attr.rsplit(".", 1)
|
134
|
+
with open(f"{module_path.replace('.', '/')}/__init__.py") as f:
|
135
|
+
local_version = next(line.split("=")[1].strip().strip("'\"") for line in f if line.startswith(attr_name))
|
136
|
+
|
137
|
+
print(f"Local Version: {local_version}")
|
138
|
+
if not bool(version_pattern.match(local_version)):
|
139
|
+
print("WARNING: Incorrect local version pattern")
|
140
|
+
return "0.0.0", "0.0.0", False
|
141
|
+
|
142
|
+
# Get online version from PyPI
|
143
|
+
response = requests.get(f"https://pypi.org/pypi/{package_name}/json")
|
144
|
+
online_version = response.json()["info"]["version"] if response.status_code == 200 else None
|
145
|
+
print(f"Online Version: {online_version or 'Not Found'}")
|
146
|
+
|
147
|
+
# Determine if a new version should be published
|
148
|
+
if online_version:
|
149
|
+
local_ver = tuple(map(int, local_version.split(".")))
|
150
|
+
online_ver = tuple(map(int, online_version.split(".")))
|
151
|
+
major_diff = local_ver[0] - online_ver[0]
|
152
|
+
minor_diff = local_ver[1] - online_ver[1]
|
153
|
+
patch_diff = local_ver[2] - online_ver[2]
|
154
|
+
|
155
|
+
publish = (
|
156
|
+
(major_diff == 0 and minor_diff == 0 and 0 < patch_diff <= 2)
|
157
|
+
or (major_diff == 0 and minor_diff == 1 and local_ver[2] == 0)
|
158
|
+
or (major_diff == 1 and local_ver[1] == 0 and local_ver[2] == 0)
|
159
|
+
) # should publish an update
|
160
|
+
else:
|
161
|
+
publish = True # publish as this is likely a first release
|
162
|
+
|
163
|
+
return local_version, online_version, publish
|
@@ -1,7 +1,7 @@
|
|
1
1
|
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
2
|
|
3
3
|
import os
|
4
|
-
import
|
4
|
+
import time
|
5
5
|
from typing import Dict, List
|
6
6
|
|
7
7
|
import requests
|
@@ -25,7 +25,7 @@ def get_completion(
|
|
25
25
|
content = ""
|
26
26
|
max_retries = 2
|
27
27
|
for attempt in range(max_retries + 2): # attempt = [0, 1, 2, 3], 2 random retries before asking for no links
|
28
|
-
data = {"model": OPENAI_MODEL, "messages": messages, "seed":
|
28
|
+
data = {"model": OPENAI_MODEL, "messages": messages, "seed": int(time.time() * 1000)}
|
29
29
|
|
30
30
|
r = requests.post(url, headers=headers, json=data)
|
31
31
|
r.raise_for_status()
|
@@ -95,3 +95,16 @@ packages = { find = { where = ["."], include = ["actions", "actions.*"] } }
|
|
95
95
|
|
96
96
|
[tool.setuptools.dynamic]
|
97
97
|
version = { attr = "actions.__version__" }
|
98
|
+
|
99
|
+
[tool.ruff]
|
100
|
+
line-length = 120
|
101
|
+
|
102
|
+
[tool.ruff.format]
|
103
|
+
docstring-code-format = true
|
104
|
+
|
105
|
+
[tool.docformatter]
|
106
|
+
wrap-summaries = 120
|
107
|
+
wrap-descriptions = 120
|
108
|
+
pre-summary-newline = true
|
109
|
+
close-quotes-on-newline = true
|
110
|
+
in-place = true
|
{ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/ultralytics_actions.egg-info/PKG-INFO
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ultralytics-actions
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.34
|
4
4
|
Summary: Ultralytics Actions for GitHub automation and PR management.
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
@@ -1,45 +0,0 @@
|
|
1
|
-
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
|
-
|
3
|
-
from .common_utils import remove_html_comments
|
4
|
-
from .github_utils import (
|
5
|
-
DISCUSSION,
|
6
|
-
EVENT_DATA,
|
7
|
-
GITHUB_API_URL,
|
8
|
-
GITHUB_EVENT_NAME,
|
9
|
-
GITHUB_EVENT_PATH,
|
10
|
-
GITHUB_HEADERS,
|
11
|
-
GITHUB_HEADERS_DIFF,
|
12
|
-
GITHUB_REPOSITORY,
|
13
|
-
GITHUB_TOKEN,
|
14
|
-
PR,
|
15
|
-
check_pypi_version,
|
16
|
-
get_github_data,
|
17
|
-
get_github_username,
|
18
|
-
get_pr_diff,
|
19
|
-
graphql_request,
|
20
|
-
ultralytics_actions_info,
|
21
|
-
)
|
22
|
-
from .openai_utils import OPENAI_API_KEY, OPENAI_MODEL, get_completion
|
23
|
-
|
24
|
-
__all__ = (
|
25
|
-
"remove_html_comments",
|
26
|
-
"EVENT_DATA",
|
27
|
-
"GITHUB_API_URL",
|
28
|
-
"GITHUB_HEADERS",
|
29
|
-
"GITHUB_HEADERS_DIFF",
|
30
|
-
"GITHUB_TOKEN",
|
31
|
-
"GITHUB_REPOSITORY",
|
32
|
-
"PR",
|
33
|
-
"DISCUSSION",
|
34
|
-
"GITHUB_EVENT_NAME",
|
35
|
-
"GITHUB_EVENT_PATH",
|
36
|
-
"get_github_data",
|
37
|
-
"get_pr_diff",
|
38
|
-
"graphql_request",
|
39
|
-
"OPENAI_API_KEY",
|
40
|
-
"OPENAI_MODEL",
|
41
|
-
"get_completion",
|
42
|
-
"get_github_username",
|
43
|
-
"check_pypi_version",
|
44
|
-
"ultralytics_actions_info",
|
45
|
-
)
|
@@ -1,157 +0,0 @@
|
|
1
|
-
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
|
-
import json
|
3
|
-
import os
|
4
|
-
from pathlib import Path
|
5
|
-
|
6
|
-
import requests
|
7
|
-
|
8
|
-
from actions import __version__
|
9
|
-
|
10
|
-
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
|
11
|
-
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
|
12
|
-
GITHUB_EVENT_NAME = os.getenv("GITHUB_EVENT_NAME")
|
13
|
-
GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH")
|
14
|
-
GITHUB_API_URL = "https://api.github.com"
|
15
|
-
GITHUB_HEADERS = {"Authorization": f"token {GITHUB_TOKEN}", "Accept": "application/vnd.github.v3+json"}
|
16
|
-
GITHUB_HEADERS_DIFF = {"Authorization": f"token {GITHUB_TOKEN}", "Accept": "application/vnd.github.v3.diff"}
|
17
|
-
|
18
|
-
EVENT_DATA = {}
|
19
|
-
if GITHUB_EVENT_PATH:
|
20
|
-
event_path = Path(GITHUB_EVENT_PATH)
|
21
|
-
if event_path.exists():
|
22
|
-
EVENT_DATA = json.loads(event_path.read_text())
|
23
|
-
PR = EVENT_DATA.get("pull_request", {})
|
24
|
-
DISCUSSION = EVENT_DATA.get("discussion", {})
|
25
|
-
|
26
|
-
|
27
|
-
def get_github_username():
|
28
|
-
"""Gets username associated with the GitHub token in GITHUB_HEADERS."""
|
29
|
-
query = """
|
30
|
-
query {
|
31
|
-
viewer {
|
32
|
-
login
|
33
|
-
}
|
34
|
-
}
|
35
|
-
"""
|
36
|
-
response = requests.post("https://api.github.com/graphql", json={"query": query}, headers=GITHUB_HEADERS)
|
37
|
-
if response.status_code != 200:
|
38
|
-
print(f"Failed to fetch authenticated user. Status code: {response.status_code}")
|
39
|
-
return None
|
40
|
-
|
41
|
-
try:
|
42
|
-
return response.json()["data"]["viewer"]["login"]
|
43
|
-
except KeyError as e:
|
44
|
-
print(f"Error parsing authenticated user response: {e}")
|
45
|
-
return None
|
46
|
-
|
47
|
-
|
48
|
-
def get_pr_diff(pr_number: int) -> str:
|
49
|
-
"""Retrieves the diff content for a specified pull request in a GitHub repository."""
|
50
|
-
url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/pulls/{pr_number}"
|
51
|
-
r = requests.get(url, headers=GITHUB_HEADERS_DIFF)
|
52
|
-
return r.text if r.status_code == 200 else ""
|
53
|
-
|
54
|
-
|
55
|
-
def get_github_data(endpoint: str) -> dict:
|
56
|
-
"""Fetches GitHub repository data from a specified endpoint using the GitHub API."""
|
57
|
-
r = requests.get(f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/{endpoint}", headers=GITHUB_HEADERS)
|
58
|
-
r.raise_for_status()
|
59
|
-
return r.json()
|
60
|
-
|
61
|
-
|
62
|
-
def graphql_request(query: str, variables: dict = None) -> dict:
|
63
|
-
"""Executes a GraphQL query against the GitHub API and returns the response as a dictionary."""
|
64
|
-
headers = {
|
65
|
-
"Authorization": f"Bearer {GITHUB_TOKEN}",
|
66
|
-
"Content-Type": "application/json",
|
67
|
-
"Accept": "application/vnd.github.v4+json",
|
68
|
-
}
|
69
|
-
r = requests.post(f"{GITHUB_API_URL}/graphql", json={"query": query, "variables": variables}, headers=headers)
|
70
|
-
r.raise_for_status()
|
71
|
-
result = r.json()
|
72
|
-
success = "data" in result and not result.get("errors")
|
73
|
-
print(f"{'Successful' if success else 'Fail'} discussion GraphQL request: {result.get('errors', 'No errors')}")
|
74
|
-
return result
|
75
|
-
|
76
|
-
|
77
|
-
def check_pypi_version(pyproject_toml="pyproject.toml"):
|
78
|
-
"""Compares local and PyPI package versions to determine if a new version should be published."""
|
79
|
-
import re
|
80
|
-
|
81
|
-
import tomllib # requires Python>=3.11
|
82
|
-
|
83
|
-
version_pattern = re.compile(r"^\d+\.\d+\.\d+$") # e.g. 0.0.0
|
84
|
-
|
85
|
-
with open(pyproject_toml, "rb") as f:
|
86
|
-
pyproject = tomllib.load(f)
|
87
|
-
|
88
|
-
package_name = pyproject["project"]["name"]
|
89
|
-
local_version = pyproject["project"].get("version", "dynamic")
|
90
|
-
|
91
|
-
# If version is dynamic, extract it from the specified file
|
92
|
-
if local_version == "dynamic":
|
93
|
-
version_attr = pyproject["tool"]["setuptools"]["dynamic"]["version"]["attr"]
|
94
|
-
module_path, attr_name = version_attr.rsplit(".", 1)
|
95
|
-
with open(f"{module_path.replace('.', '/')}/__init__.py") as f:
|
96
|
-
local_version = next(line.split("=")[1].strip().strip("'\"") for line in f if line.startswith(attr_name))
|
97
|
-
|
98
|
-
print(f"Local Version: {local_version}")
|
99
|
-
if not bool(version_pattern.match(local_version)):
|
100
|
-
print("WARNING: Incorrect local version pattern")
|
101
|
-
return "0.0.0", "0.0.0", False
|
102
|
-
|
103
|
-
# Get online version from PyPI
|
104
|
-
response = requests.get(f"https://pypi.org/pypi/{package_name}/json")
|
105
|
-
online_version = response.json()["info"]["version"] if response.status_code == 200 else None
|
106
|
-
print(f"Online Version: {online_version or 'Not Found'}")
|
107
|
-
|
108
|
-
# Determine if a new version should be published
|
109
|
-
if online_version:
|
110
|
-
local_ver = tuple(map(int, local_version.split(".")))
|
111
|
-
online_ver = tuple(map(int, online_version.split(".")))
|
112
|
-
major_diff = local_ver[0] - online_ver[0]
|
113
|
-
minor_diff = local_ver[1] - online_ver[1]
|
114
|
-
patch_diff = local_ver[2] - online_ver[2]
|
115
|
-
|
116
|
-
publish = (
|
117
|
-
(major_diff == 0 and minor_diff == 0 and 0 < patch_diff <= 2)
|
118
|
-
or (major_diff == 0 and minor_diff == 1 and local_ver[2] == 0)
|
119
|
-
or (major_diff == 1 and local_ver[1] == 0 and local_ver[2] == 0)
|
120
|
-
) # should publish an update
|
121
|
-
else:
|
122
|
-
publish = True # publish as this is likely a first release
|
123
|
-
|
124
|
-
return local_version, online_version, publish
|
125
|
-
|
126
|
-
|
127
|
-
def ultralytics_actions_info():
|
128
|
-
"""Print Ultralytics Actions information."""
|
129
|
-
info = {
|
130
|
-
"github.event_name": GITHUB_EVENT_NAME,
|
131
|
-
"github.event.action": EVENT_DATA.get("action"),
|
132
|
-
"github.repository": GITHUB_REPOSITORY,
|
133
|
-
"github.event.pull_request.number": PR.get("number"),
|
134
|
-
"github.event.pull_request.head.repo.full_name": PR.get("head", {}).get("repo", {}).get("full_name"),
|
135
|
-
"github.actor": os.environ.get("GITHUB_ACTOR"),
|
136
|
-
"github.event.pull_request.head.ref": PR.get("head", {}).get("ref"),
|
137
|
-
"github.ref": os.environ.get("GITHUB_REF"),
|
138
|
-
"github.head_ref": os.environ.get("GITHUB_HEAD_REF"),
|
139
|
-
"github.base_ref": os.environ.get("GITHUB_BASE_REF"),
|
140
|
-
"github.base_sha": PR.get("base", {}).get("sha"),
|
141
|
-
}
|
142
|
-
|
143
|
-
if GITHUB_EVENT_NAME == "discussion":
|
144
|
-
info.update(
|
145
|
-
{
|
146
|
-
"github.event.discussion.node_id": DISCUSSION.get("node_id"),
|
147
|
-
"github.event.discussion.number": DISCUSSION.get("number"),
|
148
|
-
}
|
149
|
-
)
|
150
|
-
|
151
|
-
# Print information
|
152
|
-
max_key_length = max(len(key) for key in info)
|
153
|
-
header = f"Ultralytics Actions {__version__} Information " + "-" * 40
|
154
|
-
print(header)
|
155
|
-
for key, value in info.items():
|
156
|
-
print(f"{key:<{max_key_length + 5}}{value}")
|
157
|
-
print("-" * len(header)) # footer
|
File without changes
|
File without changes
|
{ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/actions/update_markdown_code_blocks.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/ultralytics_actions.egg-info/SOURCES.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|
{ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/ultralytics_actions.egg-info/requires.txt
RENAMED
File without changes
|
{ultralytics_actions-0.0.30 → ultralytics_actions-0.0.34}/ultralytics_actions.egg-info/top_level.txt
RENAMED
File without changes
|