ultralytics-actions 0.0.5__tar.gz → 0.0.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ultralytics_actions-0.0.5/ultralytics_actions.egg-info → ultralytics_actions-0.0.7}/PKG-INFO +3 -1
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/README.md +2 -0
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/actions/__init__.py +1 -7
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/actions/first_interaction.py +12 -12
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/actions/summarize_pr.py +3 -3
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/actions/summarize_release.py +6 -6
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/actions/update_markdown_code_blocks.py +8 -8
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/actions/utils/__init__.py +2 -0
- ultralytics_actions-0.0.7/actions/utils/common_utils.py +92 -0
- ultralytics_actions-0.0.7/actions/utils/github_utils.py +87 -0
- ultralytics_actions-0.0.7/actions/utils/openai_utils.py +45 -0
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7/ultralytics_actions.egg-info}/PKG-INFO +3 -1
- ultralytics_actions-0.0.5/actions/utils/common_utils.py +0 -8
- ultralytics_actions-0.0.5/actions/utils/github_utils.py +0 -44
- ultralytics_actions-0.0.5/actions/utils/openai_utils.py +0 -24
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/LICENSE +0 -0
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/pyproject.toml +0 -0
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/setup.cfg +0 -0
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/ultralytics_actions.egg-info/SOURCES.txt +0 -0
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/ultralytics_actions.egg-info/dependency_links.txt +0 -0
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/ultralytics_actions.egg-info/entry_points.txt +0 -0
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/ultralytics_actions.egg-info/requires.txt +0 -0
- {ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/ultralytics_actions.egg-info/top_level.txt +0 -0
{ultralytics_actions-0.0.5/ultralytics_actions.egg-info → ultralytics_actions-0.0.7}/PKG-INFO
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ultralytics-actions
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.7
|
4
4
|
Summary: Ultralytics Actions for GitHub automation and PR management.
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
@@ -42,6 +42,8 @@ Welcome to the [Ultralytics Actions](https://github.com/ultralytics/actions) rep
|
|
42
42
|
|
43
43
|
[](https://github.com/marketplace/actions/ultralytics-actions) [](https://github.com/ultralytics/actions/actions/workflows/format.yml) <a href="https://discord.com/invite/ultralytics"><img alt="Discord" src="https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue"></a> <a href="https://community.ultralytics.com/"><img alt="Ultralytics Forums" src="https://img.shields.io/discourse/users?server=https%3A%2F%2Fcommunity.ultralytics.com&logo=discourse&label=Forums&color=blue"></a> <a href="https://reddit.com/r/ultralytics"><img alt="Ultralytics Reddit" src="https://img.shields.io/reddit/subreddit-subscribers/ultralytics?style=flat&logo=reddit&logoColor=white&label=Reddit&color=blue"></a>
|
44
44
|
|
45
|
+
[](https://badge.fury.io/py/ultralytics-actions) [](https://pepy.tech/project/ultralytics-actions)
|
46
|
+
|
45
47
|
## 📄 Actions Description
|
46
48
|
|
47
49
|
Ultralytics Actions automatically applies formats, updates, and enhancements:
|
@@ -6,6 +6,8 @@ Welcome to the [Ultralytics Actions](https://github.com/ultralytics/actions) rep
|
|
6
6
|
|
7
7
|
[](https://github.com/marketplace/actions/ultralytics-actions) [](https://github.com/ultralytics/actions/actions/workflows/format.yml) <a href="https://discord.com/invite/ultralytics"><img alt="Discord" src="https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue"></a> <a href="https://community.ultralytics.com/"><img alt="Ultralytics Forums" src="https://img.shields.io/discourse/users?server=https%3A%2F%2Fcommunity.ultralytics.com&logo=discourse&label=Forums&color=blue"></a> <a href="https://reddit.com/r/ultralytics"><img alt="Ultralytics Reddit" src="https://img.shields.io/reddit/subreddit-subscribers/ultralytics?style=flat&logo=reddit&logoColor=white&label=Reddit&color=blue"></a>
|
8
8
|
|
9
|
+
[](https://badge.fury.io/py/ultralytics-actions) [](https://pepy.tech/project/ultralytics-actions)
|
10
|
+
|
9
11
|
## 📄 Actions Description
|
10
12
|
|
11
13
|
Ultralytics Actions automatically applies formats, updates, and enhancements:
|
@@ -22,10 +22,4 @@
|
|
22
22
|
# ├── test_summarize_pr.py
|
23
23
|
# └── ...
|
24
24
|
|
25
|
-
|
26
|
-
from .summarize_pr import main as summarize_pr_main
|
27
|
-
from .summarize_release import main as summarize_release_main
|
28
|
-
from .update_markdown_code_blocks import main as update_markdown_code_blocks_main
|
29
|
-
|
30
|
-
__all__ = ["first_interaction_main", "summarize_pr_main", "summarize_release_main", "update_markdown_code_blocks_main"]
|
31
|
-
__version__ = "0.0.5"
|
25
|
+
__version__ = "0.0.7"
|
@@ -24,7 +24,7 @@ BLOCK_USER = os.getenv("BLOCK_USER", "false").lower() == "true"
|
|
24
24
|
|
25
25
|
|
26
26
|
def get_event_content() -> Tuple[int, str, str, str, str, str, str]:
|
27
|
-
"""Extracts
|
27
|
+
"""Extracts key information from GitHub event data for issues, pull requests, or discussions."""
|
28
28
|
with open(GITHUB_EVENT_PATH) as f:
|
29
29
|
data = json.load(f)
|
30
30
|
action = data["action"] # 'opened', 'closed', 'created' (discussion), etc.
|
@@ -50,7 +50,7 @@ def get_event_content() -> Tuple[int, str, str, str, str, str, str]:
|
|
50
50
|
|
51
51
|
|
52
52
|
def update_issue_pr_content(number: int, node_id: str, issue_type: str):
|
53
|
-
"""Updates the title and body of
|
53
|
+
"""Updates the title and body of an issue, pull request, or discussion with predefined content."""
|
54
54
|
new_title = "Content Under Review"
|
55
55
|
new_body = """This post has been flagged for review by [Ultralytics Actions](https://ultralytics.com/actions) due to possible spam, abuse, or off-topic content. For more information please see our:
|
56
56
|
|
@@ -79,7 +79,7 @@ mutation($discussionId: ID!, $title: String!, $body: String!) {
|
|
79
79
|
|
80
80
|
|
81
81
|
def close_issue_pr(number: int, node_id: str, issue_type: str):
|
82
|
-
"""Closes the issue, pull request, or discussion."""
|
82
|
+
"""Closes the specified issue, pull request, or discussion using the GitHub API."""
|
83
83
|
if issue_type == "discussion":
|
84
84
|
mutation = """
|
85
85
|
mutation($discussionId: ID!) {
|
@@ -98,7 +98,7 @@ mutation($discussionId: ID!) {
|
|
98
98
|
|
99
99
|
|
100
100
|
def lock_issue_pr(number: int, node_id: str, issue_type: str):
|
101
|
-
"""Locks
|
101
|
+
"""Locks an issue, pull request, or discussion to prevent further interactions."""
|
102
102
|
if issue_type == "discussion":
|
103
103
|
mutation = """
|
104
104
|
mutation($lockableId: ID!, $lockReason: LockReason) {
|
@@ -119,7 +119,7 @@ mutation($lockableId: ID!, $lockReason: LockReason) {
|
|
119
119
|
|
120
120
|
|
121
121
|
def block_user(username: str):
|
122
|
-
"""Blocks a user from the organization."""
|
122
|
+
"""Blocks a user from the organization using the GitHub API."""
|
123
123
|
url = f"{GITHUB_API_URL}/orgs/{REPO_NAME.split('/')[0]}/blocks/{username}"
|
124
124
|
r = requests.put(url, headers=GITHUB_HEADERS)
|
125
125
|
print(f"{'Successful' if r.status_code == 204 else 'Fail'} user block for {username}: {r.status_code}")
|
@@ -128,7 +128,7 @@ def block_user(username: str):
|
|
128
128
|
def get_relevant_labels(
|
129
129
|
issue_type: str, title: str, body: str, available_labels: Dict, current_labels: List
|
130
130
|
) -> List[str]:
|
131
|
-
"""
|
131
|
+
"""Determines relevant labels for GitHub issues/PRs using OpenAI, considering title, body, and existing labels."""
|
132
132
|
# Remove mutually exclusive labels like both 'bug' and 'question' or inappropriate labels like 'help wanted'
|
133
133
|
for label in ["help wanted", "TODO"]: # normal case
|
134
134
|
available_labels.pop(label, None) # remove as should only be manually added
|
@@ -212,7 +212,7 @@ query($owner: String!, $name: String!) {
|
|
212
212
|
|
213
213
|
|
214
214
|
def apply_labels(number: int, node_id: str, labels: List[str], issue_type: str):
|
215
|
-
"""Applies
|
215
|
+
"""Applies specified labels to a GitHub issue, pull request, or discussion using the appropriate API."""
|
216
216
|
if "Alert" in labels:
|
217
217
|
create_alert_label()
|
218
218
|
|
@@ -243,13 +243,13 @@ mutation($labelableId: ID!, $labelIds: [ID!]!) {
|
|
243
243
|
|
244
244
|
|
245
245
|
def create_alert_label():
|
246
|
-
"""Creates the 'Alert' label in the repository if it doesn't exist."""
|
246
|
+
"""Creates the 'Alert' label in the repository if it doesn't exist, with a red color and description."""
|
247
247
|
alert_label = {"name": "Alert", "color": "FF0000", "description": "Potential spam, abuse, or off-topic."}
|
248
248
|
requests.post(f"{GITHUB_API_URL}/repos/{REPO_NAME}/labels", json=alert_label, headers=GITHUB_HEADERS)
|
249
249
|
|
250
250
|
|
251
251
|
def is_org_member(username: str) -> bool:
|
252
|
-
"""Checks if a user is a member of the organization."""
|
252
|
+
"""Checks if a user is a member of the organization using the GitHub API."""
|
253
253
|
org_name = REPO_NAME.split("/")[0]
|
254
254
|
url = f"{GITHUB_API_URL}/orgs/{org_name}/members/{username}"
|
255
255
|
r = requests.get(url, headers=GITHUB_HEADERS)
|
@@ -257,7 +257,7 @@ def is_org_member(username: str) -> bool:
|
|
257
257
|
|
258
258
|
|
259
259
|
def add_comment(number: int, node_id: str, comment: str, issue_type: str):
|
260
|
-
"""Adds a comment to the issue, pull request, or discussion."""
|
260
|
+
"""Adds a comment to the specified issue, pull request, or discussion using the GitHub API."""
|
261
261
|
if issue_type == "discussion":
|
262
262
|
mutation = """
|
263
263
|
mutation($discussionId: ID!, $body: String!) {
|
@@ -276,7 +276,7 @@ mutation($discussionId: ID!, $body: String!) {
|
|
276
276
|
|
277
277
|
|
278
278
|
def get_first_interaction_response(issue_type: str, title: str, body: str, username: str, number: int) -> str:
|
279
|
-
"""Generates a custom response
|
279
|
+
"""Generates a custom LLM response for GitHub issues, PRs, or discussions based on content."""
|
280
280
|
issue_discussion_response = f"""
|
281
281
|
👋 Hello @{username}, thank you for submitting a `{REPO_NAME}` 🚀 {issue_type.capitalize()}. To help us address your concern efficiently, please ensure you've provided the following information:
|
282
282
|
|
@@ -370,7 +370,7 @@ YOUR {issue_type.upper()} RESPONSE:
|
|
370
370
|
|
371
371
|
|
372
372
|
def main():
|
373
|
-
"""
|
373
|
+
"""Executes autolabeling and custom response generation for new GitHub issues, PRs, and discussions."""
|
374
374
|
number, node_id, title, body, username, issue_type, action = get_event_content()
|
375
375
|
available_labels = get_github_data("labels")
|
376
376
|
label_descriptions = {label["name"]: label.get("description", "") for label in available_labels}
|
@@ -18,7 +18,7 @@ SUMMARY_START = (
|
|
18
18
|
|
19
19
|
|
20
20
|
def generate_pr_summary(repo_name, diff_text):
|
21
|
-
"""Generates a
|
21
|
+
"""Generates a concise, professional summary of a PR using OpenAI's API for Ultralytics repositories."""
|
22
22
|
if not diff_text:
|
23
23
|
diff_text = "**ERROR: DIFF IS EMPTY, THERE ARE ZERO CODE CHANGES IN THIS PR."
|
24
24
|
ratio = 3.3 # about 3.3 characters per token
|
@@ -45,7 +45,7 @@ def generate_pr_summary(repo_name, diff_text):
|
|
45
45
|
|
46
46
|
|
47
47
|
def update_pr_description(repo_name, pr_number, new_summary):
|
48
|
-
"""Updates the
|
48
|
+
"""Updates the PR description with a new summary, replacing existing summary if present."""
|
49
49
|
# Fetch the current PR description
|
50
50
|
pr_url = f"{GITHUB_API_URL}/repos/{repo_name}/pulls/{pr_number}"
|
51
51
|
pr_response = requests.get(pr_url, headers=GITHUB_HEADERS)
|
@@ -64,7 +64,7 @@ def update_pr_description(repo_name, pr_number, new_summary):
|
|
64
64
|
|
65
65
|
|
66
66
|
def main():
|
67
|
-
"""Summarize
|
67
|
+
"""Summarize a pull request and update its description with an AI-generated summary."""
|
68
68
|
diff = get_pr_diff(PR_NUMBER)
|
69
69
|
|
70
70
|
# Generate PR summary
|
@@ -24,14 +24,14 @@ PREVIOUS_TAG = os.getenv("PREVIOUS_TAG")
|
|
24
24
|
|
25
25
|
|
26
26
|
def get_release_diff(repo_name: str, previous_tag: str, latest_tag: str) -> str:
|
27
|
-
"""
|
27
|
+
"""Retrieves the differences between two specified Git tags in a GitHub repository."""
|
28
28
|
url = f"{GITHUB_API_URL}/repos/{repo_name}/compare/{previous_tag}...{latest_tag}"
|
29
29
|
r = requests.get(url, headers=GITHUB_HEADERS_DIFF)
|
30
30
|
return r.text if r.status_code == 200 else f"Failed to get diff: {r.content}"
|
31
31
|
|
32
32
|
|
33
33
|
def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str) -> list:
|
34
|
-
"""
|
34
|
+
"""Retrieves and processes pull requests merged between two specified tags in a GitHub repository."""
|
35
35
|
url = f"{GITHUB_API_URL}/repos/{repo_name}/compare/{previous_tag}...{latest_tag}"
|
36
36
|
r = requests.get(url, headers=GITHUB_HEADERS)
|
37
37
|
r.raise_for_status()
|
@@ -68,7 +68,7 @@ def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str) ->
|
|
68
68
|
|
69
69
|
|
70
70
|
def get_new_contributors(repo: str, prs: list) -> set:
|
71
|
-
"""Identify
|
71
|
+
"""Identify new contributors who made their first merged PR in the current release."""
|
72
72
|
new_contributors = set()
|
73
73
|
for pr in prs:
|
74
74
|
author = pr["author"]
|
@@ -85,7 +85,7 @@ def get_new_contributors(repo: str, prs: list) -> set:
|
|
85
85
|
|
86
86
|
|
87
87
|
def generate_release_summary(diff: str, prs: list, latest_tag: str, previous_tag: str, repo_name: str) -> str:
|
88
|
-
"""Generate a summary for
|
88
|
+
"""Generate a concise release summary with key changes, purpose, and impact for a new Ultralytics version."""
|
89
89
|
pr_summaries = "\n\n".join(
|
90
90
|
[f"PR #{pr['number']}: {pr['title']} by @{pr['author']}\n{pr['body'][:1000]}" for pr in prs]
|
91
91
|
)
|
@@ -139,7 +139,7 @@ def generate_release_summary(diff: str, prs: list, latest_tag: str, previous_tag
|
|
139
139
|
|
140
140
|
|
141
141
|
def create_github_release(repo_name: str, tag_name: str, name: str, body: str) -> int:
|
142
|
-
"""
|
142
|
+
"""Creates a GitHub release with specified tag, name, and body content for the given repository."""
|
143
143
|
url = f"{GITHUB_API_URL}/repos/{repo_name}/releases"
|
144
144
|
data = {"tag_name": tag_name, "name": name, "body": body, "draft": False, "prerelease": False}
|
145
145
|
r = requests.post(url, headers=GITHUB_HEADERS, json=data)
|
@@ -147,7 +147,7 @@ def create_github_release(repo_name: str, tag_name: str, name: str, body: str) -
|
|
147
147
|
|
148
148
|
|
149
149
|
def get_previous_tag() -> str:
|
150
|
-
"""
|
150
|
+
"""Retrieves the previous Git tag, excluding the current tag, using the git describe command."""
|
151
151
|
cmd = ["git", "describe", "--tags", "--abbrev=0", "--exclude", CURRENT_TAG]
|
152
152
|
try:
|
153
153
|
return subprocess.run(cmd, check=True, text=True, capture_output=True).stdout.strip()
|
{ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/actions/update_markdown_code_blocks.py
RENAMED
@@ -8,21 +8,21 @@ from pathlib import Path
|
|
8
8
|
|
9
9
|
|
10
10
|
def extract_code_blocks(markdown_content):
|
11
|
-
"""
|
11
|
+
"""Extracts Python code blocks from markdown content using regex pattern matching."""
|
12
12
|
pattern = r"^( *)```(?:python|py|\{[ ]*\.py[ ]*\.annotate[ ]*\})\n(.*?)\n\1```"
|
13
13
|
code_block_pattern = re.compile(pattern, re.DOTALL | re.MULTILINE)
|
14
14
|
return code_block_pattern.findall(markdown_content)
|
15
15
|
|
16
16
|
|
17
17
|
def remove_indentation(code_block, num_spaces):
|
18
|
-
"""Removes
|
18
|
+
"""Removes specified leading spaces from each line in a code block to adjust indentation."""
|
19
19
|
lines = code_block.split("\n")
|
20
20
|
stripped_lines = [line[num_spaces:] if len(line) >= num_spaces else line for line in lines]
|
21
21
|
return "\n".join(stripped_lines)
|
22
22
|
|
23
23
|
|
24
24
|
def add_indentation(code_block, num_spaces):
|
25
|
-
"""Adds
|
25
|
+
"""Adds specified number of leading spaces to non-empty lines in a code block."""
|
26
26
|
indent = " " * num_spaces
|
27
27
|
lines = code_block.split("\n")
|
28
28
|
indented_lines = [indent + line if line.strip() != "" else line for line in lines]
|
@@ -30,7 +30,7 @@ def add_indentation(code_block, num_spaces):
|
|
30
30
|
|
31
31
|
|
32
32
|
def format_code_with_ruff(temp_dir):
|
33
|
-
"""Formats
|
33
|
+
"""Formats Python code files in the specified directory using ruff linter and docformatter tools."""
|
34
34
|
try:
|
35
35
|
# Run ruff format
|
36
36
|
subprocess.run(
|
@@ -86,14 +86,14 @@ def format_code_with_ruff(temp_dir):
|
|
86
86
|
|
87
87
|
|
88
88
|
def generate_temp_filename(file_path, index):
|
89
|
-
"""Generates a unique temporary filename
|
89
|
+
"""Generates a unique temporary filename using a hash of the file path and index."""
|
90
90
|
unique_string = f"{file_path.parent}_{file_path.stem}_{index}"
|
91
91
|
unique_hash = hashlib.md5(unique_string.encode()).hexdigest()
|
92
92
|
return f"temp_{unique_hash}.py"
|
93
93
|
|
94
94
|
|
95
95
|
def process_markdown_file(file_path, temp_dir, verbose=False):
|
96
|
-
"""
|
96
|
+
"""Processes a markdown file, extracting Python code blocks for formatting and updating the original file."""
|
97
97
|
try:
|
98
98
|
markdown_content = Path(file_path).read_text()
|
99
99
|
code_blocks = extract_code_blocks(markdown_content)
|
@@ -119,7 +119,7 @@ def process_markdown_file(file_path, temp_dir, verbose=False):
|
|
119
119
|
|
120
120
|
|
121
121
|
def update_markdown_file(file_path, markdown_content, temp_files):
|
122
|
-
"""Updates
|
122
|
+
"""Updates a markdown file with formatted Python code blocks extracted and processed externally."""
|
123
123
|
for num_spaces, original_code_block, temp_file_path in temp_files:
|
124
124
|
try:
|
125
125
|
with open(temp_file_path) as temp_file:
|
@@ -143,7 +143,7 @@ def update_markdown_file(file_path, markdown_content, temp_files):
|
|
143
143
|
|
144
144
|
|
145
145
|
def main(root_dir=Path.cwd(), verbose=False):
|
146
|
-
"""Processes
|
146
|
+
"""Processes markdown files, extracts and formats Python code blocks, and updates the original files."""
|
147
147
|
root_path = Path(root_dir)
|
148
148
|
markdown_files = list(root_path.rglob("*.md"))
|
149
149
|
temp_dir = Path("temp_code_blocks")
|
@@ -10,6 +10,7 @@ from .github_utils import (
|
|
10
10
|
GITHUB_TOKEN,
|
11
11
|
PR_NUMBER,
|
12
12
|
REPO_NAME,
|
13
|
+
check_pypi_version,
|
13
14
|
get_github_data,
|
14
15
|
get_pr_diff,
|
15
16
|
graphql_request,
|
@@ -32,4 +33,5 @@ __all__ = (
|
|
32
33
|
"OPENAI_API_KEY",
|
33
34
|
"OPENAI_MODEL",
|
34
35
|
"get_completion",
|
36
|
+
"check_pypi_version",
|
35
37
|
)
|
@@ -0,0 +1,92 @@
|
|
1
|
+
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
|
+
|
3
|
+
import re
|
4
|
+
import socket
|
5
|
+
import time
|
6
|
+
import urllib
|
7
|
+
from concurrent.futures import ThreadPoolExecutor
|
8
|
+
|
9
|
+
|
10
|
+
def remove_html_comments(body: str) -> str:
|
11
|
+
"""Removes HTML comments from a string using regex pattern matching."""
|
12
|
+
return re.sub(r"<!--.*?-->", "", body, flags=re.DOTALL).strip()
|
13
|
+
|
14
|
+
|
15
|
+
def clean_url(url):
|
16
|
+
"""Remove extra characters from URL strings."""
|
17
|
+
for _ in range(3):
|
18
|
+
url = str(url).strip('"').strip("'").rstrip(".,:;!?`\\").replace(".git@main", "").replace("git+", "")
|
19
|
+
return url
|
20
|
+
|
21
|
+
|
22
|
+
def is_url(url, check=True, max_attempts=3, timeout=2):
|
23
|
+
"""Check if string is URL and check if URL exists."""
|
24
|
+
allow_list = (
|
25
|
+
"localhost",
|
26
|
+
"127.0.0",
|
27
|
+
":5000",
|
28
|
+
":3000",
|
29
|
+
":8000",
|
30
|
+
":8080",
|
31
|
+
":6006",
|
32
|
+
"MODEL_ID",
|
33
|
+
"API_KEY",
|
34
|
+
"url",
|
35
|
+
"example",
|
36
|
+
"mailto:",
|
37
|
+
)
|
38
|
+
try:
|
39
|
+
# Check allow list
|
40
|
+
if any(x in url for x in allow_list):
|
41
|
+
return True
|
42
|
+
|
43
|
+
# Check structure
|
44
|
+
result = urllib.parse.urlparse(url)
|
45
|
+
if not all([result.scheme, result.netloc]):
|
46
|
+
return False
|
47
|
+
|
48
|
+
# Check response
|
49
|
+
if check:
|
50
|
+
for attempt in range(max_attempts):
|
51
|
+
try:
|
52
|
+
req = urllib.request.Request(url, method="HEAD", headers={"User-Agent": "Chrome/120.0.0.0"})
|
53
|
+
with urllib.request.urlopen(req, timeout=timeout) as response:
|
54
|
+
return response.getcode() < 400
|
55
|
+
except (urllib.error.URLError, socket.timeout):
|
56
|
+
if attempt == max_attempts - 1: # last attempt
|
57
|
+
return False
|
58
|
+
time.sleep(2**attempt) # exponential backoff
|
59
|
+
return False
|
60
|
+
return True
|
61
|
+
except Exception:
|
62
|
+
return False
|
63
|
+
|
64
|
+
|
65
|
+
def check_links_in_string(text, verbose=True, return_bad=False):
|
66
|
+
"""Process a given text, find unique URLs within it, and check for any 404 errors."""
|
67
|
+
pattern = (
|
68
|
+
r"\[([^\]]+)\]\(([^)]+)\)" # Matches Markdown links [text](url)
|
69
|
+
r"|"
|
70
|
+
r"(" # Start capturing group for plaintext URLs
|
71
|
+
r"(?:https?://)?" # Optional http:// or https://
|
72
|
+
r"(?:www\.)?" # Optional www.
|
73
|
+
r"[\w.-]+" # Domain name and subdomains
|
74
|
+
r"\.[a-zA-Z]{2,}" # TLD
|
75
|
+
r"(?:/[^\s\"')\]]*)?" # Optional path
|
76
|
+
r")"
|
77
|
+
)
|
78
|
+
all_urls = []
|
79
|
+
for md_text, md_url, plain_url in re.findall(pattern, text):
|
80
|
+
url = md_url or plain_url
|
81
|
+
if url and urllib.parse.urlparse(url).scheme:
|
82
|
+
all_urls.append(url)
|
83
|
+
|
84
|
+
urls = set(map(clean_url, all_urls)) # remove extra characters and make unique
|
85
|
+
with ThreadPoolExecutor(max_workers=16) as executor: # multi-thread
|
86
|
+
bad_urls = [url for url, valid in zip(urls, executor.map(lambda x: not is_url(x, check=True), urls)) if valid]
|
87
|
+
|
88
|
+
passing = not bad_urls
|
89
|
+
if verbose and not passing:
|
90
|
+
print(f"WARNING ⚠️ errors found in URLs {bad_urls}")
|
91
|
+
|
92
|
+
return (passing, bad_urls) if return_bad else passing
|
@@ -0,0 +1,87 @@
|
|
1
|
+
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
|
+
|
3
|
+
import os
|
4
|
+
|
5
|
+
import requests
|
6
|
+
|
7
|
+
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
|
8
|
+
GITHUB_API_URL = "https://api.github.com"
|
9
|
+
GITHUB_HEADERS = {"Authorization": f"token {GITHUB_TOKEN}", "Accept": "application/vnd.github.v3+json"}
|
10
|
+
GITHUB_HEADERS_DIFF = {"Authorization": f"token {GITHUB_TOKEN}", "Accept": "application/vnd.github.v3.diff"}
|
11
|
+
|
12
|
+
PR_NUMBER = os.getenv("PR_NUMBER")
|
13
|
+
REPO_NAME = os.getenv("GITHUB_REPOSITORY")
|
14
|
+
GITHUB_EVENT_NAME = os.getenv("GITHUB_EVENT_NAME")
|
15
|
+
GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH")
|
16
|
+
|
17
|
+
|
18
|
+
def get_pr_diff(pr_number: int) -> str:
|
19
|
+
"""Retrieves the diff content for a specified pull request in a GitHub repository."""
|
20
|
+
url = f"{GITHUB_API_URL}/repos/{REPO_NAME}/pulls/{pr_number}"
|
21
|
+
r = requests.get(url, headers=GITHUB_HEADERS_DIFF)
|
22
|
+
return r.text if r.status_code == 200 else ""
|
23
|
+
|
24
|
+
|
25
|
+
def get_github_data(endpoint: str) -> dict:
|
26
|
+
"""Fetches GitHub repository data from a specified endpoint using the GitHub API."""
|
27
|
+
r = requests.get(f"{GITHUB_API_URL}/repos/{REPO_NAME}/{endpoint}", headers=GITHUB_HEADERS)
|
28
|
+
r.raise_for_status()
|
29
|
+
return r.json()
|
30
|
+
|
31
|
+
|
32
|
+
def graphql_request(query: str, variables: dict = None) -> dict:
|
33
|
+
"""Executes a GraphQL query against the GitHub API and returns the response as a dictionary."""
|
34
|
+
headers = {
|
35
|
+
"Authorization": f"Bearer {GITHUB_TOKEN}",
|
36
|
+
"Content-Type": "application/json",
|
37
|
+
"Accept": "application/vnd.github.v4+json",
|
38
|
+
}
|
39
|
+
r = requests.post(f"{GITHUB_API_URL}/graphql", json={"query": query, "variables": variables}, headers=headers)
|
40
|
+
r.raise_for_status()
|
41
|
+
result = r.json()
|
42
|
+
success = "data" in result and not result.get("errors")
|
43
|
+
print(f"{'Successful' if success else 'Fail'} discussion GraphQL request: {result.get('errors', 'No errors')}")
|
44
|
+
return result
|
45
|
+
|
46
|
+
|
47
|
+
def check_pypi_version(pyproject_toml="pyproject.toml"):
|
48
|
+
"""Compares local and PyPI package versions to determine if a new version should be published."""
|
49
|
+
import tomllib # requires Python>=3.11
|
50
|
+
|
51
|
+
with open(pyproject_toml, "rb") as f:
|
52
|
+
pyproject = tomllib.load(f)
|
53
|
+
|
54
|
+
package_name = pyproject["project"]["name"]
|
55
|
+
local_version = pyproject["project"].get("version", "dynamic")
|
56
|
+
|
57
|
+
# If version is dynamic, extract it from the specified file
|
58
|
+
if local_version == "dynamic":
|
59
|
+
version_attr = pyproject["tool"]["setuptools"]["dynamic"]["version"]["attr"]
|
60
|
+
module_path, attr_name = version_attr.rsplit(".", 1)
|
61
|
+
with open(f"{module_path.replace('.', '/')}/__init__.py") as f:
|
62
|
+
local_version = next(line.split("=")[1].strip().strip("'\"") for line in f if line.startswith(attr_name))
|
63
|
+
|
64
|
+
print(f"Local Version: {local_version}")
|
65
|
+
|
66
|
+
# Get online version from PyPI
|
67
|
+
response = requests.get(f"https://pypi.org/pypi/{package_name}/json")
|
68
|
+
online_version = response.json()["info"]["version"] if response.status_code == 200 else None
|
69
|
+
print(f"Online Version: {online_version or 'Not Found'}")
|
70
|
+
|
71
|
+
# Determine if a new version should be published
|
72
|
+
if online_version:
|
73
|
+
local_ver = tuple(map(int, local_version.split(".")))
|
74
|
+
online_ver = tuple(map(int, online_version.split(".")))
|
75
|
+
major_diff = local_ver[0] - online_ver[0]
|
76
|
+
minor_diff = local_ver[1] - online_ver[1]
|
77
|
+
patch_diff = local_ver[2] - online_ver[2]
|
78
|
+
|
79
|
+
publish = (
|
80
|
+
(major_diff == 0 and minor_diff == 0 and 0 < patch_diff <= 2)
|
81
|
+
or (major_diff == 0 and minor_diff == 1 and local_ver[2] == 0)
|
82
|
+
or (major_diff == 1 and local_ver[1] == 0 and local_ver[2] == 0)
|
83
|
+
) # should publish an update
|
84
|
+
else:
|
85
|
+
publish = True # publish as this is likely a first release
|
86
|
+
|
87
|
+
return local_version, online_version, publish
|
@@ -0,0 +1,45 @@
|
|
1
|
+
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
|
+
|
3
|
+
import os
|
4
|
+
import random
|
5
|
+
from typing import Dict, List
|
6
|
+
|
7
|
+
import requests
|
8
|
+
|
9
|
+
from actions.utils.common_utils import check_links_in_string
|
10
|
+
|
11
|
+
OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o")
|
12
|
+
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
13
|
+
|
14
|
+
|
15
|
+
def get_completion(
|
16
|
+
messages: List[Dict[str, str]],
|
17
|
+
check_links: bool = True,
|
18
|
+
remove: List[str] = (" @giscus[bot]",), # strings to remove from response
|
19
|
+
) -> str:
|
20
|
+
"""Generates a completion using OpenAI's API based on input messages."""
|
21
|
+
assert OPENAI_API_KEY, "OpenAI API key is required."
|
22
|
+
url = "https://api.openai.com/v1/chat/completions"
|
23
|
+
headers = {"Authorization": f"Bearer {OPENAI_API_KEY}", "Content-Type": "application/json"}
|
24
|
+
|
25
|
+
content = ""
|
26
|
+
max_retries = 2
|
27
|
+
for attempt in range(max_retries + 2): # attempt = [0, 1, 2, 3], 2 random retries before asking for no links
|
28
|
+
data = {"model": OPENAI_MODEL, "messages": messages, "seed": random.randint(1, 1000000)}
|
29
|
+
|
30
|
+
r = requests.post(url, headers=headers, json=data)
|
31
|
+
r.raise_for_status()
|
32
|
+
content = r.json()["choices"][0]["message"]["content"].strip()
|
33
|
+
for x in remove:
|
34
|
+
content = content.replace(x, "")
|
35
|
+
if not check_links or check_links_in_string(content): # if no checks or checks are passing return response
|
36
|
+
return content
|
37
|
+
|
38
|
+
if attempt < max_retries:
|
39
|
+
print(f"Attempt {attempt + 1}: Found bad URLs. Retrying with a new random seed.")
|
40
|
+
else:
|
41
|
+
print("Max retries reached. Updating prompt to exclude links.")
|
42
|
+
messages.append({"role": "user", "content": "Please provide a response without any URLs or links in it."})
|
43
|
+
check_links = False # automatically accept the last message
|
44
|
+
|
45
|
+
return content
|
{ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7/ultralytics_actions.egg-info}/PKG-INFO
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ultralytics-actions
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.7
|
4
4
|
Summary: Ultralytics Actions for GitHub automation and PR management.
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
@@ -42,6 +42,8 @@ Welcome to the [Ultralytics Actions](https://github.com/ultralytics/actions) rep
|
|
42
42
|
|
43
43
|
[](https://github.com/marketplace/actions/ultralytics-actions) [](https://github.com/ultralytics/actions/actions/workflows/format.yml) <a href="https://discord.com/invite/ultralytics"><img alt="Discord" src="https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue"></a> <a href="https://community.ultralytics.com/"><img alt="Ultralytics Forums" src="https://img.shields.io/discourse/users?server=https%3A%2F%2Fcommunity.ultralytics.com&logo=discourse&label=Forums&color=blue"></a> <a href="https://reddit.com/r/ultralytics"><img alt="Ultralytics Reddit" src="https://img.shields.io/reddit/subreddit-subscribers/ultralytics?style=flat&logo=reddit&logoColor=white&label=Reddit&color=blue"></a>
|
44
44
|
|
45
|
+
[](https://badge.fury.io/py/ultralytics-actions) [](https://pepy.tech/project/ultralytics-actions)
|
46
|
+
|
45
47
|
## 📄 Actions Description
|
46
48
|
|
47
49
|
Ultralytics Actions automatically applies formats, updates, and enhancements:
|
@@ -1,8 +0,0 @@
|
|
1
|
-
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
|
-
|
3
|
-
import re
|
4
|
-
|
5
|
-
|
6
|
-
def remove_html_comments(body: str) -> str:
|
7
|
-
"""Removes HTML comments from a string using regex pattern matching."""
|
8
|
-
return re.sub(r"<!--.*?-->", "", body, flags=re.DOTALL).strip()
|
@@ -1,44 +0,0 @@
|
|
1
|
-
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
|
-
|
3
|
-
import os
|
4
|
-
|
5
|
-
import requests
|
6
|
-
|
7
|
-
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
|
8
|
-
GITHUB_API_URL = "https://api.github.com"
|
9
|
-
GITHUB_HEADERS = {"Authorization": f"token {GITHUB_TOKEN}", "Accept": "application/vnd.github.v3+json"}
|
10
|
-
GITHUB_HEADERS_DIFF = {"Authorization": f"token {GITHUB_TOKEN}", "Accept": "application/vnd.github.v3.diff"}
|
11
|
-
|
12
|
-
PR_NUMBER = os.getenv("PR_NUMBER")
|
13
|
-
REPO_NAME = os.getenv("GITHUB_REPOSITORY")
|
14
|
-
GITHUB_EVENT_NAME = os.getenv("GITHUB_EVENT_NAME")
|
15
|
-
GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH")
|
16
|
-
|
17
|
-
|
18
|
-
def get_pr_diff(pr_number: int) -> str:
|
19
|
-
"""Retrieves the diff content for a specified pull request in a GitHub repository."""
|
20
|
-
url = f"{GITHUB_API_URL}/repos/{REPO_NAME}/pulls/{pr_number}"
|
21
|
-
r = requests.get(url, headers=GITHUB_HEADERS_DIFF)
|
22
|
-
return r.text if r.status_code == 200 else ""
|
23
|
-
|
24
|
-
|
25
|
-
def get_github_data(endpoint: str) -> dict:
|
26
|
-
"""Fetches GitHub repository data from a specified endpoint using the GitHub API."""
|
27
|
-
r = requests.get(f"{GITHUB_API_URL}/repos/{REPO_NAME}/{endpoint}", headers=GITHUB_HEADERS)
|
28
|
-
r.raise_for_status()
|
29
|
-
return r.json()
|
30
|
-
|
31
|
-
|
32
|
-
def graphql_request(query: str, variables: dict = None) -> dict:
|
33
|
-
"""Executes a GraphQL query against the GitHub API and returns the response as a dictionary."""
|
34
|
-
headers = {
|
35
|
-
"Authorization": f"Bearer {GITHUB_TOKEN}",
|
36
|
-
"Content-Type": "application/json",
|
37
|
-
"Accept": "application/vnd.github.v4+json",
|
38
|
-
}
|
39
|
-
r = requests.post(f"{GITHUB_API_URL}/graphql", json={"query": query, "variables": variables}, headers=headers)
|
40
|
-
r.raise_for_status()
|
41
|
-
result = r.json()
|
42
|
-
success = "data" in result and not result.get("errors")
|
43
|
-
print(f"{'Successful' if success else 'Fail'} discussion GraphQL request: {result.get('errors', 'No errors')}")
|
44
|
-
return result
|
@@ -1,24 +0,0 @@
|
|
1
|
-
# Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
|
2
|
-
|
3
|
-
import os
|
4
|
-
|
5
|
-
import requests
|
6
|
-
|
7
|
-
OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o")
|
8
|
-
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
9
|
-
|
10
|
-
|
11
|
-
def get_completion(messages: list) -> str:
|
12
|
-
"""Generates a completion using OpenAI's API based on input messages."""
|
13
|
-
assert OPENAI_API_KEY, "OpenAI API key is required."
|
14
|
-
url = "https://api.openai.com/v1/chat/completions"
|
15
|
-
headers = {"Authorization": f"Bearer {OPENAI_API_KEY}", "Content-Type": "application/json"}
|
16
|
-
data = {"model": OPENAI_MODEL, "messages": messages}
|
17
|
-
|
18
|
-
r = requests.post(url, headers=headers, json=data)
|
19
|
-
r.raise_for_status()
|
20
|
-
content = r.json()["choices"][0]["message"]["content"].strip()
|
21
|
-
remove = [" @giscus[bot]"]
|
22
|
-
for x in remove:
|
23
|
-
content = content.replace(x, "")
|
24
|
-
return content
|
File without changes
|
File without changes
|
File without changes
|
{ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/ultralytics_actions.egg-info/SOURCES.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|
{ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/ultralytics_actions.egg-info/requires.txt
RENAMED
File without changes
|
{ultralytics_actions-0.0.5 → ultralytics_actions-0.0.7}/ultralytics_actions.egg-info/top_level.txt
RENAMED
File without changes
|