ultralytics-actions 0.0.6__tar.gz → 0.0.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. {ultralytics_actions-0.0.6/ultralytics_actions.egg-info → ultralytics_actions-0.0.8}/PKG-INFO +1 -1
  2. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/actions/__init__.py +1 -1
  3. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/actions/utils/__init__.py +2 -0
  4. ultralytics_actions-0.0.8/actions/utils/common_utils.py +92 -0
  5. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/actions/utils/github_utils.py +43 -1
  6. ultralytics_actions-0.0.8/actions/utils/openai_utils.py +45 -0
  7. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/pyproject.toml +1 -0
  8. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8/ultralytics_actions.egg-info}/PKG-INFO +1 -1
  9. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/ultralytics_actions.egg-info/entry_points.txt +1 -0
  10. ultralytics_actions-0.0.6/actions/utils/common_utils.py +0 -8
  11. ultralytics_actions-0.0.6/actions/utils/openai_utils.py +0 -24
  12. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/LICENSE +0 -0
  13. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/README.md +0 -0
  14. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/actions/first_interaction.py +0 -0
  15. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/actions/summarize_pr.py +0 -0
  16. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/actions/summarize_release.py +0 -0
  17. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/actions/update_markdown_code_blocks.py +0 -0
  18. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/setup.cfg +0 -0
  19. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/ultralytics_actions.egg-info/SOURCES.txt +0 -0
  20. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/ultralytics_actions.egg-info/dependency_links.txt +0 -0
  21. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/ultralytics_actions.egg-info/requires.txt +0 -0
  22. {ultralytics_actions-0.0.6 → ultralytics_actions-0.0.8}/ultralytics_actions.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics-actions
3
- Version: 0.0.6
3
+ Version: 0.0.8
4
4
  Summary: Ultralytics Actions for GitHub automation and PR management.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -22,4 +22,4 @@
22
22
  # ├── test_summarize_pr.py
23
23
  # └── ...
24
24
 
25
- __version__ = "0.0.6"
25
+ __version__ = "0.0.8"
@@ -14,6 +14,7 @@ from .github_utils import (
14
14
  get_github_data,
15
15
  get_pr_diff,
16
16
  graphql_request,
17
+ ultralytics_actions_info,
17
18
  )
18
19
  from .openai_utils import OPENAI_API_KEY, OPENAI_MODEL, get_completion
19
20
 
@@ -34,4 +35,5 @@ __all__ = (
34
35
  "OPENAI_MODEL",
35
36
  "get_completion",
36
37
  "check_pypi_version",
38
+ "ultralytics_actions_info",
37
39
  )
@@ -0,0 +1,92 @@
1
+ # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
+
3
+ import re
4
+ import socket
5
+ import time
6
+ import urllib
7
+ from concurrent.futures import ThreadPoolExecutor
8
+
9
+
10
+ def remove_html_comments(body: str) -> str:
11
+ """Removes HTML comments from a string using regex pattern matching."""
12
+ return re.sub(r"<!--.*?-->", "", body, flags=re.DOTALL).strip()
13
+
14
+
15
+ def clean_url(url):
16
+ """Remove extra characters from URL strings."""
17
+ for _ in range(3):
18
+ url = str(url).strip('"').strip("'").rstrip(".,:;!?`\\").replace(".git@main", "").replace("git+", "")
19
+ return url
20
+
21
+
22
+ def is_url(url, check=True, max_attempts=3, timeout=2):
23
+ """Check if string is URL and check if URL exists."""
24
+ allow_list = (
25
+ "localhost",
26
+ "127.0.0",
27
+ ":5000",
28
+ ":3000",
29
+ ":8000",
30
+ ":8080",
31
+ ":6006",
32
+ "MODEL_ID",
33
+ "API_KEY",
34
+ "url",
35
+ "example",
36
+ "mailto:",
37
+ )
38
+ try:
39
+ # Check allow list
40
+ if any(x in url for x in allow_list):
41
+ return True
42
+
43
+ # Check structure
44
+ result = urllib.parse.urlparse(url)
45
+ if not all([result.scheme, result.netloc]):
46
+ return False
47
+
48
+ # Check response
49
+ if check:
50
+ for attempt in range(max_attempts):
51
+ try:
52
+ req = urllib.request.Request(url, method="HEAD", headers={"User-Agent": "Chrome/120.0.0.0"})
53
+ with urllib.request.urlopen(req, timeout=timeout) as response:
54
+ return response.getcode() < 400
55
+ except (urllib.error.URLError, socket.timeout):
56
+ if attempt == max_attempts - 1: # last attempt
57
+ return False
58
+ time.sleep(2**attempt) # exponential backoff
59
+ return False
60
+ return True
61
+ except Exception:
62
+ return False
63
+
64
+
65
+ def check_links_in_string(text, verbose=True, return_bad=False):
66
+ """Process a given text, find unique URLs within it, and check for any 404 errors."""
67
+ pattern = (
68
+ r"\[([^\]]+)\]\(([^)]+)\)" # Matches Markdown links [text](url)
69
+ r"|"
70
+ r"(" # Start capturing group for plaintext URLs
71
+ r"(?:https?://)?" # Optional http:// or https://
72
+ r"(?:www\.)?" # Optional www.
73
+ r"[\w.-]+" # Domain name and subdomains
74
+ r"\.[a-zA-Z]{2,}" # TLD
75
+ r"(?:/[^\s\"')\]]*)?" # Optional path
76
+ r")"
77
+ )
78
+ all_urls = []
79
+ for md_text, md_url, plain_url in re.findall(pattern, text):
80
+ url = md_url or plain_url
81
+ if url and urllib.parse.urlparse(url).scheme:
82
+ all_urls.append(url)
83
+
84
+ urls = set(map(clean_url, all_urls)) # remove extra characters and make unique
85
+ with ThreadPoolExecutor(max_workers=16) as executor: # multi-thread
86
+ bad_urls = [url for url, valid in zip(urls, executor.map(lambda x: not is_url(x, check=True), urls)) if valid]
87
+
88
+ passing = not bad_urls
89
+ if verbose and not passing:
90
+ print(f"WARNING ⚠️ errors found in URLs {bad_urls}")
91
+
92
+ return (passing, bad_urls) if return_bad else passing
@@ -1,6 +1,7 @@
1
1
  # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
-
2
+ import json
3
3
  import os
4
+ from pathlib import Path
4
5
 
5
6
  import requests
6
7
 
@@ -85,3 +86,44 @@ def check_pypi_version(pyproject_toml="pyproject.toml"):
85
86
  publish = True # publish as this is likely a first release
86
87
 
87
88
  return local_version, online_version, publish
89
+
90
+
91
+ def ultralytics_actions_info():
92
+ """Print Ultralytics Actions information."""
93
+ event_data = {}
94
+ if GITHUB_EVENT_PATH:
95
+ event_path = Path(GITHUB_EVENT_PATH)
96
+ if event_path.exists():
97
+ event_data = json.loads(event_path.read_text())
98
+
99
+ pr = event_data.get("pull_request", {})
100
+ pr_head_ref = pr.get("head", {}).get("ref")
101
+
102
+ info = {
103
+ "github.event_name": GITHUB_EVENT_NAME,
104
+ "github.event.action": event_data.get("action"),
105
+ "github.repository": REPO_NAME,
106
+ "github.event.pull_request.number": pr.get("number"),
107
+ "github.event.pull_request.head.repo.full_name": pr.get("head", {}).get("repo", {}).get("full_name"),
108
+ "github.actor": os.environ.get("GITHUB_ACTOR"),
109
+ "github.event.pull_request.head.ref": pr_head_ref,
110
+ "github.ref": os.environ.get("GITHUB_REF"),
111
+ "github.head_ref": os.environ.get("GITHUB_HEAD_REF"),
112
+ "github.base_ref": os.environ.get("GITHUB_BASE_REF"),
113
+ }
114
+
115
+ if GITHUB_EVENT_NAME == "discussion":
116
+ discussion = event_data.get("discussion", {})
117
+ info.update(
118
+ {
119
+ "github.event.discussion.node_id": discussion.get("node_id"),
120
+ "github.event.discussion.number": discussion.get("number"),
121
+ }
122
+ )
123
+
124
+ # Print information
125
+ max_key_length = max(len(key) for key in info.keys())
126
+ print("Ultralytics Actions Information " + "-" * 40) # header (72 long)
127
+ for key, value in info.items():
128
+ print(f"{key:<{max_key_length + 5}}{value}")
129
+ print("-" * 72) # footer
@@ -0,0 +1,45 @@
1
+ # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
+
3
+ import os
4
+ import random
5
+ from typing import Dict, List
6
+
7
+ import requests
8
+
9
+ from actions.utils.common_utils import check_links_in_string
10
+
11
+ OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o")
12
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
13
+
14
+
15
+ def get_completion(
16
+ messages: List[Dict[str, str]],
17
+ check_links: bool = True,
18
+ remove: List[str] = (" @giscus[bot]",), # strings to remove from response
19
+ ) -> str:
20
+ """Generates a completion using OpenAI's API based on input messages."""
21
+ assert OPENAI_API_KEY, "OpenAI API key is required."
22
+ url = "https://api.openai.com/v1/chat/completions"
23
+ headers = {"Authorization": f"Bearer {OPENAI_API_KEY}", "Content-Type": "application/json"}
24
+
25
+ content = ""
26
+ max_retries = 2
27
+ for attempt in range(max_retries + 2): # attempt = [0, 1, 2, 3], 2 random retries before asking for no links
28
+ data = {"model": OPENAI_MODEL, "messages": messages, "seed": random.randint(1, 1000000)}
29
+
30
+ r = requests.post(url, headers=headers, json=data)
31
+ r.raise_for_status()
32
+ content = r.json()["choices"][0]["message"]["content"].strip()
33
+ for x in remove:
34
+ content = content.replace(x, "")
35
+ if not check_links or check_links_in_string(content): # if no checks or checks are passing return response
36
+ return content
37
+
38
+ if attempt < max_retries:
39
+ print(f"Attempt {attempt + 1}: Found bad URLs. Retrying with a new random seed.")
40
+ else:
41
+ print("Max retries reached. Updating prompt to exclude links.")
42
+ messages.append({"role": "user", "content": "Please provide a response without any URLs or links in it."})
43
+ check_links = False # automatically accept the last message
44
+
45
+ return content
@@ -87,6 +87,7 @@ ultralytics-actions-first-interaction = "actions.first_interaction:main"
87
87
  ultralytics-actions-summarize-pr = "actions.summarize_pr:main"
88
88
  ultralytics-actions-summarize-release = "actions.summarize_release:main"
89
89
  ultralytics-actions-update-markdown-code-blocks = "actions.update_markdown_code_blocks:main"
90
+ ultralytics-actions-info = "actions.utils:ultralytics_actions_info"
90
91
 
91
92
  [tool.setuptools]
92
93
  packages = { find = { where = ["."], include = ["actions", "actions.*"] } }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics-actions
3
- Version: 0.0.6
3
+ Version: 0.0.8
4
4
  Summary: Ultralytics Actions for GitHub automation and PR management.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -1,5 +1,6 @@
1
1
  [console_scripts]
2
2
  ultralytics-actions-first-interaction = actions.first_interaction:main
3
+ ultralytics-actions-info = actions.utils:ultralytics_actions_info
3
4
  ultralytics-actions-summarize-pr = actions.summarize_pr:main
4
5
  ultralytics-actions-summarize-release = actions.summarize_release:main
5
6
  ultralytics-actions-update-markdown-code-blocks = actions.update_markdown_code_blocks:main
@@ -1,8 +0,0 @@
1
- # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
-
3
- import re
4
-
5
-
6
- def remove_html_comments(body: str) -> str:
7
- """Removes HTML comments from a string using regex pattern matching."""
8
- return re.sub(r"<!--.*?-->", "", body, flags=re.DOTALL).strip()
@@ -1,24 +0,0 @@
1
- # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
-
3
- import os
4
-
5
- import requests
6
-
7
- OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o")
8
- OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
9
-
10
-
11
- def get_completion(messages: list) -> str:
12
- """Generates a completion using OpenAI's API based on input messages."""
13
- assert OPENAI_API_KEY, "OpenAI API key is required."
14
- url = "https://api.openai.com/v1/chat/completions"
15
- headers = {"Authorization": f"Bearer {OPENAI_API_KEY}", "Content-Type": "application/json"}
16
- data = {"model": OPENAI_MODEL, "messages": messages}
17
-
18
- r = requests.post(url, headers=headers, json=data)
19
- r.raise_for_status()
20
- content = r.json()["choices"][0]["message"]["content"].strip()
21
- remove = [" @giscus[bot]"]
22
- for x in remove:
23
- content = content.replace(x, "")
24
- return content