ultralytics-actions 0.0.5__py3-none-any.whl → 0.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
actions/__init__.py CHANGED
@@ -22,10 +22,4 @@
22
22
  # ├── test_summarize_pr.py
23
23
  # └── ...
24
24
 
25
- from .first_interaction import main as first_interaction_main
26
- from .summarize_pr import main as summarize_pr_main
27
- from .summarize_release import main as summarize_release_main
28
- from .update_markdown_code_blocks import main as update_markdown_code_blocks_main
29
-
30
- __all__ = ["first_interaction_main", "summarize_pr_main", "summarize_release_main", "update_markdown_code_blocks_main"]
31
- __version__ = "0.0.5"
25
+ __version__ = "0.0.10"
@@ -1,17 +1,16 @@
1
1
  # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
2
 
3
- import json
4
3
  import os
5
4
  from typing import Dict, List, Tuple
6
5
 
7
6
  import requests
8
7
 
9
8
  from .utils import (
9
+ EVENT_DATA,
10
10
  GITHUB_API_URL,
11
11
  GITHUB_EVENT_NAME,
12
- GITHUB_EVENT_PATH,
13
12
  GITHUB_HEADERS,
14
- REPO_NAME,
13
+ GITHUB_REPOSITORY,
15
14
  get_completion,
16
15
  get_github_data,
17
16
  get_pr_diff,
@@ -24,19 +23,17 @@ BLOCK_USER = os.getenv("BLOCK_USER", "false").lower() == "true"
24
23
 
25
24
 
26
25
  def get_event_content() -> Tuple[int, str, str, str, str, str, str]:
27
- """Extracts the number, node_id, title, body, username, and issue_type."""
28
- with open(GITHUB_EVENT_PATH) as f:
29
- data = json.load(f)
30
- action = data["action"] # 'opened', 'closed', 'created' (discussion), etc.
26
+ """Extracts key information from GitHub event data for issues, pull requests, or discussions."""
27
+ action = EVENT_DATA["action"] # 'opened', 'closed', 'created' (discussion), etc.
31
28
  if GITHUB_EVENT_NAME == "issues":
32
- item = data["issue"]
29
+ item = EVENT_DATA["issue"]
33
30
  issue_type = "issue"
34
31
  elif GITHUB_EVENT_NAME in ["pull_request", "pull_request_target"]:
35
- pr_number = data["pull_request"]["number"]
32
+ pr_number = EVENT_DATA["pull_request"]["number"]
36
33
  item = get_github_data(f"pulls/{pr_number}")
37
34
  issue_type = "pull request"
38
35
  elif GITHUB_EVENT_NAME == "discussion":
39
- item = data["discussion"]
36
+ item = EVENT_DATA["discussion"]
40
37
  issue_type = "discussion"
41
38
  else:
42
39
  raise ValueError(f"Unsupported event type: {GITHUB_EVENT_NAME}")
@@ -50,7 +47,7 @@ def get_event_content() -> Tuple[int, str, str, str, str, str, str]:
50
47
 
51
48
 
52
49
  def update_issue_pr_content(number: int, node_id: str, issue_type: str):
53
- """Updates the title and body of the issue, pull request, or discussion."""
50
+ """Updates the title and body of an issue, pull request, or discussion with predefined content."""
54
51
  new_title = "Content Under Review"
55
52
  new_body = """This post has been flagged for review by [Ultralytics Actions](https://ultralytics.com/actions) due to possible spam, abuse, or off-topic content. For more information please see our:
56
53
 
@@ -73,13 +70,13 @@ mutation($discussionId: ID!, $title: String!, $body: String!) {
73
70
  """
74
71
  graphql_request(mutation, variables={"discussionId": node_id, "title": new_title, "body": new_body})
75
72
  else:
76
- url = f"{GITHUB_API_URL}/repos/{REPO_NAME}/issues/{number}"
73
+ url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{number}"
77
74
  r = requests.patch(url, json={"title": new_title, "body": new_body}, headers=GITHUB_HEADERS)
78
75
  print(f"{'Successful' if r.status_code == 200 else 'Fail'} issue/PR #{number} update: {r.status_code}")
79
76
 
80
77
 
81
78
  def close_issue_pr(number: int, node_id: str, issue_type: str):
82
- """Closes the issue, pull request, or discussion."""
79
+ """Closes the specified issue, pull request, or discussion using the GitHub API."""
83
80
  if issue_type == "discussion":
84
81
  mutation = """
85
82
  mutation($discussionId: ID!) {
@@ -92,13 +89,13 @@ mutation($discussionId: ID!) {
92
89
  """
93
90
  graphql_request(mutation, variables={"discussionId": node_id})
94
91
  else:
95
- url = f"{GITHUB_API_URL}/repos/{REPO_NAME}/issues/{number}"
92
+ url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{number}"
96
93
  r = requests.patch(url, json={"state": "closed"}, headers=GITHUB_HEADERS)
97
94
  print(f"{'Successful' if r.status_code == 200 else 'Fail'} issue/PR #{number} close: {r.status_code}")
98
95
 
99
96
 
100
97
  def lock_issue_pr(number: int, node_id: str, issue_type: str):
101
- """Locks the issue, pull request, or discussion."""
98
+ """Locks an issue, pull request, or discussion to prevent further interactions."""
102
99
  if issue_type == "discussion":
103
100
  mutation = """
104
101
  mutation($lockableId: ID!, $lockReason: LockReason) {
@@ -113,14 +110,14 @@ mutation($lockableId: ID!, $lockReason: LockReason) {
113
110
  """
114
111
  graphql_request(mutation, variables={"lockableId": node_id, "lockReason": "OFF_TOPIC"})
115
112
  else:
116
- url = f"{GITHUB_API_URL}/repos/{REPO_NAME}/issues/{number}/lock"
113
+ url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{number}/lock"
117
114
  r = requests.put(url, json={"lock_reason": "off-topic"}, headers=GITHUB_HEADERS)
118
115
  print(f"{'Successful' if r.status_code in {200, 204} else 'Fail'} issue/PR #{number} lock: {r.status_code}")
119
116
 
120
117
 
121
118
  def block_user(username: str):
122
- """Blocks a user from the organization."""
123
- url = f"{GITHUB_API_URL}/orgs/{REPO_NAME.split('/')[0]}/blocks/{username}"
119
+ """Blocks a user from the organization using the GitHub API."""
120
+ url = f"{GITHUB_API_URL}/orgs/{GITHUB_REPOSITORY.split('/')[0]}/blocks/{username}"
124
121
  r = requests.put(url, headers=GITHUB_HEADERS)
125
122
  print(f"{'Successful' if r.status_code == 204 else 'Fail'} user block for {username}: {r.status_code}")
126
123
 
@@ -128,7 +125,7 @@ def block_user(username: str):
128
125
  def get_relevant_labels(
129
126
  issue_type: str, title: str, body: str, available_labels: Dict, current_labels: List
130
127
  ) -> List[str]:
131
- """Uses OpenAI to determine the most relevant labels."""
128
+ """Determines relevant labels for GitHub issues/PRs using OpenAI, considering title, body, and existing labels."""
132
129
  # Remove mutually exclusive labels like both 'bug' and 'question' or inappropriate labels like 'help wanted'
133
130
  for label in ["help wanted", "TODO"]: # normal case
134
131
  available_labels.pop(label, None) # remove as should only be manually added
@@ -200,7 +197,7 @@ query($owner: String!, $name: String!) {
200
197
  }
201
198
  }
202
199
  """
203
- owner, repo = REPO_NAME.split("/")
200
+ owner, repo = GITHUB_REPOSITORY.split("/")
204
201
  result = graphql_request(query, variables={"owner": owner, "name": repo})
205
202
  if "data" in result and "repository" in result["data"]:
206
203
  all_labels = result["data"]["repository"]["labels"]["nodes"]
@@ -212,7 +209,7 @@ query($owner: String!, $name: String!) {
212
209
 
213
210
 
214
211
  def apply_labels(number: int, node_id: str, labels: List[str], issue_type: str):
215
- """Applies the given labels to the issue, pull request, or discussion."""
212
+ """Applies specified labels to a GitHub issue, pull request, or discussion using the appropriate API."""
216
213
  if "Alert" in labels:
217
214
  create_alert_label()
218
215
 
@@ -237,27 +234,27 @@ mutation($labelableId: ID!, $labelIds: [ID!]!) {
237
234
  graphql_request(mutation, {"labelableId": node_id, "labelIds": label_ids})
238
235
  print(f"Successfully applied labels: {', '.join(labels)}")
239
236
  else:
240
- url = f"{GITHUB_API_URL}/repos/{REPO_NAME}/issues/{number}/labels"
237
+ url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{number}/labels"
241
238
  r = requests.post(url, json={"labels": labels}, headers=GITHUB_HEADERS)
242
239
  print(f"{'Successful' if r.status_code == 200 else 'Fail'} apply labels {', '.join(labels)}: {r.status_code}")
243
240
 
244
241
 
245
242
  def create_alert_label():
246
- """Creates the 'Alert' label in the repository if it doesn't exist."""
243
+ """Creates the 'Alert' label in the repository if it doesn't exist, with a red color and description."""
247
244
  alert_label = {"name": "Alert", "color": "FF0000", "description": "Potential spam, abuse, or off-topic."}
248
- requests.post(f"{GITHUB_API_URL}/repos/{REPO_NAME}/labels", json=alert_label, headers=GITHUB_HEADERS)
245
+ requests.post(f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/labels", json=alert_label, headers=GITHUB_HEADERS)
249
246
 
250
247
 
251
248
  def is_org_member(username: str) -> bool:
252
- """Checks if a user is a member of the organization."""
253
- org_name = REPO_NAME.split("/")[0]
249
+ """Checks if a user is a member of the organization using the GitHub API."""
250
+ org_name = GITHUB_REPOSITORY.split("/")[0]
254
251
  url = f"{GITHUB_API_URL}/orgs/{org_name}/members/{username}"
255
252
  r = requests.get(url, headers=GITHUB_HEADERS)
256
253
  return r.status_code == 204 # 204 means the user is a member
257
254
 
258
255
 
259
256
  def add_comment(number: int, node_id: str, comment: str, issue_type: str):
260
- """Adds a comment to the issue, pull request, or discussion."""
257
+ """Adds a comment to the specified issue, pull request, or discussion using the GitHub API."""
261
258
  if issue_type == "discussion":
262
259
  mutation = """
263
260
  mutation($discussionId: ID!, $body: String!) {
@@ -270,15 +267,15 @@ mutation($discussionId: ID!, $body: String!) {
270
267
  """
271
268
  graphql_request(mutation, variables={"discussionId": node_id, "body": comment})
272
269
  else:
273
- url = f"{GITHUB_API_URL}/repos/{REPO_NAME}/issues/{number}/comments"
270
+ url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/issues/{number}/comments"
274
271
  r = requests.post(url, json={"body": comment}, headers=GITHUB_HEADERS)
275
272
  print(f"{'Successful' if r.status_code in {200, 201} else 'Fail'} issue/PR #{number} comment: {r.status_code}")
276
273
 
277
274
 
278
275
  def get_first_interaction_response(issue_type: str, title: str, body: str, username: str, number: int) -> str:
279
- """Generates a custom response using LLM based on the issue/PR content and instructions."""
276
+ """Generates a custom LLM response for GitHub issues, PRs, or discussions based on content."""
280
277
  issue_discussion_response = f"""
281
- 👋 Hello @{username}, thank you for submitting a `{REPO_NAME}` 🚀 {issue_type.capitalize()}. To help us address your concern efficiently, please ensure you've provided the following information:
278
+ 👋 Hello @{username}, thank you for submitting a `{GITHUB_REPOSITORY}` 🚀 {issue_type.capitalize()}. To help us address your concern efficiently, please ensure you've provided the following information:
282
279
 
283
280
  1. For bug reports:
284
281
  - A clear and concise description of the bug
@@ -303,10 +300,10 @@ Thank you for your contribution to improving our project!
303
300
  """
304
301
 
305
302
  pr_response = f"""
306
- 👋 Hello @{username}, thank you for submitting an `{REPO_NAME}` 🚀 PR! To ensure a seamless integration of your work, please review the following checklist:
303
+ 👋 Hello @{username}, thank you for submitting an `{GITHUB_REPOSITORY}` 🚀 PR! To ensure a seamless integration of your work, please review the following checklist:
307
304
 
308
- - ✅ **Define a Purpose**: Clearly explain the purpose of your fix or feature in your PR description, and link to any [relevant issues](https://github.com/{REPO_NAME}/issues). Ensure your commit messages are clear, concise, and adhere to the project's conventions.
309
- - ✅ **Synchronize with Source**: Confirm your PR is synchronized with the `{REPO_NAME}` `main` branch. If it's behind, update it by clicking the 'Update branch' button or by running `git pull` and `git merge main` locally.
305
+ - ✅ **Define a Purpose**: Clearly explain the purpose of your fix or feature in your PR description, and link to any [relevant issues](https://github.com/{GITHUB_REPOSITORY}/issues). Ensure your commit messages are clear, concise, and adhere to the project's conventions.
306
+ - ✅ **Synchronize with Source**: Confirm your PR is synchronized with the `{GITHUB_REPOSITORY}` `main` branch. If it's behind, update it by clicking the 'Update branch' button or by running `git pull` and `git merge main` locally.
310
307
  - ✅ **Ensure CI Checks Pass**: Verify all Ultralytics [Continuous Integration (CI)](https://docs.ultralytics.com/help/CI/) checks are passing. If any checks fail, please address the issues.
311
308
  - ✅ **Update Documentation**: Update the relevant [documentation](https://docs.ultralytics.com) for any new or modified features.
312
309
  - ✅ **Add Tests**: If applicable, include or update tests to cover your changes, and confirm that all tests are passing.
@@ -321,8 +318,8 @@ For more guidance, please refer to our [Contributing Guide](https://docs.ultraly
321
318
  else:
322
319
  example = os.getenv("FIRST_ISSUE_RESPONSE") or issue_discussion_response
323
320
 
324
- org_name, repo_name = REPO_NAME.split("/")
325
- repo_url = f"https://github.com/{REPO_NAME}"
321
+ org_name, repo_name = GITHUB_REPOSITORY.split("/")
322
+ repo_url = f"https://github.com/{GITHUB_REPOSITORY}"
326
323
  diff = get_pr_diff(number)[:32000] if issue_type == "pull request" else ""
327
324
 
328
325
  prompt = f"""Generate a customized response to the new GitHub {issue_type} below:
@@ -370,7 +367,7 @@ YOUR {issue_type.upper()} RESPONSE:
370
367
 
371
368
 
372
369
  def main():
373
- """Runs autolabel action and adds custom response for new issues/PRs/Discussions."""
370
+ """Executes autolabeling and custom response generation for new GitHub issues, PRs, and discussions."""
374
371
  number, node_id, title, body, username, issue_type, action = get_event_content()
375
372
  available_labels = get_github_data("labels")
376
373
  label_descriptions = {label["name"]: label.get("description", "") for label in available_labels}
actions/summarize_pr.py CHANGED
@@ -5,8 +5,8 @@ import requests
5
5
  from .utils import (
6
6
  GITHUB_API_URL,
7
7
  GITHUB_HEADERS,
8
- PR_NUMBER,
9
- REPO_NAME,
8
+ GITHUB_REPOSITORY,
9
+ PR,
10
10
  get_completion,
11
11
  get_pr_diff,
12
12
  )
@@ -18,7 +18,7 @@ SUMMARY_START = (
18
18
 
19
19
 
20
20
  def generate_pr_summary(repo_name, diff_text):
21
- """Generates a professionally written yet accessible summary of a PR using OpenAI's API."""
21
+ """Generates a concise, professional summary of a PR using OpenAI's API for Ultralytics repositories."""
22
22
  if not diff_text:
23
23
  diff_text = "**ERROR: DIFF IS EMPTY, THERE ARE ZERO CODE CHANGES IN THIS PR."
24
24
  ratio = 3.3 # about 3.3 characters per token
@@ -45,7 +45,7 @@ def generate_pr_summary(repo_name, diff_text):
45
45
 
46
46
 
47
47
  def update_pr_description(repo_name, pr_number, new_summary):
48
- """Updates the original PR description with a new summary, replacing an existing summary if found."""
48
+ """Updates the PR description with a new summary, replacing existing summary if present."""
49
49
  # Fetch the current PR description
50
50
  pr_url = f"{GITHUB_API_URL}/repos/{repo_name}/pulls/{pr_number}"
51
51
  pr_response = requests.get(pr_url, headers=GITHUB_HEADERS)
@@ -64,14 +64,19 @@ def update_pr_description(repo_name, pr_number, new_summary):
64
64
 
65
65
 
66
66
  def main():
67
- """Summarize PR."""
68
- diff = get_pr_diff(PR_NUMBER)
67
+ """Summarize a pull request and update its description with an AI-generated summary."""
68
+ pr_number = PR["number"]
69
+
70
+ print(f"Retrieving diff for PR {pr_number}")
71
+ diff = get_pr_diff(PR["number"])
69
72
 
70
73
  # Generate PR summary
71
- summary = generate_pr_summary(REPO_NAME, diff)
74
+ print("Generating PR summary...")
75
+ summary = generate_pr_summary(GITHUB_REPOSITORY, diff)
72
76
 
73
77
  # Update PR description
74
- status_code = update_pr_description(REPO_NAME, PR_NUMBER, summary)
78
+ print("Updating PR description...")
79
+ status_code = update_pr_description(GITHUB_REPOSITORY, pr_number, summary)
75
80
  if status_code == 200:
76
81
  print("PR description updated successfully.")
77
82
  else:
@@ -12,8 +12,8 @@ from .utils import (
12
12
  GITHUB_API_URL,
13
13
  GITHUB_HEADERS,
14
14
  GITHUB_HEADERS_DIFF,
15
+ GITHUB_REPOSITORY,
15
16
  GITHUB_TOKEN,
16
- REPO_NAME,
17
17
  get_completion,
18
18
  remove_html_comments,
19
19
  )
@@ -24,14 +24,14 @@ PREVIOUS_TAG = os.getenv("PREVIOUS_TAG")
24
24
 
25
25
 
26
26
  def get_release_diff(repo_name: str, previous_tag: str, latest_tag: str) -> str:
27
- """Get the diff between two tags."""
27
+ """Retrieves the differences between two specified Git tags in a GitHub repository."""
28
28
  url = f"{GITHUB_API_URL}/repos/{repo_name}/compare/{previous_tag}...{latest_tag}"
29
29
  r = requests.get(url, headers=GITHUB_HEADERS_DIFF)
30
30
  return r.text if r.status_code == 200 else f"Failed to get diff: {r.content}"
31
31
 
32
32
 
33
33
  def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str) -> list:
34
- """Get PRs merged between two tags using the compare API."""
34
+ """Retrieves and processes pull requests merged between two specified tags in a GitHub repository."""
35
35
  url = f"{GITHUB_API_URL}/repos/{repo_name}/compare/{previous_tag}...{latest_tag}"
36
36
  r = requests.get(url, headers=GITHUB_HEADERS)
37
37
  r.raise_for_status()
@@ -68,7 +68,7 @@ def get_prs_between_tags(repo_name: str, previous_tag: str, latest_tag: str) ->
68
68
 
69
69
 
70
70
  def get_new_contributors(repo: str, prs: list) -> set:
71
- """Identify genuinely new contributors in the current release."""
71
+ """Identify new contributors who made their first merged PR in the current release."""
72
72
  new_contributors = set()
73
73
  for pr in prs:
74
74
  author = pr["author"]
@@ -85,7 +85,7 @@ def get_new_contributors(repo: str, prs: list) -> set:
85
85
 
86
86
 
87
87
  def generate_release_summary(diff: str, prs: list, latest_tag: str, previous_tag: str, repo_name: str) -> str:
88
- """Generate a summary for the release."""
88
+ """Generate a concise release summary with key changes, purpose, and impact for a new Ultralytics version."""
89
89
  pr_summaries = "\n\n".join(
90
90
  [f"PR #{pr['number']}: {pr['title']} by @{pr['author']}\n{pr['body'][:1000]}" for pr in prs]
91
91
  )
@@ -139,7 +139,7 @@ def generate_release_summary(diff: str, prs: list, latest_tag: str, previous_tag
139
139
 
140
140
 
141
141
  def create_github_release(repo_name: str, tag_name: str, name: str, body: str) -> int:
142
- """Create a release on GitHub."""
142
+ """Creates a GitHub release with specified tag, name, and body content for the given repository."""
143
143
  url = f"{GITHUB_API_URL}/repos/{repo_name}/releases"
144
144
  data = {"tag_name": tag_name, "name": name, "body": body, "draft": False, "prerelease": False}
145
145
  r = requests.post(url, headers=GITHUB_HEADERS, json=data)
@@ -147,7 +147,7 @@ def create_github_release(repo_name: str, tag_name: str, name: str, body: str) -
147
147
 
148
148
 
149
149
  def get_previous_tag() -> str:
150
- """Get the previous tag from git tags."""
150
+ """Retrieves the previous Git tag, excluding the current tag, using the git describe command."""
151
151
  cmd = ["git", "describe", "--tags", "--abbrev=0", "--exclude", CURRENT_TAG]
152
152
  try:
153
153
  return subprocess.run(cmd, check=True, text=True, capture_output=True).stdout.strip()
@@ -164,14 +164,14 @@ def main():
164
164
  previous_tag = PREVIOUS_TAG or get_previous_tag()
165
165
 
166
166
  # Get the diff between the tags
167
- diff = get_release_diff(REPO_NAME, previous_tag, CURRENT_TAG)
167
+ diff = get_release_diff(GITHUB_REPOSITORY, previous_tag, CURRENT_TAG)
168
168
 
169
169
  # Get PRs merged between the tags
170
- prs = get_prs_between_tags(REPO_NAME, previous_tag, CURRENT_TAG)
170
+ prs = get_prs_between_tags(GITHUB_REPOSITORY, previous_tag, CURRENT_TAG)
171
171
 
172
172
  # Generate release summary
173
173
  try:
174
- summary = generate_release_summary(diff, prs, CURRENT_TAG, previous_tag, REPO_NAME)
174
+ summary = generate_release_summary(diff, prs, CURRENT_TAG, previous_tag, GITHUB_REPOSITORY)
175
175
  except Exception as e:
176
176
  print(f"Failed to generate summary: {str(e)}")
177
177
  summary = "Failed to generate summary."
@@ -181,7 +181,7 @@ def main():
181
181
  commit_message = subprocess.run(cmd, check=True, text=True, capture_output=True).stdout.split("\n")[0].strip()
182
182
 
183
183
  # Create the release on GitHub
184
- status_code = create_github_release(REPO_NAME, CURRENT_TAG, f"{CURRENT_TAG} - {commit_message}", summary)
184
+ status_code = create_github_release(GITHUB_REPOSITORY, CURRENT_TAG, f"{CURRENT_TAG} - {commit_message}", summary)
185
185
  if status_code == 201:
186
186
  print(f"Successfully created release {CURRENT_TAG}")
187
187
  else:
@@ -8,21 +8,21 @@ from pathlib import Path
8
8
 
9
9
 
10
10
  def extract_code_blocks(markdown_content):
11
- """Extract Python code blocks with ``` followed by "python", "py", or "{ .py .annotate }"."""
11
+ """Extracts Python code blocks from markdown content using regex pattern matching."""
12
12
  pattern = r"^( *)```(?:python|py|\{[ ]*\.py[ ]*\.annotate[ ]*\})\n(.*?)\n\1```"
13
13
  code_block_pattern = re.compile(pattern, re.DOTALL | re.MULTILINE)
14
14
  return code_block_pattern.findall(markdown_content)
15
15
 
16
16
 
17
17
  def remove_indentation(code_block, num_spaces):
18
- """Removes `num_spaces` leading spaces from each line in `code_block` and returns the modified string."""
18
+ """Removes specified leading spaces from each line in a code block to adjust indentation."""
19
19
  lines = code_block.split("\n")
20
20
  stripped_lines = [line[num_spaces:] if len(line) >= num_spaces else line for line in lines]
21
21
  return "\n".join(stripped_lines)
22
22
 
23
23
 
24
24
  def add_indentation(code_block, num_spaces):
25
- """Adds `num_spaces` leading spaces to each non-empty line in `code_block`."""
25
+ """Adds specified number of leading spaces to non-empty lines in a code block."""
26
26
  indent = " " * num_spaces
27
27
  lines = code_block.split("\n")
28
28
  indented_lines = [indent + line if line.strip() != "" else line for line in lines]
@@ -30,7 +30,7 @@ def add_indentation(code_block, num_spaces):
30
30
 
31
31
 
32
32
  def format_code_with_ruff(temp_dir):
33
- """Formats all Python code files in the `temp_dir` directory using the 'ruff' linter tool."""
33
+ """Formats Python code files in the specified directory using ruff linter and docformatter tools."""
34
34
  try:
35
35
  # Run ruff format
36
36
  subprocess.run(
@@ -86,14 +86,14 @@ def format_code_with_ruff(temp_dir):
86
86
 
87
87
 
88
88
  def generate_temp_filename(file_path, index):
89
- """Generates a unique temporary filename based on the file path and index."""
89
+ """Generates a unique temporary filename using a hash of the file path and index."""
90
90
  unique_string = f"{file_path.parent}_{file_path.stem}_{index}"
91
91
  unique_hash = hashlib.md5(unique_string.encode()).hexdigest()
92
92
  return f"temp_{unique_hash}.py"
93
93
 
94
94
 
95
95
  def process_markdown_file(file_path, temp_dir, verbose=False):
96
- """Reads a markdown file, extracts Python code blocks, saves them to temp files, and updates the file."""
96
+ """Processes a markdown file, extracting Python code blocks for formatting and updating the original file."""
97
97
  try:
98
98
  markdown_content = Path(file_path).read_text()
99
99
  code_blocks = extract_code_blocks(markdown_content)
@@ -119,7 +119,7 @@ def process_markdown_file(file_path, temp_dir, verbose=False):
119
119
 
120
120
 
121
121
  def update_markdown_file(file_path, markdown_content, temp_files):
122
- """Updates the markdown file with formatted code blocks."""
122
+ """Updates a markdown file with formatted Python code blocks extracted and processed externally."""
123
123
  for num_spaces, original_code_block, temp_file_path in temp_files:
124
124
  try:
125
125
  with open(temp_file_path) as temp_file:
@@ -143,7 +143,7 @@ def update_markdown_file(file_path, markdown_content, temp_files):
143
143
 
144
144
 
145
145
  def main(root_dir=Path.cwd(), verbose=False):
146
- """Processes all markdown files in a specified directory and its subdirectories."""
146
+ """Processes markdown files, extracts and formats Python code blocks, and updates the original files."""
147
147
  root_path = Path(root_dir)
148
148
  markdown_files = list(root_path.rglob("*.md"))
149
149
  temp_dir = Path("temp_code_blocks")
actions/utils/__init__.py CHANGED
@@ -2,28 +2,34 @@
2
2
 
3
3
  from .common_utils import remove_html_comments
4
4
  from .github_utils import (
5
+ DISCUSSION,
6
+ EVENT_DATA,
5
7
  GITHUB_API_URL,
6
8
  GITHUB_EVENT_NAME,
7
9
  GITHUB_EVENT_PATH,
8
10
  GITHUB_HEADERS,
9
11
  GITHUB_HEADERS_DIFF,
12
+ GITHUB_REPOSITORY,
10
13
  GITHUB_TOKEN,
11
- PR_NUMBER,
12
- REPO_NAME,
14
+ PR,
15
+ check_pypi_version,
13
16
  get_github_data,
14
17
  get_pr_diff,
15
18
  graphql_request,
19
+ ultralytics_actions_info,
16
20
  )
17
21
  from .openai_utils import OPENAI_API_KEY, OPENAI_MODEL, get_completion
18
22
 
19
23
  __all__ = (
20
24
  "remove_html_comments",
25
+ "EVENT_DATA",
21
26
  "GITHUB_API_URL",
22
27
  "GITHUB_HEADERS",
23
28
  "GITHUB_HEADERS_DIFF",
24
29
  "GITHUB_TOKEN",
25
- "REPO_NAME",
26
- "PR_NUMBER",
30
+ "GITHUB_REPOSITORY",
31
+ "PR",
32
+ "DISCUSSION",
27
33
  "GITHUB_EVENT_NAME",
28
34
  "GITHUB_EVENT_PATH",
29
35
  "get_github_data",
@@ -32,4 +38,6 @@ __all__ = (
32
38
  "OPENAI_API_KEY",
33
39
  "OPENAI_MODEL",
34
40
  "get_completion",
41
+ "check_pypi_version",
42
+ "ultralytics_actions_info",
35
43
  )
@@ -1,8 +1,100 @@
1
1
  # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
2
 
3
3
  import re
4
+ import socket
5
+ import time
6
+ import urllib
7
+ from concurrent.futures import ThreadPoolExecutor
4
8
 
5
9
 
6
10
  def remove_html_comments(body: str) -> str:
7
11
  """Removes HTML comments from a string using regex pattern matching."""
8
12
  return re.sub(r"<!--.*?-->", "", body, flags=re.DOTALL).strip()
13
+
14
+
15
+ def clean_url(url):
16
+ """Remove extra characters from URL strings."""
17
+ for _ in range(3):
18
+ url = str(url).strip('"').strip("'").rstrip(".,:;!?`\\").replace(".git@main", "").replace("git+", "")
19
+ return url
20
+
21
+
22
+ def is_url(url, check=True, max_attempts=3, timeout=2):
23
+ """Check if string is URL and check if URL exists."""
24
+ allow_list = (
25
+ "localhost",
26
+ "127.0.0",
27
+ ":5000",
28
+ ":3000",
29
+ ":8000",
30
+ ":8080",
31
+ ":6006",
32
+ "MODEL_ID",
33
+ "API_KEY",
34
+ "url",
35
+ "example",
36
+ "mailto:",
37
+ "github.com", # ignore GitHub links that may be private repos
38
+ "kaggle.com", # blocks automated header requests
39
+ )
40
+ try:
41
+ # Check allow list
42
+ if any(x in url for x in allow_list):
43
+ return True
44
+
45
+ # Check structure
46
+ result = urllib.parse.urlparse(url)
47
+ if not all([result.scheme, result.netloc]):
48
+ return False
49
+
50
+ # Check response
51
+ if check:
52
+ for attempt in range(max_attempts):
53
+ try:
54
+ req = urllib.request.Request(
55
+ url,
56
+ method="HEAD",
57
+ headers={
58
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
59
+ },
60
+ )
61
+ with urllib.request.urlopen(req, timeout=timeout) as response:
62
+ return response.getcode() < 400
63
+ except (urllib.error.URLError, socket.timeout):
64
+ if attempt == max_attempts - 1: # last attempt
65
+ return False
66
+ time.sleep(2**attempt) # exponential backoff
67
+ return False
68
+ return True
69
+ except Exception:
70
+ return False
71
+
72
+
73
+ def check_links_in_string(text, verbose=True, return_bad=False):
74
+ """Process a given text, find unique URLs within it, and check for any 404 errors."""
75
+ pattern = (
76
+ r"\[([^\]]+)\]\(([^)]+)\)" # Matches Markdown links [text](url)
77
+ r"|"
78
+ r"(" # Start capturing group for plaintext URLs
79
+ r"(?:https?://)?" # Optional http:// or https://
80
+ r"(?:www\.)?" # Optional www.
81
+ r"[\w.-]+" # Domain name and subdomains
82
+ r"\.[a-zA-Z]{2,}" # TLD
83
+ r"(?:/[^\s\"')\]]*)?" # Optional path
84
+ r")"
85
+ )
86
+ all_urls = []
87
+ for md_text, md_url, plain_url in re.findall(pattern, text):
88
+ url = md_url or plain_url
89
+ if url and urllib.parse.urlparse(url).scheme:
90
+ all_urls.append(url)
91
+
92
+ urls = set(map(clean_url, all_urls)) # remove extra characters and make unique
93
+ with ThreadPoolExecutor(max_workers=16) as executor: # multi-thread
94
+ bad_urls = [url for url, valid in zip(urls, executor.map(lambda x: not is_url(x, check=True), urls)) if valid]
95
+
96
+ passing = not bad_urls
97
+ if verbose and not passing:
98
+ print(f"WARNING ⚠️ errors found in URLs {bad_urls}")
99
+
100
+ return (passing, bad_urls) if return_bad else passing
@@ -1,30 +1,39 @@
1
1
  # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
-
2
+ import json
3
3
  import os
4
+ from pathlib import Path
4
5
 
5
6
  import requests
6
7
 
7
8
  GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
9
+ GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
10
+ GITHUB_EVENT_NAME = os.getenv("GITHUB_EVENT_NAME")
11
+ GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH")
8
12
  GITHUB_API_URL = "https://api.github.com"
9
13
  GITHUB_HEADERS = {"Authorization": f"token {GITHUB_TOKEN}", "Accept": "application/vnd.github.v3+json"}
10
14
  GITHUB_HEADERS_DIFF = {"Authorization": f"token {GITHUB_TOKEN}", "Accept": "application/vnd.github.v3.diff"}
11
15
 
12
- PR_NUMBER = os.getenv("PR_NUMBER")
13
- REPO_NAME = os.getenv("GITHUB_REPOSITORY")
14
- GITHUB_EVENT_NAME = os.getenv("GITHUB_EVENT_NAME")
15
- GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH")
16
+ EVENT_DATA = {}
17
+ if GITHUB_EVENT_PATH:
18
+ event_path = Path(GITHUB_EVENT_PATH)
19
+ if event_path.exists():
20
+ EVENT_DATA = json.loads(event_path.read_text())
21
+ PR = EVENT_DATA.get("pull_request", {})
22
+ DISCUSSION = EVENT_DATA.get("discussion", {})
23
+
24
+ INPUTS = {k[6:].lower(): v for k, v in os.environ.items() if k.startswith("INPUT_")} # actions inputs dictionary
16
25
 
17
26
 
18
27
  def get_pr_diff(pr_number: int) -> str:
19
28
  """Retrieves the diff content for a specified pull request in a GitHub repository."""
20
- url = f"{GITHUB_API_URL}/repos/{REPO_NAME}/pulls/{pr_number}"
29
+ url = f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/pulls/{pr_number}"
21
30
  r = requests.get(url, headers=GITHUB_HEADERS_DIFF)
22
31
  return r.text if r.status_code == 200 else ""
23
32
 
24
33
 
25
34
  def get_github_data(endpoint: str) -> dict:
26
35
  """Fetches GitHub repository data from a specified endpoint using the GitHub API."""
27
- r = requests.get(f"{GITHUB_API_URL}/repos/{REPO_NAME}/{endpoint}", headers=GITHUB_HEADERS)
36
+ r = requests.get(f"{GITHUB_API_URL}/repos/{GITHUB_REPOSITORY}/{endpoint}", headers=GITHUB_HEADERS)
28
37
  r.raise_for_status()
29
38
  return r.json()
30
39
 
@@ -42,3 +51,77 @@ def graphql_request(query: str, variables: dict = None) -> dict:
42
51
  success = "data" in result and not result.get("errors")
43
52
  print(f"{'Successful' if success else 'Fail'} discussion GraphQL request: {result.get('errors', 'No errors')}")
44
53
  return result
54
+
55
+
56
+ def check_pypi_version(pyproject_toml="pyproject.toml"):
57
+ """Compares local and PyPI package versions to determine if a new version should be published."""
58
+ import tomllib # requires Python>=3.11
59
+
60
+ with open(pyproject_toml, "rb") as f:
61
+ pyproject = tomllib.load(f)
62
+
63
+ package_name = pyproject["project"]["name"]
64
+ local_version = pyproject["project"].get("version", "dynamic")
65
+
66
+ # If version is dynamic, extract it from the specified file
67
+ if local_version == "dynamic":
68
+ version_attr = pyproject["tool"]["setuptools"]["dynamic"]["version"]["attr"]
69
+ module_path, attr_name = version_attr.rsplit(".", 1)
70
+ with open(f"{module_path.replace('.', '/')}/__init__.py") as f:
71
+ local_version = next(line.split("=")[1].strip().strip("'\"") for line in f if line.startswith(attr_name))
72
+
73
+ print(f"Local Version: {local_version}")
74
+
75
+ # Get online version from PyPI
76
+ response = requests.get(f"https://pypi.org/pypi/{package_name}/json")
77
+ online_version = response.json()["info"]["version"] if response.status_code == 200 else None
78
+ print(f"Online Version: {online_version or 'Not Found'}")
79
+
80
+ # Determine if a new version should be published
81
+ if online_version:
82
+ local_ver = tuple(map(int, local_version.split(".")))
83
+ online_ver = tuple(map(int, online_version.split(".")))
84
+ major_diff = local_ver[0] - online_ver[0]
85
+ minor_diff = local_ver[1] - online_ver[1]
86
+ patch_diff = local_ver[2] - online_ver[2]
87
+
88
+ publish = (
89
+ (major_diff == 0 and minor_diff == 0 and 0 < patch_diff <= 2)
90
+ or (major_diff == 0 and minor_diff == 1 and local_ver[2] == 0)
91
+ or (major_diff == 1 and local_ver[1] == 0 and local_ver[2] == 0)
92
+ ) # should publish an update
93
+ else:
94
+ publish = True # publish as this is likely a first release
95
+
96
+ return local_version, online_version, publish
97
+
98
+
99
+ def ultralytics_actions_info():
100
+ """Print Ultralytics Actions information."""
101
+ info = {
102
+ "github.event_name": GITHUB_EVENT_NAME,
103
+ "github.event.action": EVENT_DATA.get("action"),
104
+ "github.repository": GITHUB_REPOSITORY,
105
+ "github.event.pull_request.number": PR.get("number"),
106
+ "github.event.pull_request.head.repo.full_name": PR.get("head", {}).get("repo", {}).get("full_name"),
107
+ "github.actor": os.environ.get("GITHUB_ACTOR"),
108
+ "github.event.pull_request.head.ref": PR.get("head", {}).get("ref"),
109
+ "github.ref": os.environ.get("GITHUB_REF"),
110
+ "github.head_ref": os.environ.get("GITHUB_HEAD_REF"),
111
+ "github.base_ref": os.environ.get("GITHUB_BASE_REF"),
112
+ }
113
+
114
+ if GITHUB_EVENT_NAME == "discussion":
115
+ info.update(
116
+ {
117
+ "github.event.discussion.node_id": DISCUSSION.get("node_id"),
118
+ "github.event.discussion.number": DISCUSSION.get("number"),
119
+ }
120
+ )
121
+
122
+ # Print information
123
+ max_key_length = max(len(key) for key in info.keys())
124
+ print("Ultralytics Actions Information " + "-" * 40) # header (72 long)
125
+ for key, value in info.items():
126
+ print(f"{key:<{max_key_length + 5}}{value}")
127
+ print("-" * 72) # footer
@@ -1,24 +1,45 @@
1
1
  # Ultralytics Actions 🚀, AGPL-3.0 license https://ultralytics.com/license
2
2
 
3
3
  import os
4
+ import random
5
+ from typing import Dict, List
4
6
 
5
7
  import requests
6
8
 
7
- OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o")
9
+ from actions.utils.common_utils import check_links_in_string
10
+
8
11
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
12
+ OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o")
9
13
 
10
14
 
11
- def get_completion(messages: list) -> str:
15
+ def get_completion(
16
+ messages: List[Dict[str, str]],
17
+ check_links: bool = True,
18
+ remove: List[str] = (" @giscus[bot]",), # strings to remove from response
19
+ ) -> str:
12
20
  """Generates a completion using OpenAI's API based on input messages."""
13
21
  assert OPENAI_API_KEY, "OpenAI API key is required."
14
22
  url = "https://api.openai.com/v1/chat/completions"
15
23
  headers = {"Authorization": f"Bearer {OPENAI_API_KEY}", "Content-Type": "application/json"}
16
- data = {"model": OPENAI_MODEL, "messages": messages}
17
-
18
- r = requests.post(url, headers=headers, json=data)
19
- r.raise_for_status()
20
- content = r.json()["choices"][0]["message"]["content"].strip()
21
- remove = [" @giscus[bot]"]
22
- for x in remove:
23
- content = content.replace(x, "")
24
+
25
+ content = ""
26
+ max_retries = 2
27
+ for attempt in range(max_retries + 2): # attempt = [0, 1, 2, 3], 2 random retries before asking for no links
28
+ data = {"model": OPENAI_MODEL, "messages": messages, "seed": random.randint(1, 1000000)}
29
+
30
+ r = requests.post(url, headers=headers, json=data)
31
+ r.raise_for_status()
32
+ content = r.json()["choices"][0]["message"]["content"].strip()
33
+ for x in remove:
34
+ content = content.replace(x, "")
35
+ if not check_links or check_links_in_string(content): # if no checks or checks are passing return response
36
+ return content
37
+
38
+ if attempt < max_retries:
39
+ print(f"Attempt {attempt + 1}: Found bad URLs. Retrying with a new random seed.")
40
+ else:
41
+ print("Max retries reached. Updating prompt to exclude links.")
42
+ messages.append({"role": "user", "content": "Please provide a response without any URLs or links in it."})
43
+ check_links = False # automatically accept the last message
44
+
24
45
  return content
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics-actions
3
- Version: 0.0.5
3
+ Version: 0.0.10
4
4
  Summary: Ultralytics Actions for GitHub automation and PR management.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -42,6 +42,8 @@ Welcome to the [Ultralytics Actions](https://github.com/ultralytics/actions) rep
42
42
 
43
43
  [![GitHub Actions Marketplace](https://img.shields.io/badge/Marketplace-Ultralytics_Actions-blue?style=flat&logo=github)](https://github.com/marketplace/actions/ultralytics-actions) [![Ultralytics Actions](https://github.com/ultralytics/actions/actions/workflows/format.yml/badge.svg)](https://github.com/ultralytics/actions/actions/workflows/format.yml) <a href="https://discord.com/invite/ultralytics"><img alt="Discord" src="https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue"></a> <a href="https://community.ultralytics.com/"><img alt="Ultralytics Forums" src="https://img.shields.io/discourse/users?server=https%3A%2F%2Fcommunity.ultralytics.com&logo=discourse&label=Forums&color=blue"></a> <a href="https://reddit.com/r/ultralytics"><img alt="Ultralytics Reddit" src="https://img.shields.io/reddit/subreddit-subscribers/ultralytics?style=flat&logo=reddit&logoColor=white&label=Reddit&color=blue"></a>
44
44
 
45
+ [![PyPI version](https://badge.fury.io/py/ultralytics-actions.svg)](https://badge.fury.io/py/ultralytics-actions) [![Downloads](https://static.pepy.tech/badge/ultralytics-actions)](https://pepy.tech/project/ultralytics-actions)
46
+
45
47
  ## 📄 Actions Description
46
48
 
47
49
  Ultralytics Actions automatically applies formats, updates, and enhancements:
@@ -0,0 +1,15 @@
1
+ actions/__init__.py,sha256=Eko4c5hZ6l7Dg7GO0v0C5Lp72mdL4RDxAjkLO2mjqsI,749
2
+ actions/first_interaction.py,sha256=cLXo5hmhOgTfk7F9LbGJeRdg6vvrKUe-0taCa_TkpAg,17683
3
+ actions/summarize_pr.py,sha256=ysbGgomPXMXBZQQROWTv06syA59gJmGMvpWxDO66cLQ,3779
4
+ actions/summarize_release.py,sha256=l8NBdTAXLysfNKl1Kf_1tyuBRmeEBLyzTDXS6s5_eQg,8350
5
+ actions/update_markdown_code_blocks.py,sha256=WBNcMD_KKsZS-qSPBn6O1G0ggQ_VrT-jTQffbg7xH_M,6369
6
+ actions/utils/__init__.py,sha256=0vRjFc7i2WOlphuxdUxQo5BuNipgwGw2Bs-fdUBDeUw,973
7
+ actions/utils/common_utils.py,sha256=PaXyUAf675PEjzXY7ZFCcyfes1mkibB1OuvKU06N-ug,3490
8
+ actions/utils/github_utils.py,sha256=2H4pRum8tLSbLKGoO85Ea4E8psxZICyvFO_1hwQPuoU,5431
9
+ actions/utils/openai_utils.py,sha256=SQWOjU3hdfI_0LKb3uqM5pNsoMyE7W0hGxXy7ISk97M,1823
10
+ ultralytics_actions-0.0.10.dist-info/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
11
+ ultralytics_actions-0.0.10.dist-info/METADATA,sha256=KhVf3iB6wqE6AuGSi0NSJBCAXFj-cq1Knn2xZRn9r3I,10535
12
+ ultralytics_actions-0.0.10.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
13
+ ultralytics_actions-0.0.10.dist-info/entry_points.txt,sha256=GowvOFplj0C7JmsjbKcbpgLpdf2r921pcaOQkAHWZRA,378
14
+ ultralytics_actions-0.0.10.dist-info/top_level.txt,sha256=5apM5x80QlJcGbACn1v3fkmIuL1-XQCKcItJre7w7Tw,8
15
+ ultralytics_actions-0.0.10.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.1.0)
2
+ Generator: setuptools (75.2.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,5 +1,6 @@
1
1
  [console_scripts]
2
2
  ultralytics-actions-first-interaction = actions.first_interaction:main
3
+ ultralytics-actions-info = actions.utils:ultralytics_actions_info
3
4
  ultralytics-actions-summarize-pr = actions.summarize_pr:main
4
5
  ultralytics-actions-summarize-release = actions.summarize_release:main
5
6
  ultralytics-actions-update-markdown-code-blocks = actions.update_markdown_code_blocks:main
@@ -1,15 +0,0 @@
1
- actions/__init__.py,sha256=gQgelJXb-pdXlmEB1emplvz7LMwt23p26a1zVOI0_Ak,1127
2
- actions/first_interaction.py,sha256=28WnEN0TWV0RYeQJ81ai5uuTnUhEsOopmbBx14W4I2o,17292
3
- actions/summarize_pr.py,sha256=BUZ8MuIxzXU4Tl5L8UXiUyUXYWKYEe_34lbK4IP2TuA,3531
4
- actions/summarize_release.py,sha256=Gu1RM6bQ8dJ6Vd5lWFtisL9TBI3hQZA9QbgNMTFaVyo,8009
5
- actions/update_markdown_code_blocks.py,sha256=J5gQAxAt9giogDxMf6zFQ2ZS7XtoJZ9gWO9GmvTeRRY,6308
6
- actions/utils/__init__.py,sha256=3PHLC4zhx_6bGi83msXeusuacmzNjE5Q8iV7FK9EBsI,791
7
- actions/utils/common_utils.py,sha256=A_lzlzyTkrwd5fpqvDnIT1hhdBlHDdwtlxjAcKn4da0,278
8
- actions/utils/github_utils.py,sha256=CLzS0k9aaCpaaeObSDRq00sN_gsOEaspyeqiqpeqHxs,1853
9
- actions/utils/openai_utils.py,sha256=-N63TWvlw0lfDJBVeZMhONgwe41_3TQT_3fTlkpWrwo,849
10
- ultralytics_actions-0.0.5.dist-info/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
11
- ultralytics_actions-0.0.5.dist-info/METADATA,sha256=eqJ7Gck8Bdf4gwZ28wDjxUo_vrfToiExX_UD4CVhg74,10305
12
- ultralytics_actions-0.0.5.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
13
- ultralytics_actions-0.0.5.dist-info/entry_points.txt,sha256=S8UXCzr5pW9tUAPck-gZTfdvX10a-gambB9wmFRWDnw,312
14
- ultralytics_actions-0.0.5.dist-info/top_level.txt,sha256=5apM5x80QlJcGbACn1v3fkmIuL1-XQCKcItJre7w7Tw,8
15
- ultralytics_actions-0.0.5.dist-info/RECORD,,