airbyte-internal-ops 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airbyte-internal-ops
3
- Version: 0.8.0
3
+ Version: 0.9.0
4
4
  Summary: MCP and API interfaces that let the agents do the admin work
5
5
  Author-email: Aaron Steers <aj@airbyte.io>
6
6
  Keywords: admin,airbyte,api,mcp
@@ -32,6 +32,7 @@ Requires-Dist: jinja2<4.0,>=3.1.2
32
32
  Requires-Dist: markdown-it-py<3.0,>=2.2.0
33
33
  Requires-Dist: pydantic>=2.0.0
34
34
  Requires-Dist: pydash<8.0,>=6.0.2
35
+ Requires-Dist: pygithub<3.0,>=2.0
35
36
  Requires-Dist: python-dotenv<2.0,>=1.0.0
36
37
  Requires-Dist: pyyaml<7.0,>=6.0.0
37
38
  Requires-Dist: requests<3.0,>=2.31.0
@@ -4,7 +4,7 @@ airbyte_ops_mcp/constants.py,sha256=xU8ARMs7jG0-1hseKn3K1dmhyMOMM3JTetRZugLSKEo,
4
4
  airbyte_ops_mcp/docker_hub.py,sha256=qdOYpj2KOFOsEGsl2b2rcVPzyYDharOVM_lJxNTytds,5833
5
5
  airbyte_ops_mcp/gcp_auth.py,sha256=i0cm1_xX4fj_31iKlfARpNvTaSr85iGTSw9KMf4f4MU,7206
6
6
  airbyte_ops_mcp/github_actions.py,sha256=FSi_tjS9TbwRVp8dwlDZhFOi7lJXEZQLhPm2KpcjNlY,7022
7
- airbyte_ops_mcp/github_api.py,sha256=ezpMR1vjqQ-1f5yOLBVbxW70OPtUferl1uA0u_gUVo8,12733
7
+ airbyte_ops_mcp/github_api.py,sha256=Hz3ZCGfLfnfSsytV4zpAFS6q3u0Xh8hUhSSRAwjf6G4,14398
8
8
  airbyte_ops_mcp/metadata_validator.py,sha256=kRCr3uzZmFLv4gCzY4KzLrApycqoE1YgDRpld04EBaU,15002
9
9
  airbyte_ops_mcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  airbyte_ops_mcp/telemetry.py,sha256=GUQX-oD6besFl-kAyycci1HCk1jov_JFwUAqKVN0yKE,5230
@@ -218,6 +218,7 @@ airbyte_ops_mcp/_legacy/airbyte_ci/metadata_service/templates/connector_registry
218
218
  airbyte_ops_mcp/_legacy/airbyte_ci/metadata_service/templates/render.py,sha256=gX5Fk5HJ8a_gnZ7N2rgFL8aKURHemWSKd_Z5RBISkpI,2831
219
219
  airbyte_ops_mcp/airbyte_repo/__init__.py,sha256=3yEpCOop1h33UuCtU7-1UB3kun2QsxtgvsfVEpuqN6s,1572
220
220
  airbyte_ops_mcp/airbyte_repo/bump_version.py,sha256=rtXYv105BhElj7twQmy20VjNsj6vCb0Ulrzmc4Z-1cE,15384
221
+ airbyte_ops_mcp/airbyte_repo/changelog_fix.py,sha256=sT94FkH5YuPc_NZCoMr9IUydcD4CO-i5ifGm8FOuI9g,13890
221
222
  airbyte_ops_mcp/airbyte_repo/list_connectors.py,sha256=UI4RKMfFWOXHBtvh7TIw1v0BnV4rQQGuUgh8e9yvpos,19907
222
223
  airbyte_ops_mcp/airbyte_repo/utils.py,sha256=TXlOAfhiu_hVRNjCxB4PRPVDhTWCU5lYmgqz4QG_-EA,3201
223
224
  airbyte_ops_mcp/cli/__init__.py,sha256=XpL7FyVfgabfBF2JR7u7NwJ2krlYqjd_OwLcWf-Xc7s,114
@@ -226,8 +227,8 @@ airbyte_ops_mcp/cli/_shared.py,sha256=jg-xMyGzTCGPqKd8VTfE_3kGPIyO_3Kx5sQbG4rPc0
226
227
  airbyte_ops_mcp/cli/app.py,sha256=xxlyJzlFvZuy-qKAhTPPmzj-t0o7MSqQ7aPHonfT1vU,790
227
228
  airbyte_ops_mcp/cli/cloud.py,sha256=jac0FcG3UvNNeNNhqZR04uY9BCyVvins1kjO0LRZ86Y,45861
228
229
  airbyte_ops_mcp/cli/gh.py,sha256=koJPu0MDB6AW7mJq2z4dZV65ofvsZTkqoeitGF8KJR8,5364
229
- airbyte_ops_mcp/cli/local.py,sha256=57F_FrC8oEEa9eb85sptJOSDncS_ItlWtnO6DcJomKk,20874
230
- airbyte_ops_mcp/cli/registry.py,sha256=L4nDKhlegr31gSE-GUvDFSq10KgDz5kJuZXgLIxYIyg,9785
230
+ airbyte_ops_mcp/cli/local.py,sha256=kJe3rO6AWdXyjvrlcyP5MC_wIGu9cm1DaCNuO2gxkhU,40806
231
+ airbyte_ops_mcp/cli/registry.py,sha256=CVm15_6YWpLD99DZ8f8y68npXJw01Ckde7Ty3o89YNE,16631
231
232
  airbyte_ops_mcp/cloud_admin/__init__.py,sha256=cqE96Q10Kp6elhH9DAi6TVsIwSUy3sooDLLrxTaktGk,816
232
233
  airbyte_ops_mcp/cloud_admin/api_client.py,sha256=pzdnZpIxOaXJsF_vQuw9_goiDbqwgEQxFwZwsoupY40,57459
233
234
  airbyte_ops_mcp/cloud_admin/auth.py,sha256=qE2Aqe0qbZB755KscL65s54Jz78-F-X5a8fXKsrYEOQ,3749
@@ -283,8 +284,10 @@ airbyte_ops_mcp/prod_db_access/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
283
284
  airbyte_ops_mcp/prod_db_access/queries.py,sha256=PyZ0HU-E6jDhSgQ94XRz4wzh61vCr0ACxfMoGjeTKUU,22312
284
285
  airbyte_ops_mcp/prod_db_access/sql.py,sha256=E7HQratZC5LbCXdvpQzZRd09SmCVnY_4Ru-HluWaQ_c,38308
285
286
  airbyte_ops_mcp/registry/__init__.py,sha256=iEaPlt9GrnlaLbc__98TguNeZG8wuQu7S-_2QkhHcbA,858
287
+ airbyte_ops_mcp/registry/_gcs_util.py,sha256=3dL7iqpyoSPfl2nGBPp73TqrqGWI4Ks6egKpQXDnw24,2838
288
+ airbyte_ops_mcp/registry/connector_stubs.py,sha256=pBzaQIOgCfBvHuy3_N45amU9Do2ALDpU8q5HnZySPTQ,5807
286
289
  airbyte_ops_mcp/registry/models.py,sha256=B4L4TKr52wo0xs0CqvCBrpowqjShzVnZ5eTr2-EyhNs,2346
287
- airbyte_ops_mcp/registry/publish.py,sha256=VoPxsM2_0zJ829orzCRN-kjgcJtuBNyXgW4I9J680ro,12717
290
+ airbyte_ops_mcp/registry/publish.py,sha256=BHOvj7qximKytGKTXMXCO6_1yTkmSvUije5jHKpQj64,12640
288
291
  airbyte_ops_mcp/regression_tests/__init__.py,sha256=qdveN866w50VmiDmTD0IW8Buo4UUn_1m9mKNS6hgj5U,1092
289
292
  airbyte_ops_mcp/regression_tests/cdk_secrets.py,sha256=iRjqqBS96KZoswfgT7ju-pE_pfbYoDy4PfrK-K8uyYs,3204
290
293
  airbyte_ops_mcp/regression_tests/ci_output.py,sha256=os69gcEhbomrGRAfnwPYAXhsAiJSWOuyW1gWZPx9-hw,15498
@@ -305,7 +308,7 @@ airbyte_ops_mcp/regression_tests/regression/comparators.py,sha256=MJkLZEKHivgrG0
305
308
  airbyte_ops_mcp/regression_tests/validation/__init__.py,sha256=MBEwGOoNuqT4_oCahtoK62OKWIjUCfWa7vZTxNj_0Ek,1532
306
309
  airbyte_ops_mcp/regression_tests/validation/catalog_validators.py,sha256=jqqVAMOk0mtdPgwu4d0hA0ZEjtsNh5gapvGydRv3_qk,12553
307
310
  airbyte_ops_mcp/regression_tests/validation/record_validators.py,sha256=RjauAhKWNwxMBTu0eNS2hMFNQVs5CLbQU51kp6FOVDk,7432
308
- airbyte_internal_ops-0.8.0.dist-info/METADATA,sha256=b9pAOYSCdbb5hYDF-Wuk8wWG1r2uh7-Su-8wzwPOE_s,5964
309
- airbyte_internal_ops-0.8.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
310
- airbyte_internal_ops-0.8.0.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
311
- airbyte_internal_ops-0.8.0.dist-info/RECORD,,
311
+ airbyte_internal_ops-0.9.0.dist-info/METADATA,sha256=bE6ARIIiGF7okmQ779VU9T5KgHrX-2cdsv4wOtianI8,5998
312
+ airbyte_internal_ops-0.9.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
313
+ airbyte_internal_ops-0.9.0.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
314
+ airbyte_internal_ops-0.9.0.dist-info/RECORD,,
@@ -0,0 +1,437 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Changelog date fixing and checking utilities for Airbyte connectors.
3
+
4
+ This module provides functionality to fix and check changelog entry dates by looking up
5
+ the actual PR merge dates from GitHub.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import datetime
11
+ import re
12
+ from dataclasses import dataclass
13
+ from enum import StrEnum
14
+ from pathlib import Path
15
+
16
+ from airbyte_ops_mcp.airbyte_repo.bump_version import (
17
+ AIRBYTE_GITHUB_REPO,
18
+ get_connector_doc_path,
19
+ )
20
+ from airbyte_ops_mcp.airbyte_repo.list_connectors import get_all_connectors
21
+ from airbyte_ops_mcp.github_api import GitHubAPIError, get_pr_merge_date
22
+
23
+
24
+ class ChangelogIssueType(StrEnum):
25
+ """Types of changelog issues that can be detected."""
26
+
27
+ PR_MISMATCH = "pr_mismatch"
28
+
29
+
30
+ @dataclass
31
+ class ChangelogDateFix:
32
+ """A single changelog date fix."""
33
+
34
+ line_number: int
35
+ version: str
36
+ pr_number: int
37
+ old_date: datetime.date
38
+ new_date: datetime.date
39
+
40
+ @property
41
+ def changed(self) -> bool:
42
+ """Return True if the date was changed."""
43
+ return self.old_date != self.new_date
44
+
45
+
46
+ @dataclass
47
+ class ChangelogIssue:
48
+ """A single changelog issue found during checking."""
49
+
50
+ line_number: int
51
+ version: str
52
+ issue_type: ChangelogIssueType
53
+ message: str
54
+
55
+
56
+ @dataclass
57
+ class ChangelogCheckResult:
58
+ """Result of checking changelog for a connector."""
59
+
60
+ connector: str
61
+ doc_path: Path | None
62
+ date_issues: list[ChangelogDateFix]
63
+ pr_mismatch_warnings: list[ChangelogIssue]
64
+ errors: list[str]
65
+
66
+ @property
67
+ def has_issues(self) -> bool:
68
+ """Return True if any issues were found."""
69
+ return (
70
+ any(fix.changed for fix in self.date_issues)
71
+ or len(self.pr_mismatch_warnings) > 0
72
+ )
73
+
74
+ @property
75
+ def issue_count(self) -> int:
76
+ """Return the total number of issues found."""
77
+ return sum(1 for fix in self.date_issues if fix.changed) + len(
78
+ self.pr_mismatch_warnings
79
+ )
80
+
81
+
82
+ @dataclass
83
+ class ChangelogFixResult:
84
+ """Result of fixing changelog dates for a connector."""
85
+
86
+ connector: str
87
+ doc_path: Path | None
88
+ fixes: list[ChangelogDateFix]
89
+ warnings: list[ChangelogIssue]
90
+ errors: list[str]
91
+ dry_run: bool
92
+
93
+ @property
94
+ def has_changes(self) -> bool:
95
+ """Return True if any dates were changed."""
96
+ return any(fix.changed for fix in self.fixes)
97
+
98
+ @property
99
+ def changed_count(self) -> int:
100
+ """Return the number of dates that were changed."""
101
+ return sum(1 for fix in self.fixes if fix.changed)
102
+
103
+
104
+ def _parse_changelog_entries(
105
+ content: str,
106
+ github_repo: str = AIRBYTE_GITHUB_REPO,
107
+ ) -> list[tuple[int, str, str, int, int, str]]:
108
+ """Parse changelog entries from markdown content.
109
+
110
+ Args:
111
+ content: The markdown content of the documentation file.
112
+ github_repo: GitHub repository for PR links.
113
+
114
+ Returns:
115
+ List of tuples: (line_number, version, date_str, displayed_pr_number, url_pr_number, full_line)
116
+ """
117
+ # Regex to parse changelog table rows in the format:
118
+ # | version | date | [pr_num](url) | comment |
119
+ changelog_entry_re = (
120
+ # Match table row start and capture semantic version (e.g., "1.2.3")
121
+ r"^\| *(?P<version>[0-9]+\.[0-9]+\.[0-9]+) *\| *"
122
+ # Capture date in ISO format YYYY-MM-DD
123
+ r"(?P<date>[0-9]{4}-[0-9]{2}-[0-9]{2}) *\| *"
124
+ # Capture displayed PR number (may be in brackets as markdown link)
125
+ r"\[?(?P<displayed_pr>[0-9]+)\]?\(https://github.com/"
126
+ # GitHub repo portion (escaped to handle special chars)
127
+ + re.escape(github_repo)
128
+ # Capture PR number from URL path
129
+ + r"/pull/(?P<url_pr>[0-9]+)\) *\| *"
130
+ # Capture comment text until end of row
131
+ r"(?P<comment>.*?) *\| *$"
132
+ )
133
+
134
+ entries = []
135
+ lines = content.splitlines()
136
+
137
+ for line_num, line in enumerate(lines, start=1):
138
+ match = re.match(changelog_entry_re, line)
139
+ if match:
140
+ version = match.group("version")
141
+ date_str = match.group("date")
142
+ displayed_pr = int(match.group("displayed_pr"))
143
+ url_pr = int(match.group("url_pr"))
144
+ entries.append((line_num, version, date_str, displayed_pr, url_pr, line))
145
+
146
+ return entries
147
+
148
+
149
+ def check_changelog(
150
+ repo_path: str | Path,
151
+ connector_name: str,
152
+ lookback_days: int | None = None,
153
+ github_repo: str = AIRBYTE_GITHUB_REPO,
154
+ token: str | None = None,
155
+ ) -> ChangelogCheckResult:
156
+ """Check changelog for issues (incorrect dates, mismatched PR numbers).
157
+
158
+ Args:
159
+ repo_path: Path to the Airbyte monorepo.
160
+ connector_name: Technical name of the connector (e.g., "source-github").
161
+ lookback_days: Only check entries with dates within this many days. None for all.
162
+ github_repo: GitHub repository for PR links.
163
+ token: GitHub API token. If None, will be resolved from environment.
164
+
165
+ Returns:
166
+ ChangelogCheckResult with details of any issues found.
167
+ """
168
+ repo_path = Path(repo_path)
169
+ date_issues: list[ChangelogDateFix] = []
170
+ pr_mismatch_warnings: list[ChangelogIssue] = []
171
+ errors: list[str] = []
172
+
173
+ doc_path = get_connector_doc_path(repo_path, connector_name)
174
+ if doc_path is None or not doc_path.exists():
175
+ return ChangelogCheckResult(
176
+ connector=connector_name,
177
+ doc_path=None,
178
+ date_issues=[],
179
+ pr_mismatch_warnings=[],
180
+ errors=[f"Documentation file not found for {connector_name}"],
181
+ )
182
+
183
+ content = doc_path.read_text()
184
+ entries = _parse_changelog_entries(content, github_repo)
185
+
186
+ if not entries:
187
+ return ChangelogCheckResult(
188
+ connector=connector_name,
189
+ doc_path=doc_path,
190
+ date_issues=[],
191
+ pr_mismatch_warnings=[],
192
+ errors=[],
193
+ )
194
+
195
+ owner, repo = github_repo.split("/")
196
+ cutoff_date = None
197
+ if lookback_days is not None:
198
+ cutoff_date = datetime.date.today() - datetime.timedelta(days=lookback_days)
199
+
200
+ for line_num, version, date_str, displayed_pr, url_pr, _full_line in entries:
201
+ entry_date = datetime.datetime.strptime(date_str, "%Y-%m-%d").date()
202
+
203
+ # Skip entries older than the lookback threshold
204
+ if cutoff_date is not None and entry_date < cutoff_date:
205
+ continue
206
+
207
+ if displayed_pr != url_pr:
208
+ pr_mismatch_warnings.append(
209
+ ChangelogIssue(
210
+ line_number=line_num,
211
+ version=version,
212
+ issue_type=ChangelogIssueType.PR_MISMATCH,
213
+ message=f"Displayed PR number ({displayed_pr}) does not match URL PR number ({url_pr})",
214
+ )
215
+ )
216
+
217
+ merge_date = None
218
+ try:
219
+ merge_date = get_pr_merge_date(owner, repo, url_pr, token)
220
+ except GitHubAPIError as e:
221
+ errors.append(f"Failed to fetch PR {url_pr}: {e}")
222
+ continue
223
+
224
+ if merge_date is None:
225
+ errors.append(f"PR {url_pr} is not merged")
226
+ continue
227
+
228
+ date_issues.append(
229
+ ChangelogDateFix(
230
+ line_number=line_num,
231
+ version=version,
232
+ pr_number=url_pr,
233
+ old_date=entry_date,
234
+ new_date=merge_date,
235
+ )
236
+ )
237
+
238
+ return ChangelogCheckResult(
239
+ connector=connector_name,
240
+ doc_path=doc_path,
241
+ date_issues=date_issues,
242
+ pr_mismatch_warnings=pr_mismatch_warnings,
243
+ errors=errors,
244
+ )
245
+
246
+
247
+ def fix_changelog_dates(
248
+ repo_path: str | Path,
249
+ connector_name: str,
250
+ dry_run: bool = False,
251
+ lookback_days: int | None = None,
252
+ github_repo: str = AIRBYTE_GITHUB_REPO,
253
+ token: str | None = None,
254
+ ) -> ChangelogFixResult:
255
+ """Fix changelog dates for a connector by looking up PR merge dates.
256
+
257
+ Args:
258
+ repo_path: Path to the Airbyte monorepo.
259
+ connector_name: Technical name of the connector (e.g., "source-github").
260
+ dry_run: If True, don't actually modify the file.
261
+ lookback_days: Only fix entries with dates within this many days. None for all.
262
+ github_repo: GitHub repository for PR links.
263
+ token: GitHub API token. If None, will be resolved from environment.
264
+
265
+ Returns:
266
+ ChangelogFixResult with details of the fixes applied.
267
+ """
268
+ repo_path = Path(repo_path)
269
+ fixes: list[ChangelogDateFix] = []
270
+ warnings: list[ChangelogIssue] = []
271
+ errors: list[str] = []
272
+
273
+ doc_path = get_connector_doc_path(repo_path, connector_name)
274
+ if doc_path is None or not doc_path.exists():
275
+ return ChangelogFixResult(
276
+ connector=connector_name,
277
+ doc_path=None,
278
+ fixes=[],
279
+ warnings=[],
280
+ errors=[f"Documentation file not found for {connector_name}"],
281
+ dry_run=dry_run,
282
+ )
283
+
284
+ content = doc_path.read_text()
285
+ entries = _parse_changelog_entries(content, github_repo)
286
+
287
+ if not entries:
288
+ return ChangelogFixResult(
289
+ connector=connector_name,
290
+ doc_path=doc_path,
291
+ fixes=[],
292
+ warnings=[],
293
+ errors=[],
294
+ dry_run=dry_run,
295
+ )
296
+
297
+ owner, repo = github_repo.split("/")
298
+ lines = content.splitlines()
299
+ # Track which lines need to be modified: line_num (1-indexed) -> new_line
300
+ line_replacements: dict[int, str] = {}
301
+ cutoff_date = None
302
+ if lookback_days is not None:
303
+ cutoff_date = datetime.date.today() - datetime.timedelta(days=lookback_days)
304
+
305
+ for line_num, version, date_str, displayed_pr, url_pr, full_line in entries:
306
+ entry_date = datetime.datetime.strptime(date_str, "%Y-%m-%d").date()
307
+
308
+ # Skip entries older than the lookback threshold
309
+ if cutoff_date is not None and entry_date < cutoff_date:
310
+ continue
311
+
312
+ if displayed_pr != url_pr:
313
+ warnings.append(
314
+ ChangelogIssue(
315
+ line_number=line_num,
316
+ version=version,
317
+ issue_type=ChangelogIssueType.PR_MISMATCH,
318
+ message=f"Displayed PR number ({displayed_pr}) does not match URL PR number ({url_pr})",
319
+ )
320
+ )
321
+
322
+ merge_date = None
323
+ try:
324
+ merge_date = get_pr_merge_date(owner, repo, url_pr, token)
325
+ except GitHubAPIError as e:
326
+ errors.append(f"Failed to fetch PR {url_pr}: {e}")
327
+ continue
328
+
329
+ if merge_date is None:
330
+ errors.append(f"PR {url_pr} is not merged")
331
+ continue
332
+
333
+ fix = ChangelogDateFix(
334
+ line_number=line_num,
335
+ version=version,
336
+ pr_number=url_pr,
337
+ old_date=entry_date,
338
+ new_date=merge_date,
339
+ )
340
+ fixes.append(fix)
341
+
342
+ if fix.changed:
343
+ new_line = full_line.replace(date_str, merge_date.strftime("%Y-%m-%d"))
344
+ line_replacements[line_num] = new_line
345
+
346
+ # Apply line replacements by line number to avoid issues with duplicate lines
347
+ if line_replacements:
348
+ for line_num, new_line in line_replacements.items():
349
+ lines[line_num - 1] = new_line # Convert 1-indexed to 0-indexed
350
+ new_content = "\n".join(lines)
351
+ if content.endswith("\n"):
352
+ new_content += "\n"
353
+ else:
354
+ new_content = content
355
+
356
+ if not dry_run and new_content != content:
357
+ doc_path.write_text(new_content)
358
+
359
+ return ChangelogFixResult(
360
+ connector=connector_name,
361
+ doc_path=doc_path,
362
+ fixes=fixes,
363
+ warnings=warnings,
364
+ errors=errors,
365
+ dry_run=dry_run,
366
+ )
367
+
368
+
369
+ def check_all_changelogs(
370
+ repo_path: str | Path,
371
+ lookback_days: int | None = None,
372
+ github_repo: str = AIRBYTE_GITHUB_REPO,
373
+ token: str | None = None,
374
+ ) -> list[ChangelogCheckResult]:
375
+ """Check changelogs for all connectors in the repository.
376
+
377
+ Args:
378
+ repo_path: Path to the Airbyte monorepo.
379
+ lookback_days: Only check entries with dates within this many days. None for all.
380
+ github_repo: GitHub repository for PR links.
381
+ token: GitHub API token. If None, will be resolved from environment.
382
+
383
+ Returns:
384
+ List of ChangelogCheckResult for each connector processed.
385
+ """
386
+ repo_path = Path(repo_path)
387
+ connectors = get_all_connectors(repo_path)
388
+
389
+ results = []
390
+ for connector_name in sorted(connectors):
391
+ result = check_changelog(
392
+ repo_path=repo_path,
393
+ connector_name=connector_name,
394
+ lookback_days=lookback_days,
395
+ github_repo=github_repo,
396
+ token=token,
397
+ )
398
+ results.append(result)
399
+
400
+ return results
401
+
402
+
403
+ def fix_all_changelog_dates(
404
+ repo_path: str | Path,
405
+ dry_run: bool = False,
406
+ lookback_days: int | None = None,
407
+ github_repo: str = AIRBYTE_GITHUB_REPO,
408
+ token: str | None = None,
409
+ ) -> list[ChangelogFixResult]:
410
+ """Fix changelog dates for all connectors in the repository.
411
+
412
+ Args:
413
+ repo_path: Path to the Airbyte monorepo.
414
+ dry_run: If True, don't actually modify files.
415
+ lookback_days: Only fix entries with dates within this many days. None for all.
416
+ github_repo: GitHub repository for PR links.
417
+ token: GitHub API token. If None, will be resolved from environment.
418
+
419
+ Returns:
420
+ List of ChangelogFixResult for each connector processed.
421
+ """
422
+ repo_path = Path(repo_path)
423
+ connectors = get_all_connectors(repo_path)
424
+
425
+ results = []
426
+ for connector_name in sorted(connectors):
427
+ result = fix_changelog_dates(
428
+ repo_path=repo_path,
429
+ connector_name=connector_name,
430
+ dry_run=dry_run,
431
+ lookback_days=lookback_days,
432
+ github_repo=github_repo,
433
+ token=token,
434
+ )
435
+ results.append(result)
436
+
437
+ return results