airbyte-internal-ops 0.7.1__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_internal_ops-0.7.1.dist-info → airbyte_internal_ops-0.9.0.dist-info}/METADATA +2 -1
- {airbyte_internal_ops-0.7.1.dist-info → airbyte_internal_ops-0.9.0.dist-info}/RECORD +12 -9
- airbyte_ops_mcp/airbyte_repo/changelog_fix.py +437 -0
- airbyte_ops_mcp/cli/local.py +584 -6
- airbyte_ops_mcp/cli/registry.py +215 -1
- airbyte_ops_mcp/connector_qa/cli.py +37 -0
- airbyte_ops_mcp/github_api.py +56 -0
- airbyte_ops_mcp/registry/_gcs_util.py +100 -0
- airbyte_ops_mcp/registry/connector_stubs.py +186 -0
- airbyte_ops_mcp/registry/publish.py +4 -4
- {airbyte_internal_ops-0.7.1.dist-info → airbyte_internal_ops-0.9.0.dist-info}/WHEEL +0 -0
- {airbyte_internal_ops-0.7.1.dist-info → airbyte_internal_ops-0.9.0.dist-info}/entry_points.txt +0 -0
airbyte_ops_mcp/cli/local.py
CHANGED
|
@@ -7,6 +7,10 @@ Commands:
|
|
|
7
7
|
airbyte-ops local connector bump-version - Bump connector version
|
|
8
8
|
airbyte-ops local connector qa - Run QA checks on a connector
|
|
9
9
|
airbyte-ops local connector qa-docs-generate - Generate QA checks documentation
|
|
10
|
+
airbyte-ops local connector changelog check - Check changelog entries for issues
|
|
11
|
+
airbyte-ops local connector changelog fix - Fix changelog entry dates
|
|
12
|
+
airbyte-ops local connector enterprise-stub check - Validate enterprise stub entries
|
|
13
|
+
airbyte-ops local connector enterprise-stub sync - Sync stub from connector metadata
|
|
10
14
|
"""
|
|
11
15
|
|
|
12
16
|
from __future__ import annotations
|
|
@@ -26,6 +30,14 @@ from airbyte_ops_mcp.airbyte_repo.bump_version import (
|
|
|
26
30
|
VersionNotFoundError,
|
|
27
31
|
bump_connector_version,
|
|
28
32
|
)
|
|
33
|
+
from airbyte_ops_mcp.airbyte_repo.changelog_fix import (
|
|
34
|
+
ChangelogCheckResult,
|
|
35
|
+
ChangelogFixResult,
|
|
36
|
+
check_all_changelogs,
|
|
37
|
+
check_changelog,
|
|
38
|
+
fix_all_changelog_dates,
|
|
39
|
+
fix_changelog_dates,
|
|
40
|
+
)
|
|
29
41
|
from airbyte_ops_mcp.airbyte_repo.list_connectors import (
|
|
30
42
|
CONNECTOR_PATH_PREFIX,
|
|
31
43
|
METADATA_FILE_NAME,
|
|
@@ -33,7 +45,7 @@ from airbyte_ops_mcp.airbyte_repo.list_connectors import (
|
|
|
33
45
|
get_connectors_with_local_cdk,
|
|
34
46
|
)
|
|
35
47
|
from airbyte_ops_mcp.cli._base import app
|
|
36
|
-
from airbyte_ops_mcp.cli._shared import exit_with_error, print_json
|
|
48
|
+
from airbyte_ops_mcp.cli._shared import error_console, exit_with_error, print_json
|
|
37
49
|
from airbyte_ops_mcp.connector_ops.utils import Connector
|
|
38
50
|
from airbyte_ops_mcp.connector_qa.checks import ENABLED_CHECKS
|
|
39
51
|
from airbyte_ops_mcp.connector_qa.consts import CONNECTORS_QA_DOC_TEMPLATE_NAME
|
|
@@ -48,6 +60,13 @@ from airbyte_ops_mcp.connector_qa.utils import (
|
|
|
48
60
|
remove_strict_encrypt_suffix,
|
|
49
61
|
)
|
|
50
62
|
from airbyte_ops_mcp.mcp.github_repo_ops import list_connectors_in_repo
|
|
63
|
+
from airbyte_ops_mcp.registry.connector_stubs import (
|
|
64
|
+
CONNECTOR_STUBS_FILE,
|
|
65
|
+
ConnectorStub,
|
|
66
|
+
find_stub_by_connector,
|
|
67
|
+
load_local_stubs,
|
|
68
|
+
save_local_stubs,
|
|
69
|
+
)
|
|
51
70
|
|
|
52
71
|
console = Console()
|
|
53
72
|
|
|
@@ -559,11 +578,12 @@ def run_qa_checks(
|
|
|
559
578
|
for connector in connectors:
|
|
560
579
|
for qa_check in checks_to_run:
|
|
561
580
|
result = qa_check.run(connector)
|
|
562
|
-
|
|
563
|
-
"[green]PASS[/green]"
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
581
|
+
if result.status == CheckStatus.PASSED:
|
|
582
|
+
status_icon = "[green]✅ PASS[/green]"
|
|
583
|
+
elif result.status == CheckStatus.SKIPPED:
|
|
584
|
+
status_icon = "[yellow]🔶 SKIP[/yellow]"
|
|
585
|
+
else:
|
|
586
|
+
status_icon = "[red]❌ FAIL[/red]"
|
|
567
587
|
console.print(
|
|
568
588
|
f"{status_icon} {connector.technical_name}: {result.check.name}"
|
|
569
589
|
)
|
|
@@ -609,3 +629,561 @@ def generate_qa_docs(
|
|
|
609
629
|
output_path = Path(output_file)
|
|
610
630
|
output_path.write_text(documentation)
|
|
611
631
|
console.print(f"Documentation written to {output_file}")
|
|
632
|
+
|
|
633
|
+
|
|
634
|
+
# Create the changelog sub-app under connector
|
|
635
|
+
changelog_app = App(name="changelog", help="Changelog operations for connectors.")
|
|
636
|
+
connector_app.command(changelog_app)
|
|
637
|
+
|
|
638
|
+
|
|
639
|
+
@changelog_app.command(name="check")
|
|
640
|
+
def changelog_check(
|
|
641
|
+
connector_name: Annotated[
|
|
642
|
+
str | None,
|
|
643
|
+
Parameter(help="Connector technical name (e.g., source-github)."),
|
|
644
|
+
] = None,
|
|
645
|
+
all_connectors: Annotated[
|
|
646
|
+
bool,
|
|
647
|
+
Parameter("--all", help="Check all connectors in the repository."),
|
|
648
|
+
] = False,
|
|
649
|
+
repo_path: Annotated[
|
|
650
|
+
str | None,
|
|
651
|
+
Parameter(help="Path to the Airbyte monorepo. Can be inferred from context."),
|
|
652
|
+
] = None,
|
|
653
|
+
lookback_days: Annotated[
|
|
654
|
+
int | None,
|
|
655
|
+
Parameter(help="Only check entries with dates within this many days."),
|
|
656
|
+
] = None,
|
|
657
|
+
strict: Annotated[
|
|
658
|
+
bool,
|
|
659
|
+
Parameter(help="Exit with error code if any issues are found."),
|
|
660
|
+
] = False,
|
|
661
|
+
) -> None:
|
|
662
|
+
"""Check changelog entries for issues.
|
|
663
|
+
|
|
664
|
+
Validates changelog dates match PR merge dates and checks for PR number mismatches.
|
|
665
|
+
"""
|
|
666
|
+
if not connector_name and not all_connectors:
|
|
667
|
+
exit_with_error("Either --connector-name or --all must be specified.")
|
|
668
|
+
|
|
669
|
+
if connector_name and all_connectors:
|
|
670
|
+
exit_with_error("Cannot specify both --connector-name and --all.")
|
|
671
|
+
|
|
672
|
+
if repo_path is None:
|
|
673
|
+
cwd = Path.cwd()
|
|
674
|
+
for parent in [cwd, *cwd.parents]:
|
|
675
|
+
if (parent / CONNECTOR_PATH_PREFIX).exists():
|
|
676
|
+
repo_path = str(parent)
|
|
677
|
+
break
|
|
678
|
+
if repo_path is None:
|
|
679
|
+
exit_with_error(
|
|
680
|
+
"Could not infer repo path. Please provide --repo-path or run from within the Airbyte monorepo."
|
|
681
|
+
)
|
|
682
|
+
|
|
683
|
+
total_issues = 0
|
|
684
|
+
|
|
685
|
+
if all_connectors:
|
|
686
|
+
results = check_all_changelogs(repo_path=repo_path, lookback_days=lookback_days)
|
|
687
|
+
for result in results:
|
|
688
|
+
if result.has_issues or result.errors:
|
|
689
|
+
_print_check_result(result)
|
|
690
|
+
total_issues += result.issue_count
|
|
691
|
+
else:
|
|
692
|
+
result = check_changelog(
|
|
693
|
+
repo_path=repo_path,
|
|
694
|
+
connector_name=connector_name,
|
|
695
|
+
lookback_days=lookback_days,
|
|
696
|
+
)
|
|
697
|
+
_print_check_result(result)
|
|
698
|
+
total_issues = result.issue_count
|
|
699
|
+
|
|
700
|
+
if total_issues > 0:
|
|
701
|
+
console.print(f"\n[bold]Total issues found: {total_issues}[/bold]")
|
|
702
|
+
if strict:
|
|
703
|
+
exit_with_error(f"Found {total_issues} issue(s) in changelog(s).")
|
|
704
|
+
else:
|
|
705
|
+
console.print("[green]No issues found.[/green]")
|
|
706
|
+
|
|
707
|
+
|
|
708
|
+
def _print_check_result(result: ChangelogCheckResult) -> None:
|
|
709
|
+
"""Print a changelog check result."""
|
|
710
|
+
if not result.has_issues and not result.errors:
|
|
711
|
+
return
|
|
712
|
+
|
|
713
|
+
console.print(f"\n[bold]{result.connector}[/bold]")
|
|
714
|
+
|
|
715
|
+
for warning in result.pr_mismatch_warnings:
|
|
716
|
+
console.print(
|
|
717
|
+
f" [yellow]WARNING[/yellow] Line {warning.line_number} (v{warning.version}): {warning.message}"
|
|
718
|
+
)
|
|
719
|
+
|
|
720
|
+
for fix in result.date_issues:
|
|
721
|
+
if fix.changed:
|
|
722
|
+
console.print(
|
|
723
|
+
f" [red]DATE MISMATCH[/red] Line {fix.line_number} (v{fix.version}): "
|
|
724
|
+
f"changelog has {fix.old_date}, PR merged on {fix.new_date}"
|
|
725
|
+
)
|
|
726
|
+
|
|
727
|
+
for error in result.errors:
|
|
728
|
+
console.print(f" [red]ERROR[/red] {error}")
|
|
729
|
+
|
|
730
|
+
|
|
731
|
+
@changelog_app.command(name="fix")
|
|
732
|
+
def changelog_fix(
|
|
733
|
+
connector_name: Annotated[
|
|
734
|
+
str | None,
|
|
735
|
+
Parameter(help="Connector technical name (e.g., source-github)."),
|
|
736
|
+
] = None,
|
|
737
|
+
all_connectors: Annotated[
|
|
738
|
+
bool,
|
|
739
|
+
Parameter("--all", help="Fix all connectors in the repository."),
|
|
740
|
+
] = False,
|
|
741
|
+
repo_path: Annotated[
|
|
742
|
+
str | None,
|
|
743
|
+
Parameter(help="Path to the Airbyte monorepo. Can be inferred from context."),
|
|
744
|
+
] = None,
|
|
745
|
+
lookback_days: Annotated[
|
|
746
|
+
int | None,
|
|
747
|
+
Parameter(help="Only fix entries with dates within this many days."),
|
|
748
|
+
] = None,
|
|
749
|
+
dry_run: Annotated[
|
|
750
|
+
bool,
|
|
751
|
+
Parameter(help="Print changes without modifying files."),
|
|
752
|
+
] = False,
|
|
753
|
+
) -> None:
|
|
754
|
+
"""Fix changelog entry dates to match PR merge dates.
|
|
755
|
+
|
|
756
|
+
Looks up the actual merge date for each PR referenced in the changelog
|
|
757
|
+
and updates the date column to match.
|
|
758
|
+
"""
|
|
759
|
+
if not connector_name and not all_connectors:
|
|
760
|
+
exit_with_error("Either --connector-name or --all must be specified.")
|
|
761
|
+
|
|
762
|
+
if connector_name and all_connectors:
|
|
763
|
+
exit_with_error("Cannot specify both --connector-name and --all.")
|
|
764
|
+
|
|
765
|
+
if repo_path is None:
|
|
766
|
+
cwd = Path.cwd()
|
|
767
|
+
for parent in [cwd, *cwd.parents]:
|
|
768
|
+
if (parent / CONNECTOR_PATH_PREFIX).exists():
|
|
769
|
+
repo_path = str(parent)
|
|
770
|
+
break
|
|
771
|
+
if repo_path is None:
|
|
772
|
+
exit_with_error(
|
|
773
|
+
"Could not infer repo path. Please provide --repo-path or run from within the Airbyte monorepo."
|
|
774
|
+
)
|
|
775
|
+
|
|
776
|
+
total_fixed = 0
|
|
777
|
+
total_warnings = 0
|
|
778
|
+
|
|
779
|
+
if all_connectors:
|
|
780
|
+
results = fix_all_changelog_dates(
|
|
781
|
+
repo_path=repo_path, dry_run=dry_run, lookback_days=lookback_days
|
|
782
|
+
)
|
|
783
|
+
for result in results:
|
|
784
|
+
if result.has_changes or result.warnings or result.errors:
|
|
785
|
+
_print_fix_result(result)
|
|
786
|
+
total_fixed += result.changed_count
|
|
787
|
+
total_warnings += len(result.warnings)
|
|
788
|
+
else:
|
|
789
|
+
result = fix_changelog_dates(
|
|
790
|
+
repo_path=repo_path,
|
|
791
|
+
connector_name=connector_name,
|
|
792
|
+
dry_run=dry_run,
|
|
793
|
+
lookback_days=lookback_days,
|
|
794
|
+
)
|
|
795
|
+
_print_fix_result(result)
|
|
796
|
+
total_fixed = result.changed_count
|
|
797
|
+
total_warnings = len(result.warnings)
|
|
798
|
+
|
|
799
|
+
action = "Would fix" if dry_run else "Fixed"
|
|
800
|
+
console.print(f"\n[bold]{action} {total_fixed} date(s).[/bold]")
|
|
801
|
+
if total_warnings > 0:
|
|
802
|
+
console.print(
|
|
803
|
+
f"[yellow]{total_warnings} warning(s) about PR number mismatches.[/yellow]"
|
|
804
|
+
)
|
|
805
|
+
|
|
806
|
+
|
|
807
|
+
def _print_fix_result(result: ChangelogFixResult) -> None:
|
|
808
|
+
"""Print a changelog fix result."""
|
|
809
|
+
if not result.has_changes and not result.warnings and not result.errors:
|
|
810
|
+
return
|
|
811
|
+
|
|
812
|
+
console.print(f"\n[bold]{result.connector}[/bold]")
|
|
813
|
+
|
|
814
|
+
for warning in result.warnings:
|
|
815
|
+
console.print(
|
|
816
|
+
f" [yellow]WARNING[/yellow] Line {warning.line_number} (v{warning.version}): {warning.message}"
|
|
817
|
+
)
|
|
818
|
+
|
|
819
|
+
for fix in result.fixes:
|
|
820
|
+
if fix.changed:
|
|
821
|
+
action = "Would fix" if result.dry_run else "Fixed"
|
|
822
|
+
console.print(
|
|
823
|
+
f" [green]{action}[/green] Line {fix.line_number} (v{fix.version}): "
|
|
824
|
+
f"{fix.old_date} -> {fix.new_date}"
|
|
825
|
+
)
|
|
826
|
+
|
|
827
|
+
for error in result.errors:
|
|
828
|
+
console.print(f" [red]ERROR[/red] {error}")
|
|
829
|
+
|
|
830
|
+
|
|
831
|
+
# Create the enterprise-stub sub-app under connector
|
|
832
|
+
enterprise_stub_app = App(
|
|
833
|
+
name="enterprise-stub",
|
|
834
|
+
help="Enterprise connector stub operations (local file validation and updates).",
|
|
835
|
+
)
|
|
836
|
+
connector_app.command(enterprise_stub_app)
|
|
837
|
+
|
|
838
|
+
# Path to connectors in the airbyte-enterprise repo
|
|
839
|
+
ENTERPRISE_CONNECTOR_PATH_PREFIX = "airbyte-integrations/connectors"
|
|
840
|
+
|
|
841
|
+
|
|
842
|
+
def _build_stub_from_metadata(
|
|
843
|
+
connector_name: str,
|
|
844
|
+
metadata: dict,
|
|
845
|
+
existing_stub: dict | None = None,
|
|
846
|
+
) -> dict:
|
|
847
|
+
"""Build a connector stub from metadata.yaml.
|
|
848
|
+
|
|
849
|
+
Args:
|
|
850
|
+
connector_name: The connector name (e.g., 'source-oracle-enterprise').
|
|
851
|
+
metadata: The parsed metadata.yaml content.
|
|
852
|
+
existing_stub: Optional existing stub to preserve extra fields from.
|
|
853
|
+
|
|
854
|
+
Returns:
|
|
855
|
+
A connector stub dictionary.
|
|
856
|
+
"""
|
|
857
|
+
data = metadata.get("data", {})
|
|
858
|
+
|
|
859
|
+
# Determine connector type for the stub
|
|
860
|
+
connector_type = data.get("connectorType", "source")
|
|
861
|
+
stub_type = f"enterprise_{connector_type}"
|
|
862
|
+
|
|
863
|
+
# Preserve existing stub ID if available, otherwise use connector name
|
|
864
|
+
stub_id = (existing_stub.get("id") if existing_stub else None) or connector_name
|
|
865
|
+
|
|
866
|
+
# Get the icon URL - construct from icon filename if available
|
|
867
|
+
icon_filename = data.get("icon", "")
|
|
868
|
+
if icon_filename and not icon_filename.startswith("http"):
|
|
869
|
+
# Construct icon URL from the standard GCS path
|
|
870
|
+
icon_url = f"https://storage.googleapis.com/prod-airbyte-cloud-connector-metadata-service/resources/connector_stubs/v0/icons/{icon_filename}"
|
|
871
|
+
else:
|
|
872
|
+
icon_url = icon_filename or ""
|
|
873
|
+
|
|
874
|
+
# Build the stub
|
|
875
|
+
stub: dict = {
|
|
876
|
+
"id": stub_id,
|
|
877
|
+
"name": data.get("name", connector_name.replace("-", " ").title()),
|
|
878
|
+
"label": "enterprise",
|
|
879
|
+
"icon": icon_url,
|
|
880
|
+
"url": data.get("documentationUrl", ""),
|
|
881
|
+
"type": stub_type,
|
|
882
|
+
}
|
|
883
|
+
|
|
884
|
+
# Add definitionId if available
|
|
885
|
+
definition_id = data.get("definitionId")
|
|
886
|
+
if definition_id:
|
|
887
|
+
stub["definitionId"] = definition_id
|
|
888
|
+
|
|
889
|
+
# Preserve extra fields from existing stub (like codename)
|
|
890
|
+
if existing_stub:
|
|
891
|
+
for key in existing_stub:
|
|
892
|
+
if key not in stub:
|
|
893
|
+
stub[key] = existing_stub[key]
|
|
894
|
+
|
|
895
|
+
return stub
|
|
896
|
+
|
|
897
|
+
|
|
898
|
+
@enterprise_stub_app.command(name="check")
|
|
899
|
+
def enterprise_stub_check(
|
|
900
|
+
connector: Annotated[
|
|
901
|
+
str | None,
|
|
902
|
+
Parameter(help="Connector name to check (e.g., 'source-oracle-enterprise')."),
|
|
903
|
+
] = None,
|
|
904
|
+
all_connectors: Annotated[
|
|
905
|
+
bool,
|
|
906
|
+
Parameter("--all", help="Check all stubs in the file."),
|
|
907
|
+
] = False,
|
|
908
|
+
repo_root: Annotated[
|
|
909
|
+
Path | None,
|
|
910
|
+
Parameter(
|
|
911
|
+
help="Path to the airbyte-enterprise repository root. Defaults to current directory."
|
|
912
|
+
),
|
|
913
|
+
] = None,
|
|
914
|
+
) -> None:
|
|
915
|
+
"""Validate enterprise connector stub entries.
|
|
916
|
+
|
|
917
|
+
Checks that stub entries have valid required fields (id, name, url, icon)
|
|
918
|
+
and optionally validates that the stub matches the connector's metadata.yaml.
|
|
919
|
+
|
|
920
|
+
Exit codes:
|
|
921
|
+
0: All checks passed
|
|
922
|
+
1: Validation errors found
|
|
923
|
+
|
|
924
|
+
Output:
|
|
925
|
+
STDOUT: JSON validation result
|
|
926
|
+
STDERR: Informational messages
|
|
927
|
+
|
|
928
|
+
Example:
|
|
929
|
+
airbyte-ops local connector enterprise-stub check --connector source-oracle-enterprise --repo-root /path/to/airbyte-enterprise
|
|
930
|
+
airbyte-ops local connector enterprise-stub check --all --repo-root /path/to/airbyte-enterprise
|
|
931
|
+
"""
|
|
932
|
+
if not connector and not all_connectors:
|
|
933
|
+
exit_with_error("Either --connector or --all must be specified.")
|
|
934
|
+
|
|
935
|
+
if connector and all_connectors:
|
|
936
|
+
exit_with_error("Cannot specify both --connector and --all.")
|
|
937
|
+
|
|
938
|
+
if repo_root is None:
|
|
939
|
+
repo_root = Path.cwd()
|
|
940
|
+
|
|
941
|
+
# Load local stubs
|
|
942
|
+
try:
|
|
943
|
+
stubs = load_local_stubs(repo_root)
|
|
944
|
+
except FileNotFoundError as e:
|
|
945
|
+
exit_with_error(str(e))
|
|
946
|
+
except ValueError as e:
|
|
947
|
+
exit_with_error(str(e))
|
|
948
|
+
|
|
949
|
+
stubs_to_check = stubs if all_connectors else []
|
|
950
|
+
if connector:
|
|
951
|
+
stub = find_stub_by_connector(stubs, connector)
|
|
952
|
+
if stub is None:
|
|
953
|
+
exit_with_error(
|
|
954
|
+
f"Connector stub '{connector}' not found in {CONNECTOR_STUBS_FILE}"
|
|
955
|
+
)
|
|
956
|
+
stubs_to_check = [stub]
|
|
957
|
+
|
|
958
|
+
errors: list[dict] = []
|
|
959
|
+
warnings: list[dict] = []
|
|
960
|
+
placeholders: list[dict] = []
|
|
961
|
+
|
|
962
|
+
for stub in stubs_to_check:
|
|
963
|
+
stub_id = stub.get("id", "<unknown>")
|
|
964
|
+
stub_name = stub.get("name", stub_id)
|
|
965
|
+
|
|
966
|
+
# Check required fields
|
|
967
|
+
required_fields = ["id", "name", "url", "icon"]
|
|
968
|
+
for field in required_fields:
|
|
969
|
+
if not stub.get(field):
|
|
970
|
+
errors.append(
|
|
971
|
+
{"stub_id": stub_id, "error": f"Missing required field: {field}"}
|
|
972
|
+
)
|
|
973
|
+
|
|
974
|
+
# Check if corresponding connector exists and validate against metadata
|
|
975
|
+
connector_dir = repo_root / ENTERPRISE_CONNECTOR_PATH_PREFIX / stub_id
|
|
976
|
+
metadata_file = connector_dir / METADATA_FILE_NAME
|
|
977
|
+
|
|
978
|
+
if metadata_file.exists():
|
|
979
|
+
metadata = yaml.safe_load(metadata_file.read_text())
|
|
980
|
+
data = metadata.get("data", {})
|
|
981
|
+
|
|
982
|
+
# Check if definitionId matches
|
|
983
|
+
metadata_def_id = data.get("definitionId")
|
|
984
|
+
stub_def_id = stub.get("definitionId")
|
|
985
|
+
if metadata_def_id and stub_def_id and metadata_def_id != stub_def_id:
|
|
986
|
+
errors.append(
|
|
987
|
+
{
|
|
988
|
+
"stub_id": stub_id,
|
|
989
|
+
"error": f"definitionId mismatch: stub has '{stub_def_id}', metadata has '{metadata_def_id}'",
|
|
990
|
+
}
|
|
991
|
+
)
|
|
992
|
+
|
|
993
|
+
# Check if name matches
|
|
994
|
+
metadata_name = data.get("name")
|
|
995
|
+
if metadata_name and stub_name and metadata_name != stub_name:
|
|
996
|
+
warnings.append(
|
|
997
|
+
{
|
|
998
|
+
"stub_id": stub_id,
|
|
999
|
+
"warning": f"name mismatch: stub has '{stub_name}', metadata has '{metadata_name}'",
|
|
1000
|
+
}
|
|
1001
|
+
)
|
|
1002
|
+
else:
|
|
1003
|
+
# No connector directory - this is a registry placeholder for a future connector
|
|
1004
|
+
placeholders.append(
|
|
1005
|
+
{
|
|
1006
|
+
"stub_id": stub_id,
|
|
1007
|
+
"name": stub_name,
|
|
1008
|
+
}
|
|
1009
|
+
)
|
|
1010
|
+
|
|
1011
|
+
result = {
|
|
1012
|
+
"checked_count": len(stubs_to_check),
|
|
1013
|
+
"error_count": len(errors),
|
|
1014
|
+
"warning_count": len(warnings),
|
|
1015
|
+
"placeholder_count": len(placeholders),
|
|
1016
|
+
"valid": len(errors) == 0,
|
|
1017
|
+
"errors": errors,
|
|
1018
|
+
"warnings": warnings,
|
|
1019
|
+
"placeholders": placeholders,
|
|
1020
|
+
}
|
|
1021
|
+
|
|
1022
|
+
# Print placeholders as info (not warnings - these are valid registry placeholders)
|
|
1023
|
+
if placeholders:
|
|
1024
|
+
error_console.print(
|
|
1025
|
+
f"[blue]Found {len(placeholders)} registry placeholder(s) (no local directory):[/blue]"
|
|
1026
|
+
)
|
|
1027
|
+
for placeholder in placeholders:
|
|
1028
|
+
error_console.print(
|
|
1029
|
+
f" Found Connector Registry Placeholder (no local directory): {placeholder['name']}"
|
|
1030
|
+
)
|
|
1031
|
+
|
|
1032
|
+
if errors:
|
|
1033
|
+
error_console.print(f"[red]Found {len(errors)} error(s):[/red]")
|
|
1034
|
+
for err in errors:
|
|
1035
|
+
error_console.print(f" {err['stub_id']}: {err['error']}")
|
|
1036
|
+
|
|
1037
|
+
if warnings:
|
|
1038
|
+
error_console.print(f"[yellow]Found {len(warnings)} warning(s):[/yellow]")
|
|
1039
|
+
for warn in warnings:
|
|
1040
|
+
error_console.print(f" {warn['stub_id']}: {warn['warning']}")
|
|
1041
|
+
|
|
1042
|
+
if not errors and not warnings:
|
|
1043
|
+
error_console.print(
|
|
1044
|
+
f"[green]All {len(stubs_to_check)} stub(s) passed validation[/green]"
|
|
1045
|
+
)
|
|
1046
|
+
|
|
1047
|
+
print_json(result)
|
|
1048
|
+
|
|
1049
|
+
if errors:
|
|
1050
|
+
exit_with_error("Validation failed", code=1)
|
|
1051
|
+
|
|
1052
|
+
|
|
1053
|
+
@enterprise_stub_app.command(name="sync")
|
|
1054
|
+
def enterprise_stub_sync(
|
|
1055
|
+
connector: Annotated[
|
|
1056
|
+
str | None,
|
|
1057
|
+
Parameter(help="Connector name to sync (e.g., 'source-oracle-enterprise')."),
|
|
1058
|
+
] = None,
|
|
1059
|
+
all_connectors: Annotated[
|
|
1060
|
+
bool,
|
|
1061
|
+
Parameter("--all", help="Sync all connectors that have metadata.yaml files."),
|
|
1062
|
+
] = False,
|
|
1063
|
+
repo_root: Annotated[
|
|
1064
|
+
Path | None,
|
|
1065
|
+
Parameter(
|
|
1066
|
+
help="Path to the airbyte-enterprise repository root. Defaults to current directory."
|
|
1067
|
+
),
|
|
1068
|
+
] = None,
|
|
1069
|
+
dry_run: Annotated[
|
|
1070
|
+
bool,
|
|
1071
|
+
Parameter(help="Show what would be synced without making changes."),
|
|
1072
|
+
] = False,
|
|
1073
|
+
) -> None:
|
|
1074
|
+
"""Sync connector stub(s) from connector metadata.yaml file(s).
|
|
1075
|
+
|
|
1076
|
+
Reads the connector's metadata.yaml file and updates the corresponding
|
|
1077
|
+
entry in connector_stubs.json with the current values.
|
|
1078
|
+
|
|
1079
|
+
Exit codes:
|
|
1080
|
+
0: Sync successful (or dry-run completed)
|
|
1081
|
+
1: Error (connector not found, no metadata, etc.)
|
|
1082
|
+
|
|
1083
|
+
Output:
|
|
1084
|
+
STDOUT: JSON representation of the synced stub(s)
|
|
1085
|
+
STDERR: Informational messages
|
|
1086
|
+
|
|
1087
|
+
Example:
|
|
1088
|
+
airbyte-ops local connector enterprise-stub sync --connector source-oracle-enterprise --repo-root /path/to/airbyte-enterprise
|
|
1089
|
+
airbyte-ops local connector enterprise-stub sync --all --repo-root /path/to/airbyte-enterprise
|
|
1090
|
+
airbyte-ops local connector enterprise-stub sync --connector source-oracle-enterprise --dry-run
|
|
1091
|
+
"""
|
|
1092
|
+
if not connector and not all_connectors:
|
|
1093
|
+
exit_with_error("Either --connector or --all must be specified.")
|
|
1094
|
+
|
|
1095
|
+
if connector and all_connectors:
|
|
1096
|
+
exit_with_error("Cannot specify both --connector and --all.")
|
|
1097
|
+
|
|
1098
|
+
if repo_root is None:
|
|
1099
|
+
repo_root = Path.cwd()
|
|
1100
|
+
|
|
1101
|
+
# Load existing stubs
|
|
1102
|
+
try:
|
|
1103
|
+
stubs = load_local_stubs(repo_root)
|
|
1104
|
+
except FileNotFoundError:
|
|
1105
|
+
stubs = []
|
|
1106
|
+
except ValueError as e:
|
|
1107
|
+
exit_with_error(str(e))
|
|
1108
|
+
|
|
1109
|
+
# Determine which connectors to sync
|
|
1110
|
+
connectors_to_sync: list[str] = []
|
|
1111
|
+
if connector:
|
|
1112
|
+
connectors_to_sync = [connector]
|
|
1113
|
+
else:
|
|
1114
|
+
# Find all connectors with metadata.yaml in the enterprise connectors directory
|
|
1115
|
+
connectors_dir = repo_root / ENTERPRISE_CONNECTOR_PATH_PREFIX
|
|
1116
|
+
if connectors_dir.exists():
|
|
1117
|
+
for item in connectors_dir.iterdir():
|
|
1118
|
+
if item.is_dir() and (item / METADATA_FILE_NAME).exists():
|
|
1119
|
+
connectors_to_sync.append(item.name)
|
|
1120
|
+
connectors_to_sync.sort()
|
|
1121
|
+
|
|
1122
|
+
if not connectors_to_sync:
|
|
1123
|
+
exit_with_error("No connectors found to sync.")
|
|
1124
|
+
|
|
1125
|
+
synced_stubs: list[dict] = []
|
|
1126
|
+
updated_count = 0
|
|
1127
|
+
added_count = 0
|
|
1128
|
+
|
|
1129
|
+
for conn_name in connectors_to_sync:
|
|
1130
|
+
connector_dir = repo_root / ENTERPRISE_CONNECTOR_PATH_PREFIX / conn_name
|
|
1131
|
+
metadata_file = connector_dir / METADATA_FILE_NAME
|
|
1132
|
+
|
|
1133
|
+
if not connector_dir.exists():
|
|
1134
|
+
if connector:
|
|
1135
|
+
exit_with_error(f"Connector directory not found: {connector_dir}")
|
|
1136
|
+
continue
|
|
1137
|
+
|
|
1138
|
+
if not metadata_file.exists():
|
|
1139
|
+
if connector:
|
|
1140
|
+
exit_with_error(f"Metadata file not found: {metadata_file}")
|
|
1141
|
+
continue
|
|
1142
|
+
|
|
1143
|
+
# Load metadata
|
|
1144
|
+
metadata = yaml.safe_load(metadata_file.read_text())
|
|
1145
|
+
|
|
1146
|
+
# Find existing stub if any
|
|
1147
|
+
existing_stub = find_stub_by_connector(stubs, conn_name)
|
|
1148
|
+
|
|
1149
|
+
# Build new stub from metadata
|
|
1150
|
+
new_stub = _build_stub_from_metadata(conn_name, metadata, existing_stub)
|
|
1151
|
+
|
|
1152
|
+
# Validate the new stub
|
|
1153
|
+
ConnectorStub(**new_stub)
|
|
1154
|
+
|
|
1155
|
+
if dry_run:
|
|
1156
|
+
action = "update" if existing_stub else "create"
|
|
1157
|
+
error_console.print(f"[DRY RUN] Would {action} stub for '{conn_name}'")
|
|
1158
|
+
synced_stubs.append(new_stub)
|
|
1159
|
+
continue
|
|
1160
|
+
|
|
1161
|
+
# Update or add the stub
|
|
1162
|
+
if existing_stub:
|
|
1163
|
+
# Find and replace
|
|
1164
|
+
for i, stub in enumerate(stubs):
|
|
1165
|
+
if stub.get("id") == existing_stub.get("id"):
|
|
1166
|
+
stubs[i] = new_stub
|
|
1167
|
+
break
|
|
1168
|
+
updated_count += 1
|
|
1169
|
+
else:
|
|
1170
|
+
stubs.append(new_stub)
|
|
1171
|
+
added_count += 1
|
|
1172
|
+
|
|
1173
|
+
synced_stubs.append(new_stub)
|
|
1174
|
+
|
|
1175
|
+
if not dry_run:
|
|
1176
|
+
# Save the updated stubs
|
|
1177
|
+
save_local_stubs(repo_root, stubs)
|
|
1178
|
+
error_console.print(
|
|
1179
|
+
f"[green]Synced {len(synced_stubs)} stub(s) to {CONNECTOR_STUBS_FILE} "
|
|
1180
|
+
f"({added_count} added, {updated_count} updated)[/green]"
|
|
1181
|
+
)
|
|
1182
|
+
else:
|
|
1183
|
+
error_console.print(
|
|
1184
|
+
f"[DRY RUN] Would sync {len(synced_stubs)} stub(s) to {CONNECTOR_STUBS_FILE}"
|
|
1185
|
+
)
|
|
1186
|
+
|
|
1187
|
+
print_json(
|
|
1188
|
+
synced_stubs if all_connectors else synced_stubs[0] if synced_stubs else {}
|
|
1189
|
+
)
|