airbyte-agent-amazon-ads 0.1.15__py3-none-any.whl → 0.1.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_agent_amazon_ads/__init__.py +2 -0
- airbyte_agent_amazon_ads/_vendored/connector_sdk/cloud_utils/client.py +62 -0
- airbyte_agent_amazon_ads/_vendored/connector_sdk/connector_model_loader.py +5 -0
- airbyte_agent_amazon_ads/_vendored/connector_sdk/executor/hosted_executor.py +59 -25
- airbyte_agent_amazon_ads/_vendored/connector_sdk/executor/local_executor.py +87 -12
- airbyte_agent_amazon_ads/_vendored/connector_sdk/executor/models.py +12 -0
- airbyte_agent_amazon_ads/_vendored/connector_sdk/http/adapters/httpx_adapter.py +10 -1
- airbyte_agent_amazon_ads/_vendored/connector_sdk/introspection.py +12 -5
- airbyte_agent_amazon_ads/_vendored/connector_sdk/schema/base.py +11 -0
- airbyte_agent_amazon_ads/_vendored/connector_sdk/schema/operations.py +10 -0
- airbyte_agent_amazon_ads/_vendored/connector_sdk/schema/security.py +5 -0
- airbyte_agent_amazon_ads/_vendored/connector_sdk/telemetry/tracker.py +4 -4
- airbyte_agent_amazon_ads/_vendored/connector_sdk/types.py +20 -1
- airbyte_agent_amazon_ads/_vendored/connector_sdk/utils.py +67 -0
- airbyte_agent_amazon_ads/_vendored/connector_sdk/validation.py +171 -2
- airbyte_agent_amazon_ads/_vendored/connector_sdk/validation_replication.py +970 -0
- airbyte_agent_amazon_ads/connector.py +154 -11
- airbyte_agent_amazon_ads/connector_model.py +2 -1
- airbyte_agent_amazon_ads/models.py +19 -0
- {airbyte_agent_amazon_ads-0.1.15.dist-info → airbyte_agent_amazon_ads-0.1.24.dist-info}/METADATA +13 -9
- {airbyte_agent_amazon_ads-0.1.15.dist-info → airbyte_agent_amazon_ads-0.1.24.dist-info}/RECORD +22 -21
- {airbyte_agent_amazon_ads-0.1.15.dist-info → airbyte_agent_amazon_ads-0.1.24.dist-info}/WHEEL +0 -0
|
@@ -1,7 +1,13 @@
|
|
|
1
1
|
"""Utility functions for working with connectors."""
|
|
2
2
|
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
3
5
|
from collections.abc import AsyncIterator
|
|
4
6
|
from pathlib import Path
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from .types import AuthOption
|
|
5
11
|
|
|
6
12
|
|
|
7
13
|
async def save_download(
|
|
@@ -58,3 +64,64 @@ async def save_download(
|
|
|
58
64
|
raise OSError(f"Failed to write file {file_path}: {e}") from e
|
|
59
65
|
|
|
60
66
|
return file_path
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def find_matching_auth_options(
|
|
70
|
+
provided_keys: set[str],
|
|
71
|
+
auth_options: list[AuthOption],
|
|
72
|
+
) -> list[AuthOption]:
|
|
73
|
+
"""Find auth options that match the provided credential keys.
|
|
74
|
+
|
|
75
|
+
This is the single source of truth for auth scheme inference logic,
|
|
76
|
+
used by both the executor (at runtime) and validation (for cassettes).
|
|
77
|
+
|
|
78
|
+
Matching logic:
|
|
79
|
+
- An option matches if all its required fields are present in provided_keys
|
|
80
|
+
- Options with no required fields match any credentials
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
provided_keys: Set of credential/auth_config keys
|
|
84
|
+
auth_options: List of AuthOption from the connector model
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
List of AuthOption that match the provided keys
|
|
88
|
+
"""
|
|
89
|
+
matching_options: list[AuthOption] = []
|
|
90
|
+
|
|
91
|
+
for option in auth_options:
|
|
92
|
+
if option.user_config_spec and option.user_config_spec.required:
|
|
93
|
+
required_fields = set(option.user_config_spec.required)
|
|
94
|
+
if required_fields.issubset(provided_keys):
|
|
95
|
+
matching_options.append(option)
|
|
96
|
+
elif not option.user_config_spec or not option.user_config_spec.required:
|
|
97
|
+
# Option has no required fields - it matches any credentials
|
|
98
|
+
matching_options.append(option)
|
|
99
|
+
|
|
100
|
+
return matching_options
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def infer_auth_scheme_name(
|
|
104
|
+
provided_keys: set[str],
|
|
105
|
+
auth_options: list[AuthOption],
|
|
106
|
+
) -> str | None:
|
|
107
|
+
"""Infer the auth scheme name from provided credential keys.
|
|
108
|
+
|
|
109
|
+
Uses find_matching_auth_options to find matches, then returns
|
|
110
|
+
the scheme name only if exactly one option matches.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
provided_keys: Set of credential/auth_config keys
|
|
114
|
+
auth_options: List of AuthOption from the connector model
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
The scheme_name if exactly one match, None otherwise
|
|
118
|
+
"""
|
|
119
|
+
if not provided_keys or not auth_options:
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
matching = find_matching_auth_options(provided_keys, auth_options)
|
|
123
|
+
|
|
124
|
+
if len(matching) == 1:
|
|
125
|
+
return matching[0].scheme_name
|
|
126
|
+
|
|
127
|
+
return None
|
|
@@ -5,6 +5,7 @@ These tools help ensure that connectors are ready to ship by:
|
|
|
5
5
|
- Checking that all entity/action operations have corresponding test cassettes
|
|
6
6
|
- Validating that response schemas match the actual cassette responses
|
|
7
7
|
- Detecting fields present in responses but not declared in schemas
|
|
8
|
+
- Validating replication compatibility with Airbyte source connectors
|
|
8
9
|
"""
|
|
9
10
|
|
|
10
11
|
from collections import defaultdict
|
|
@@ -20,7 +21,9 @@ from .connector_model_loader import (
|
|
|
20
21
|
load_connector_model,
|
|
21
22
|
)
|
|
22
23
|
from .testing.spec_loader import load_test_spec
|
|
23
|
-
from .types import Action, EndpointDefinition
|
|
24
|
+
from .types import Action, ConnectorModel, EndpointDefinition
|
|
25
|
+
from .utils import infer_auth_scheme_name
|
|
26
|
+
from .validation_replication import validate_replication_compatibility
|
|
24
27
|
|
|
25
28
|
|
|
26
29
|
def build_cassette_map(cassettes_dir: Path) -> Dict[Tuple[str, str], List[Path]]:
|
|
@@ -51,6 +54,112 @@ def build_cassette_map(cassettes_dir: Path) -> Dict[Tuple[str, str], List[Path]]
|
|
|
51
54
|
return dict(cassette_map)
|
|
52
55
|
|
|
53
56
|
|
|
57
|
+
def build_auth_scheme_coverage(
|
|
58
|
+
cassettes_dir: Path,
|
|
59
|
+
auth_options: list | None = None,
|
|
60
|
+
) -> Tuple[Dict[str | None, List[Path]], List[Tuple[Path, set[str]]]]:
|
|
61
|
+
"""Build a map of auth_scheme -> list of cassette paths.
|
|
62
|
+
|
|
63
|
+
For multi-auth connectors, infers the auth scheme from the cassette's auth_config
|
|
64
|
+
keys using the same matching logic as the executor.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
cassettes_dir: Directory containing cassette YAML files
|
|
68
|
+
auth_options: List of AuthOption from the connector model (for inference)
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
Tuple of:
|
|
72
|
+
- Dictionary mapping auth_scheme names (or None for single-auth) to cassette paths
|
|
73
|
+
- List of (cassette_path, auth_config_keys) for cassettes that couldn't be matched
|
|
74
|
+
"""
|
|
75
|
+
auth_scheme_map: Dict[str | None, List[Path]] = defaultdict(list)
|
|
76
|
+
unmatched_cassettes: List[Tuple[Path, set[str]]] = []
|
|
77
|
+
|
|
78
|
+
if not cassettes_dir.exists() or not cassettes_dir.is_dir():
|
|
79
|
+
return {}, []
|
|
80
|
+
|
|
81
|
+
for cassette_file in cassettes_dir.glob("*.yaml"):
|
|
82
|
+
try:
|
|
83
|
+
spec = load_test_spec(cassette_file, auth_config={})
|
|
84
|
+
|
|
85
|
+
# First, check if auth_scheme is explicitly set in the cassette
|
|
86
|
+
if spec.auth_scheme:
|
|
87
|
+
auth_scheme_map[spec.auth_scheme].append(cassette_file)
|
|
88
|
+
# Otherwise, try to infer from auth_config keys
|
|
89
|
+
elif spec.auth_config and auth_options:
|
|
90
|
+
auth_config_keys = set(spec.auth_config.keys())
|
|
91
|
+
inferred_scheme = infer_auth_scheme_name(auth_config_keys, auth_options)
|
|
92
|
+
if inferred_scheme is not None:
|
|
93
|
+
auth_scheme_map[inferred_scheme].append(cassette_file)
|
|
94
|
+
else:
|
|
95
|
+
# Couldn't infer - track as unmatched
|
|
96
|
+
unmatched_cassettes.append((cassette_file, auth_config_keys))
|
|
97
|
+
else:
|
|
98
|
+
# No auth_scheme and no auth_config - treat as None
|
|
99
|
+
auth_scheme_map[None].append(cassette_file)
|
|
100
|
+
except Exception:
|
|
101
|
+
continue
|
|
102
|
+
|
|
103
|
+
return dict(auth_scheme_map), unmatched_cassettes
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def validate_auth_scheme_coverage(
|
|
107
|
+
config: ConnectorModel,
|
|
108
|
+
cassettes_dir: Path,
|
|
109
|
+
) -> Tuple[bool, List[str], List[str], List[str], List[Tuple[Path, set[str]]]]:
|
|
110
|
+
"""Validate that each auth scheme has at least one cassette.
|
|
111
|
+
|
|
112
|
+
For multi-auth connectors, every defined auth scheme must have coverage
|
|
113
|
+
unless marked with x-airbyte-untested: true.
|
|
114
|
+
For single-auth connectors, this check is skipped (existing cassette checks suffice).
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
config: Loaded connector model
|
|
118
|
+
cassettes_dir: Directory containing cassette files
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
Tuple of (is_valid, errors, warnings, covered_schemes, unmatched_cassettes)
|
|
122
|
+
"""
|
|
123
|
+
errors: List[str] = []
|
|
124
|
+
warnings: List[str] = []
|
|
125
|
+
|
|
126
|
+
# Skip check for single-auth connectors
|
|
127
|
+
if not config.auth.is_multi_auth():
|
|
128
|
+
return True, errors, warnings, [], []
|
|
129
|
+
|
|
130
|
+
# Get all defined auth schemes, separating tested from untested
|
|
131
|
+
options = config.auth.options or []
|
|
132
|
+
|
|
133
|
+
# Build auth scheme coverage from cassettes (pass options for inference)
|
|
134
|
+
auth_scheme_coverage, unmatched_cassettes = build_auth_scheme_coverage(cassettes_dir, options)
|
|
135
|
+
tested_schemes = {opt.scheme_name for opt in options if not opt.untested}
|
|
136
|
+
untested_schemes = {opt.scheme_name for opt in options if opt.untested}
|
|
137
|
+
covered_schemes = {scheme for scheme in auth_scheme_coverage.keys() if scheme is not None}
|
|
138
|
+
|
|
139
|
+
# Find missing tested schemes (errors)
|
|
140
|
+
missing_tested = tested_schemes - covered_schemes
|
|
141
|
+
for scheme in sorted(missing_tested):
|
|
142
|
+
errors.append(
|
|
143
|
+
f"Auth scheme '{scheme}' has no cassette coverage. "
|
|
144
|
+
f"Record at least one cassette using this authentication method, "
|
|
145
|
+
f"or add 'x-airbyte-untested: true' to skip this check."
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
# Warn about untested schemes without coverage
|
|
149
|
+
missing_untested = untested_schemes - covered_schemes
|
|
150
|
+
for scheme in sorted(missing_untested):
|
|
151
|
+
warnings.append(
|
|
152
|
+
f"Auth scheme '{scheme}' is marked as untested (x-airbyte-untested: true) " f"and has no cassette coverage. Validation skipped."
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
# Warn about cassettes that couldn't be matched to any auth scheme
|
|
156
|
+
for cassette_path, auth_config_keys in unmatched_cassettes:
|
|
157
|
+
warnings.append(f"Cassette '{cassette_path.name}' could not be matched to any auth scheme. " f"auth_config keys: {sorted(auth_config_keys)}")
|
|
158
|
+
|
|
159
|
+
is_valid = len(missing_tested) == 0
|
|
160
|
+
return is_valid, errors, warnings, sorted(covered_schemes), unmatched_cassettes
|
|
161
|
+
|
|
162
|
+
|
|
54
163
|
def validate_response_against_schema(response_body: Any, schema: Dict[str, Any]) -> Tuple[bool, List[str]]:
|
|
55
164
|
"""Validate a response body against a JSON schema.
|
|
56
165
|
|
|
@@ -586,6 +695,9 @@ def validate_connector_readiness(connector_dir: str | Path) -> Dict[str, Any]:
|
|
|
586
695
|
cassettes_dir = connector_path / "tests" / "cassettes"
|
|
587
696
|
cassette_map = build_cassette_map(cassettes_dir)
|
|
588
697
|
|
|
698
|
+
# Validate auth scheme coverage for multi-auth connectors
|
|
699
|
+
auth_valid, auth_errors, auth_warnings, auth_covered_schemes, auth_unmatched_cassettes = validate_auth_scheme_coverage(config, cassettes_dir)
|
|
700
|
+
|
|
589
701
|
validation_results = []
|
|
590
702
|
total_operations = 0
|
|
591
703
|
operations_with_cassettes = 0
|
|
@@ -808,13 +920,70 @@ def validate_connector_readiness(connector_dir: str | Path) -> Dict[str, Any]:
|
|
|
808
920
|
}
|
|
809
921
|
)
|
|
810
922
|
|
|
811
|
-
|
|
923
|
+
# Validate replication compatibility with Airbyte
|
|
924
|
+
replication_result = validate_replication_compatibility(
|
|
925
|
+
connector_yaml_path=config_file,
|
|
926
|
+
raw_spec=raw_spec,
|
|
927
|
+
)
|
|
928
|
+
|
|
929
|
+
# Merge replication errors/warnings into totals
|
|
930
|
+
# Note: If connector is not in registry, we don't count warnings since this is expected for test connectors
|
|
931
|
+
replication_errors = replication_result.get("errors", [])
|
|
932
|
+
replication_warnings = replication_result.get("warnings", [])
|
|
933
|
+
total_errors += len(replication_errors)
|
|
934
|
+
|
|
935
|
+
# Only count replication warnings if the connector was found in the registry
|
|
936
|
+
# (i.e., there are actual validation issues, not just "not found in registry")
|
|
937
|
+
if replication_result.get("registry_found", False):
|
|
938
|
+
total_warnings += len(replication_warnings)
|
|
939
|
+
|
|
940
|
+
# Merge auth scheme validation errors/warnings into totals
|
|
941
|
+
total_errors += len(auth_errors)
|
|
942
|
+
total_warnings += len(auth_warnings)
|
|
943
|
+
|
|
944
|
+
# Update success criteria to include replication and auth scheme validation
|
|
945
|
+
success = operations_missing_cassettes == 0 and cassettes_invalid == 0 and total_operations > 0 and len(replication_errors) == 0 and auth_valid
|
|
946
|
+
|
|
947
|
+
# Check for preferred_for_check on at least one list operation
|
|
948
|
+
has_preferred_check = False
|
|
949
|
+
for entity in config.entities:
|
|
950
|
+
for action_val in entity.actions:
|
|
951
|
+
endpoint = entity.endpoints.get(action_val)
|
|
952
|
+
if endpoint and getattr(endpoint, "preferred_for_check", False):
|
|
953
|
+
has_preferred_check = True
|
|
954
|
+
break
|
|
955
|
+
if has_preferred_check:
|
|
956
|
+
break
|
|
957
|
+
|
|
958
|
+
readiness_warnings = []
|
|
959
|
+
if not has_preferred_check:
|
|
960
|
+
readiness_warnings.append(
|
|
961
|
+
"No operation has x-airbyte-preferred-for-check: true. "
|
|
962
|
+
"Add this extension to a lightweight list operation (e.g., users.list) "
|
|
963
|
+
"to enable reliable health checks."
|
|
964
|
+
)
|
|
965
|
+
|
|
966
|
+
# Build auth scheme validation result
|
|
967
|
+
options = config.auth.options or []
|
|
968
|
+
tested_schemes = [opt.scheme_name for opt in options if not opt.untested]
|
|
969
|
+
untested_schemes_list = [opt.scheme_name for opt in options if opt.untested]
|
|
970
|
+
missing_tested = [s for s in tested_schemes if s not in auth_covered_schemes]
|
|
812
971
|
|
|
813
972
|
return {
|
|
814
973
|
"success": success,
|
|
815
974
|
"connector_name": config.name,
|
|
816
975
|
"connector_path": str(connector_path),
|
|
817
976
|
"validation_results": validation_results,
|
|
977
|
+
"replication_validation": replication_result,
|
|
978
|
+
"auth_scheme_validation": {
|
|
979
|
+
"valid": auth_valid,
|
|
980
|
+
"errors": auth_errors,
|
|
981
|
+
"warnings": auth_warnings,
|
|
982
|
+
"covered_schemes": auth_covered_schemes,
|
|
983
|
+
"missing_schemes": missing_tested,
|
|
984
|
+
"untested_schemes": untested_schemes_list,
|
|
985
|
+
},
|
|
986
|
+
"readiness_warnings": readiness_warnings,
|
|
818
987
|
"summary": {
|
|
819
988
|
"total_operations": total_operations,
|
|
820
989
|
"operations_with_cassettes": operations_with_cassettes,
|