airbyte-agent-facebook-marketing 0.1.2__py3-none-any.whl → 0.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_agent_facebook_marketing/_vendored/connector_sdk/cloud_utils/client.py +125 -0
- airbyte_agent_facebook_marketing/_vendored/connector_sdk/connector_model_loader.py +1 -0
- airbyte_agent_facebook_marketing/_vendored/connector_sdk/executor/hosted_executor.py +54 -25
- airbyte_agent_facebook_marketing/_vendored/connector_sdk/executor/local_executor.py +5 -12
- airbyte_agent_facebook_marketing/_vendored/connector_sdk/schema/base.py +11 -0
- airbyte_agent_facebook_marketing/_vendored/connector_sdk/schema/security.py +5 -0
- airbyte_agent_facebook_marketing/_vendored/connector_sdk/types.py +4 -0
- airbyte_agent_facebook_marketing/_vendored/connector_sdk/utils.py +67 -0
- airbyte_agent_facebook_marketing/_vendored/connector_sdk/validation.py +151 -2
- airbyte_agent_facebook_marketing/_vendored/connector_sdk/validation_replication.py +970 -0
- airbyte_agent_facebook_marketing/connector.py +201 -11
- airbyte_agent_facebook_marketing/connector_model.py +21 -7
- airbyte_agent_facebook_marketing/models.py +9 -4
- {airbyte_agent_facebook_marketing-0.1.2.dist-info → airbyte_agent_facebook_marketing-0.1.6.dist-info}/METADATA +8 -6
- {airbyte_agent_facebook_marketing-0.1.2.dist-info → airbyte_agent_facebook_marketing-0.1.6.dist-info}/RECORD +16 -15
- {airbyte_agent_facebook_marketing-0.1.2.dist-info → airbyte_agent_facebook_marketing-0.1.6.dist-info}/WHEEL +0 -0
|
@@ -161,6 +161,131 @@ class AirbyteCloudClient:
|
|
|
161
161
|
connector_id = connectors[0]["id"]
|
|
162
162
|
return connector_id
|
|
163
163
|
|
|
164
|
+
async def initiate_oauth(
|
|
165
|
+
self,
|
|
166
|
+
definition_id: str,
|
|
167
|
+
external_user_id: str,
|
|
168
|
+
redirect_url: str,
|
|
169
|
+
) -> str:
|
|
170
|
+
"""Initiate a server-side OAuth flow.
|
|
171
|
+
|
|
172
|
+
Starts the OAuth flow for a connector. Returns a consent URL where the
|
|
173
|
+
end user should be redirected to grant access. After completing consent,
|
|
174
|
+
they'll be redirected to your redirect_url with a `server_side_oauth_secret_id`
|
|
175
|
+
query parameter that can be used with `create_source()`.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
definition_id: Connector definition UUID
|
|
179
|
+
external_user_id: Workspace identifier
|
|
180
|
+
redirect_url: URL where users will be redirected after OAuth consent
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
The OAuth consent URL
|
|
184
|
+
|
|
185
|
+
Raises:
|
|
186
|
+
httpx.HTTPStatusError: If the request fails
|
|
187
|
+
|
|
188
|
+
Example:
|
|
189
|
+
consent_url = await client.initiate_oauth(
|
|
190
|
+
definition_id="d8313939-3782-41b0-be29-b3ca20d8dd3a",
|
|
191
|
+
external_user_id="my-workspace",
|
|
192
|
+
redirect_url="https://myapp.com/oauth/callback",
|
|
193
|
+
)
|
|
194
|
+
# Redirect user to: consent_url
|
|
195
|
+
# After consent: https://myapp.com/oauth/callback?server_side_oauth_secret_id=...
|
|
196
|
+
"""
|
|
197
|
+
token = await self.get_bearer_token()
|
|
198
|
+
url = f"{self.API_BASE_URL}/api/v1/integrations/connectors/oauth/initiate"
|
|
199
|
+
headers = {"Authorization": f"Bearer {token}"}
|
|
200
|
+
request_body = {
|
|
201
|
+
"external_user_id": external_user_id,
|
|
202
|
+
"definition_id": definition_id,
|
|
203
|
+
"redirect_url": redirect_url,
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
response = await self._http_client.post(url, json=request_body, headers=headers)
|
|
207
|
+
response.raise_for_status()
|
|
208
|
+
return response.json()["consent_url"]
|
|
209
|
+
|
|
210
|
+
async def create_source(
|
|
211
|
+
self,
|
|
212
|
+
name: str,
|
|
213
|
+
connector_definition_id: str,
|
|
214
|
+
external_user_id: str,
|
|
215
|
+
credentials: dict[str, Any] | None = None,
|
|
216
|
+
replication_config: dict[str, Any] | None = None,
|
|
217
|
+
server_side_oauth_secret_id: str | None = None,
|
|
218
|
+
source_template_id: str | None = None,
|
|
219
|
+
) -> str:
|
|
220
|
+
"""Create a new source on Airbyte Cloud.
|
|
221
|
+
|
|
222
|
+
Supports two authentication modes:
|
|
223
|
+
1. Direct credentials: Provide `credentials` dict
|
|
224
|
+
2. Server-side OAuth: Provide `server_side_oauth_secret_id` from OAuth flow
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
name: Source name
|
|
228
|
+
connector_definition_id: UUID of the connector definition
|
|
229
|
+
external_user_id: User identifier
|
|
230
|
+
credentials: Connector auth config dict. Required unless using OAuth.
|
|
231
|
+
replication_config: Optional replication settings (e.g., start_date for
|
|
232
|
+
connectors with x-airbyte-replication-config). Required for REPLICATION
|
|
233
|
+
mode sources like Intercom.
|
|
234
|
+
server_side_oauth_secret_id: OAuth secret ID from initiate_oauth redirect.
|
|
235
|
+
When provided, credentials are not required.
|
|
236
|
+
source_template_id: Source template ID. Required when organization has
|
|
237
|
+
multiple source templates for this connector type.
|
|
238
|
+
|
|
239
|
+
Returns:
|
|
240
|
+
The created source ID (UUID string)
|
|
241
|
+
|
|
242
|
+
Raises:
|
|
243
|
+
httpx.HTTPStatusError: If creation fails
|
|
244
|
+
|
|
245
|
+
Example:
|
|
246
|
+
# With direct credentials:
|
|
247
|
+
source_id = await client.create_source(
|
|
248
|
+
name="My Intercom Source",
|
|
249
|
+
connector_definition_id="d8313939-3782-41b0-be29-b3ca20d8dd3a",
|
|
250
|
+
external_user_id="my-workspace",
|
|
251
|
+
credentials={"access_token": "..."},
|
|
252
|
+
replication_config={"start_date": "2024-01-01T00:00:00Z"}
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
# With server-side OAuth:
|
|
256
|
+
source_id = await client.create_source(
|
|
257
|
+
name="My Intercom Source",
|
|
258
|
+
connector_definition_id="d8313939-3782-41b0-be29-b3ca20d8dd3a",
|
|
259
|
+
external_user_id="my-workspace",
|
|
260
|
+
server_side_oauth_secret_id="airbyte_oauth_..._secret_...",
|
|
261
|
+
replication_config={"start_date": "2024-01-01T00:00:00Z"}
|
|
262
|
+
)
|
|
263
|
+
"""
|
|
264
|
+
token = await self.get_bearer_token()
|
|
265
|
+
url = f"{self.API_BASE_URL}/v1/integrations/connectors"
|
|
266
|
+
headers = {"Authorization": f"Bearer {token}"}
|
|
267
|
+
|
|
268
|
+
request_body: dict[str, Any] = {
|
|
269
|
+
"name": name,
|
|
270
|
+
"definition_id": connector_definition_id,
|
|
271
|
+
"external_user_id": external_user_id,
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
if credentials is not None:
|
|
275
|
+
request_body["credentials"] = credentials
|
|
276
|
+
if replication_config is not None:
|
|
277
|
+
request_body["replication_config"] = replication_config
|
|
278
|
+
if server_side_oauth_secret_id is not None:
|
|
279
|
+
request_body["server_side_oauth_secret_id"] = server_side_oauth_secret_id
|
|
280
|
+
if source_template_id is not None:
|
|
281
|
+
request_body["source_template_id"] = source_template_id
|
|
282
|
+
|
|
283
|
+
response = await self._http_client.post(url, json=request_body, headers=headers)
|
|
284
|
+
response.raise_for_status()
|
|
285
|
+
|
|
286
|
+
data = response.json()
|
|
287
|
+
return data["id"]
|
|
288
|
+
|
|
164
289
|
async def execute_connector(
|
|
165
290
|
self,
|
|
166
291
|
connector_id: str,
|
|
@@ -1014,6 +1014,7 @@ def _parse_security_scheme_to_option(scheme_name: str, scheme: Any) -> AuthOptio
|
|
|
1014
1014
|
type=single_auth.type,
|
|
1015
1015
|
config=single_auth.config,
|
|
1016
1016
|
user_config_spec=single_auth.user_config_spec,
|
|
1017
|
+
untested=getattr(scheme, "x_airbyte_untested", False),
|
|
1017
1018
|
)
|
|
1018
1019
|
|
|
1019
1020
|
|
|
@@ -19,19 +19,26 @@ class HostedExecutor:
|
|
|
19
19
|
instead of directly calling external services. The cloud API handles all
|
|
20
20
|
connector logic, secrets management, and execution.
|
|
21
21
|
|
|
22
|
-
The executor
|
|
22
|
+
The executor uses the AirbyteCloudClient to:
|
|
23
23
|
1. Authenticate with the Airbyte Platform (bearer token with caching)
|
|
24
|
-
2. Look up the user's connector
|
|
24
|
+
2. Look up the user's connector (if connector_id not provided)
|
|
25
25
|
3. Execute the connector operation via the cloud API
|
|
26
26
|
|
|
27
27
|
Implements ExecutorProtocol.
|
|
28
28
|
|
|
29
29
|
Example:
|
|
30
|
-
# Create executor with
|
|
30
|
+
# Create executor with explicit connector_id (no lookup needed)
|
|
31
|
+
executor = HostedExecutor(
|
|
32
|
+
airbyte_client_id="client_abc123",
|
|
33
|
+
airbyte_client_secret="secret_xyz789",
|
|
34
|
+
connector_id="existing-source-uuid",
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
# Or create executor with user ID for lookup
|
|
31
38
|
executor = HostedExecutor(
|
|
32
|
-
external_user_id="user-123",
|
|
33
39
|
airbyte_client_id="client_abc123",
|
|
34
40
|
airbyte_client_secret="secret_xyz789",
|
|
41
|
+
external_user_id="user-123",
|
|
35
42
|
connector_definition_id="abc123-def456-ghi789",
|
|
36
43
|
)
|
|
37
44
|
|
|
@@ -51,28 +58,48 @@ class HostedExecutor:
|
|
|
51
58
|
|
|
52
59
|
def __init__(
|
|
53
60
|
self,
|
|
54
|
-
external_user_id: str,
|
|
55
61
|
airbyte_client_id: str,
|
|
56
62
|
airbyte_client_secret: str,
|
|
57
|
-
|
|
63
|
+
connector_id: str | None = None,
|
|
64
|
+
external_user_id: str | None = None,
|
|
65
|
+
connector_definition_id: str | None = None,
|
|
58
66
|
):
|
|
59
67
|
"""Initialize hosted executor.
|
|
60
68
|
|
|
69
|
+
Either provide connector_id directly OR (external_user_id + connector_definition_id)
|
|
70
|
+
for lookup.
|
|
71
|
+
|
|
61
72
|
Args:
|
|
62
|
-
external_user_id: User identifier in the Airbyte system
|
|
63
73
|
airbyte_client_id: Airbyte client ID for authentication
|
|
64
74
|
airbyte_client_secret: Airbyte client secret for authentication
|
|
65
|
-
|
|
66
|
-
|
|
75
|
+
connector_id: Direct connector/source ID (skips lookup if provided)
|
|
76
|
+
external_user_id: User identifier in the Airbyte system (for lookup)
|
|
77
|
+
connector_definition_id: Connector definition ID (for lookup)
|
|
78
|
+
|
|
79
|
+
Raises:
|
|
80
|
+
ValueError: If neither connector_id nor (external_user_id + connector_definition_id) provided
|
|
67
81
|
|
|
68
82
|
Example:
|
|
83
|
+
# With explicit connector_id (no lookup)
|
|
84
|
+
executor = HostedExecutor(
|
|
85
|
+
airbyte_client_id="client_abc123",
|
|
86
|
+
airbyte_client_secret="secret_xyz789",
|
|
87
|
+
connector_id="existing-source-uuid",
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# With lookup by user + definition
|
|
69
91
|
executor = HostedExecutor(
|
|
70
|
-
external_user_id="user-123",
|
|
71
92
|
airbyte_client_id="client_abc123",
|
|
72
93
|
airbyte_client_secret="secret_xyz789",
|
|
94
|
+
external_user_id="user-123",
|
|
73
95
|
connector_definition_id="abc123-def456-ghi789",
|
|
74
96
|
)
|
|
75
97
|
"""
|
|
98
|
+
# Validate: either connector_id OR (external_user_id + connector_definition_id) required
|
|
99
|
+
if not connector_id and not (external_user_id and connector_definition_id):
|
|
100
|
+
raise ValueError("Either connector_id OR (external_user_id + connector_definition_id) must be provided")
|
|
101
|
+
|
|
102
|
+
self._connector_id = connector_id
|
|
76
103
|
self._external_user_id = external_user_id
|
|
77
104
|
self._connector_definition_id = connector_definition_id
|
|
78
105
|
|
|
@@ -86,10 +113,9 @@ class HostedExecutor:
|
|
|
86
113
|
"""Execute connector via cloud API (ExecutorProtocol implementation).
|
|
87
114
|
|
|
88
115
|
Flow:
|
|
89
|
-
1.
|
|
90
|
-
2.
|
|
91
|
-
3.
|
|
92
|
-
4. Parse the response into ExecutionResult
|
|
116
|
+
1. Use provided connector_id or look up from external_user_id + definition_id
|
|
117
|
+
2. Execute the connector operation via the cloud API
|
|
118
|
+
3. Parse the response into ExecutionResult
|
|
93
119
|
|
|
94
120
|
Args:
|
|
95
121
|
config: Execution configuration (entity, action, params)
|
|
@@ -98,7 +124,7 @@ class HostedExecutor:
|
|
|
98
124
|
ExecutionResult with success/failure status
|
|
99
125
|
|
|
100
126
|
Raises:
|
|
101
|
-
ValueError: If no connector or multiple connectors found for user
|
|
127
|
+
ValueError: If no connector or multiple connectors found for user (when doing lookup)
|
|
102
128
|
httpx.HTTPStatusError: If API returns 4xx/5xx status code
|
|
103
129
|
httpx.RequestError: If network request fails
|
|
104
130
|
|
|
@@ -114,23 +140,26 @@ class HostedExecutor:
|
|
|
114
140
|
|
|
115
141
|
with tracer.start_as_current_span("airbyte.hosted_executor.execute") as span:
|
|
116
142
|
# Add span attributes for observability
|
|
117
|
-
|
|
143
|
+
if self._connector_definition_id:
|
|
144
|
+
span.set_attribute("connector.definition_id", self._connector_definition_id)
|
|
118
145
|
span.set_attribute("connector.entity", config.entity)
|
|
119
146
|
span.set_attribute("connector.action", config.action)
|
|
120
|
-
|
|
147
|
+
if self._external_user_id:
|
|
148
|
+
span.set_attribute("user.external_id", self._external_user_id)
|
|
121
149
|
if config.params:
|
|
122
150
|
# Only add non-sensitive param keys
|
|
123
151
|
span.set_attribute("connector.param_keys", list(config.params.keys()))
|
|
124
152
|
|
|
125
153
|
try:
|
|
126
|
-
#
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
154
|
+
# Use provided connector_id or look it up
|
|
155
|
+
if self._connector_id:
|
|
156
|
+
connector_id = self._connector_id
|
|
157
|
+
else:
|
|
158
|
+
# Look up connector by external_user_id + definition_id
|
|
159
|
+
connector_id = await self._cloud_client.get_connector_id(
|
|
160
|
+
external_user_id=self._external_user_id, # type: ignore[arg-type]
|
|
161
|
+
connector_definition_id=self._connector_definition_id, # type: ignore[arg-type]
|
|
162
|
+
)
|
|
134
163
|
|
|
135
164
|
span.set_attribute("connector.connector_id", connector_id)
|
|
136
165
|
|
|
@@ -36,6 +36,7 @@ from ..types import (
|
|
|
36
36
|
EndpointDefinition,
|
|
37
37
|
EntityDefinition,
|
|
38
38
|
)
|
|
39
|
+
from ..utils import find_matching_auth_options
|
|
39
40
|
|
|
40
41
|
from .models import (
|
|
41
42
|
ActionNotSupportedError,
|
|
@@ -356,8 +357,8 @@ class LocalExecutor:
|
|
|
356
357
|
) -> tuple[AuthOption, dict[str, SecretStr]]:
|
|
357
358
|
"""Infer authentication scheme from provided credentials.
|
|
358
359
|
|
|
359
|
-
|
|
360
|
-
|
|
360
|
+
Uses shared utility find_matching_auth_options to match credentials
|
|
361
|
+
against each auth option's required fields.
|
|
361
362
|
|
|
362
363
|
Args:
|
|
363
364
|
user_credentials: User-provided credentials
|
|
@@ -375,16 +376,8 @@ class LocalExecutor:
|
|
|
375
376
|
# Get the credential keys provided by the user
|
|
376
377
|
provided_keys = set(user_credentials.keys())
|
|
377
378
|
|
|
378
|
-
#
|
|
379
|
-
matching_options
|
|
380
|
-
for option in options:
|
|
381
|
-
if option.user_config_spec and option.user_config_spec.required:
|
|
382
|
-
required_fields = set(option.user_config_spec.required)
|
|
383
|
-
if required_fields.issubset(provided_keys):
|
|
384
|
-
matching_options.append(option)
|
|
385
|
-
elif not option.user_config_spec or not option.user_config_spec.required:
|
|
386
|
-
# Option has no required fields - it matches any credentials
|
|
387
|
-
matching_options.append(option)
|
|
379
|
+
# Use shared utility to find matching options
|
|
380
|
+
matching_options = find_matching_auth_options(provided_keys, options)
|
|
388
381
|
|
|
389
382
|
# Handle matching results
|
|
390
383
|
if len(matching_options) == 0:
|
|
@@ -126,6 +126,17 @@ class Info(BaseModel):
|
|
|
126
126
|
x_airbyte_example_questions: ExampleQuestions | None = Field(None, alias="x-airbyte-example-questions")
|
|
127
127
|
x_airbyte_cache: CacheConfig | None = Field(None, alias="x-airbyte-cache")
|
|
128
128
|
x_airbyte_replication_config: ReplicationConfig | None = Field(None, alias="x-airbyte-replication-config")
|
|
129
|
+
x_airbyte_skip_suggested_streams: list[str] = Field(
|
|
130
|
+
default_factory=list,
|
|
131
|
+
alias="x-airbyte-skip-suggested-streams",
|
|
132
|
+
description="List of Airbyte suggested streams to skip when validating cache entity coverage",
|
|
133
|
+
)
|
|
134
|
+
x_airbyte_skip_auth_methods: list[str] = Field(
|
|
135
|
+
default_factory=list,
|
|
136
|
+
alias="x-airbyte-skip-auth-methods",
|
|
137
|
+
description="List of Airbyte auth methods to skip when validating auth compatibility. "
|
|
138
|
+
"Use the SelectiveAuthenticator option key (e.g., 'Private App Credentials', 'oauth2.0')",
|
|
139
|
+
)
|
|
129
140
|
|
|
130
141
|
|
|
131
142
|
class ServerVariable(BaseModel):
|
|
@@ -199,6 +199,11 @@ class SecurityScheme(BaseModel):
|
|
|
199
199
|
alias="x-airbyte-token-extract",
|
|
200
200
|
description="List of fields to extract from OAuth2 token responses and use as server variables",
|
|
201
201
|
)
|
|
202
|
+
x_airbyte_untested: bool = Field(
|
|
203
|
+
False,
|
|
204
|
+
alias="x-airbyte-untested",
|
|
205
|
+
description="Mark this auth scheme as untested to skip cassette coverage validation",
|
|
206
|
+
)
|
|
202
207
|
|
|
203
208
|
@field_validator("x_airbyte_token_extract", mode="after")
|
|
204
209
|
@classmethod
|
|
@@ -90,6 +90,10 @@ class AuthOption(BaseModel):
|
|
|
90
90
|
None,
|
|
91
91
|
description="User-facing credential specification from x-airbyte-auth-config",
|
|
92
92
|
)
|
|
93
|
+
untested: bool = Field(
|
|
94
|
+
False,
|
|
95
|
+
description="Mark this auth scheme as untested to skip cassette coverage validation",
|
|
96
|
+
)
|
|
93
97
|
|
|
94
98
|
|
|
95
99
|
class AuthConfig(BaseModel):
|
|
@@ -1,7 +1,13 @@
|
|
|
1
1
|
"""Utility functions for working with connectors."""
|
|
2
2
|
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
3
5
|
from collections.abc import AsyncIterator
|
|
4
6
|
from pathlib import Path
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from .types import AuthOption
|
|
5
11
|
|
|
6
12
|
|
|
7
13
|
async def save_download(
|
|
@@ -58,3 +64,64 @@ async def save_download(
|
|
|
58
64
|
raise OSError(f"Failed to write file {file_path}: {e}") from e
|
|
59
65
|
|
|
60
66
|
return file_path
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def find_matching_auth_options(
|
|
70
|
+
provided_keys: set[str],
|
|
71
|
+
auth_options: list[AuthOption],
|
|
72
|
+
) -> list[AuthOption]:
|
|
73
|
+
"""Find auth options that match the provided credential keys.
|
|
74
|
+
|
|
75
|
+
This is the single source of truth for auth scheme inference logic,
|
|
76
|
+
used by both the executor (at runtime) and validation (for cassettes).
|
|
77
|
+
|
|
78
|
+
Matching logic:
|
|
79
|
+
- An option matches if all its required fields are present in provided_keys
|
|
80
|
+
- Options with no required fields match any credentials
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
provided_keys: Set of credential/auth_config keys
|
|
84
|
+
auth_options: List of AuthOption from the connector model
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
List of AuthOption that match the provided keys
|
|
88
|
+
"""
|
|
89
|
+
matching_options: list[AuthOption] = []
|
|
90
|
+
|
|
91
|
+
for option in auth_options:
|
|
92
|
+
if option.user_config_spec and option.user_config_spec.required:
|
|
93
|
+
required_fields = set(option.user_config_spec.required)
|
|
94
|
+
if required_fields.issubset(provided_keys):
|
|
95
|
+
matching_options.append(option)
|
|
96
|
+
elif not option.user_config_spec or not option.user_config_spec.required:
|
|
97
|
+
# Option has no required fields - it matches any credentials
|
|
98
|
+
matching_options.append(option)
|
|
99
|
+
|
|
100
|
+
return matching_options
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def infer_auth_scheme_name(
|
|
104
|
+
provided_keys: set[str],
|
|
105
|
+
auth_options: list[AuthOption],
|
|
106
|
+
) -> str | None:
|
|
107
|
+
"""Infer the auth scheme name from provided credential keys.
|
|
108
|
+
|
|
109
|
+
Uses find_matching_auth_options to find matches, then returns
|
|
110
|
+
the scheme name only if exactly one option matches.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
provided_keys: Set of credential/auth_config keys
|
|
114
|
+
auth_options: List of AuthOption from the connector model
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
The scheme_name if exactly one match, None otherwise
|
|
118
|
+
"""
|
|
119
|
+
if not provided_keys or not auth_options:
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
matching = find_matching_auth_options(provided_keys, auth_options)
|
|
123
|
+
|
|
124
|
+
if len(matching) == 1:
|
|
125
|
+
return matching[0].scheme_name
|
|
126
|
+
|
|
127
|
+
return None
|
|
@@ -5,6 +5,7 @@ These tools help ensure that connectors are ready to ship by:
|
|
|
5
5
|
- Checking that all entity/action operations have corresponding test cassettes
|
|
6
6
|
- Validating that response schemas match the actual cassette responses
|
|
7
7
|
- Detecting fields present in responses but not declared in schemas
|
|
8
|
+
- Validating replication compatibility with Airbyte source connectors
|
|
8
9
|
"""
|
|
9
10
|
|
|
10
11
|
from collections import defaultdict
|
|
@@ -20,7 +21,9 @@ from .connector_model_loader import (
|
|
|
20
21
|
load_connector_model,
|
|
21
22
|
)
|
|
22
23
|
from .testing.spec_loader import load_test_spec
|
|
23
|
-
from .types import Action, EndpointDefinition
|
|
24
|
+
from .types import Action, ConnectorModel, EndpointDefinition
|
|
25
|
+
from .utils import infer_auth_scheme_name
|
|
26
|
+
from .validation_replication import validate_replication_compatibility
|
|
24
27
|
|
|
25
28
|
|
|
26
29
|
def build_cassette_map(cassettes_dir: Path) -> Dict[Tuple[str, str], List[Path]]:
|
|
@@ -51,6 +54,112 @@ def build_cassette_map(cassettes_dir: Path) -> Dict[Tuple[str, str], List[Path]]
|
|
|
51
54
|
return dict(cassette_map)
|
|
52
55
|
|
|
53
56
|
|
|
57
|
+
def build_auth_scheme_coverage(
|
|
58
|
+
cassettes_dir: Path,
|
|
59
|
+
auth_options: list | None = None,
|
|
60
|
+
) -> Tuple[Dict[str | None, List[Path]], List[Tuple[Path, set[str]]]]:
|
|
61
|
+
"""Build a map of auth_scheme -> list of cassette paths.
|
|
62
|
+
|
|
63
|
+
For multi-auth connectors, infers the auth scheme from the cassette's auth_config
|
|
64
|
+
keys using the same matching logic as the executor.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
cassettes_dir: Directory containing cassette YAML files
|
|
68
|
+
auth_options: List of AuthOption from the connector model (for inference)
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
Tuple of:
|
|
72
|
+
- Dictionary mapping auth_scheme names (or None for single-auth) to cassette paths
|
|
73
|
+
- List of (cassette_path, auth_config_keys) for cassettes that couldn't be matched
|
|
74
|
+
"""
|
|
75
|
+
auth_scheme_map: Dict[str | None, List[Path]] = defaultdict(list)
|
|
76
|
+
unmatched_cassettes: List[Tuple[Path, set[str]]] = []
|
|
77
|
+
|
|
78
|
+
if not cassettes_dir.exists() or not cassettes_dir.is_dir():
|
|
79
|
+
return {}, []
|
|
80
|
+
|
|
81
|
+
for cassette_file in cassettes_dir.glob("*.yaml"):
|
|
82
|
+
try:
|
|
83
|
+
spec = load_test_spec(cassette_file, auth_config={})
|
|
84
|
+
|
|
85
|
+
# First, check if auth_scheme is explicitly set in the cassette
|
|
86
|
+
if spec.auth_scheme:
|
|
87
|
+
auth_scheme_map[spec.auth_scheme].append(cassette_file)
|
|
88
|
+
# Otherwise, try to infer from auth_config keys
|
|
89
|
+
elif spec.auth_config and auth_options:
|
|
90
|
+
auth_config_keys = set(spec.auth_config.keys())
|
|
91
|
+
inferred_scheme = infer_auth_scheme_name(auth_config_keys, auth_options)
|
|
92
|
+
if inferred_scheme is not None:
|
|
93
|
+
auth_scheme_map[inferred_scheme].append(cassette_file)
|
|
94
|
+
else:
|
|
95
|
+
# Couldn't infer - track as unmatched
|
|
96
|
+
unmatched_cassettes.append((cassette_file, auth_config_keys))
|
|
97
|
+
else:
|
|
98
|
+
# No auth_scheme and no auth_config - treat as None
|
|
99
|
+
auth_scheme_map[None].append(cassette_file)
|
|
100
|
+
except Exception:
|
|
101
|
+
continue
|
|
102
|
+
|
|
103
|
+
return dict(auth_scheme_map), unmatched_cassettes
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def validate_auth_scheme_coverage(
|
|
107
|
+
config: ConnectorModel,
|
|
108
|
+
cassettes_dir: Path,
|
|
109
|
+
) -> Tuple[bool, List[str], List[str], List[str], List[Tuple[Path, set[str]]]]:
|
|
110
|
+
"""Validate that each auth scheme has at least one cassette.
|
|
111
|
+
|
|
112
|
+
For multi-auth connectors, every defined auth scheme must have coverage
|
|
113
|
+
unless marked with x-airbyte-untested: true.
|
|
114
|
+
For single-auth connectors, this check is skipped (existing cassette checks suffice).
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
config: Loaded connector model
|
|
118
|
+
cassettes_dir: Directory containing cassette files
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
Tuple of (is_valid, errors, warnings, covered_schemes, unmatched_cassettes)
|
|
122
|
+
"""
|
|
123
|
+
errors: List[str] = []
|
|
124
|
+
warnings: List[str] = []
|
|
125
|
+
|
|
126
|
+
# Skip check for single-auth connectors
|
|
127
|
+
if not config.auth.is_multi_auth():
|
|
128
|
+
return True, errors, warnings, [], []
|
|
129
|
+
|
|
130
|
+
# Get all defined auth schemes, separating tested from untested
|
|
131
|
+
options = config.auth.options or []
|
|
132
|
+
|
|
133
|
+
# Build auth scheme coverage from cassettes (pass options for inference)
|
|
134
|
+
auth_scheme_coverage, unmatched_cassettes = build_auth_scheme_coverage(cassettes_dir, options)
|
|
135
|
+
tested_schemes = {opt.scheme_name for opt in options if not opt.untested}
|
|
136
|
+
untested_schemes = {opt.scheme_name for opt in options if opt.untested}
|
|
137
|
+
covered_schemes = {scheme for scheme in auth_scheme_coverage.keys() if scheme is not None}
|
|
138
|
+
|
|
139
|
+
# Find missing tested schemes (errors)
|
|
140
|
+
missing_tested = tested_schemes - covered_schemes
|
|
141
|
+
for scheme in sorted(missing_tested):
|
|
142
|
+
errors.append(
|
|
143
|
+
f"Auth scheme '{scheme}' has no cassette coverage. "
|
|
144
|
+
f"Record at least one cassette using this authentication method, "
|
|
145
|
+
f"or add 'x-airbyte-untested: true' to skip this check."
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
# Warn about untested schemes without coverage
|
|
149
|
+
missing_untested = untested_schemes - covered_schemes
|
|
150
|
+
for scheme in sorted(missing_untested):
|
|
151
|
+
warnings.append(
|
|
152
|
+
f"Auth scheme '{scheme}' is marked as untested (x-airbyte-untested: true) " f"and has no cassette coverage. Validation skipped."
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
# Warn about cassettes that couldn't be matched to any auth scheme
|
|
156
|
+
for cassette_path, auth_config_keys in unmatched_cassettes:
|
|
157
|
+
warnings.append(f"Cassette '{cassette_path.name}' could not be matched to any auth scheme. " f"auth_config keys: {sorted(auth_config_keys)}")
|
|
158
|
+
|
|
159
|
+
is_valid = len(missing_tested) == 0
|
|
160
|
+
return is_valid, errors, warnings, sorted(covered_schemes), unmatched_cassettes
|
|
161
|
+
|
|
162
|
+
|
|
54
163
|
def validate_response_against_schema(response_body: Any, schema: Dict[str, Any]) -> Tuple[bool, List[str]]:
|
|
55
164
|
"""Validate a response body against a JSON schema.
|
|
56
165
|
|
|
@@ -586,6 +695,9 @@ def validate_connector_readiness(connector_dir: str | Path) -> Dict[str, Any]:
|
|
|
586
695
|
cassettes_dir = connector_path / "tests" / "cassettes"
|
|
587
696
|
cassette_map = build_cassette_map(cassettes_dir)
|
|
588
697
|
|
|
698
|
+
# Validate auth scheme coverage for multi-auth connectors
|
|
699
|
+
auth_valid, auth_errors, auth_warnings, auth_covered_schemes, auth_unmatched_cassettes = validate_auth_scheme_coverage(config, cassettes_dir)
|
|
700
|
+
|
|
589
701
|
validation_results = []
|
|
590
702
|
total_operations = 0
|
|
591
703
|
operations_with_cassettes = 0
|
|
@@ -808,7 +920,29 @@ def validate_connector_readiness(connector_dir: str | Path) -> Dict[str, Any]:
|
|
|
808
920
|
}
|
|
809
921
|
)
|
|
810
922
|
|
|
811
|
-
|
|
923
|
+
# Validate replication compatibility with Airbyte
|
|
924
|
+
replication_result = validate_replication_compatibility(
|
|
925
|
+
connector_yaml_path=config_file,
|
|
926
|
+
raw_spec=raw_spec,
|
|
927
|
+
)
|
|
928
|
+
|
|
929
|
+
# Merge replication errors/warnings into totals
|
|
930
|
+
# Note: If connector is not in registry, we don't count warnings since this is expected for test connectors
|
|
931
|
+
replication_errors = replication_result.get("errors", [])
|
|
932
|
+
replication_warnings = replication_result.get("warnings", [])
|
|
933
|
+
total_errors += len(replication_errors)
|
|
934
|
+
|
|
935
|
+
# Only count replication warnings if the connector was found in the registry
|
|
936
|
+
# (i.e., there are actual validation issues, not just "not found in registry")
|
|
937
|
+
if replication_result.get("registry_found", False):
|
|
938
|
+
total_warnings += len(replication_warnings)
|
|
939
|
+
|
|
940
|
+
# Merge auth scheme validation errors/warnings into totals
|
|
941
|
+
total_errors += len(auth_errors)
|
|
942
|
+
total_warnings += len(auth_warnings)
|
|
943
|
+
|
|
944
|
+
# Update success criteria to include replication and auth scheme validation
|
|
945
|
+
success = operations_missing_cassettes == 0 and cassettes_invalid == 0 and total_operations > 0 and len(replication_errors) == 0 and auth_valid
|
|
812
946
|
|
|
813
947
|
# Check for preferred_for_check on at least one list operation
|
|
814
948
|
has_preferred_check = False
|
|
@@ -829,11 +963,26 @@ def validate_connector_readiness(connector_dir: str | Path) -> Dict[str, Any]:
|
|
|
829
963
|
"to enable reliable health checks."
|
|
830
964
|
)
|
|
831
965
|
|
|
966
|
+
# Build auth scheme validation result
|
|
967
|
+
options = config.auth.options or []
|
|
968
|
+
tested_schemes = [opt.scheme_name for opt in options if not opt.untested]
|
|
969
|
+
untested_schemes_list = [opt.scheme_name for opt in options if opt.untested]
|
|
970
|
+
missing_tested = [s for s in tested_schemes if s not in auth_covered_schemes]
|
|
971
|
+
|
|
832
972
|
return {
|
|
833
973
|
"success": success,
|
|
834
974
|
"connector_name": config.name,
|
|
835
975
|
"connector_path": str(connector_path),
|
|
836
976
|
"validation_results": validation_results,
|
|
977
|
+
"replication_validation": replication_result,
|
|
978
|
+
"auth_scheme_validation": {
|
|
979
|
+
"valid": auth_valid,
|
|
980
|
+
"errors": auth_errors,
|
|
981
|
+
"warnings": auth_warnings,
|
|
982
|
+
"covered_schemes": auth_covered_schemes,
|
|
983
|
+
"missing_schemes": missing_tested,
|
|
984
|
+
"untested_schemes": untested_schemes_list,
|
|
985
|
+
},
|
|
837
986
|
"readiness_warnings": readiness_warnings,
|
|
838
987
|
"summary": {
|
|
839
988
|
"total_operations": total_operations,
|