airbyte-agent-mcp 0.1.53__py3-none-any.whl → 0.1.64__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_agent_mcp/_vendored/connector_sdk/auth_strategies.py +2 -5
- airbyte_agent_mcp/_vendored/connector_sdk/auth_template.py +1 -1
- airbyte_agent_mcp/_vendored/connector_sdk/cloud_utils/client.py +26 -26
- airbyte_agent_mcp/_vendored/connector_sdk/connector_model_loader.py +4 -4
- airbyte_agent_mcp/_vendored/connector_sdk/executor/hosted_executor.py +10 -11
- airbyte_agent_mcp/_vendored/connector_sdk/executor/local_executor.py +72 -13
- airbyte_agent_mcp/_vendored/connector_sdk/extensions.py +1 -2
- airbyte_agent_mcp/_vendored/connector_sdk/http/response.py +2 -0
- airbyte_agent_mcp/_vendored/connector_sdk/logging/logger.py +9 -9
- airbyte_agent_mcp/_vendored/connector_sdk/logging/types.py +10 -10
- airbyte_agent_mcp/_vendored/connector_sdk/observability/config.py +2 -2
- airbyte_agent_mcp/_vendored/connector_sdk/observability/models.py +6 -6
- airbyte_agent_mcp/_vendored/connector_sdk/observability/session.py +7 -5
- airbyte_agent_mcp/_vendored/connector_sdk/performance/metrics.py +3 -3
- airbyte_agent_mcp/_vendored/connector_sdk/schema/base.py +21 -18
- airbyte_agent_mcp/_vendored/connector_sdk/schema/components.py +58 -58
- airbyte_agent_mcp/_vendored/connector_sdk/schema/connector.py +22 -33
- airbyte_agent_mcp/_vendored/connector_sdk/schema/extensions.py +131 -10
- airbyte_agent_mcp/_vendored/connector_sdk/schema/operations.py +31 -31
- airbyte_agent_mcp/_vendored/connector_sdk/schema/security.py +36 -36
- airbyte_agent_mcp/_vendored/connector_sdk/secrets.py +2 -2
- airbyte_agent_mcp/_vendored/connector_sdk/telemetry/events.py +7 -7
- airbyte_agent_mcp/_vendored/connector_sdk/telemetry/tracker.py +6 -5
- airbyte_agent_mcp/_vendored/connector_sdk/types.py +2 -2
- airbyte_agent_mcp/server.py +34 -1
- {airbyte_agent_mcp-0.1.53.dist-info → airbyte_agent_mcp-0.1.64.dist-info}/METADATA +1 -1
- {airbyte_agent_mcp-0.1.53.dist-info → airbyte_agent_mcp-0.1.64.dist-info}/RECORD +28 -28
- {airbyte_agent_mcp-0.1.53.dist-info → airbyte_agent_mcp-0.1.64.dist-info}/WHEEL +0 -0
|
@@ -610,9 +610,7 @@ class OAuth2AuthStrategy(AuthStrategy):
|
|
|
610
610
|
has_refresh_token = bool(secrets.get("refresh_token"))
|
|
611
611
|
|
|
612
612
|
if not has_access_token and not has_refresh_token:
|
|
613
|
-
raise AuthenticationError(
|
|
614
|
-
"Missing OAuth2 credentials. Provide either 'access_token' " "or 'refresh_token' (for refresh-token-only mode)."
|
|
615
|
-
)
|
|
613
|
+
raise AuthenticationError("Missing OAuth2 credentials. Provide either 'access_token' or 'refresh_token' (for refresh-token-only mode).")
|
|
616
614
|
|
|
617
615
|
def can_refresh(self, secrets: OAuth2RefreshSecrets) -> bool:
|
|
618
616
|
"""Check if token refresh is possible.
|
|
@@ -1106,8 +1104,7 @@ class AuthStrategyFactory:
|
|
|
1106
1104
|
strategy = cls._strategies.get(auth_type)
|
|
1107
1105
|
if strategy is None:
|
|
1108
1106
|
raise AuthenticationError(
|
|
1109
|
-
f"Authentication type '{auth_type.value}' is not implemented. "
|
|
1110
|
-
f"Supported types: {', '.join(s.value for s in cls._strategies.keys())}"
|
|
1107
|
+
f"Authentication type '{auth_type.value}' is not implemented. Supported types: {', '.join(s.value for s in cls._strategies.keys())}"
|
|
1111
1108
|
)
|
|
1112
1109
|
return strategy
|
|
1113
1110
|
|
|
@@ -17,7 +17,7 @@ class MissingVariableError(ValueError):
|
|
|
17
17
|
def __init__(self, var_name: str, available_fields: list):
|
|
18
18
|
self.var_name = var_name
|
|
19
19
|
self.available_fields = available_fields
|
|
20
|
-
super().__init__(f"Template variable '${{{var_name}}}' not found in config.
|
|
20
|
+
super().__init__(f"Template variable '${{{var_name}}}' not found in config. Available fields: {available_fields}")
|
|
21
21
|
|
|
22
22
|
|
|
23
23
|
def apply_template(template: str, values: Dict[str, str]) -> str:
|
|
@@ -13,7 +13,7 @@ class AirbyteCloudClient:
|
|
|
13
13
|
|
|
14
14
|
Handles authentication, token caching, and API calls to:
|
|
15
15
|
- Get bearer tokens for authentication
|
|
16
|
-
- Look up
|
|
16
|
+
- Look up connectors for users
|
|
17
17
|
- Execute connectors via the cloud API
|
|
18
18
|
|
|
19
19
|
Example:
|
|
@@ -22,15 +22,15 @@ class AirbyteCloudClient:
|
|
|
22
22
|
client_secret="your-client-secret"
|
|
23
23
|
)
|
|
24
24
|
|
|
25
|
-
# Get a connector
|
|
26
|
-
|
|
25
|
+
# Get a connector ID
|
|
26
|
+
connector_id = await client.get_connector_id(
|
|
27
27
|
external_user_id="user-123",
|
|
28
|
-
connector_definition_id="
|
|
28
|
+
connector_definition_id="550e8400-e29b-41d4-a716-446655440000"
|
|
29
29
|
)
|
|
30
30
|
|
|
31
31
|
# Execute the connector
|
|
32
32
|
result = await client.execute_connector(
|
|
33
|
-
|
|
33
|
+
connector_id=connector_id,
|
|
34
34
|
entity="customers",
|
|
35
35
|
action="list",
|
|
36
36
|
params={"limit": 10}
|
|
@@ -105,37 +105,37 @@ class AirbyteCloudClient:
|
|
|
105
105
|
|
|
106
106
|
return access_token
|
|
107
107
|
|
|
108
|
-
async def
|
|
108
|
+
async def get_connector_id(
|
|
109
109
|
self,
|
|
110
110
|
external_user_id: str,
|
|
111
111
|
connector_definition_id: str,
|
|
112
112
|
) -> str:
|
|
113
|
-
"""Get connector
|
|
113
|
+
"""Get connector ID for a user.
|
|
114
114
|
|
|
115
|
-
Looks up the connector
|
|
116
|
-
and connector definition. Validates that exactly one
|
|
115
|
+
Looks up the connector that belongs to the specified user
|
|
116
|
+
and connector definition. Validates that exactly one connector exists.
|
|
117
117
|
|
|
118
118
|
Args:
|
|
119
119
|
external_user_id: User identifier in the Airbyte system
|
|
120
120
|
connector_definition_id: UUID of the connector definition
|
|
121
121
|
|
|
122
122
|
Returns:
|
|
123
|
-
Connector
|
|
123
|
+
Connector ID (UUID string)
|
|
124
124
|
|
|
125
125
|
Raises:
|
|
126
|
-
ValueError: If 0 or more than 1
|
|
126
|
+
ValueError: If 0 or more than 1 connector is found
|
|
127
127
|
httpx.HTTPStatusError: If API returns 4xx/5xx status code
|
|
128
128
|
httpx.RequestError: If network request fails
|
|
129
129
|
|
|
130
130
|
Example:
|
|
131
|
-
|
|
131
|
+
connector_id = await client.get_connector_id(
|
|
132
132
|
external_user_id="user-123",
|
|
133
133
|
connector_definition_id="550e8400-e29b-41d4-a716-446655440000"
|
|
134
134
|
)
|
|
135
135
|
"""
|
|
136
136
|
|
|
137
137
|
token = await self.get_bearer_token()
|
|
138
|
-
url = f"{self.API_BASE_URL}/api/v1/connectors/
|
|
138
|
+
url = f"{self.API_BASE_URL}/api/v1/connectors/connectors_for_user"
|
|
139
139
|
params = {
|
|
140
140
|
"external_user_id": external_user_id,
|
|
141
141
|
"definition_id": connector_definition_id,
|
|
@@ -146,24 +146,24 @@ class AirbyteCloudClient:
|
|
|
146
146
|
response.raise_for_status()
|
|
147
147
|
|
|
148
148
|
data = response.json()
|
|
149
|
-
|
|
149
|
+
connectors = data["connectors"]
|
|
150
150
|
|
|
151
|
-
if len(
|
|
152
|
-
raise ValueError(f"No connector
|
|
151
|
+
if len(connectors) == 0:
|
|
152
|
+
raise ValueError(f"No connector found for user '{external_user_id}' and connector definition '{connector_definition_id}'")
|
|
153
153
|
|
|
154
|
-
if len(
|
|
154
|
+
if len(connectors) > 1:
|
|
155
155
|
raise ValueError(
|
|
156
|
-
f"Multiple
|
|
157
|
-
f"and connector '{connector_definition_id}'. Expected exactly 1, "
|
|
158
|
-
f"found {len(
|
|
156
|
+
f"Multiple connectors found for user '{external_user_id}' "
|
|
157
|
+
f"and connector definition '{connector_definition_id}'. Expected exactly 1, "
|
|
158
|
+
f"found {len(connectors)}"
|
|
159
159
|
)
|
|
160
160
|
|
|
161
|
-
|
|
162
|
-
return
|
|
161
|
+
connector_id = connectors[0]["id"]
|
|
162
|
+
return connector_id
|
|
163
163
|
|
|
164
164
|
async def execute_connector(
|
|
165
165
|
self,
|
|
166
|
-
|
|
166
|
+
connector_id: str,
|
|
167
167
|
entity: str,
|
|
168
168
|
action: str,
|
|
169
169
|
params: dict[str, Any] | None,
|
|
@@ -171,7 +171,7 @@ class AirbyteCloudClient:
|
|
|
171
171
|
"""Execute a connector operation.
|
|
172
172
|
|
|
173
173
|
Args:
|
|
174
|
-
|
|
174
|
+
connector_id: Connector UUID (source ID)
|
|
175
175
|
entity: Entity name (e.g., "customers", "invoices")
|
|
176
176
|
action: Operation action (e.g., "list", "get", "create")
|
|
177
177
|
params: Optional parameters for the operation
|
|
@@ -185,14 +185,14 @@ class AirbyteCloudClient:
|
|
|
185
185
|
|
|
186
186
|
Example:
|
|
187
187
|
result = await client.execute_connector(
|
|
188
|
-
|
|
188
|
+
connector_id="550e8400-e29b-41d4-a716-446655440000",
|
|
189
189
|
entity="customers",
|
|
190
190
|
action="list",
|
|
191
191
|
params={"limit": 10}
|
|
192
192
|
)
|
|
193
193
|
"""
|
|
194
194
|
token = await self.get_bearer_token()
|
|
195
|
-
url = f"{self.API_BASE_URL}/api/v1/connectors/
|
|
195
|
+
url = f"{self.API_BASE_URL}/api/v1/connectors/sources/{connector_id}/execute"
|
|
196
196
|
headers = {"Authorization": f"Bearer {token}"}
|
|
197
197
|
request_body = {
|
|
198
198
|
"entity": entity,
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
import logging
|
|
5
6
|
import re
|
|
6
7
|
from pathlib import Path
|
|
7
8
|
from typing import Any
|
|
@@ -518,13 +519,14 @@ def _parse_oauth2_config(scheme: Any) -> dict[str, str]:
|
|
|
518
519
|
config["refresh_url"] = refresh_url
|
|
519
520
|
|
|
520
521
|
# Extract custom refresh configuration from x-airbyte-token-refresh extension
|
|
522
|
+
# Note: x_token_refresh is a Dict[str, Any], not a Pydantic model, so use .get()
|
|
521
523
|
x_token_refresh = getattr(scheme, "x_token_refresh", None)
|
|
522
524
|
if x_token_refresh:
|
|
523
|
-
auth_style =
|
|
525
|
+
auth_style = x_token_refresh.get("auth_style")
|
|
524
526
|
if auth_style:
|
|
525
527
|
config["auth_style"] = auth_style
|
|
526
528
|
|
|
527
|
-
body_format =
|
|
529
|
+
body_format = x_token_refresh.get("body_format")
|
|
528
530
|
if body_format:
|
|
529
531
|
config["body_format"] = body_format
|
|
530
532
|
|
|
@@ -768,8 +770,6 @@ def _parse_auth_from_openapi(spec: OpenAPIConnector) -> AuthConfig:
|
|
|
768
770
|
options.append(auth_option)
|
|
769
771
|
except Exception as e:
|
|
770
772
|
# Log warning but continue - skip invalid schemes
|
|
771
|
-
import logging
|
|
772
|
-
|
|
773
773
|
logger = logging.getLogger(__name__)
|
|
774
774
|
logger.warning(f"Skipping invalid security scheme '{scheme_name}': {e}")
|
|
775
775
|
continue
|
|
@@ -21,7 +21,7 @@ class HostedExecutor:
|
|
|
21
21
|
|
|
22
22
|
The executor takes an external_user_id and uses the AirbyteCloudClient to:
|
|
23
23
|
1. Authenticate with the Airbyte Platform (bearer token with caching)
|
|
24
|
-
2. Look up the user's connector
|
|
24
|
+
2. Look up the user's connector
|
|
25
25
|
3. Execute the connector operation via the cloud API
|
|
26
26
|
|
|
27
27
|
Implements ExecutorProtocol.
|
|
@@ -63,7 +63,7 @@ class HostedExecutor:
|
|
|
63
63
|
airbyte_client_id: Airbyte client ID for authentication
|
|
64
64
|
airbyte_client_secret: Airbyte client secret for authentication
|
|
65
65
|
connector_definition_id: Connector definition ID used to look up
|
|
66
|
-
the user's connector
|
|
66
|
+
the user's connector.
|
|
67
67
|
|
|
68
68
|
Example:
|
|
69
69
|
executor = HostedExecutor(
|
|
@@ -86,8 +86,8 @@ class HostedExecutor:
|
|
|
86
86
|
"""Execute connector via cloud API (ExecutorProtocol implementation).
|
|
87
87
|
|
|
88
88
|
Flow:
|
|
89
|
-
1. Get connector id from
|
|
90
|
-
2. Look up the user's connector
|
|
89
|
+
1. Get connector definition id from executor config
|
|
90
|
+
2. Look up the user's connector ID
|
|
91
91
|
3. Execute the connector operation via the cloud API
|
|
92
92
|
4. Parse the response into ExecutionResult
|
|
93
93
|
|
|
@@ -98,7 +98,7 @@ class HostedExecutor:
|
|
|
98
98
|
ExecutionResult with success/failure status
|
|
99
99
|
|
|
100
100
|
Raises:
|
|
101
|
-
ValueError: If no
|
|
101
|
+
ValueError: If no connector or multiple connectors found for user
|
|
102
102
|
httpx.HTTPStatusError: If API returns 4xx/5xx status code
|
|
103
103
|
httpx.RequestError: If network request fails
|
|
104
104
|
|
|
@@ -126,24 +126,23 @@ class HostedExecutor:
|
|
|
126
126
|
# Step 1: Get connector definition id
|
|
127
127
|
connector_definition_id = self._connector_definition_id
|
|
128
128
|
|
|
129
|
-
# Step 2: Get the connector
|
|
130
|
-
|
|
129
|
+
# Step 2: Get the connector ID for this user
|
|
130
|
+
connector_id = await self._cloud_client.get_connector_id(
|
|
131
131
|
external_user_id=self._external_user_id,
|
|
132
132
|
connector_definition_id=connector_definition_id,
|
|
133
133
|
)
|
|
134
134
|
|
|
135
|
-
span.set_attribute("connector.
|
|
135
|
+
span.set_attribute("connector.connector_id", connector_id)
|
|
136
136
|
|
|
137
137
|
# Step 3: Execute the connector via the cloud API
|
|
138
138
|
response = await self._cloud_client.execute_connector(
|
|
139
|
-
|
|
139
|
+
connector_id=connector_id,
|
|
140
140
|
entity=config.entity,
|
|
141
141
|
action=config.action,
|
|
142
142
|
params=config.params,
|
|
143
143
|
)
|
|
144
144
|
|
|
145
145
|
# Step 4: Parse the response into ExecutionResult
|
|
146
|
-
# The response_data is a dict from the API
|
|
147
146
|
result = self._parse_execution_result(response)
|
|
148
147
|
|
|
149
148
|
# Mark span as successful
|
|
@@ -152,7 +151,7 @@ class HostedExecutor:
|
|
|
152
151
|
return result
|
|
153
152
|
|
|
154
153
|
except ValueError as e:
|
|
155
|
-
#
|
|
154
|
+
# Connector lookup validation error (0 or >1 connectors)
|
|
156
155
|
span.set_attribute("connector.success", False)
|
|
157
156
|
span.set_attribute("connector.error_type", "ValueError")
|
|
158
157
|
span.record_exception(e)
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
import asyncio
|
|
6
|
+
import inspect
|
|
6
7
|
import logging
|
|
7
8
|
import os
|
|
8
9
|
import re
|
|
@@ -11,6 +12,7 @@ from collections.abc import AsyncIterator
|
|
|
11
12
|
from typing import Any, Protocol
|
|
12
13
|
from urllib.parse import quote
|
|
13
14
|
|
|
15
|
+
from jinja2 import Environment, StrictUndefined
|
|
14
16
|
from jsonpath_ng import parse as parse_jsonpath
|
|
15
17
|
from opentelemetry import trace
|
|
16
18
|
|
|
@@ -506,8 +508,6 @@ class LocalExecutor:
|
|
|
506
508
|
result = handler.execute_operation(config.entity, action, params)
|
|
507
509
|
|
|
508
510
|
# Check if it's an async generator (download) or awaitable (standard)
|
|
509
|
-
import inspect
|
|
510
|
-
|
|
511
511
|
if inspect.isasyncgen(result):
|
|
512
512
|
# Download operation: return generator directly
|
|
513
513
|
return ExecutionResult(
|
|
@@ -814,7 +814,6 @@ class LocalExecutor:
|
|
|
814
814
|
>>> _substitute_file_field_params("attachments[{attachment_index}].url", {"attachment_index": 0})
|
|
815
815
|
"attachments[0].url"
|
|
816
816
|
"""
|
|
817
|
-
from jinja2 import Environment, StrictUndefined
|
|
818
817
|
|
|
819
818
|
# Use custom delimiters to match OpenAPI path parameter syntax {var}
|
|
820
819
|
# StrictUndefined raises clear error if a template variable is missing
|
|
@@ -1099,35 +1098,93 @@ class LocalExecutor:
|
|
|
1099
1098
|
|
|
1100
1099
|
return interpolate_value(variables)
|
|
1101
1100
|
|
|
1101
|
+
def _wrap_primitives(self, data: Any) -> dict[str, Any] | list[dict[str, Any]] | None:
|
|
1102
|
+
"""Wrap primitive values in dict format for consistent response structure.
|
|
1103
|
+
|
|
1104
|
+
Transforms primitive API responses into dict format so downstream code
|
|
1105
|
+
can always expect dict-based data structures.
|
|
1106
|
+
|
|
1107
|
+
Args:
|
|
1108
|
+
data: Response data (could be primitive, list, dict, or None)
|
|
1109
|
+
|
|
1110
|
+
Returns:
|
|
1111
|
+
- If data is a primitive (str, int, float, bool): {"value": data}
|
|
1112
|
+
- If data is a list: wraps all non-dict elements as {"value": item}
|
|
1113
|
+
- If data is already a dict or list of dicts: unchanged
|
|
1114
|
+
- If data is None: None
|
|
1115
|
+
|
|
1116
|
+
Examples:
|
|
1117
|
+
>>> executor._wrap_primitives(42)
|
|
1118
|
+
{"value": 42}
|
|
1119
|
+
>>> executor._wrap_primitives([1, 2, 3])
|
|
1120
|
+
[{"value": 1}, {"value": 2}, {"value": 3}]
|
|
1121
|
+
>>> executor._wrap_primitives([1, {"id": 2}, 3])
|
|
1122
|
+
[{"value": 1}, {"id": 2}, {"value": 3}]
|
|
1123
|
+
>>> executor._wrap_primitives([[1, 2], 3])
|
|
1124
|
+
[{"value": [1, 2]}, {"value": 3}]
|
|
1125
|
+
>>> executor._wrap_primitives({"id": 1})
|
|
1126
|
+
{"id": 1} # unchanged
|
|
1127
|
+
"""
|
|
1128
|
+
if data is None:
|
|
1129
|
+
return None
|
|
1130
|
+
|
|
1131
|
+
# Handle primitive scalars
|
|
1132
|
+
if isinstance(data, (bool, str, int, float)):
|
|
1133
|
+
return {"value": data}
|
|
1134
|
+
|
|
1135
|
+
# Handle lists - wrap non-dict elements
|
|
1136
|
+
if isinstance(data, list):
|
|
1137
|
+
if not data:
|
|
1138
|
+
return [] # Empty list unchanged
|
|
1139
|
+
|
|
1140
|
+
wrapped = []
|
|
1141
|
+
for item in data:
|
|
1142
|
+
if isinstance(item, dict):
|
|
1143
|
+
wrapped.append(item)
|
|
1144
|
+
else:
|
|
1145
|
+
wrapped.append({"value": item})
|
|
1146
|
+
return wrapped
|
|
1147
|
+
|
|
1148
|
+
# Dict - return unchanged
|
|
1149
|
+
if isinstance(data, dict):
|
|
1150
|
+
return data
|
|
1151
|
+
|
|
1152
|
+
# Unknown type - wrap for safety
|
|
1153
|
+
return {"value": data}
|
|
1154
|
+
|
|
1102
1155
|
def _extract_records(
|
|
1103
1156
|
self,
|
|
1104
|
-
response_data:
|
|
1157
|
+
response_data: Any,
|
|
1105
1158
|
endpoint: EndpointDefinition,
|
|
1106
|
-
) -> dict[str, Any] | list[Any] | None:
|
|
1159
|
+
) -> dict[str, Any] | list[dict[str, Any]] | None:
|
|
1107
1160
|
"""Extract records from response using record extractor.
|
|
1108
1161
|
|
|
1109
1162
|
Type inference based on action:
|
|
1110
1163
|
- list, search: Returns array ([] if not found)
|
|
1111
1164
|
- get, create, update, delete: Returns single record (None if not found)
|
|
1112
1165
|
|
|
1166
|
+
Automatically wraps primitive values (int, str, float, bool) in {"value": primitive}
|
|
1167
|
+
format to ensure consistent dict-based responses for downstream code.
|
|
1168
|
+
|
|
1113
1169
|
Args:
|
|
1114
|
-
response_data: Full API response
|
|
1170
|
+
response_data: Full API response (can be dict, list, primitive, or None)
|
|
1115
1171
|
endpoint: Endpoint with optional record extractor and action
|
|
1116
1172
|
|
|
1117
1173
|
Returns:
|
|
1118
1174
|
- Extracted data if extractor configured and path found
|
|
1119
1175
|
- [] or None if path not found (based on action)
|
|
1120
1176
|
- Original response if no extractor configured or on error
|
|
1177
|
+
- Primitives are wrapped as {"value": primitive}
|
|
1121
1178
|
"""
|
|
1122
1179
|
# Check if endpoint has record extractor
|
|
1123
1180
|
extractor = endpoint.record_extractor
|
|
1124
1181
|
if not extractor:
|
|
1125
|
-
return response_data
|
|
1182
|
+
return self._wrap_primitives(response_data)
|
|
1126
1183
|
|
|
1127
1184
|
# Determine if this action returns array or single record
|
|
1128
1185
|
action = endpoint.action
|
|
1129
1186
|
if not action:
|
|
1130
|
-
return response_data
|
|
1187
|
+
return self._wrap_primitives(response_data)
|
|
1131
1188
|
|
|
1132
1189
|
is_array_action = action in (Action.LIST, Action.API_SEARCH)
|
|
1133
1190
|
|
|
@@ -1140,17 +1197,19 @@ class LocalExecutor:
|
|
|
1140
1197
|
# Path not found - return empty based on action
|
|
1141
1198
|
return [] if is_array_action else None
|
|
1142
1199
|
|
|
1143
|
-
# Return extracted data
|
|
1200
|
+
# Return extracted data with primitive wrapping
|
|
1144
1201
|
if is_array_action:
|
|
1145
1202
|
# For array actions, return the array (or list of matches)
|
|
1146
|
-
|
|
1203
|
+
result = matches[0] if len(matches) == 1 else matches
|
|
1147
1204
|
else:
|
|
1148
1205
|
# For single record actions, return first match
|
|
1149
|
-
|
|
1206
|
+
result = matches[0]
|
|
1207
|
+
|
|
1208
|
+
return self._wrap_primitives(result)
|
|
1150
1209
|
|
|
1151
1210
|
except Exception as e:
|
|
1152
1211
|
logging.warning(f"Failed to apply record extractor '{extractor}': {e}. Returning original response.")
|
|
1153
|
-
return response_data
|
|
1212
|
+
return self._wrap_primitives(response_data)
|
|
1154
1213
|
|
|
1155
1214
|
def _extract_metadata(
|
|
1156
1215
|
self,
|
|
@@ -1235,7 +1294,7 @@ class LocalExecutor:
|
|
|
1235
1294
|
|
|
1236
1295
|
if missing_fields:
|
|
1237
1296
|
raise MissingParameterError(
|
|
1238
|
-
f"Missing required body fields for {entity}.{action.value}: {missing_fields}.
|
|
1297
|
+
f"Missing required body fields for {entity}.{action.value}: {missing_fields}. Provided parameters: {list(params.keys())}"
|
|
1239
1298
|
)
|
|
1240
1299
|
|
|
1241
1300
|
async def close(self):
|
|
@@ -666,8 +666,7 @@ EXTENSION_REGISTRY = {
|
|
|
666
666
|
"type": "dict[str, str]",
|
|
667
667
|
"required": False,
|
|
668
668
|
"description": (
|
|
669
|
-
"Dictionary mapping field names to JSONPath expressions for extracting metadata "
|
|
670
|
-
"(pagination, request IDs, etc.) from response envelopes"
|
|
669
|
+
"Dictionary mapping field names to JSONPath expressions for extracting metadata (pagination, request IDs, etc.) from response envelopes"
|
|
671
670
|
),
|
|
672
671
|
},
|
|
673
672
|
AIRBYTE_FILE_URL: {
|
|
@@ -80,6 +80,8 @@ class HTTPResponse:
|
|
|
80
80
|
HTTPStatusError: For 4xx or 5xx status codes.
|
|
81
81
|
"""
|
|
82
82
|
if 400 <= self._status_code < 600:
|
|
83
|
+
# NOTE: Import here intentionally to avoid circular import.
|
|
84
|
+
# exceptions.py imports HTTPResponse for type hints.
|
|
83
85
|
from .exceptions import HTTPStatusError
|
|
84
86
|
|
|
85
87
|
raise HTTPStatusError(
|
|
@@ -5,7 +5,7 @@ import json
|
|
|
5
5
|
import time
|
|
6
6
|
import uuid
|
|
7
7
|
from pathlib import Path
|
|
8
|
-
from typing import Any, Dict,
|
|
8
|
+
from typing import Any, Dict, Set
|
|
9
9
|
|
|
10
10
|
from .types import LogSession, RequestLog
|
|
11
11
|
|
|
@@ -31,9 +31,9 @@ class RequestLogger:
|
|
|
31
31
|
|
|
32
32
|
def __init__(
|
|
33
33
|
self,
|
|
34
|
-
log_file:
|
|
35
|
-
connector_name:
|
|
36
|
-
max_logs:
|
|
34
|
+
log_file: str | None = None,
|
|
35
|
+
connector_name: str | None = None,
|
|
36
|
+
max_logs: int | None = 10000,
|
|
37
37
|
):
|
|
38
38
|
"""
|
|
39
39
|
Initialize the request logger.
|
|
@@ -99,9 +99,9 @@ class RequestLogger:
|
|
|
99
99
|
method: str,
|
|
100
100
|
url: str,
|
|
101
101
|
path: str,
|
|
102
|
-
headers:
|
|
103
|
-
params:
|
|
104
|
-
body:
|
|
102
|
+
headers: Dict[str, str] | None = None,
|
|
103
|
+
params: Dict[str, Any] | None = None,
|
|
104
|
+
body: Any | None = None,
|
|
105
105
|
) -> str:
|
|
106
106
|
"""
|
|
107
107
|
Log the start of an HTTP request.
|
|
@@ -133,7 +133,7 @@ class RequestLogger:
|
|
|
133
133
|
self,
|
|
134
134
|
request_id: str,
|
|
135
135
|
status_code: int,
|
|
136
|
-
response_body:
|
|
136
|
+
response_body: Any | None = None,
|
|
137
137
|
) -> None:
|
|
138
138
|
"""
|
|
139
139
|
Log a successful HTTP response.
|
|
@@ -176,7 +176,7 @@ class RequestLogger:
|
|
|
176
176
|
self,
|
|
177
177
|
request_id: str,
|
|
178
178
|
error: str,
|
|
179
|
-
status_code:
|
|
179
|
+
status_code: int | None = None,
|
|
180
180
|
) -> None:
|
|
181
181
|
"""
|
|
182
182
|
Log an HTTP request error.
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
import base64
|
|
4
4
|
from datetime import UTC, datetime
|
|
5
|
-
from typing import Any, Dict, List
|
|
5
|
+
from typing import Any, Dict, List
|
|
6
6
|
|
|
7
7
|
from pydantic import BaseModel, ConfigDict, Field, field_serializer, field_validator
|
|
8
8
|
|
|
@@ -27,12 +27,12 @@ class RequestLog(BaseModel):
|
|
|
27
27
|
url: str
|
|
28
28
|
path: str
|
|
29
29
|
headers: Dict[str, str] = Field(default_factory=dict)
|
|
30
|
-
params:
|
|
31
|
-
body:
|
|
32
|
-
response_status:
|
|
33
|
-
response_body:
|
|
34
|
-
timing_ms:
|
|
35
|
-
error:
|
|
30
|
+
params: Dict[str, Any] | None = None
|
|
31
|
+
body: Any | None = None
|
|
32
|
+
response_status: int | None = None
|
|
33
|
+
response_body: Any | None = None
|
|
34
|
+
timing_ms: float | None = None
|
|
35
|
+
error: str | None = None
|
|
36
36
|
|
|
37
37
|
@field_serializer("timestamp")
|
|
38
38
|
def serialize_datetime(self, value: datetime) -> str:
|
|
@@ -50,9 +50,9 @@ class LogSession(BaseModel):
|
|
|
50
50
|
|
|
51
51
|
session_id: str
|
|
52
52
|
started_at: datetime = Field(default_factory=_utc_now)
|
|
53
|
-
connector_name:
|
|
53
|
+
connector_name: str | None = None
|
|
54
54
|
logs: List[RequestLog] = Field(default_factory=list)
|
|
55
|
-
max_logs:
|
|
55
|
+
max_logs: int | None = Field(
|
|
56
56
|
default=10000,
|
|
57
57
|
description="Maximum number of logs to keep in memory. "
|
|
58
58
|
"When limit is reached, oldest logs should be flushed before removal. "
|
|
@@ -60,7 +60,7 @@ class LogSession(BaseModel):
|
|
|
60
60
|
)
|
|
61
61
|
chunk_logs: List[bytes] = Field(
|
|
62
62
|
default_factory=list,
|
|
63
|
-
description="Captured chunks from streaming responses.
|
|
63
|
+
description="Captured chunks from streaming responses. Each chunk is logged when log_chunk_fetch() is called.",
|
|
64
64
|
)
|
|
65
65
|
|
|
66
66
|
@field_validator("chunk_logs", mode="before")
|
|
@@ -6,7 +6,7 @@ import tempfile
|
|
|
6
6
|
import uuid
|
|
7
7
|
from dataclasses import dataclass, field
|
|
8
8
|
from pathlib import Path
|
|
9
|
-
from typing import Any
|
|
9
|
+
from typing import Any
|
|
10
10
|
|
|
11
11
|
import yaml
|
|
12
12
|
|
|
@@ -53,7 +53,7 @@ def _delete_legacy_files() -> None:
|
|
|
53
53
|
logger.debug(f"Could not delete legacy file {legacy_path}: {e}")
|
|
54
54
|
|
|
55
55
|
|
|
56
|
-
def _migrate_legacy_config() ->
|
|
56
|
+
def _migrate_legacy_config() -> SDKConfig | None:
|
|
57
57
|
"""
|
|
58
58
|
Migrate from legacy file-based config to new YAML format.
|
|
59
59
|
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
4
|
from datetime import datetime
|
|
5
|
-
from typing import Any, Dict
|
|
5
|
+
from typing import Any, Dict
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
@dataclass
|
|
@@ -12,8 +12,8 @@ class OperationMetadata:
|
|
|
12
12
|
entity: str
|
|
13
13
|
action: str
|
|
14
14
|
timestamp: datetime
|
|
15
|
-
timing_ms:
|
|
16
|
-
status_code:
|
|
17
|
-
error_type:
|
|
18
|
-
error_message:
|
|
19
|
-
params:
|
|
15
|
+
timing_ms: float | None = None
|
|
16
|
+
status_code: int | None = None
|
|
17
|
+
error_type: str | None = None
|
|
18
|
+
error_message: str | None = None
|
|
19
|
+
params: Dict[str, Any] | None = None
|
|
@@ -3,14 +3,14 @@
|
|
|
3
3
|
import logging
|
|
4
4
|
import uuid
|
|
5
5
|
from datetime import UTC, datetime
|
|
6
|
-
from typing import Any, Dict
|
|
6
|
+
from typing import Any, Dict
|
|
7
7
|
|
|
8
8
|
from .config import SDKConfig, load_config
|
|
9
9
|
|
|
10
10
|
logger = logging.getLogger(__name__)
|
|
11
11
|
|
|
12
12
|
# Cache the config at module level to avoid repeated reads
|
|
13
|
-
_cached_config:
|
|
13
|
+
_cached_config: SDKConfig | None = None
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
def _get_config() -> SDKConfig:
|
|
@@ -39,7 +39,7 @@ def get_persistent_user_id() -> str:
|
|
|
39
39
|
return _get_config().user_id
|
|
40
40
|
|
|
41
41
|
|
|
42
|
-
def get_public_ip() ->
|
|
42
|
+
def get_public_ip() -> str | None:
|
|
43
43
|
"""
|
|
44
44
|
Fetch the public IP address of the user.
|
|
45
45
|
|
|
@@ -47,6 +47,8 @@ def get_public_ip() -> Optional[str]:
|
|
|
47
47
|
Uses httpx for a robust HTTP request to a public IP service.
|
|
48
48
|
"""
|
|
49
49
|
try:
|
|
50
|
+
# NOTE: Import here intentionally - this is a non-critical network call
|
|
51
|
+
# that may fail. Importing at module level would make httpx a hard dependency.
|
|
50
52
|
import httpx
|
|
51
53
|
|
|
52
54
|
# Use a short timeout to avoid blocking
|
|
@@ -77,9 +79,9 @@ class ObservabilitySession:
|
|
|
77
79
|
def __init__(
|
|
78
80
|
self,
|
|
79
81
|
connector_name: str,
|
|
80
|
-
connector_version:
|
|
82
|
+
connector_version: str | None = None,
|
|
81
83
|
execution_context: str = "direct",
|
|
82
|
-
session_id:
|
|
84
|
+
session_id: str | None = None,
|
|
83
85
|
):
|
|
84
86
|
self.session_id = session_id or str(uuid.uuid4())
|
|
85
87
|
self.user_id = get_persistent_user_id()
|