airbyte-agent-slack 0.1.15__py3-none-any.whl → 0.1.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,8 +18,8 @@ from .models import (
18
18
  ChannelsListResponse,
19
19
  ChannelResponse,
20
20
  Attachment,
21
- File,
22
21
  Reaction,
22
+ File,
23
23
  Message,
24
24
  Thread,
25
25
  EditedInfo,
@@ -50,7 +50,13 @@ from .models import (
50
50
  UsersListResult,
51
51
  ChannelsListResult,
52
52
  ChannelMessagesListResult,
53
- ThreadsListResult
53
+ ThreadsListResult,
54
+ AirbyteSearchHit,
55
+ AirbyteSearchResult,
56
+ ChannelsSearchData,
57
+ ChannelsSearchResult,
58
+ UsersSearchData,
59
+ UsersSearchResult
54
60
  )
55
61
  from .types import (
56
62
  UsersListParams,
@@ -65,7 +71,15 @@ from .types import (
65
71
  ChannelsUpdateParams,
66
72
  ChannelTopicsCreateParams,
67
73
  ChannelPurposesCreateParams,
68
- ReactionsCreateParams
74
+ ReactionsCreateParams,
75
+ AirbyteSearchParams,
76
+ AirbyteSortOrder,
77
+ ChannelsSearchFilter,
78
+ ChannelsSearchQuery,
79
+ ChannelsCondition,
80
+ UsersSearchFilter,
81
+ UsersSearchQuery,
82
+ UsersCondition
69
83
  )
70
84
 
71
85
  __all__ = [
@@ -82,8 +96,8 @@ __all__ = [
82
96
  "ChannelsListResponse",
83
97
  "ChannelResponse",
84
98
  "Attachment",
85
- "File",
86
99
  "Reaction",
100
+ "File",
87
101
  "Message",
88
102
  "Thread",
89
103
  "EditedInfo",
@@ -115,6 +129,12 @@ __all__ = [
115
129
  "ChannelsListResult",
116
130
  "ChannelMessagesListResult",
117
131
  "ThreadsListResult",
132
+ "AirbyteSearchHit",
133
+ "AirbyteSearchResult",
134
+ "ChannelsSearchData",
135
+ "ChannelsSearchResult",
136
+ "UsersSearchData",
137
+ "UsersSearchResult",
118
138
  "UsersListParams",
119
139
  "UsersGetParams",
120
140
  "ChannelsListParams",
@@ -128,4 +148,12 @@ __all__ = [
128
148
  "ChannelTopicsCreateParams",
129
149
  "ChannelPurposesCreateParams",
130
150
  "ReactionsCreateParams",
151
+ "AirbyteSearchParams",
152
+ "AirbyteSortOrder",
153
+ "ChannelsSearchFilter",
154
+ "ChannelsSearchQuery",
155
+ "ChannelsCondition",
156
+ "UsersSearchFilter",
157
+ "UsersSearchQuery",
158
+ "UsersCondition",
131
159
  ]
@@ -634,9 +634,7 @@ class OAuth2AuthStrategy(AuthStrategy):
634
634
  Headers dict with additional headers added
635
635
  """
636
636
  # Build template context with extracted secret values
637
- template_context = {
638
- key: extract_secret_value(value) for key, value in secrets.items()
639
- }
637
+ template_context = {key: extract_secret_value(value) for key, value in secrets.items()}
640
638
 
641
639
  for header_name, value_template in additional_headers.items():
642
640
  # Use Jinja2 templating for variable substitution
@@ -62,6 +62,53 @@ class TokenExtractValidationError(ConnectorModelLoaderError):
62
62
  pass
63
63
 
64
64
 
65
+ # Expected auth_mapping keys for each auth type.
66
+ # These are the auth parameters that each security scheme expects, NOT the user's credential field names.
67
+ EXPECTED_AUTH_MAPPING_KEYS: dict[AuthType, set[str]] = {
68
+ AuthType.BEARER: {"token"},
69
+ AuthType.BASIC: {"username", "password"},
70
+ AuthType.API_KEY: {"api_key"},
71
+ AuthType.OAUTH2: {"access_token", "refresh_token", "client_id", "client_secret"},
72
+ }
73
+
74
+
75
+ def _validate_auth_mapping_keys(
76
+ auth_type: AuthType,
77
+ auth_config: AirbyteAuthConfig | None,
78
+ scheme_name: str = "default",
79
+ ) -> None:
80
+ """Validate that auth_mapping keys match expected parameters for the auth type.
81
+
82
+ The auth_mapping keys must be the parameters expected by the security scheme
83
+ (e.g., "token" for bearer), not the user's credential field names.
84
+
85
+ Args:
86
+ auth_type: The authentication type
87
+ auth_config: The x-airbyte-auth-config containing auth_mapping
88
+ scheme_name: Name of the security scheme for error messages
89
+
90
+ Raises:
91
+ InvalidOpenAPIError: If auth_mapping keys don't match expected parameters
92
+ """
93
+ if auth_config is None or auth_config.auth_mapping is None:
94
+ return # No explicit auth_mapping, will use defaults
95
+
96
+ expected_keys = EXPECTED_AUTH_MAPPING_KEYS.get(auth_type)
97
+ if expected_keys is None:
98
+ return # Unknown auth type, skip validation
99
+
100
+ actual_keys = set(auth_config.auth_mapping.keys())
101
+ invalid_keys = actual_keys - expected_keys
102
+
103
+ if invalid_keys:
104
+ raise InvalidOpenAPIError(
105
+ f"Invalid auth_mapping keys for {auth_type.value} auth in scheme '{scheme_name}': {invalid_keys}. "
106
+ f"Expected keys for {auth_type.value}: {sorted(expected_keys)}. "
107
+ f"Note: auth_mapping keys must be the auth parameters (e.g., 'token' for bearer), "
108
+ f'not your credential field names. Use template syntax to map: token: "${{your_field}}"'
109
+ )
110
+
111
+
65
112
  def extract_path_params(path: str) -> list[str]:
66
113
  """Extract parameter names from path template.
67
114
 
@@ -145,6 +192,87 @@ def _deproxy_schema(obj: Any) -> Any:
145
192
  return obj
146
193
 
147
194
 
195
+ def _type_includes(type_value: Any, target: str) -> bool:
196
+ if isinstance(type_value, list):
197
+ return target in type_value
198
+ return type_value == target
199
+
200
+
201
+ def _flatten_cache_properties(properties: dict[str, Any], prefix: str) -> list[str]:
202
+ entries: list[str] = []
203
+ for prop_name, prop in properties.items():
204
+ path = f"{prefix}{prop_name}" if prefix else prop_name
205
+ entries.append(path)
206
+
207
+ prop_type = getattr(prop, "type", None) if not isinstance(prop, dict) else prop.get("type")
208
+ prop_properties = getattr(prop, "properties", None) if not isinstance(prop, dict) else prop.get("properties")
209
+
210
+ if _type_includes(prop_type, "array"):
211
+ array_path = f"{path}[]"
212
+ entries.append(array_path)
213
+ if isinstance(prop_properties, dict):
214
+ entries.extend(_flatten_cache_properties(prop_properties, prefix=f"{array_path}."))
215
+ elif isinstance(prop_properties, dict):
216
+ entries.extend(_flatten_cache_properties(prop_properties, prefix=f"{path}."))
217
+
218
+ return entries
219
+
220
+
221
+ def _flatten_cache_field_paths(field: Any) -> list[str]:
222
+ field_name = getattr(field, "name", None) if not isinstance(field, dict) else field.get("name")
223
+ if not isinstance(field_name, str) or not field_name:
224
+ return []
225
+
226
+ field_type = getattr(field, "type", None) if not isinstance(field, dict) else field.get("type")
227
+ field_properties = getattr(field, "properties", None) if not isinstance(field, dict) else field.get("properties")
228
+
229
+ entries = [field_name]
230
+ if _type_includes(field_type, "array"):
231
+ array_path = f"{field_name}[]"
232
+ entries.append(array_path)
233
+ if isinstance(field_properties, dict):
234
+ entries.extend(_flatten_cache_properties(field_properties, prefix=f"{array_path}."))
235
+ elif isinstance(field_properties, dict):
236
+ entries.extend(_flatten_cache_properties(field_properties, prefix=f"{field_name}."))
237
+
238
+ return entries
239
+
240
+
241
+ def _dedupe_strings(values: list[str]) -> list[str]:
242
+ seen: set[str] = set()
243
+ ordered: list[str] = []
244
+ for value in values:
245
+ if value not in seen:
246
+ seen.add(value)
247
+ ordered.append(value)
248
+ return ordered
249
+
250
+
251
+ def _extract_search_field_paths(spec: OpenAPIConnector) -> dict[str, list[str]]:
252
+ cache_config = getattr(spec.info, "x_airbyte_cache", None)
253
+ entities = getattr(cache_config, "entities", None)
254
+ if not isinstance(entities, list):
255
+ return {}
256
+
257
+ search_fields: dict[str, list[str]] = {}
258
+ for entity in entities:
259
+ entity_name = getattr(entity, "entity", None) if not isinstance(entity, dict) else entity.get("entity")
260
+ if not isinstance(entity_name, str) or not entity_name:
261
+ continue
262
+
263
+ fields = getattr(entity, "fields", None) if not isinstance(entity, dict) else entity.get("fields")
264
+ if not isinstance(fields, list):
265
+ continue
266
+
267
+ field_paths: list[str] = []
268
+ for field in fields:
269
+ field_paths.extend(_flatten_cache_field_paths(field))
270
+
271
+ search_fields[entity_name] = _dedupe_strings(field_paths)
272
+
273
+ return search_fields
274
+
275
+
148
276
  def parse_openapi_spec(raw_config: dict) -> OpenAPIConnector:
149
277
  """Parse OpenAPI specification from YAML.
150
278
 
@@ -434,6 +562,8 @@ def convert_openapi_to_connector_model(spec: OpenAPIConnector) -> ConnectorModel
434
562
  if not connector_id:
435
563
  raise InvalidOpenAPIError("Missing required x-airbyte-connector-id field")
436
564
 
565
+ search_field_paths = _extract_search_field_paths(spec)
566
+
437
567
  # Create ConnectorModel
438
568
  model = ConnectorModel(
439
569
  id=connector_id,
@@ -444,6 +574,7 @@ def convert_openapi_to_connector_model(spec: OpenAPIConnector) -> ConnectorModel
444
574
  entities=entities,
445
575
  openapi_spec=spec,
446
576
  retry_config=retry_config,
577
+ search_field_paths=search_field_paths,
447
578
  )
448
579
 
449
580
  return model
@@ -840,6 +971,9 @@ def _parse_single_security_scheme(scheme: Any) -> AuthConfig:
840
971
  oauth2_config = _parse_oauth2_config(scheme)
841
972
  # Use explicit x-airbyte-auth-config if present, otherwise generate default
842
973
  auth_config_obj = scheme.x_airbyte_auth_config or _generate_default_auth_config(AuthType.OAUTH2)
974
+ # Validate auth_mapping keys if explicitly provided
975
+ if scheme.x_airbyte_auth_config:
976
+ _validate_auth_mapping_keys(AuthType.OAUTH2, scheme.x_airbyte_auth_config)
843
977
  return AuthConfig(
844
978
  type=AuthType.OAUTH2,
845
979
  config=oauth2_config,
@@ -850,6 +984,10 @@ def _parse_single_security_scheme(scheme: Any) -> AuthConfig:
850
984
  # Use explicit x-airbyte-auth-config if present, otherwise generate default
851
985
  auth_config_obj = scheme.x_airbyte_auth_config or _generate_default_auth_config(auth_type)
852
986
 
987
+ # Validate auth_mapping keys if explicitly provided
988
+ if scheme.x_airbyte_auth_config:
989
+ _validate_auth_mapping_keys(auth_type, scheme.x_airbyte_auth_config)
990
+
853
991
  return AuthConfig(
854
992
  type=auth_type,
855
993
  config=auth_config,
@@ -694,9 +694,7 @@ class LocalExecutor:
694
694
  """
695
695
  return {key: value for key, value in params.items() if key in allowed_fields and value is not None}
696
696
 
697
- def _extract_header_params(
698
- self, endpoint: EndpointDefinition, params: dict[str, Any], body: dict[str, Any] | None = None
699
- ) -> dict[str, str]:
697
+ def _extract_header_params(self, endpoint: EndpointDefinition, params: dict[str, Any], body: dict[str, Any] | None = None) -> dict[str, str]:
700
698
  """Extract header parameters from params and schema defaults.
701
699
 
702
700
  Also adds Content-Type header when there's a request body (unless already specified
@@ -1034,7 +1032,9 @@ class LocalExecutor:
1034
1032
  if "variables" in graphql_config and graphql_config["variables"]:
1035
1033
  variables = self._interpolate_variables(graphql_config["variables"], params, param_defaults)
1036
1034
  # Filter out None values (optional fields not provided) - matches REST _extract_body() behavior
1037
- body["variables"] = {k: v for k, v in variables.items() if v is not None}
1035
+ # But preserve None for variables explicitly marked as nullable (e.g., to unassign a user)
1036
+ nullable_vars = set(graphql_config.get("x-airbyte-nullable-variables") or [])
1037
+ body["variables"] = {k: v for k, v in variables.items() if v is not None or k in nullable_vars}
1038
1038
 
1039
1039
  # Add operation name if specified
1040
1040
  if "operationName" in graphql_config:
@@ -18,6 +18,185 @@ from typing import Any, Protocol
18
18
  MAX_EXAMPLE_QUESTIONS = 5 # Maximum number of example questions to include in description
19
19
 
20
20
 
21
+ def _type_includes(type_value: Any, target: str) -> bool:
22
+ if isinstance(type_value, list):
23
+ return target in type_value
24
+ return type_value == target
25
+
26
+
27
+ def _is_object_schema(schema: dict[str, Any]) -> bool:
28
+ if "properties" in schema:
29
+ return True
30
+ return _type_includes(schema.get("type"), "object")
31
+
32
+
33
+ def _is_array_schema(schema: dict[str, Any]) -> bool:
34
+ if "items" in schema:
35
+ return True
36
+ return _type_includes(schema.get("type"), "array")
37
+
38
+
39
+ def _dedupe_param_entries(entries: list[tuple[str, bool]]) -> list[tuple[str, bool]]:
40
+ seen: dict[str, bool] = {}
41
+ ordered: list[str] = []
42
+ for name, required in entries:
43
+ if name not in seen:
44
+ seen[name] = required
45
+ ordered.append(name)
46
+ else:
47
+ seen[name] = seen[name] or required
48
+ return [(name, seen[name]) for name in ordered]
49
+
50
+
51
+ def _flatten_schema_params(
52
+ schema: dict[str, Any],
53
+ prefix: str = "",
54
+ parent_required: bool = True,
55
+ seen_stack: set[int] | None = None,
56
+ ) -> list[tuple[str, bool]]:
57
+ if not isinstance(schema, dict):
58
+ return []
59
+
60
+ if seen_stack is None:
61
+ seen_stack = set()
62
+
63
+ schema_id = id(schema)
64
+ if schema_id in seen_stack:
65
+ return []
66
+
67
+ seen_stack.add(schema_id)
68
+ try:
69
+ entries: list[tuple[str, bool]] = []
70
+
71
+ for subschema in schema.get("allOf", []) or []:
72
+ if isinstance(subschema, dict):
73
+ entries.extend(_flatten_schema_params(subschema, prefix, parent_required, seen_stack))
74
+
75
+ for keyword in ("anyOf", "oneOf"):
76
+ for subschema in schema.get(keyword, []) or []:
77
+ if isinstance(subschema, dict):
78
+ entries.extend(_flatten_schema_params(subschema, prefix, False, seen_stack))
79
+
80
+ properties = schema.get("properties")
81
+ if isinstance(properties, dict):
82
+ required_fields = set(schema.get("required", [])) if isinstance(schema.get("required"), list) else set()
83
+ for prop_name, prop_schema in properties.items():
84
+ path = f"{prefix}{prop_name}" if prefix else prop_name
85
+ is_required = parent_required and prop_name in required_fields
86
+ entries.append((path, is_required))
87
+
88
+ if isinstance(prop_schema, dict):
89
+ if _is_array_schema(prop_schema):
90
+ array_path = f"{path}[]"
91
+ entries.append((array_path, is_required))
92
+ items = prop_schema.get("items")
93
+ if isinstance(items, dict):
94
+ entries.extend(_flatten_schema_params(items, prefix=f"{array_path}.", parent_required=is_required, seen_stack=seen_stack))
95
+ if _is_object_schema(prop_schema):
96
+ entries.extend(_flatten_schema_params(prop_schema, prefix=f"{path}.", parent_required=is_required, seen_stack=seen_stack))
97
+
98
+ return _dedupe_param_entries(entries)
99
+ finally:
100
+ seen_stack.remove(schema_id)
101
+
102
+
103
+ def _cache_field_value(field: Any, key: str) -> Any:
104
+ if isinstance(field, dict):
105
+ return field.get(key)
106
+ return getattr(field, key, None)
107
+
108
+
109
+ def _flatten_cache_properties(properties: dict[str, Any], prefix: str) -> list[str]:
110
+ entries: list[str] = []
111
+ for prop_name, prop in properties.items():
112
+ path = f"{prefix}{prop_name}" if prefix else prop_name
113
+ entries.append(path)
114
+
115
+ prop_type = _cache_field_value(prop, "type")
116
+ prop_properties = _cache_field_value(prop, "properties")
117
+
118
+ if _type_includes(prop_type, "array"):
119
+ array_path = f"{path}[]"
120
+ entries.append(array_path)
121
+ if isinstance(prop_properties, dict):
122
+ entries.extend(_flatten_cache_properties(prop_properties, prefix=f"{array_path}."))
123
+ elif isinstance(prop_properties, dict):
124
+ entries.extend(_flatten_cache_properties(prop_properties, prefix=f"{path}."))
125
+
126
+ return entries
127
+
128
+
129
+ def _flatten_cache_field_paths(field: Any) -> list[str]:
130
+ field_name = _cache_field_value(field, "name")
131
+ if not isinstance(field_name, str) or not field_name:
132
+ return []
133
+
134
+ field_type = _cache_field_value(field, "type")
135
+ field_properties = _cache_field_value(field, "properties")
136
+
137
+ entries = [field_name]
138
+ if _type_includes(field_type, "array"):
139
+ array_path = f"{field_name}[]"
140
+ entries.append(array_path)
141
+ if isinstance(field_properties, dict):
142
+ entries.extend(_flatten_cache_properties(field_properties, prefix=f"{array_path}."))
143
+ elif isinstance(field_properties, dict):
144
+ entries.extend(_flatten_cache_properties(field_properties, prefix=f"{field_name}."))
145
+
146
+ return entries
147
+
148
+
149
+ def _dedupe_strings(values: list[str]) -> list[str]:
150
+ seen: set[str] = set()
151
+ ordered: list[str] = []
152
+ for value in values:
153
+ if value not in seen:
154
+ seen.add(value)
155
+ ordered.append(value)
156
+ return ordered
157
+
158
+
159
+ def _collect_search_field_paths(model: ConnectorModelProtocol) -> dict[str, list[str]]:
160
+ search_field_paths = getattr(model, "search_field_paths", None)
161
+ if isinstance(search_field_paths, dict) and search_field_paths:
162
+ normalized: dict[str, list[str]] = {}
163
+ for entity, fields in search_field_paths.items():
164
+ if not isinstance(entity, str) or not entity:
165
+ continue
166
+ if isinstance(fields, list):
167
+ normalized[entity] = _dedupe_strings([field for field in fields if isinstance(field, str) and field])
168
+ return normalized
169
+
170
+ openapi_spec = getattr(model, "openapi_spec", None)
171
+ info = getattr(openapi_spec, "info", None)
172
+ cache_config = getattr(info, "x_airbyte_cache", None)
173
+ entities = getattr(cache_config, "entities", None)
174
+ if not isinstance(entities, list):
175
+ return {}
176
+
177
+ search_fields: dict[str, list[str]] = {}
178
+ for entity in entities:
179
+ entity_name = _cache_field_value(entity, "entity")
180
+ if not isinstance(entity_name, str) or not entity_name:
181
+ continue
182
+
183
+ fields = _cache_field_value(entity, "fields") or []
184
+ if not isinstance(fields, list):
185
+ continue
186
+ field_paths: list[str] = []
187
+ for field in fields:
188
+ field_paths.extend(_flatten_cache_field_paths(field))
189
+
190
+ search_fields[entity_name] = _dedupe_strings(field_paths)
191
+
192
+ return search_fields
193
+
194
+
195
+ def _format_search_param_signature() -> str:
196
+ params = ["query*", "limit?", "cursor?", "fields?"]
197
+ return f"({', '.join(params)})"
198
+
199
+
21
200
  class EndpointProtocol(Protocol):
22
201
  """Protocol defining the expected interface for endpoint parameters.
23
202
 
@@ -54,6 +233,9 @@ class ConnectorModelProtocol(Protocol):
54
233
  @property
55
234
  def openapi_spec(self) -> Any: ...
56
235
 
236
+ @property
237
+ def search_field_paths(self) -> dict[str, list[str]] | None: ...
238
+
57
239
 
58
240
  def format_param_signature(endpoint: EndpointProtocol) -> str:
59
241
  """Format parameter signature for an endpoint action.
@@ -86,9 +268,12 @@ def format_param_signature(endpoint: EndpointProtocol) -> str:
86
268
  required = schema.get("required", False)
87
269
  params.append(f"{name}{'*' if required else '?'}")
88
270
 
89
- # Body fields
90
- if request_schema:
91
- required_fields = set(request_schema.get("required", []))
271
+ # Body fields (include nested params from schema when available)
272
+ if isinstance(request_schema, dict):
273
+ for name, required in _flatten_schema_params(request_schema):
274
+ params.append(f"{name}{'*' if required else '?'}")
275
+ elif request_schema:
276
+ required_fields = set(request_schema.get("required", [])) if isinstance(request_schema, dict) else set()
92
277
  for name in body_fields:
93
278
  params.append(f"{name}{'*' if name in required_fields else '?'}")
94
279
 
@@ -99,7 +284,7 @@ def describe_entities(model: ConnectorModelProtocol) -> list[dict[str, Any]]:
99
284
  """Generate entity descriptions from ConnectorModel.
100
285
 
101
286
  Returns a list of entity descriptions with detailed parameter information
102
- for each action. This is used by generated connectors' describe() method.
287
+ for each action. This is used by generated connectors' list_entities() method.
103
288
 
104
289
  Args:
105
290
  model: Object conforming to ConnectorModelProtocol (e.g., ConnectorModel)
@@ -203,8 +388,8 @@ def generate_tool_description(model: ConnectorModelProtocol) -> str:
203
388
  - Response structure documentation with pagination hints
204
389
  - Example questions if available in the OpenAPI spec
205
390
 
206
- This is used by the Connector.describe class method decorator to populate
207
- function docstrings for AI framework integration.
391
+ This is used by the Connector.tool_utils decorator to populate function
392
+ docstrings for AI framework integration.
208
393
 
209
394
  Args:
210
395
  model: Object conforming to ConnectorModelProtocol (e.g., ConnectorModel)
@@ -213,8 +398,11 @@ def generate_tool_description(model: ConnectorModelProtocol) -> str:
213
398
  Formatted description string suitable for AI tool documentation
214
399
  """
215
400
  lines = []
401
+ # NOTE: Do not insert blank lines in the docstring; pydantic-ai parsing truncates
402
+ # at the first empty line and only keeps the initial section.
216
403
 
217
404
  # Entity/action parameter details (including pagination params like limit, starting_after)
405
+ search_field_paths = _collect_search_field_paths(model)
218
406
  lines.append("ENTITIES AND PARAMETERS:")
219
407
  for entity in model.entities:
220
408
  lines.append(f" {entity.name}:")
@@ -228,14 +416,41 @@ def generate_tool_description(model: ConnectorModelProtocol) -> str:
228
416
  lines.append(f" - {action_str}{param_sig}")
229
417
  else:
230
418
  lines.append(f" - {action_str}()")
419
+ if entity.name in search_field_paths:
420
+ search_sig = _format_search_param_signature()
421
+ lines.append(f" - search{search_sig}")
231
422
 
232
423
  # Response structure (brief, includes pagination hint)
233
- lines.append("")
234
424
  lines.append("RESPONSE STRUCTURE:")
235
425
  lines.append(" - list/api_search: {data: [...], meta: {has_more: bool}}")
236
426
  lines.append(" - get: Returns entity directly (no envelope)")
237
427
  lines.append(" To paginate: pass starting_after=<last_id> while has_more is true")
238
428
 
429
+ lines.append("GUIDELINES:")
430
+ lines.append(' - Prefer cached search over direct API calls when using execute(): action="search" whenever possible.')
431
+ lines.append(" - Direct API actions (list/get/download) are slower and should be used only if search cannot answer the query.")
432
+ lines.append(" - Keep results small: use params.fields, params.query.filter, small params.limit, and cursor pagination.")
433
+ lines.append(" - If output is too large, refine the query with tighter filters/fields/limit.")
434
+
435
+ if search_field_paths:
436
+ lines.append("SEARCH (PREFERRED):")
437
+ lines.append(' execute(entity, action="search", params={')
438
+ lines.append(' "query": {"filter": <condition>, "sort": [{"field": "asc|desc"}, ...]},')
439
+ lines.append(' "limit": <int>, "cursor": <str>, "fields": ["field", "nested.field", ...]')
440
+ lines.append(" })")
441
+ lines.append(' Example: {"query": {"filter": {"eq": {"title": "Intro to Airbyte | Miinto"}}}, "limit": 1,')
442
+ lines.append(' "fields": ["id", "title", "started", "primaryUserId"]}')
443
+ lines.append(" Conditions are composable:")
444
+ lines.append(" - eq, neq, gt, gte, lt, lte, in, like, fuzzy, keyword, contains, any")
445
+ lines.append(' - and/or/not to combine conditions (e.g., {"and": [cond1, cond2]})')
446
+
447
+ lines.append("SEARCHABLE FIELDS:")
448
+ for entity_name, field_paths in search_field_paths.items():
449
+ if field_paths:
450
+ lines.append(f" {entity_name}: {', '.join(field_paths)}")
451
+ else:
452
+ lines.append(f" {entity_name}: (no fields listed)")
453
+
239
454
  # Add example questions if available in openapi_spec
240
455
  openapi_spec = getattr(model, "openapi_spec", None)
241
456
  if openapi_spec:
@@ -245,18 +460,15 @@ def generate_tool_description(model: ConnectorModelProtocol) -> str:
245
460
  if example_questions:
246
461
  supported = getattr(example_questions, "supported", None)
247
462
  if supported:
248
- lines.append("")
249
463
  lines.append("EXAMPLE QUESTIONS:")
250
464
  for q in supported[:MAX_EXAMPLE_QUESTIONS]:
251
465
  lines.append(f" - {q}")
252
466
 
253
467
  # Generic parameter description for function signature
254
- lines.append("")
255
468
  lines.append("FUNCTION PARAMETERS:")
256
469
  lines.append(" - entity: Entity name (string)")
257
470
  lines.append(" - action: Operation to perform (string)")
258
471
  lines.append(" - params: Operation parameters (dict) - see entity details above")
259
- lines.append("")
260
472
  lines.append("Parameter markers: * = required, ? = optional")
261
473
 
262
474
  return "\n".join(lines)
@@ -13,7 +13,7 @@ from uuid import UUID
13
13
  from pydantic import BaseModel, ConfigDict, Field, field_validator
14
14
  from pydantic_core import Url
15
15
 
16
- from .extensions import CacheConfig, RetryConfig
16
+ from .extensions import CacheConfig, ReplicationConfig, RetryConfig
17
17
 
18
18
 
19
19
  class ExampleQuestions(BaseModel):
@@ -106,6 +106,7 @@ class Info(BaseModel):
106
106
  - x-airbyte-retry-config: Retry configuration for transient errors (Airbyte extension)
107
107
  - x-airbyte-example-questions: Example questions for AI connector README (Airbyte extension)
108
108
  - x-airbyte-cache: Cache configuration for field mapping between API and cache schemas (Airbyte extension)
109
+ - x-airbyte-replication-config: Replication configuration for MULTI mode connectors (Airbyte extension)
109
110
  """
110
111
 
111
112
  model_config = ConfigDict(populate_by_name=True, extra="forbid")
@@ -124,6 +125,7 @@ class Info(BaseModel):
124
125
  x_airbyte_retry_config: RetryConfig | None = Field(None, alias="x-airbyte-retry-config")
125
126
  x_airbyte_example_questions: ExampleQuestions | None = Field(None, alias="x-airbyte-example-questions")
126
127
  x_airbyte_cache: CacheConfig | None = Field(None, alias="x-airbyte-cache")
128
+ x_airbyte_replication_config: ReplicationConfig | None = Field(None, alias="x-airbyte-replication-config")
127
129
 
128
130
 
129
131
  class ServerVariable(BaseModel):
@@ -140,6 +142,33 @@ class ServerVariable(BaseModel):
140
142
  description: str | None = None
141
143
 
142
144
 
145
+ class EnvironmentMappingTransform(BaseModel):
146
+ """
147
+ Structured transform for environment mapping values.
148
+
149
+ Allows transforming environment values before storing in source_config.
150
+
151
+ Example:
152
+ source: subdomain
153
+ format: "{value}.atlassian.net"
154
+
155
+ The format string uses {value} as a placeholder for the source value.
156
+ """
157
+
158
+ model_config = ConfigDict(populate_by_name=True, extra="forbid")
159
+
160
+ source: str = Field(description="The environment config key to read the value from")
161
+ format: str | None = Field(
162
+ default=None,
163
+ description="Optional format string to transform the value. Use {value} as placeholder.",
164
+ )
165
+
166
+
167
+ # Type alias for environment mapping values: either a simple string (config key)
168
+ # or a structured transform with source and optional transform template
169
+ EnvironmentMappingValue = str | EnvironmentMappingTransform
170
+
171
+
143
172
  class Server(BaseModel):
144
173
  """
145
174
  Server URL and variable definitions.
@@ -152,7 +181,10 @@ class Server(BaseModel):
152
181
  url: str
153
182
  description: str | None = None
154
183
  variables: Dict[str, ServerVariable] = Field(default_factory=dict)
155
- x_airbyte_replication_environment_mapping: Dict[str, str] | None = Field(default=None, alias="x-airbyte-replication-environment-mapping")
184
+ x_airbyte_replication_environment_mapping: Dict[str, EnvironmentMappingValue] | None = Field(
185
+ default=None,
186
+ alias="x-airbyte-replication-environment-mapping",
187
+ )
156
188
  x_airbyte_replication_environment_constants: Dict[str, Any] | None = Field(
157
189
  default=None,
158
190
  alias="x-airbyte-replication-environment-constants",
@@ -134,6 +134,11 @@ class GraphQLBodyConfig(BaseModel):
134
134
  None,
135
135
  description="Default fields to select if not provided in request parameters. Can be a string or array of field names.",
136
136
  )
137
+ nullable_variables: List[str] | None = Field(
138
+ default=None,
139
+ alias="x-airbyte-nullable-variables",
140
+ description="Variable names that can be explicitly set to null (e.g., to unassign a user)",
141
+ )
137
142
 
138
143
 
139
144
  # Union type for all body type configs (extensible for future types like XML, SOAP, etc.)