airbyte-agent-hubspot 0.15.25__py3-none-any.whl → 0.15.43__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. airbyte_agent_hubspot/__init__.py +100 -25
  2. airbyte_agent_hubspot/_vendored/connector_sdk/auth_strategies.py +2 -5
  3. airbyte_agent_hubspot/_vendored/connector_sdk/auth_template.py +1 -1
  4. airbyte_agent_hubspot/_vendored/connector_sdk/cloud_utils/client.py +26 -26
  5. airbyte_agent_hubspot/_vendored/connector_sdk/connector_model_loader.py +11 -4
  6. airbyte_agent_hubspot/_vendored/connector_sdk/constants.py +1 -1
  7. airbyte_agent_hubspot/_vendored/connector_sdk/executor/hosted_executor.py +10 -11
  8. airbyte_agent_hubspot/_vendored/connector_sdk/executor/local_executor.py +163 -34
  9. airbyte_agent_hubspot/_vendored/connector_sdk/extensions.py +43 -5
  10. airbyte_agent_hubspot/_vendored/connector_sdk/http/response.py +2 -0
  11. airbyte_agent_hubspot/_vendored/connector_sdk/introspection.py +262 -0
  12. airbyte_agent_hubspot/_vendored/connector_sdk/logging/logger.py +9 -9
  13. airbyte_agent_hubspot/_vendored/connector_sdk/logging/types.py +10 -10
  14. airbyte_agent_hubspot/_vendored/connector_sdk/observability/config.py +179 -0
  15. airbyte_agent_hubspot/_vendored/connector_sdk/observability/models.py +6 -6
  16. airbyte_agent_hubspot/_vendored/connector_sdk/observability/session.py +41 -32
  17. airbyte_agent_hubspot/_vendored/connector_sdk/performance/metrics.py +3 -3
  18. airbyte_agent_hubspot/_vendored/connector_sdk/schema/base.py +20 -18
  19. airbyte_agent_hubspot/_vendored/connector_sdk/schema/components.py +59 -58
  20. airbyte_agent_hubspot/_vendored/connector_sdk/schema/connector.py +22 -33
  21. airbyte_agent_hubspot/_vendored/connector_sdk/schema/extensions.py +103 -10
  22. airbyte_agent_hubspot/_vendored/connector_sdk/schema/operations.py +32 -32
  23. airbyte_agent_hubspot/_vendored/connector_sdk/schema/security.py +44 -34
  24. airbyte_agent_hubspot/_vendored/connector_sdk/secrets.py +2 -2
  25. airbyte_agent_hubspot/_vendored/connector_sdk/telemetry/events.py +9 -8
  26. airbyte_agent_hubspot/_vendored/connector_sdk/telemetry/tracker.py +9 -5
  27. airbyte_agent_hubspot/_vendored/connector_sdk/types.py +7 -3
  28. airbyte_agent_hubspot/connector.py +182 -87
  29. airbyte_agent_hubspot/connector_model.py +17 -12
  30. airbyte_agent_hubspot/models.py +21 -21
  31. airbyte_agent_hubspot/types.py +45 -45
  32. {airbyte_agent_hubspot-0.15.25.dist-info → airbyte_agent_hubspot-0.15.43.dist-info}/METADATA +25 -22
  33. {airbyte_agent_hubspot-0.15.25.dist-info → airbyte_agent_hubspot-0.15.43.dist-info}/RECORD +34 -32
  34. {airbyte_agent_hubspot-0.15.25.dist-info → airbyte_agent_hubspot-0.15.43.dist-info}/WHEEL +0 -0
@@ -0,0 +1,262 @@
1
+ """
2
+ Shared introspection utilities for connector metadata.
3
+
4
+ This module provides utilities for introspecting connector metadata,
5
+ generating descriptions, and formatting parameter signatures. These
6
+ functions are used by both the runtime decorators and the generated
7
+ connector code.
8
+
9
+ The module is designed to work with any object conforming to the
10
+ ConnectorModel and EndpointDefinition interfaces from connector_sdk.types.
11
+ """
12
+
13
+ from __future__ import annotations
14
+
15
+ from typing import Any, Protocol
16
+
17
+ # Constants
18
+ MAX_EXAMPLE_QUESTIONS = 5 # Maximum number of example questions to include in description
19
+
20
+
21
+ class EndpointProtocol(Protocol):
22
+ """Protocol defining the expected interface for endpoint parameters.
23
+
24
+ This allows functions to work with any endpoint-like object
25
+ that has these attributes, including EndpointDefinition and mock objects.
26
+ """
27
+
28
+ path_params: list[str]
29
+ path_params_schema: dict[str, dict[str, Any]]
30
+ query_params: list[str]
31
+ query_params_schema: dict[str, dict[str, Any]]
32
+ body_fields: list[str]
33
+ request_schema: dict[str, Any] | None
34
+
35
+
36
+ class EntityProtocol(Protocol):
37
+ """Protocol defining the expected interface for entity definitions."""
38
+
39
+ name: str
40
+ actions: list[Any]
41
+ endpoints: dict[Any, EndpointProtocol]
42
+
43
+
44
+ class ConnectorModelProtocol(Protocol):
45
+ """Protocol defining the expected interface for connector model parameters.
46
+
47
+ This allows functions to work with any connector-like object
48
+ that has these attributes, including ConnectorModel and mock objects.
49
+ """
50
+
51
+ @property
52
+ def entities(self) -> list[EntityProtocol]: ...
53
+
54
+ @property
55
+ def openapi_spec(self) -> Any: ...
56
+
57
+
58
+ def format_param_signature(endpoint: EndpointProtocol) -> str:
59
+ """Format parameter signature for an endpoint action.
60
+
61
+ Returns a string like: (id*) or (limit?, starting_after?, email?)
62
+ where * = required, ? = optional
63
+
64
+ Args:
65
+ endpoint: Object conforming to EndpointProtocol (e.g., EndpointDefinition)
66
+
67
+ Returns:
68
+ Formatted parameter signature string
69
+ """
70
+ params = []
71
+
72
+ # Defensive: safely access attributes with defaults for malformed endpoints
73
+ path_params = getattr(endpoint, "path_params", []) or []
74
+ query_params = getattr(endpoint, "query_params", []) or []
75
+ query_params_schema = getattr(endpoint, "query_params_schema", {}) or {}
76
+ body_fields = getattr(endpoint, "body_fields", []) or []
77
+ request_schema = getattr(endpoint, "request_schema", None)
78
+
79
+ # Path params (always required)
80
+ for name in path_params:
81
+ params.append(f"{name}*")
82
+
83
+ # Query params
84
+ for name in query_params:
85
+ schema = query_params_schema.get(name, {})
86
+ required = schema.get("required", False)
87
+ params.append(f"{name}{'*' if required else '?'}")
88
+
89
+ # Body fields
90
+ if request_schema:
91
+ required_fields = set(request_schema.get("required", []))
92
+ for name in body_fields:
93
+ params.append(f"{name}{'*' if name in required_fields else '?'}")
94
+
95
+ return f"({', '.join(params)})" if params else "()"
96
+
97
+
98
+ def describe_entities(model: ConnectorModelProtocol) -> list[dict[str, Any]]:
99
+ """Generate entity descriptions from ConnectorModel.
100
+
101
+ Returns a list of entity descriptions with detailed parameter information
102
+ for each action. This is used by generated connectors' describe() method.
103
+
104
+ Args:
105
+ model: Object conforming to ConnectorModelProtocol (e.g., ConnectorModel)
106
+
107
+ Returns:
108
+ List of entity description dicts with keys:
109
+ - entity_name: Name of the entity (e.g., "contacts", "deals")
110
+ - description: Entity description from the first endpoint
111
+ - available_actions: List of actions (e.g., ["list", "get", "create"])
112
+ - parameters: Dict mapping action -> list of parameter dicts
113
+ """
114
+ entities = []
115
+ for entity_def in model.entities:
116
+ description = ""
117
+ parameters: dict[str, list[dict[str, Any]]] = {}
118
+
119
+ endpoints = getattr(entity_def, "endpoints", {}) or {}
120
+ if endpoints:
121
+ for action, endpoint in endpoints.items():
122
+ # Get description from first endpoint that has one
123
+ if not description:
124
+ endpoint_desc = getattr(endpoint, "description", None)
125
+ if endpoint_desc:
126
+ description = endpoint_desc
127
+
128
+ action_params: list[dict[str, Any]] = []
129
+
130
+ # Defensive: safely access endpoint attributes
131
+ path_params = getattr(endpoint, "path_params", []) or []
132
+ path_params_schema = getattr(endpoint, "path_params_schema", {}) or {}
133
+ query_params = getattr(endpoint, "query_params", []) or []
134
+ query_params_schema = getattr(endpoint, "query_params_schema", {}) or {}
135
+ body_fields = getattr(endpoint, "body_fields", []) or []
136
+ request_schema = getattr(endpoint, "request_schema", None)
137
+
138
+ # Path params (always required)
139
+ for param_name in path_params:
140
+ schema = path_params_schema.get(param_name, {})
141
+ action_params.append(
142
+ {
143
+ "name": param_name,
144
+ "in": "path",
145
+ "required": True,
146
+ "type": schema.get("type", "string"),
147
+ "description": schema.get("description", ""),
148
+ }
149
+ )
150
+
151
+ # Query params
152
+ for param_name in query_params:
153
+ schema = query_params_schema.get(param_name, {})
154
+ action_params.append(
155
+ {
156
+ "name": param_name,
157
+ "in": "query",
158
+ "required": schema.get("required", False),
159
+ "type": schema.get("type", "string"),
160
+ "description": schema.get("description", ""),
161
+ }
162
+ )
163
+
164
+ # Body fields
165
+ if request_schema:
166
+ required_fields = request_schema.get("required", [])
167
+ properties = request_schema.get("properties", {})
168
+ for param_name in body_fields:
169
+ prop = properties.get(param_name, {})
170
+ action_params.append(
171
+ {
172
+ "name": param_name,
173
+ "in": "body",
174
+ "required": param_name in required_fields,
175
+ "type": prop.get("type", "string"),
176
+ "description": prop.get("description", ""),
177
+ }
178
+ )
179
+
180
+ if action_params:
181
+ # Action is an enum, use .value to get string
182
+ action_key = action.value if hasattr(action, "value") else str(action)
183
+ parameters[action_key] = action_params
184
+
185
+ actions = getattr(entity_def, "actions", []) or []
186
+ entities.append(
187
+ {
188
+ "entity_name": entity_def.name,
189
+ "description": description,
190
+ "available_actions": [a.value if hasattr(a, "value") else str(a) for a in actions],
191
+ "parameters": parameters,
192
+ }
193
+ )
194
+
195
+ return entities
196
+
197
+
198
+ def generate_tool_description(model: ConnectorModelProtocol) -> str:
199
+ """Generate AI tool description from connector metadata.
200
+
201
+ Produces a detailed description that includes:
202
+ - Per-entity/action parameter signatures with required (*) and optional (?) markers
203
+ - Response structure documentation with pagination hints
204
+ - Example questions if available in the OpenAPI spec
205
+
206
+ This is used by the Connector.describe class method decorator to populate
207
+ function docstrings for AI framework integration.
208
+
209
+ Args:
210
+ model: Object conforming to ConnectorModelProtocol (e.g., ConnectorModel)
211
+
212
+ Returns:
213
+ Formatted description string suitable for AI tool documentation
214
+ """
215
+ lines = []
216
+
217
+ # Entity/action parameter details (including pagination params like limit, starting_after)
218
+ lines.append("ENTITIES AND PARAMETERS:")
219
+ for entity in model.entities:
220
+ lines.append(f" {entity.name}:")
221
+ actions = getattr(entity, "actions", []) or []
222
+ endpoints = getattr(entity, "endpoints", {}) or {}
223
+ for action in actions:
224
+ action_str = action.value if hasattr(action, "value") else str(action)
225
+ endpoint = endpoints.get(action)
226
+ if endpoint:
227
+ param_sig = format_param_signature(endpoint)
228
+ lines.append(f" - {action_str}{param_sig}")
229
+ else:
230
+ lines.append(f" - {action_str}()")
231
+
232
+ # Response structure (brief, includes pagination hint)
233
+ lines.append("")
234
+ lines.append("RESPONSE STRUCTURE:")
235
+ lines.append(" - list/api_search: {data: [...], meta: {has_more: bool}}")
236
+ lines.append(" - get: Returns entity directly (no envelope)")
237
+ lines.append(" To paginate: pass starting_after=<last_id> while has_more is true")
238
+
239
+ # Add example questions if available in openapi_spec
240
+ openapi_spec = getattr(model, "openapi_spec", None)
241
+ if openapi_spec:
242
+ info = getattr(openapi_spec, "info", None)
243
+ if info:
244
+ example_questions = getattr(info, "x_airbyte_example_questions", None)
245
+ if example_questions:
246
+ supported = getattr(example_questions, "supported", None)
247
+ if supported:
248
+ lines.append("")
249
+ lines.append("EXAMPLE QUESTIONS:")
250
+ for q in supported[:MAX_EXAMPLE_QUESTIONS]:
251
+ lines.append(f" - {q}")
252
+
253
+ # Generic parameter description for function signature
254
+ lines.append("")
255
+ lines.append("FUNCTION PARAMETERS:")
256
+ lines.append(" - entity: Entity name (string)")
257
+ lines.append(" - action: Operation to perform (string)")
258
+ lines.append(" - params: Operation parameters (dict) - see entity details above")
259
+ lines.append("")
260
+ lines.append("Parameter markers: * = required, ? = optional")
261
+
262
+ return "\n".join(lines)
@@ -5,7 +5,7 @@ import json
5
5
  import time
6
6
  import uuid
7
7
  from pathlib import Path
8
- from typing import Any, Dict, Optional, Set
8
+ from typing import Any, Dict, Set
9
9
 
10
10
  from .types import LogSession, RequestLog
11
11
 
@@ -31,9 +31,9 @@ class RequestLogger:
31
31
 
32
32
  def __init__(
33
33
  self,
34
- log_file: Optional[str] = None,
35
- connector_name: Optional[str] = None,
36
- max_logs: Optional[int] = 10000,
34
+ log_file: str | None = None,
35
+ connector_name: str | None = None,
36
+ max_logs: int | None = 10000,
37
37
  ):
38
38
  """
39
39
  Initialize the request logger.
@@ -99,9 +99,9 @@ class RequestLogger:
99
99
  method: str,
100
100
  url: str,
101
101
  path: str,
102
- headers: Optional[Dict[str, str]] = None,
103
- params: Optional[Dict[str, Any]] = None,
104
- body: Optional[Any] = None,
102
+ headers: Dict[str, str] | None = None,
103
+ params: Dict[str, Any] | None = None,
104
+ body: Any | None = None,
105
105
  ) -> str:
106
106
  """
107
107
  Log the start of an HTTP request.
@@ -133,7 +133,7 @@ class RequestLogger:
133
133
  self,
134
134
  request_id: str,
135
135
  status_code: int,
136
- response_body: Optional[Any] = None,
136
+ response_body: Any | None = None,
137
137
  ) -> None:
138
138
  """
139
139
  Log a successful HTTP response.
@@ -176,7 +176,7 @@ class RequestLogger:
176
176
  self,
177
177
  request_id: str,
178
178
  error: str,
179
- status_code: Optional[int] = None,
179
+ status_code: int | None = None,
180
180
  ) -> None:
181
181
  """
182
182
  Log an HTTP request error.
@@ -2,7 +2,7 @@
2
2
 
3
3
  import base64
4
4
  from datetime import UTC, datetime
5
- from typing import Any, Dict, List, Optional
5
+ from typing import Any, Dict, List
6
6
 
7
7
  from pydantic import BaseModel, ConfigDict, Field, field_serializer, field_validator
8
8
 
@@ -27,12 +27,12 @@ class RequestLog(BaseModel):
27
27
  url: str
28
28
  path: str
29
29
  headers: Dict[str, str] = Field(default_factory=dict)
30
- params: Optional[Dict[str, Any]] = None
31
- body: Optional[Any] = None
32
- response_status: Optional[int] = None
33
- response_body: Optional[Any] = None
34
- timing_ms: Optional[float] = None
35
- error: Optional[str] = None
30
+ params: Dict[str, Any] | None = None
31
+ body: Any | None = None
32
+ response_status: int | None = None
33
+ response_body: Any | None = None
34
+ timing_ms: float | None = None
35
+ error: str | None = None
36
36
 
37
37
  @field_serializer("timestamp")
38
38
  def serialize_datetime(self, value: datetime) -> str:
@@ -50,9 +50,9 @@ class LogSession(BaseModel):
50
50
 
51
51
  session_id: str
52
52
  started_at: datetime = Field(default_factory=_utc_now)
53
- connector_name: Optional[str] = None
53
+ connector_name: str | None = None
54
54
  logs: List[RequestLog] = Field(default_factory=list)
55
- max_logs: Optional[int] = Field(
55
+ max_logs: int | None = Field(
56
56
  default=10000,
57
57
  description="Maximum number of logs to keep in memory. "
58
58
  "When limit is reached, oldest logs should be flushed before removal. "
@@ -60,7 +60,7 @@ class LogSession(BaseModel):
60
60
  )
61
61
  chunk_logs: List[bytes] = Field(
62
62
  default_factory=list,
63
- description="Captured chunks from streaming responses. " "Each chunk is logged when log_chunk_fetch() is called.",
63
+ description="Captured chunks from streaming responses. Each chunk is logged when log_chunk_fetch() is called.",
64
64
  )
65
65
 
66
66
  @field_validator("chunk_logs", mode="before")
@@ -0,0 +1,179 @@
1
+ """Unified configuration for connector-sdk."""
2
+
3
+ import logging
4
+ import os
5
+ import tempfile
6
+ import uuid
7
+ from dataclasses import dataclass, field
8
+ from pathlib import Path
9
+ from typing import Any
10
+
11
+ import yaml
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+ # New config location
16
+ CONFIG_DIR = Path.home() / ".airbyte" / "connector-sdk"
17
+ CONFIG_PATH = CONFIG_DIR / "config.yaml"
18
+
19
+ # Legacy file locations (for migration)
20
+ LEGACY_USER_ID_PATH = Path.home() / ".airbyte" / "ai_sdk_user_id"
21
+ LEGACY_INTERNAL_MARKER_PATH = Path.home() / ".airbyte" / "internal_user"
22
+
23
+
24
+ @dataclass
25
+ class SDKConfig:
26
+ """Connector SDK configuration."""
27
+
28
+ user_id: str = field(default_factory=lambda: str(uuid.uuid4()))
29
+ is_internal_user: bool = False
30
+
31
+ def to_dict(self) -> dict[str, Any]:
32
+ """Convert to dictionary for YAML serialization."""
33
+ return {
34
+ "user_id": self.user_id,
35
+ "is_internal_user": self.is_internal_user,
36
+ }
37
+
38
+
39
+ def _delete_legacy_files() -> None:
40
+ """
41
+ Delete legacy config files after successful migration.
42
+
43
+ Removes:
44
+ - ~/.airbyte/ai_sdk_user_id
45
+ - ~/.airbyte/internal_user
46
+ """
47
+ for legacy_path in [LEGACY_USER_ID_PATH, LEGACY_INTERNAL_MARKER_PATH]:
48
+ try:
49
+ if legacy_path.exists():
50
+ legacy_path.unlink()
51
+ logger.debug(f"Deleted legacy config file: {legacy_path}")
52
+ except Exception as e:
53
+ logger.debug(f"Could not delete legacy file {legacy_path}: {e}")
54
+
55
+
56
+ def _migrate_legacy_config() -> SDKConfig | None:
57
+ """
58
+ Migrate from legacy file-based config to new YAML format.
59
+
60
+ Reads from:
61
+ - ~/.airbyte/ai_sdk_user_id (user_id)
62
+ - ~/.airbyte/internal_user (is_internal_user marker)
63
+
64
+ Returns SDKConfig if migration was successful, None otherwise.
65
+ """
66
+ user_id = None
67
+ is_internal = False
68
+
69
+ # Try to read legacy user_id
70
+ try:
71
+ if LEGACY_USER_ID_PATH.exists():
72
+ user_id = LEGACY_USER_ID_PATH.read_text().strip()
73
+ if not user_id:
74
+ user_id = None
75
+ except Exception:
76
+ pass
77
+
78
+ # Check legacy internal_user marker
79
+ try:
80
+ is_internal = LEGACY_INTERNAL_MARKER_PATH.exists()
81
+ except Exception:
82
+ pass
83
+
84
+ if user_id or is_internal:
85
+ return SDKConfig(
86
+ user_id=user_id or str(uuid.uuid4()),
87
+ is_internal_user=is_internal,
88
+ )
89
+
90
+ return None
91
+
92
+
93
+ def load_config() -> SDKConfig:
94
+ """
95
+ Load SDK configuration from config file.
96
+
97
+ Checks (in order):
98
+ 1. New config file at ~/.airbyte/connector-sdk/config.yaml
99
+ 2. Legacy files at ~/.airbyte/ai_sdk_user_id and ~/.airbyte/internal_user
100
+ 3. Creates new config with generated user_id if nothing exists
101
+
102
+ Environment variable AIRBYTE_INTERNAL_USER can override is_internal_user.
103
+
104
+ Returns:
105
+ SDKConfig with user_id and is_internal_user
106
+ """
107
+ config = None
108
+
109
+ # Try to load from new config file
110
+ try:
111
+ if CONFIG_PATH.exists():
112
+ content = CONFIG_PATH.read_text()
113
+ data = yaml.safe_load(content) or {}
114
+ config = SDKConfig(
115
+ user_id=data.get("user_id", str(uuid.uuid4())),
116
+ is_internal_user=data.get("is_internal_user", False),
117
+ )
118
+ # Always clean up legacy files if they exist (even if new config exists)
119
+ _delete_legacy_files()
120
+ except Exception as e:
121
+ logger.debug(f"Could not load config from {CONFIG_PATH}: {e}")
122
+
123
+ # Try to migrate from legacy files if new config doesn't exist
124
+ if config is None:
125
+ config = _migrate_legacy_config()
126
+ if config:
127
+ # Save migrated config to new location
128
+ try:
129
+ save_config(config)
130
+ logger.debug("Migrated legacy config to new location")
131
+ # Delete legacy files after successful migration
132
+ _delete_legacy_files()
133
+ except Exception as e:
134
+ logger.debug(f"Could not save migrated config: {e}")
135
+
136
+ # Create new config if nothing exists
137
+ if config is None:
138
+ config = SDKConfig()
139
+ try:
140
+ save_config(config)
141
+ except Exception as e:
142
+ logger.debug(f"Could not save new config: {e}")
143
+
144
+ # Environment variable override for is_internal_user
145
+ env_value = os.getenv("AIRBYTE_INTERNAL_USER", "").lower()
146
+ if env_value in ("true", "1", "yes"):
147
+ config.is_internal_user = True
148
+ elif env_value:
149
+ # Any other non-empty value (including "false", "0", "no") defaults to False
150
+ config.is_internal_user = False
151
+
152
+ return config
153
+
154
+
155
+ def save_config(config: SDKConfig) -> None:
156
+ """
157
+ Save SDK configuration to config file.
158
+
159
+ Creates the config directory if it doesn't exist.
160
+ Uses atomic writes to prevent corruption from concurrent access.
161
+
162
+ Args:
163
+ config: SDKConfig to save
164
+ """
165
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
166
+
167
+ # Use atomic write: write to temp file then rename (atomic on POSIX)
168
+ fd, temp_path = tempfile.mkstemp(dir=CONFIG_DIR, suffix=".tmp")
169
+ try:
170
+ with os.fdopen(fd, "w") as f:
171
+ yaml.dump(config.to_dict(), f, default_flow_style=False)
172
+ os.rename(temp_path, CONFIG_PATH)
173
+ except Exception:
174
+ # Clean up temp file on failure
175
+ try:
176
+ os.unlink(temp_path)
177
+ except OSError:
178
+ pass
179
+ raise
@@ -2,7 +2,7 @@
2
2
 
3
3
  from dataclasses import dataclass
4
4
  from datetime import datetime
5
- from typing import Any, Dict, Optional
5
+ from typing import Any, Dict
6
6
 
7
7
 
8
8
  @dataclass
@@ -12,8 +12,8 @@ class OperationMetadata:
12
12
  entity: str
13
13
  action: str
14
14
  timestamp: datetime
15
- timing_ms: Optional[float] = None
16
- status_code: Optional[int] = None
17
- error_type: Optional[str] = None
18
- error_message: Optional[str] = None
19
- params: Optional[Dict[str, Any]] = None
15
+ timing_ms: float | None = None
16
+ status_code: int | None = None
17
+ error_type: str | None = None
18
+ error_message: str | None = None
19
+ params: Dict[str, Any] | None = None
@@ -3,49 +3,43 @@
3
3
  import logging
4
4
  import uuid
5
5
  from datetime import UTC, datetime
6
- from pathlib import Path
7
- from typing import Any, Dict, Optional
6
+ from typing import Any, Dict
7
+
8
+ from .config import SDKConfig, load_config
8
9
 
9
10
  logger = logging.getLogger(__name__)
10
11
 
12
+ # Cache the config at module level to avoid repeated reads
13
+ _cached_config: SDKConfig | None = None
14
+
15
+
16
+ def _get_config() -> SDKConfig:
17
+ """Get cached SDK config or load from file."""
18
+ global _cached_config
19
+ if _cached_config is None:
20
+ _cached_config = load_config()
21
+ return _cached_config
22
+
23
+
24
+ def _clear_config_cache() -> None:
25
+ """Clear the cached config. Used for testing."""
26
+ global _cached_config
27
+ _cached_config = None
28
+
11
29
 
12
30
  def get_persistent_user_id() -> str:
13
31
  """
14
- Get or create an anonymous user ID stored in the home directory.
32
+ Get the persistent anonymous user ID.
15
33
 
16
- The ID is stored in ~/.airbyte/ai_sdk_user_id and persists across all sessions.
17
- If the file doesn't exist, a new UUID is generated and saved.
34
+ Now reads from ~/.airbyte/connector-sdk/config.yaml
18
35
 
19
36
  Returns:
20
37
  An anonymous UUID string that uniquely identifies this user across sessions.
21
38
  """
22
- try:
23
- # Create .airbyte directory in home folder if it doesn't exist
24
- airbyte_dir = Path.home() / ".airbyte"
25
- airbyte_dir.mkdir(exist_ok=True)
26
-
27
- # Path to user ID file
28
- user_id_file = airbyte_dir / "ai_sdk_user_id"
29
-
30
- # Try to read existing user ID
31
- if user_id_file.exists():
32
- user_id = user_id_file.read_text().strip()
33
- if user_id: # Validate it's not empty
34
- return user_id
39
+ return _get_config().user_id
35
40
 
36
- # Generate new user ID if file doesn't exist or is empty
37
- user_id = str(uuid.uuid4())
38
- user_id_file.write_text(user_id)
39
- logger.debug(f"Generated new anonymous user ID: {user_id}")
40
41
 
41
- return user_id
42
- except Exception as e:
43
- # If we can't read/write the file, generate a session-only ID
44
- logger.debug(f"Could not access anonymous user ID file: {e}")
45
- return str(uuid.uuid4())
46
-
47
-
48
- def get_public_ip() -> Optional[str]:
42
+ def get_public_ip() -> str | None:
49
43
  """
50
44
  Fetch the public IP address of the user.
51
45
 
@@ -53,6 +47,8 @@ def get_public_ip() -> Optional[str]:
53
47
  Uses httpx for a robust HTTP request to a public IP service.
54
48
  """
55
49
  try:
50
+ # NOTE: Import here intentionally - this is a non-critical network call
51
+ # that may fail. Importing at module level would make httpx a hard dependency.
56
52
  import httpx
57
53
 
58
54
  # Use a short timeout to avoid blocking
@@ -65,15 +61,27 @@ def get_public_ip() -> Optional[str]:
65
61
  return None
66
62
 
67
63
 
64
+ def get_is_internal_user() -> bool:
65
+ """
66
+ Check if the current user is an internal Airbyte user.
67
+
68
+ Now reads from ~/.airbyte/connector-sdk/config.yaml
69
+ Environment variable AIRBYTE_INTERNAL_USER can override.
70
+
71
+ Returns False if not set or on any error.
72
+ """
73
+ return _get_config().is_internal_user
74
+
75
+
68
76
  class ObservabilitySession:
69
77
  """Shared session context for both logging and telemetry."""
70
78
 
71
79
  def __init__(
72
80
  self,
73
81
  connector_name: str,
74
- connector_version: Optional[str] = None,
82
+ connector_version: str | None = None,
75
83
  execution_context: str = "direct",
76
- session_id: Optional[str] = None,
84
+ session_id: str | None = None,
77
85
  ):
78
86
  self.session_id = session_id or str(uuid.uuid4())
79
87
  self.user_id = get_persistent_user_id()
@@ -84,6 +92,7 @@ class ObservabilitySession:
84
92
  self.operation_count = 0
85
93
  self.metadata: Dict[str, Any] = {}
86
94
  self.public_ip = get_public_ip()
95
+ self.is_internal_user = get_is_internal_user()
87
96
 
88
97
  def increment_operations(self):
89
98
  """Increment the operation counter."""