airbyte-agent-greenhouse 0.17.48__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_agent_greenhouse/__init__.py +105 -0
- airbyte_agent_greenhouse/_vendored/__init__.py +1 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/__init__.py +82 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/auth_strategies.py +1120 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/auth_template.py +135 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/cloud_utils/__init__.py +5 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/cloud_utils/client.py +213 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/connector_model_loader.py +965 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/constants.py +78 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/exceptions.py +23 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/executor/__init__.py +31 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/executor/hosted_executor.py +196 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/executor/local_executor.py +1724 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/executor/models.py +190 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/extensions.py +693 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/http/__init__.py +37 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/http/adapters/__init__.py +9 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/http/adapters/httpx_adapter.py +251 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/http/config.py +98 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/http/exceptions.py +119 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/http/protocols.py +114 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/http/response.py +104 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/http_client.py +693 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/introspection.py +262 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/logging/__init__.py +11 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/logging/logger.py +273 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/logging/types.py +93 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/observability/__init__.py +11 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/observability/config.py +179 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/observability/models.py +19 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/observability/redactor.py +81 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/observability/session.py +103 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/performance/__init__.py +6 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/performance/instrumentation.py +57 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/performance/metrics.py +93 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/schema/__init__.py +75 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/schema/base.py +164 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/schema/components.py +239 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/schema/connector.py +120 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/schema/extensions.py +230 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/schema/operations.py +146 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/schema/security.py +223 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/secrets.py +182 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/telemetry/__init__.py +10 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/telemetry/config.py +32 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/telemetry/events.py +59 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/telemetry/tracker.py +155 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/types.py +245 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/utils.py +60 -0
- airbyte_agent_greenhouse/_vendored/connector_sdk/validation.py +828 -0
- airbyte_agent_greenhouse/connector.py +1391 -0
- airbyte_agent_greenhouse/connector_model.py +2356 -0
- airbyte_agent_greenhouse/models.py +281 -0
- airbyte_agent_greenhouse/types.py +136 -0
- airbyte_agent_greenhouse-0.17.48.dist-info/METADATA +116 -0
- airbyte_agent_greenhouse-0.17.48.dist-info/RECORD +57 -0
- airbyte_agent_greenhouse-0.17.48.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
"""Unified configuration for connector-sdk."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
import tempfile
|
|
6
|
+
import uuid
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
import yaml
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
# New config location
|
|
16
|
+
CONFIG_DIR = Path.home() / ".airbyte" / "connector-sdk"
|
|
17
|
+
CONFIG_PATH = CONFIG_DIR / "config.yaml"
|
|
18
|
+
|
|
19
|
+
# Legacy file locations (for migration)
|
|
20
|
+
LEGACY_USER_ID_PATH = Path.home() / ".airbyte" / "ai_sdk_user_id"
|
|
21
|
+
LEGACY_INTERNAL_MARKER_PATH = Path.home() / ".airbyte" / "internal_user"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class SDKConfig:
|
|
26
|
+
"""Connector SDK configuration."""
|
|
27
|
+
|
|
28
|
+
user_id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
|
29
|
+
is_internal_user: bool = False
|
|
30
|
+
|
|
31
|
+
def to_dict(self) -> dict[str, Any]:
|
|
32
|
+
"""Convert to dictionary for YAML serialization."""
|
|
33
|
+
return {
|
|
34
|
+
"user_id": self.user_id,
|
|
35
|
+
"is_internal_user": self.is_internal_user,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _delete_legacy_files() -> None:
|
|
40
|
+
"""
|
|
41
|
+
Delete legacy config files after successful migration.
|
|
42
|
+
|
|
43
|
+
Removes:
|
|
44
|
+
- ~/.airbyte/ai_sdk_user_id
|
|
45
|
+
- ~/.airbyte/internal_user
|
|
46
|
+
"""
|
|
47
|
+
for legacy_path in [LEGACY_USER_ID_PATH, LEGACY_INTERNAL_MARKER_PATH]:
|
|
48
|
+
try:
|
|
49
|
+
if legacy_path.exists():
|
|
50
|
+
legacy_path.unlink()
|
|
51
|
+
logger.debug(f"Deleted legacy config file: {legacy_path}")
|
|
52
|
+
except Exception as e:
|
|
53
|
+
logger.debug(f"Could not delete legacy file {legacy_path}: {e}")
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _migrate_legacy_config() -> SDKConfig | None:
|
|
57
|
+
"""
|
|
58
|
+
Migrate from legacy file-based config to new YAML format.
|
|
59
|
+
|
|
60
|
+
Reads from:
|
|
61
|
+
- ~/.airbyte/ai_sdk_user_id (user_id)
|
|
62
|
+
- ~/.airbyte/internal_user (is_internal_user marker)
|
|
63
|
+
|
|
64
|
+
Returns SDKConfig if migration was successful, None otherwise.
|
|
65
|
+
"""
|
|
66
|
+
user_id = None
|
|
67
|
+
is_internal = False
|
|
68
|
+
|
|
69
|
+
# Try to read legacy user_id
|
|
70
|
+
try:
|
|
71
|
+
if LEGACY_USER_ID_PATH.exists():
|
|
72
|
+
user_id = LEGACY_USER_ID_PATH.read_text().strip()
|
|
73
|
+
if not user_id:
|
|
74
|
+
user_id = None
|
|
75
|
+
except Exception:
|
|
76
|
+
pass
|
|
77
|
+
|
|
78
|
+
# Check legacy internal_user marker
|
|
79
|
+
try:
|
|
80
|
+
is_internal = LEGACY_INTERNAL_MARKER_PATH.exists()
|
|
81
|
+
except Exception:
|
|
82
|
+
pass
|
|
83
|
+
|
|
84
|
+
if user_id or is_internal:
|
|
85
|
+
return SDKConfig(
|
|
86
|
+
user_id=user_id or str(uuid.uuid4()),
|
|
87
|
+
is_internal_user=is_internal,
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
return None
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def load_config() -> SDKConfig:
|
|
94
|
+
"""
|
|
95
|
+
Load SDK configuration from config file.
|
|
96
|
+
|
|
97
|
+
Checks (in order):
|
|
98
|
+
1. New config file at ~/.airbyte/connector-sdk/config.yaml
|
|
99
|
+
2. Legacy files at ~/.airbyte/ai_sdk_user_id and ~/.airbyte/internal_user
|
|
100
|
+
3. Creates new config with generated user_id if nothing exists
|
|
101
|
+
|
|
102
|
+
Environment variable AIRBYTE_INTERNAL_USER can override is_internal_user.
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
SDKConfig with user_id and is_internal_user
|
|
106
|
+
"""
|
|
107
|
+
config = None
|
|
108
|
+
|
|
109
|
+
# Try to load from new config file
|
|
110
|
+
try:
|
|
111
|
+
if CONFIG_PATH.exists():
|
|
112
|
+
content = CONFIG_PATH.read_text()
|
|
113
|
+
data = yaml.safe_load(content) or {}
|
|
114
|
+
config = SDKConfig(
|
|
115
|
+
user_id=data.get("user_id", str(uuid.uuid4())),
|
|
116
|
+
is_internal_user=data.get("is_internal_user", False),
|
|
117
|
+
)
|
|
118
|
+
# Always clean up legacy files if they exist (even if new config exists)
|
|
119
|
+
_delete_legacy_files()
|
|
120
|
+
except Exception as e:
|
|
121
|
+
logger.debug(f"Could not load config from {CONFIG_PATH}: {e}")
|
|
122
|
+
|
|
123
|
+
# Try to migrate from legacy files if new config doesn't exist
|
|
124
|
+
if config is None:
|
|
125
|
+
config = _migrate_legacy_config()
|
|
126
|
+
if config:
|
|
127
|
+
# Save migrated config to new location
|
|
128
|
+
try:
|
|
129
|
+
save_config(config)
|
|
130
|
+
logger.debug("Migrated legacy config to new location")
|
|
131
|
+
# Delete legacy files after successful migration
|
|
132
|
+
_delete_legacy_files()
|
|
133
|
+
except Exception as e:
|
|
134
|
+
logger.debug(f"Could not save migrated config: {e}")
|
|
135
|
+
|
|
136
|
+
# Create new config if nothing exists
|
|
137
|
+
if config is None:
|
|
138
|
+
config = SDKConfig()
|
|
139
|
+
try:
|
|
140
|
+
save_config(config)
|
|
141
|
+
except Exception as e:
|
|
142
|
+
logger.debug(f"Could not save new config: {e}")
|
|
143
|
+
|
|
144
|
+
# Environment variable override for is_internal_user
|
|
145
|
+
env_value = os.getenv("AIRBYTE_INTERNAL_USER", "").lower()
|
|
146
|
+
if env_value in ("true", "1", "yes"):
|
|
147
|
+
config.is_internal_user = True
|
|
148
|
+
elif env_value:
|
|
149
|
+
# Any other non-empty value (including "false", "0", "no") defaults to False
|
|
150
|
+
config.is_internal_user = False
|
|
151
|
+
|
|
152
|
+
return config
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def save_config(config: SDKConfig) -> None:
|
|
156
|
+
"""
|
|
157
|
+
Save SDK configuration to config file.
|
|
158
|
+
|
|
159
|
+
Creates the config directory if it doesn't exist.
|
|
160
|
+
Uses atomic writes to prevent corruption from concurrent access.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
config: SDKConfig to save
|
|
164
|
+
"""
|
|
165
|
+
CONFIG_DIR.mkdir(parents=True, exist_ok=True)
|
|
166
|
+
|
|
167
|
+
# Use atomic write: write to temp file then rename (atomic on POSIX)
|
|
168
|
+
fd, temp_path = tempfile.mkstemp(dir=CONFIG_DIR, suffix=".tmp")
|
|
169
|
+
try:
|
|
170
|
+
with os.fdopen(fd, "w") as f:
|
|
171
|
+
yaml.dump(config.to_dict(), f, default_flow_style=False)
|
|
172
|
+
os.rename(temp_path, CONFIG_PATH)
|
|
173
|
+
except Exception:
|
|
174
|
+
# Clean up temp file on failure
|
|
175
|
+
try:
|
|
176
|
+
os.unlink(temp_path)
|
|
177
|
+
except OSError:
|
|
178
|
+
pass
|
|
179
|
+
raise
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Shared operation metadata models."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Any, Dict
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class OperationMetadata:
|
|
10
|
+
"""Shared operation metadata."""
|
|
11
|
+
|
|
12
|
+
entity: str
|
|
13
|
+
action: str
|
|
14
|
+
timestamp: datetime
|
|
15
|
+
timing_ms: float | None = None
|
|
16
|
+
status_code: int | None = None
|
|
17
|
+
error_type: str | None = None
|
|
18
|
+
error_message: str | None = None
|
|
19
|
+
params: Dict[str, Any] | None = None
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"""Shared redaction logic for both logging and telemetry."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Dict
|
|
4
|
+
from urllib.parse import parse_qs, urlencode, urlparse, urlunparse
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class DataRedactor:
|
|
8
|
+
"""Shared redaction logic for both logging and telemetry."""
|
|
9
|
+
|
|
10
|
+
SENSITIVE_HEADER_PATTERNS = [
|
|
11
|
+
"authorization",
|
|
12
|
+
"api-key",
|
|
13
|
+
"x-api-key",
|
|
14
|
+
"token",
|
|
15
|
+
"bearer",
|
|
16
|
+
"secret",
|
|
17
|
+
"password",
|
|
18
|
+
"credential",
|
|
19
|
+
]
|
|
20
|
+
|
|
21
|
+
SENSITIVE_PARAM_PATTERNS = [
|
|
22
|
+
"password",
|
|
23
|
+
"secret",
|
|
24
|
+
"api_key",
|
|
25
|
+
"apikey",
|
|
26
|
+
"token",
|
|
27
|
+
"credentials",
|
|
28
|
+
"auth",
|
|
29
|
+
"key",
|
|
30
|
+
]
|
|
31
|
+
|
|
32
|
+
@staticmethod
|
|
33
|
+
def redact_headers(headers: Dict[str, str]) -> Dict[str, str]:
|
|
34
|
+
"""Redact sensitive headers."""
|
|
35
|
+
redacted = {}
|
|
36
|
+
for key, value in headers.items():
|
|
37
|
+
if any(pattern in key.lower() for pattern in DataRedactor.SENSITIVE_HEADER_PATTERNS):
|
|
38
|
+
redacted[key] = "***REDACTED***"
|
|
39
|
+
else:
|
|
40
|
+
redacted[key] = value
|
|
41
|
+
return redacted
|
|
42
|
+
|
|
43
|
+
@staticmethod
|
|
44
|
+
def redact_params(params: Dict[str, Any]) -> Dict[str, Any]:
|
|
45
|
+
"""Redact sensitive parameters."""
|
|
46
|
+
redacted = {}
|
|
47
|
+
for key, value in params.items():
|
|
48
|
+
if any(pattern in key.lower() for pattern in DataRedactor.SENSITIVE_PARAM_PATTERNS):
|
|
49
|
+
redacted[key] = "***REDACTED***"
|
|
50
|
+
else:
|
|
51
|
+
redacted[key] = value
|
|
52
|
+
return redacted
|
|
53
|
+
|
|
54
|
+
@staticmethod
|
|
55
|
+
def redact_url(url: str) -> str:
|
|
56
|
+
"""Redact sensitive query params from URL."""
|
|
57
|
+
parsed = urlparse(url)
|
|
58
|
+
if not parsed.query:
|
|
59
|
+
return url
|
|
60
|
+
|
|
61
|
+
params = parse_qs(parsed.query)
|
|
62
|
+
redacted_params = {}
|
|
63
|
+
|
|
64
|
+
for key, values in params.items():
|
|
65
|
+
if any(pattern in key.lower() for pattern in DataRedactor.SENSITIVE_PARAM_PATTERNS):
|
|
66
|
+
redacted_params[key] = ["***REDACTED***"] * len(values)
|
|
67
|
+
else:
|
|
68
|
+
redacted_params[key] = values
|
|
69
|
+
|
|
70
|
+
# Reconstruct URL with redacted params
|
|
71
|
+
new_query = urlencode(redacted_params, doseq=True)
|
|
72
|
+
return urlunparse(
|
|
73
|
+
(
|
|
74
|
+
parsed.scheme,
|
|
75
|
+
parsed.netloc,
|
|
76
|
+
parsed.path,
|
|
77
|
+
parsed.params,
|
|
78
|
+
new_query,
|
|
79
|
+
parsed.fragment,
|
|
80
|
+
)
|
|
81
|
+
)
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"""Shared session context for both logging and telemetry."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import uuid
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from typing import Any, Dict
|
|
7
|
+
|
|
8
|
+
from .config import SDKConfig, load_config
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
# Cache the config at module level to avoid repeated reads
|
|
13
|
+
_cached_config: SDKConfig | None = None
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _get_config() -> SDKConfig:
|
|
17
|
+
"""Get cached SDK config or load from file."""
|
|
18
|
+
global _cached_config
|
|
19
|
+
if _cached_config is None:
|
|
20
|
+
_cached_config = load_config()
|
|
21
|
+
return _cached_config
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _clear_config_cache() -> None:
|
|
25
|
+
"""Clear the cached config. Used for testing."""
|
|
26
|
+
global _cached_config
|
|
27
|
+
_cached_config = None
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def get_persistent_user_id() -> str:
|
|
31
|
+
"""
|
|
32
|
+
Get the persistent anonymous user ID.
|
|
33
|
+
|
|
34
|
+
Now reads from ~/.airbyte/connector-sdk/config.yaml
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
An anonymous UUID string that uniquely identifies this user across sessions.
|
|
38
|
+
"""
|
|
39
|
+
return _get_config().user_id
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def get_public_ip() -> str | None:
|
|
43
|
+
"""
|
|
44
|
+
Fetch the public IP address of the user.
|
|
45
|
+
|
|
46
|
+
Returns None if unable to fetch (network issues, etc).
|
|
47
|
+
Uses httpx for a robust HTTP request to a public IP service.
|
|
48
|
+
"""
|
|
49
|
+
try:
|
|
50
|
+
# NOTE: Import here intentionally - this is a non-critical network call
|
|
51
|
+
# that may fail. Importing at module level would make httpx a hard dependency.
|
|
52
|
+
import httpx
|
|
53
|
+
|
|
54
|
+
# Use a short timeout to avoid blocking
|
|
55
|
+
with httpx.Client(timeout=2.0) as client:
|
|
56
|
+
response = client.get("https://api.ipify.org?format=text")
|
|
57
|
+
response.raise_for_status()
|
|
58
|
+
return response.text.strip()
|
|
59
|
+
except Exception:
|
|
60
|
+
# Never fail - just return None
|
|
61
|
+
return None
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def get_is_internal_user() -> bool:
|
|
65
|
+
"""
|
|
66
|
+
Check if the current user is an internal Airbyte user.
|
|
67
|
+
|
|
68
|
+
Now reads from ~/.airbyte/connector-sdk/config.yaml
|
|
69
|
+
Environment variable AIRBYTE_INTERNAL_USER can override.
|
|
70
|
+
|
|
71
|
+
Returns False if not set or on any error.
|
|
72
|
+
"""
|
|
73
|
+
return _get_config().is_internal_user
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class ObservabilitySession:
|
|
77
|
+
"""Shared session context for both logging and telemetry."""
|
|
78
|
+
|
|
79
|
+
def __init__(
|
|
80
|
+
self,
|
|
81
|
+
connector_name: str,
|
|
82
|
+
connector_version: str | None = None,
|
|
83
|
+
execution_context: str = "direct",
|
|
84
|
+
session_id: str | None = None,
|
|
85
|
+
):
|
|
86
|
+
self.session_id = session_id or str(uuid.uuid4())
|
|
87
|
+
self.user_id = get_persistent_user_id()
|
|
88
|
+
self.connector_name = connector_name
|
|
89
|
+
self.connector_version = connector_version
|
|
90
|
+
self.execution_context = execution_context
|
|
91
|
+
self.started_at = datetime.now(UTC)
|
|
92
|
+
self.operation_count = 0
|
|
93
|
+
self.metadata: Dict[str, Any] = {}
|
|
94
|
+
self.public_ip = get_public_ip()
|
|
95
|
+
self.is_internal_user = get_is_internal_user()
|
|
96
|
+
|
|
97
|
+
def increment_operations(self):
|
|
98
|
+
"""Increment the operation counter."""
|
|
99
|
+
self.operation_count += 1
|
|
100
|
+
|
|
101
|
+
def duration_seconds(self) -> float:
|
|
102
|
+
"""Calculate session duration in seconds."""
|
|
103
|
+
return (datetime.now(UTC) - self.started_at).total_seconds()
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"""Performance instrumentation decorator for async functions."""
|
|
2
|
+
|
|
3
|
+
import functools
|
|
4
|
+
import logging
|
|
5
|
+
import time
|
|
6
|
+
from typing import Any, Callable, TypeVar
|
|
7
|
+
|
|
8
|
+
# Type variable for generic function decoration
|
|
9
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def instrument(metric_name: str) -> Callable[[F], F]:
|
|
15
|
+
"""Decorator to instrument async functions with performance tracking.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
metric_name: Name of the metric to track
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
Decorator function
|
|
22
|
+
|
|
23
|
+
Example:
|
|
24
|
+
@instrument("stripe.customer.list")
|
|
25
|
+
async def list_customers():
|
|
26
|
+
...
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def decorator(func: F) -> F:
|
|
30
|
+
@functools.wraps(func)
|
|
31
|
+
async def wrapper(*args, **kwargs):
|
|
32
|
+
start_time = time.time()
|
|
33
|
+
success = True
|
|
34
|
+
error = None
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
result = await func(*args, **kwargs)
|
|
38
|
+
return result
|
|
39
|
+
|
|
40
|
+
except Exception as e:
|
|
41
|
+
success = False
|
|
42
|
+
error = e
|
|
43
|
+
raise
|
|
44
|
+
|
|
45
|
+
finally:
|
|
46
|
+
duration = time.time() - start_time
|
|
47
|
+
duration_ms = duration * 1000
|
|
48
|
+
|
|
49
|
+
# Log performance metrics
|
|
50
|
+
if success:
|
|
51
|
+
logger.debug(f"[{metric_name}] completed in {duration_ms:.2f}ms")
|
|
52
|
+
else:
|
|
53
|
+
logger.warning(f"[{metric_name}] failed after {duration_ms:.2f}ms: {error}")
|
|
54
|
+
|
|
55
|
+
return wrapper # type: ignore
|
|
56
|
+
|
|
57
|
+
return decorator
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"""Performance metrics tracking."""
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from contextlib import asynccontextmanager
|
|
5
|
+
from typing import Dict
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class PerformanceMonitor:
|
|
9
|
+
"""Monitor and track performance metrics for operations."""
|
|
10
|
+
|
|
11
|
+
def __init__(self):
|
|
12
|
+
"""Initialize performance monitor."""
|
|
13
|
+
self._metrics: Dict[str, Dict[str, float]] = {}
|
|
14
|
+
|
|
15
|
+
def record(self, metric_name: str, duration: float):
|
|
16
|
+
"""Record a metric.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
metric_name: Name of the metric
|
|
20
|
+
duration: Duration in seconds
|
|
21
|
+
"""
|
|
22
|
+
if metric_name not in self._metrics:
|
|
23
|
+
self._metrics[metric_name] = {
|
|
24
|
+
"count": 0,
|
|
25
|
+
"total": 0.0,
|
|
26
|
+
"min": float("inf"),
|
|
27
|
+
"max": 0.0,
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
metrics = self._metrics[metric_name]
|
|
31
|
+
metrics["count"] += 1
|
|
32
|
+
metrics["total"] += duration
|
|
33
|
+
metrics["min"] = min(metrics["min"], duration)
|
|
34
|
+
metrics["max"] = max(metrics["max"], duration)
|
|
35
|
+
|
|
36
|
+
def get_stats(self, metric_name: str) -> Dict[str, float] | None:
|
|
37
|
+
"""Get statistics for a metric.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
metric_name: Name of the metric
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Dictionary with count, total, mean, min, max or None if metric not found
|
|
44
|
+
"""
|
|
45
|
+
if metric_name not in self._metrics:
|
|
46
|
+
return None
|
|
47
|
+
|
|
48
|
+
metrics = self._metrics[metric_name]
|
|
49
|
+
return {
|
|
50
|
+
"count": metrics["count"],
|
|
51
|
+
"total": metrics["total"],
|
|
52
|
+
"mean": metrics["total"] / metrics["count"] if metrics["count"] > 0 else 0.0,
|
|
53
|
+
"min": metrics["min"] if metrics["min"] != float("inf") else 0.0,
|
|
54
|
+
"max": metrics["max"],
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
def get_all_stats(self) -> Dict[str, Dict[str, float]]:
|
|
58
|
+
"""Get statistics for all metrics.
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
Dictionary mapping metric names to their statistics
|
|
62
|
+
"""
|
|
63
|
+
return {name: self.get_stats(name) for name in self._metrics.keys()}
|
|
64
|
+
|
|
65
|
+
def reset(self, metric_name: str | None = None):
|
|
66
|
+
"""Reset metrics.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
metric_name: Specific metric to reset, or None to reset all
|
|
70
|
+
"""
|
|
71
|
+
if metric_name:
|
|
72
|
+
if metric_name in self._metrics:
|
|
73
|
+
del self._metrics[metric_name]
|
|
74
|
+
else:
|
|
75
|
+
self._metrics.clear()
|
|
76
|
+
|
|
77
|
+
@asynccontextmanager
|
|
78
|
+
async def track(self, metric_name: str):
|
|
79
|
+
"""Context manager for tracking operation duration.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
metric_name: Name of the metric to track
|
|
83
|
+
|
|
84
|
+
Example:
|
|
85
|
+
async with monitor.track("api_call"):
|
|
86
|
+
result = await some_async_operation()
|
|
87
|
+
"""
|
|
88
|
+
start_time = time.time()
|
|
89
|
+
try:
|
|
90
|
+
yield
|
|
91
|
+
finally:
|
|
92
|
+
duration = time.time() - start_time
|
|
93
|
+
self.record(metric_name, duration)
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Pydantic 2 schema models for OpenAPI 3.0 connector specifications.
|
|
3
|
+
|
|
4
|
+
This package provides strongly-typed Pydantic models that mirror the OpenAPI 3.0
|
|
5
|
+
specification while supporting Airbyte-specific extensions.
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
import yaml
|
|
9
|
+
from . import OpenAPIConnector
|
|
10
|
+
|
|
11
|
+
with open('connector.yaml') as f:
|
|
12
|
+
data = yaml.safe_load(f)
|
|
13
|
+
|
|
14
|
+
connector = OpenAPIConnector(**data)
|
|
15
|
+
print(connector.list_resources())
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from .base import Contact, Info, License, Server, ServerVariable
|
|
19
|
+
from .components import (
|
|
20
|
+
Components,
|
|
21
|
+
Header,
|
|
22
|
+
MediaType,
|
|
23
|
+
Parameter,
|
|
24
|
+
RequestBody,
|
|
25
|
+
Response,
|
|
26
|
+
Schema,
|
|
27
|
+
)
|
|
28
|
+
from .connector import ExternalDocs, OpenAPIConnector, Tag
|
|
29
|
+
from .extensions import PaginationConfig, RateLimitConfig, RetryConfig
|
|
30
|
+
from .operations import Operation, PathItem
|
|
31
|
+
from .security import (
|
|
32
|
+
AirbyteAuthConfig,
|
|
33
|
+
AuthConfigFieldSpec,
|
|
34
|
+
AuthConfigOption,
|
|
35
|
+
OAuth2Flow,
|
|
36
|
+
OAuth2Flows,
|
|
37
|
+
SecurityRequirement,
|
|
38
|
+
SecurityScheme,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
__all__ = [
|
|
42
|
+
# Root model
|
|
43
|
+
"OpenAPIConnector",
|
|
44
|
+
"Tag",
|
|
45
|
+
"ExternalDocs",
|
|
46
|
+
# Base models
|
|
47
|
+
"Info",
|
|
48
|
+
"Server",
|
|
49
|
+
"ServerVariable",
|
|
50
|
+
"Contact",
|
|
51
|
+
"License",
|
|
52
|
+
# Security models
|
|
53
|
+
"SecurityScheme",
|
|
54
|
+
"SecurityRequirement",
|
|
55
|
+
"OAuth2Flow",
|
|
56
|
+
"OAuth2Flows",
|
|
57
|
+
"AirbyteAuthConfig",
|
|
58
|
+
"AuthConfigOption",
|
|
59
|
+
"AuthConfigFieldSpec",
|
|
60
|
+
# Component models
|
|
61
|
+
"Components",
|
|
62
|
+
"Schema",
|
|
63
|
+
"Parameter",
|
|
64
|
+
"RequestBody",
|
|
65
|
+
"Response",
|
|
66
|
+
"MediaType",
|
|
67
|
+
"Header",
|
|
68
|
+
# Operation models
|
|
69
|
+
"PathItem",
|
|
70
|
+
"Operation",
|
|
71
|
+
# Extension models (for future use)
|
|
72
|
+
"PaginationConfig",
|
|
73
|
+
"RateLimitConfig",
|
|
74
|
+
"RetryConfig",
|
|
75
|
+
]
|