liquid-api 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- liquid/__init__.py +60 -0
- liquid/_defaults.py +58 -0
- liquid/auth/__init__.py +8 -0
- liquid/auth/classifier.py +73 -0
- liquid/auth/manager.py +108 -0
- liquid/client.py +213 -0
- liquid/discovery/__init__.py +18 -0
- liquid/discovery/base.py +53 -0
- liquid/discovery/browser.py +175 -0
- liquid/discovery/diff.py +66 -0
- liquid/discovery/graphql.py +180 -0
- liquid/discovery/mcp.py +159 -0
- liquid/discovery/openapi.py +227 -0
- liquid/discovery/rest_heuristic.py +157 -0
- liquid/events.py +37 -0
- liquid/exceptions.py +51 -0
- liquid/mapping/__init__.py +9 -0
- liquid/mapping/learning.py +62 -0
- liquid/mapping/proposer.py +150 -0
- liquid/mapping/reviewer.py +84 -0
- liquid/models/__init__.py +36 -0
- liquid/models/adapter.py +35 -0
- liquid/models/llm.py +42 -0
- liquid/models/schema.py +84 -0
- liquid/models/sync.py +35 -0
- liquid/protocols.py +29 -0
- liquid/py.typed +0 -0
- liquid/sync/__init__.py +29 -0
- liquid/sync/auto_repair.py +64 -0
- liquid/sync/engine.py +176 -0
- liquid/sync/fetcher.py +92 -0
- liquid/sync/mapper.py +73 -0
- liquid/sync/pagination.py +102 -0
- liquid/sync/retry.py +47 -0
- liquid/sync/selector.py +32 -0
- liquid/sync/transform.py +103 -0
- liquid_api-0.2.0.dist-info/METADATA +177 -0
- liquid_api-0.2.0.dist-info/RECORD +39 -0
- liquid_api-0.2.0.dist-info/WHEEL +4 -0
liquid/__init__.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"""Liquid — AI discovers APIs. Code syncs data. No adapters to write."""
|
|
2
|
+
|
|
3
|
+
__version__ = "0.2.0"
|
|
4
|
+
|
|
5
|
+
from liquid.client import Liquid
|
|
6
|
+
from liquid.exceptions import (
|
|
7
|
+
AuthError,
|
|
8
|
+
AuthSetupError,
|
|
9
|
+
DiscoveryError,
|
|
10
|
+
EndpointGoneError,
|
|
11
|
+
FieldNotFoundError,
|
|
12
|
+
LiquidError,
|
|
13
|
+
MappingError,
|
|
14
|
+
RateLimitError,
|
|
15
|
+
ReDiscoveryNeededError,
|
|
16
|
+
ServiceDownError,
|
|
17
|
+
SyncRuntimeError,
|
|
18
|
+
VaultError,
|
|
19
|
+
)
|
|
20
|
+
from liquid.models import (
|
|
21
|
+
AdapterConfig,
|
|
22
|
+
APISchema,
|
|
23
|
+
AuthRequirement,
|
|
24
|
+
DeliveryResult,
|
|
25
|
+
Endpoint,
|
|
26
|
+
FieldMapping,
|
|
27
|
+
MappedRecord,
|
|
28
|
+
SyncConfig,
|
|
29
|
+
SyncResult,
|
|
30
|
+
)
|
|
31
|
+
from liquid.protocols import DataSink, KnowledgeStore, LLMBackend, Vault
|
|
32
|
+
|
|
33
|
+
__all__ = [
|
|
34
|
+
"APISchema",
|
|
35
|
+
"AdapterConfig",
|
|
36
|
+
"AuthError",
|
|
37
|
+
"AuthRequirement",
|
|
38
|
+
"AuthSetupError",
|
|
39
|
+
"DataSink",
|
|
40
|
+
"DeliveryResult",
|
|
41
|
+
"DiscoveryError",
|
|
42
|
+
"Endpoint",
|
|
43
|
+
"EndpointGoneError",
|
|
44
|
+
"FieldMapping",
|
|
45
|
+
"FieldNotFoundError",
|
|
46
|
+
"KnowledgeStore",
|
|
47
|
+
"LLMBackend",
|
|
48
|
+
"Liquid",
|
|
49
|
+
"LiquidError",
|
|
50
|
+
"MappedRecord",
|
|
51
|
+
"MappingError",
|
|
52
|
+
"RateLimitError",
|
|
53
|
+
"ReDiscoveryNeededError",
|
|
54
|
+
"ServiceDownError",
|
|
55
|
+
"SyncConfig",
|
|
56
|
+
"SyncResult",
|
|
57
|
+
"SyncRuntimeError",
|
|
58
|
+
"Vault",
|
|
59
|
+
"VaultError",
|
|
60
|
+
]
|
liquid/_defaults.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"""Default in-memory implementations of protocols for testing and quick starts."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from liquid.exceptions import VaultError
|
|
6
|
+
from liquid.models.adapter import FieldMapping # noqa: TC001
|
|
7
|
+
from liquid.models.llm import DeliveryResult, MappedRecord
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class InMemoryVault:
|
|
11
|
+
"""Dict-based vault for testing. Not for production."""
|
|
12
|
+
|
|
13
|
+
def __init__(self) -> None:
|
|
14
|
+
self._data: dict[str, str] = {}
|
|
15
|
+
|
|
16
|
+
async def store(self, key: str, value: str) -> None:
|
|
17
|
+
self._data[key] = value
|
|
18
|
+
|
|
19
|
+
async def get(self, key: str) -> str:
|
|
20
|
+
if key not in self._data:
|
|
21
|
+
raise VaultError(f"Key not found: {key}")
|
|
22
|
+
return self._data[key]
|
|
23
|
+
|
|
24
|
+
async def delete(self, key: str) -> None:
|
|
25
|
+
self._data.pop(key, None)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class InMemoryKnowledgeStore:
|
|
29
|
+
"""Dict-based knowledge store for testing."""
|
|
30
|
+
|
|
31
|
+
def __init__(self) -> None:
|
|
32
|
+
self._data: dict[str, list[FieldMapping]] = {}
|
|
33
|
+
|
|
34
|
+
async def find_mapping(self, service: str, target_model: str) -> list[FieldMapping] | None:
|
|
35
|
+
return self._data.get(f"{service}:{target_model}")
|
|
36
|
+
|
|
37
|
+
async def store_mapping(self, service: str, target_model: str, mappings: list[FieldMapping]) -> None:
|
|
38
|
+
self._data[f"{service}:{target_model}"] = mappings
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class StdoutSink:
|
|
42
|
+
"""Prints records to stdout. For debugging only."""
|
|
43
|
+
|
|
44
|
+
async def deliver(self, records: list[MappedRecord]) -> DeliveryResult:
|
|
45
|
+
for record in records:
|
|
46
|
+
print(f"[StdoutSink] {record.source_endpoint}: {record.mapped_data}")
|
|
47
|
+
return DeliveryResult(delivered=len(records))
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class CollectorSink:
|
|
51
|
+
"""Collects records in memory. Useful for testing."""
|
|
52
|
+
|
|
53
|
+
def __init__(self) -> None:
|
|
54
|
+
self.records: list[MappedRecord] = []
|
|
55
|
+
|
|
56
|
+
async def deliver(self, records: list[MappedRecord]) -> DeliveryResult:
|
|
57
|
+
self.records.extend(records)
|
|
58
|
+
return DeliveryResult(delivered=len(records))
|
liquid/auth/__init__.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from liquid.models.schema import AuthRequirement
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class EscalationInfo(BaseModel):
|
|
12
|
+
tier: str
|
|
13
|
+
action_required: str
|
|
14
|
+
docs_url: str | None = None
|
|
15
|
+
instructions: str = ""
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AuthClassifier:
|
|
19
|
+
"""Classifies auth requirements and generates escalation info."""
|
|
20
|
+
|
|
21
|
+
def classify(self, auth: AuthRequirement) -> EscalationInfo:
|
|
22
|
+
match auth.tier:
|
|
23
|
+
case "A":
|
|
24
|
+
return EscalationInfo(
|
|
25
|
+
tier="A",
|
|
26
|
+
action_required="none",
|
|
27
|
+
docs_url=auth.docs_url,
|
|
28
|
+
instructions="OAuth flow can proceed automatically. Redirect user to authorize.",
|
|
29
|
+
)
|
|
30
|
+
case "B":
|
|
31
|
+
return EscalationInfo(
|
|
32
|
+
tier="B",
|
|
33
|
+
action_required="admin_registration",
|
|
34
|
+
docs_url=auth.docs_url,
|
|
35
|
+
instructions=(
|
|
36
|
+
"This service requires creating a developer application first. "
|
|
37
|
+
"An admin needs to register the app at the service's developer portal, "
|
|
38
|
+
"then provide client_id and client_secret."
|
|
39
|
+
),
|
|
40
|
+
)
|
|
41
|
+
case "C":
|
|
42
|
+
return self._classify_tier_c(auth)
|
|
43
|
+
case _:
|
|
44
|
+
return EscalationInfo(
|
|
45
|
+
tier=auth.tier,
|
|
46
|
+
action_required="manual",
|
|
47
|
+
docs_url=auth.docs_url,
|
|
48
|
+
instructions="Unknown auth tier. Manual configuration required.",
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
def _classify_tier_c(self, auth: AuthRequirement) -> EscalationInfo:
|
|
52
|
+
match auth.type:
|
|
53
|
+
case "api_key":
|
|
54
|
+
return EscalationInfo(
|
|
55
|
+
tier="C",
|
|
56
|
+
action_required="provide_api_key",
|
|
57
|
+
docs_url=auth.docs_url,
|
|
58
|
+
instructions="This service requires an API key. Obtain it from the service dashboard.",
|
|
59
|
+
)
|
|
60
|
+
case "basic":
|
|
61
|
+
return EscalationInfo(
|
|
62
|
+
tier="C",
|
|
63
|
+
action_required="provide_credentials",
|
|
64
|
+
docs_url=auth.docs_url,
|
|
65
|
+
instructions="This service requires username and password for Basic auth.",
|
|
66
|
+
)
|
|
67
|
+
case _:
|
|
68
|
+
return EscalationInfo(
|
|
69
|
+
tier="C",
|
|
70
|
+
action_required="manual_configuration",
|
|
71
|
+
docs_url=auth.docs_url,
|
|
72
|
+
instructions="This service requires custom authentication. Contact support for setup.",
|
|
73
|
+
)
|
liquid/auth/manager.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from typing import TYPE_CHECKING, Any
|
|
5
|
+
|
|
6
|
+
import httpx
|
|
7
|
+
|
|
8
|
+
from liquid.exceptions import AuthError, VaultError
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from liquid.models.schema import AuthRequirement, OAuthConfig
|
|
12
|
+
from liquid.protocols import Vault
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class AuthManager:
|
|
18
|
+
"""Manages credentials storage and auth header generation."""
|
|
19
|
+
|
|
20
|
+
def __init__(self, vault: Vault) -> None:
|
|
21
|
+
self.vault = vault
|
|
22
|
+
|
|
23
|
+
async def store_credentials(self, adapter_id: str, credentials: dict[str, Any]) -> str:
|
|
24
|
+
"""Store credentials in vault with per-adapter isolation. Returns vault key prefix."""
|
|
25
|
+
prefix = f"liquid/{adapter_id}"
|
|
26
|
+
try:
|
|
27
|
+
for key, value in credentials.items():
|
|
28
|
+
await self.vault.store(f"{prefix}/{key}", str(value))
|
|
29
|
+
except Exception as e:
|
|
30
|
+
raise VaultError(f"Failed to store credentials for {adapter_id}: {e}") from e
|
|
31
|
+
return prefix
|
|
32
|
+
|
|
33
|
+
async def get_auth_headers(self, auth: AuthRequirement, vault_key: str) -> dict[str, str]:
|
|
34
|
+
"""Build HTTP headers for the given auth type."""
|
|
35
|
+
try:
|
|
36
|
+
match auth.type:
|
|
37
|
+
case "bearer" | "oauth2":
|
|
38
|
+
token = await self.vault.get(f"{vault_key}/access_token")
|
|
39
|
+
return {"Authorization": f"Bearer {token}"}
|
|
40
|
+
case "api_key":
|
|
41
|
+
key = await self.vault.get(f"{vault_key}/api_key")
|
|
42
|
+
return {"X-API-Key": key}
|
|
43
|
+
case "basic":
|
|
44
|
+
username = await self.vault.get(f"{vault_key}/username")
|
|
45
|
+
password = await self.vault.get(f"{vault_key}/password")
|
|
46
|
+
import base64
|
|
47
|
+
|
|
48
|
+
encoded = base64.b64encode(f"{username}:{password}".encode()).decode()
|
|
49
|
+
return {"Authorization": f"Basic {encoded}"}
|
|
50
|
+
case _:
|
|
51
|
+
token = await self.vault.get(f"{vault_key}/token")
|
|
52
|
+
return {"Authorization": f"Bearer {token}"}
|
|
53
|
+
except VaultError:
|
|
54
|
+
raise
|
|
55
|
+
except Exception as e:
|
|
56
|
+
raise AuthError(f"Failed to build auth headers: {e}") from e
|
|
57
|
+
|
|
58
|
+
async def refresh_oauth_token(
|
|
59
|
+
self,
|
|
60
|
+
vault_key: str,
|
|
61
|
+
oauth_config: OAuthConfig,
|
|
62
|
+
http_client: httpx.AsyncClient | None = None,
|
|
63
|
+
) -> str:
|
|
64
|
+
"""Refresh an OAuth2 access token using the stored refresh token."""
|
|
65
|
+
try:
|
|
66
|
+
refresh_token = await self.vault.get(f"{vault_key}/refresh_token")
|
|
67
|
+
client_id = await self.vault.get(f"{vault_key}/client_id")
|
|
68
|
+
client_secret = await self.vault.get(f"{vault_key}/client_secret")
|
|
69
|
+
except Exception as e:
|
|
70
|
+
raise AuthError(f"Missing OAuth credentials for refresh: {e}") from e
|
|
71
|
+
|
|
72
|
+
client = http_client or httpx.AsyncClient()
|
|
73
|
+
try:
|
|
74
|
+
resp = await client.post(
|
|
75
|
+
oauth_config.token_url,
|
|
76
|
+
data={
|
|
77
|
+
"grant_type": "refresh_token",
|
|
78
|
+
"refresh_token": refresh_token,
|
|
79
|
+
"client_id": client_id,
|
|
80
|
+
"client_secret": client_secret,
|
|
81
|
+
},
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
if not resp.is_success:
|
|
85
|
+
raise AuthError(f"Token refresh failed ({resp.status_code}): {resp.text[:200]}")
|
|
86
|
+
|
|
87
|
+
token_data = resp.json()
|
|
88
|
+
new_access_token = token_data["access_token"]
|
|
89
|
+
await self.vault.store(f"{vault_key}/access_token", new_access_token)
|
|
90
|
+
|
|
91
|
+
if "refresh_token" in token_data:
|
|
92
|
+
await self.vault.store(f"{vault_key}/refresh_token", token_data["refresh_token"])
|
|
93
|
+
|
|
94
|
+
logger.info("OAuth token refreshed for %s", vault_key)
|
|
95
|
+
return new_access_token
|
|
96
|
+
finally:
|
|
97
|
+
if not http_client:
|
|
98
|
+
await client.aclose()
|
|
99
|
+
|
|
100
|
+
async def delete_credentials(self, adapter_id: str, keys: list[str] | None = None) -> None:
|
|
101
|
+
"""Remove stored credentials for an adapter."""
|
|
102
|
+
prefix = f"liquid/{adapter_id}"
|
|
103
|
+
key_names = keys or ["access_token", "refresh_token", "client_id", "client_secret", "api_key"]
|
|
104
|
+
import contextlib
|
|
105
|
+
|
|
106
|
+
for key in key_names:
|
|
107
|
+
with contextlib.suppress(Exception):
|
|
108
|
+
await self.vault.delete(f"{prefix}/{key}")
|
liquid/client.py
ADDED
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
"""Liquid — the main orchestrator tying all phases together."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
|
|
9
|
+
from liquid.auth.classifier import AuthClassifier, EscalationInfo
|
|
10
|
+
from liquid.auth.manager import AuthManager
|
|
11
|
+
from liquid.discovery.base import DiscoveryPipeline
|
|
12
|
+
from liquid.discovery.browser import BrowserDiscovery
|
|
13
|
+
from liquid.discovery.diff import diff_schemas
|
|
14
|
+
from liquid.discovery.graphql import GraphQLDiscovery
|
|
15
|
+
from liquid.discovery.mcp import MCPDiscovery
|
|
16
|
+
from liquid.discovery.openapi import OpenAPIDiscovery
|
|
17
|
+
from liquid.discovery.rest_heuristic import RESTHeuristicDiscovery
|
|
18
|
+
from liquid.mapping.learning import MappingLearner
|
|
19
|
+
from liquid.mapping.proposer import MappingProposer
|
|
20
|
+
from liquid.mapping.reviewer import MappingReview
|
|
21
|
+
from liquid.models.adapter import AdapterConfig, FieldMapping, SyncConfig
|
|
22
|
+
from liquid.models.schema import APISchema # noqa: TC001
|
|
23
|
+
from liquid.sync.engine import SyncEngine
|
|
24
|
+
from liquid.sync.fetcher import Fetcher
|
|
25
|
+
from liquid.sync.mapper import RecordMapper
|
|
26
|
+
|
|
27
|
+
if TYPE_CHECKING:
|
|
28
|
+
from liquid.events import EventHandler
|
|
29
|
+
from liquid.models.sync import SyncResult
|
|
30
|
+
from liquid.protocols import DataSink, KnowledgeStore, LLMBackend, Vault
|
|
31
|
+
from liquid.sync.retry import RetryPolicy
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class Liquid:
|
|
35
|
+
"""Main entry point for the Liquid library.
|
|
36
|
+
|
|
37
|
+
Orchestrates: discover → classify auth → propose mappings → sync.
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
def __init__(
|
|
41
|
+
self,
|
|
42
|
+
llm: LLMBackend,
|
|
43
|
+
vault: Vault,
|
|
44
|
+
sink: DataSink,
|
|
45
|
+
knowledge: KnowledgeStore | None = None,
|
|
46
|
+
event_handler: EventHandler | None = None,
|
|
47
|
+
http_client: httpx.AsyncClient | None = None,
|
|
48
|
+
retry_policy: RetryPolicy | None = None,
|
|
49
|
+
) -> None:
|
|
50
|
+
self.llm = llm
|
|
51
|
+
self.vault = vault
|
|
52
|
+
self.sink = sink
|
|
53
|
+
self.knowledge = knowledge
|
|
54
|
+
self.event_handler = event_handler
|
|
55
|
+
self._http_client = http_client
|
|
56
|
+
self._retry_policy = retry_policy
|
|
57
|
+
|
|
58
|
+
self._auth_classifier = AuthClassifier()
|
|
59
|
+
self._auth_manager = AuthManager(vault)
|
|
60
|
+
self._mapping_proposer = MappingProposer(llm, knowledge)
|
|
61
|
+
self._mapping_learner = MappingLearner(knowledge)
|
|
62
|
+
|
|
63
|
+
async def discover(self, url: str) -> APISchema:
|
|
64
|
+
"""Phase 1: Discover the API at the given URL."""
|
|
65
|
+
client = self._http_client or httpx.AsyncClient()
|
|
66
|
+
try:
|
|
67
|
+
pipeline = DiscoveryPipeline(
|
|
68
|
+
[
|
|
69
|
+
MCPDiscovery(),
|
|
70
|
+
OpenAPIDiscovery(http_client=client),
|
|
71
|
+
GraphQLDiscovery(http_client=client),
|
|
72
|
+
RESTHeuristicDiscovery(llm=self.llm, http_client=client),
|
|
73
|
+
BrowserDiscovery(llm=self.llm),
|
|
74
|
+
]
|
|
75
|
+
)
|
|
76
|
+
return await pipeline.discover(url)
|
|
77
|
+
finally:
|
|
78
|
+
if not self._http_client:
|
|
79
|
+
await client.aclose()
|
|
80
|
+
|
|
81
|
+
def classify_auth(self, schema: APISchema) -> EscalationInfo:
|
|
82
|
+
"""Phase 2: Classify auth requirements and return escalation info."""
|
|
83
|
+
return self._auth_classifier.classify(schema.auth)
|
|
84
|
+
|
|
85
|
+
async def store_credentials(self, adapter_id: str, credentials: dict[str, Any]) -> str:
|
|
86
|
+
"""Phase 2b: Store credentials after human provides them."""
|
|
87
|
+
return await self._auth_manager.store_credentials(adapter_id, credentials)
|
|
88
|
+
|
|
89
|
+
async def propose_mappings(
|
|
90
|
+
self,
|
|
91
|
+
schema: APISchema,
|
|
92
|
+
target_model: dict[str, Any],
|
|
93
|
+
) -> MappingReview:
|
|
94
|
+
"""Phase 3: AI proposes field mappings for human review."""
|
|
95
|
+
proposals = await self._mapping_proposer.propose(schema, target_model)
|
|
96
|
+
return MappingReview(proposals)
|
|
97
|
+
|
|
98
|
+
async def create_adapter(
|
|
99
|
+
self,
|
|
100
|
+
schema: APISchema,
|
|
101
|
+
auth_ref: str,
|
|
102
|
+
mappings: list[FieldMapping],
|
|
103
|
+
sync_config: SyncConfig,
|
|
104
|
+
verified_by: str | None = None,
|
|
105
|
+
) -> AdapterConfig:
|
|
106
|
+
"""Phase 3b: Create the final adapter config after human approval."""
|
|
107
|
+
from datetime import UTC, datetime
|
|
108
|
+
|
|
109
|
+
return AdapterConfig(
|
|
110
|
+
schema=schema,
|
|
111
|
+
auth_ref=auth_ref,
|
|
112
|
+
mappings=mappings,
|
|
113
|
+
sync=sync_config,
|
|
114
|
+
verified_by=verified_by,
|
|
115
|
+
verified_at=datetime.now(UTC) if verified_by else None,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
async def sync(self, config: AdapterConfig, cursor: str | None = None) -> SyncResult:
|
|
119
|
+
"""Phase 4: Run a deterministic sync cycle."""
|
|
120
|
+
client = self._http_client or httpx.AsyncClient()
|
|
121
|
+
try:
|
|
122
|
+
fetcher = Fetcher(http_client=client, vault=self.vault)
|
|
123
|
+
mapper = RecordMapper(config.mappings)
|
|
124
|
+
engine = SyncEngine(
|
|
125
|
+
fetcher=fetcher,
|
|
126
|
+
mapper=mapper,
|
|
127
|
+
sink=self.sink,
|
|
128
|
+
event_handler=self.event_handler,
|
|
129
|
+
retry_policy=self._retry_policy,
|
|
130
|
+
)
|
|
131
|
+
return await engine.run(config, cursor)
|
|
132
|
+
finally:
|
|
133
|
+
if not self._http_client:
|
|
134
|
+
await client.aclose()
|
|
135
|
+
|
|
136
|
+
async def repair_adapter(
|
|
137
|
+
self,
|
|
138
|
+
config: AdapterConfig,
|
|
139
|
+
target_model: dict[str, Any],
|
|
140
|
+
auto_approve: bool = False,
|
|
141
|
+
confidence_threshold: float = 0.8,
|
|
142
|
+
) -> AdapterConfig | MappingReview:
|
|
143
|
+
"""Re-discover API, diff schemas, selectively re-map broken fields.
|
|
144
|
+
|
|
145
|
+
Returns AdapterConfig if auto_approve=True and all mappings are confident,
|
|
146
|
+
otherwise returns MappingReview for human review.
|
|
147
|
+
"""
|
|
148
|
+
from liquid.events import AdapterRepaired
|
|
149
|
+
|
|
150
|
+
new_schema = await self.discover(config.schema_.source_url)
|
|
151
|
+
diff = diff_schemas(config.schema_, new_schema)
|
|
152
|
+
|
|
153
|
+
if not diff.has_breaking_changes:
|
|
154
|
+
updated = config.model_copy(update={"schema_": new_schema, "version": config.version + 1})
|
|
155
|
+
if self.event_handler:
|
|
156
|
+
await self.event_handler.handle(
|
|
157
|
+
AdapterRepaired(
|
|
158
|
+
adapter_id=config.config_id,
|
|
159
|
+
diff=diff,
|
|
160
|
+
auto_approved=True,
|
|
161
|
+
)
|
|
162
|
+
)
|
|
163
|
+
return updated
|
|
164
|
+
|
|
165
|
+
proposals = await self._mapping_proposer.propose(
|
|
166
|
+
new_schema,
|
|
167
|
+
target_model,
|
|
168
|
+
existing_mappings=config.mappings,
|
|
169
|
+
removed_fields=diff.removed_fields,
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
review = MappingReview(proposals)
|
|
173
|
+
|
|
174
|
+
if auto_approve and all(m.confidence >= confidence_threshold for m in proposals):
|
|
175
|
+
review.approve_all()
|
|
176
|
+
mappings = review.finalize()
|
|
177
|
+
updated = AdapterConfig(
|
|
178
|
+
config_id=config.config_id,
|
|
179
|
+
schema=new_schema,
|
|
180
|
+
auth_ref=config.auth_ref,
|
|
181
|
+
mappings=mappings,
|
|
182
|
+
sync=config.sync,
|
|
183
|
+
verified_by=config.verified_by,
|
|
184
|
+
version=config.version + 1,
|
|
185
|
+
)
|
|
186
|
+
if self.event_handler:
|
|
187
|
+
await self.event_handler.handle(
|
|
188
|
+
AdapterRepaired(
|
|
189
|
+
adapter_id=config.config_id,
|
|
190
|
+
diff=diff,
|
|
191
|
+
auto_approved=True,
|
|
192
|
+
)
|
|
193
|
+
)
|
|
194
|
+
return updated
|
|
195
|
+
|
|
196
|
+
return review
|
|
197
|
+
|
|
198
|
+
async def learn_from_review(
|
|
199
|
+
self,
|
|
200
|
+
schema: APISchema,
|
|
201
|
+
target_model: dict[str, Any],
|
|
202
|
+
review: MappingReview,
|
|
203
|
+
) -> None:
|
|
204
|
+
"""Record corrections from a mapping review for future learning."""
|
|
205
|
+
import json
|
|
206
|
+
|
|
207
|
+
corrections = review.corrections()
|
|
208
|
+
if corrections:
|
|
209
|
+
await self._mapping_learner.record_corrections(
|
|
210
|
+
schema.service_name,
|
|
211
|
+
json.dumps(target_model),
|
|
212
|
+
corrections,
|
|
213
|
+
)
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from liquid.discovery.base import DiscoveryPipeline, DiscoveryStrategy
|
|
2
|
+
from liquid.discovery.browser import BrowserDiscovery
|
|
3
|
+
from liquid.discovery.diff import diff_schemas
|
|
4
|
+
from liquid.discovery.graphql import GraphQLDiscovery
|
|
5
|
+
from liquid.discovery.mcp import MCPDiscovery
|
|
6
|
+
from liquid.discovery.openapi import OpenAPIDiscovery
|
|
7
|
+
from liquid.discovery.rest_heuristic import RESTHeuristicDiscovery
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"BrowserDiscovery",
|
|
11
|
+
"DiscoveryPipeline",
|
|
12
|
+
"DiscoveryStrategy",
|
|
13
|
+
"GraphQLDiscovery",
|
|
14
|
+
"MCPDiscovery",
|
|
15
|
+
"OpenAPIDiscovery",
|
|
16
|
+
"RESTHeuristicDiscovery",
|
|
17
|
+
"diff_schemas",
|
|
18
|
+
]
|
liquid/discovery/base.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Protocol, runtime_checkable
|
|
5
|
+
|
|
6
|
+
from liquid.exceptions import DiscoveryError
|
|
7
|
+
from liquid.models.schema import APISchema # noqa: TC001
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@runtime_checkable
|
|
13
|
+
class DiscoveryStrategy(Protocol):
|
|
14
|
+
async def discover(self, url: str) -> APISchema | None:
|
|
15
|
+
"""Try to discover the API at the given URL.
|
|
16
|
+
|
|
17
|
+
Returns APISchema on success, None if this strategy doesn't apply.
|
|
18
|
+
Raises DiscoveryError on unexpected failures.
|
|
19
|
+
"""
|
|
20
|
+
...
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class DiscoveryPipeline:
|
|
24
|
+
"""Tries discovery strategies in order, returns first success."""
|
|
25
|
+
|
|
26
|
+
def __init__(self, strategies: list[DiscoveryStrategy]) -> None:
|
|
27
|
+
self.strategies = strategies
|
|
28
|
+
|
|
29
|
+
async def discover(self, url: str) -> APISchema:
|
|
30
|
+
errors: list[tuple[str, Exception]] = []
|
|
31
|
+
|
|
32
|
+
for strategy in self.strategies:
|
|
33
|
+
strategy_name = type(strategy).__name__
|
|
34
|
+
logger.info("Trying discovery strategy: %s for %s", strategy_name, url)
|
|
35
|
+
try:
|
|
36
|
+
result = await strategy.discover(url)
|
|
37
|
+
if result is not None:
|
|
38
|
+
logger.info("Discovery succeeded with %s", strategy_name)
|
|
39
|
+
return result
|
|
40
|
+
logger.debug("Strategy %s returned None, trying next", strategy_name)
|
|
41
|
+
except DiscoveryError as e:
|
|
42
|
+
logger.warning("Strategy %s failed: %s", strategy_name, e)
|
|
43
|
+
errors.append((strategy_name, e))
|
|
44
|
+
except Exception as e:
|
|
45
|
+
logger.warning("Strategy %s unexpected error: %s", strategy_name, e)
|
|
46
|
+
errors.append((strategy_name, e))
|
|
47
|
+
|
|
48
|
+
error_summary = "; ".join(f"{name}: {err}" for name, err in errors)
|
|
49
|
+
raise DiscoveryError(
|
|
50
|
+
f"All discovery strategies failed for {url}. Errors: {error_summary}"
|
|
51
|
+
if errors
|
|
52
|
+
else f"No discovery strategy could handle {url}"
|
|
53
|
+
)
|