levelapp 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of levelapp might be problematic. Click here for more details.
- levelapp/aspects/loader.py +4 -4
- levelapp/config/api_config.yaml +156 -0
- levelapp/config/dashq_api.yaml +94 -0
- levelapp/config/endpoint_.py +325 -5
- levelapp/config/endpoints.yaml +47 -0
- levelapp/core/session.py +8 -0
- levelapp/endpoint/__init__.py +0 -0
- levelapp/endpoint/client.py +102 -0
- levelapp/endpoint/manager.py +114 -0
- levelapp/endpoint/parsers.py +120 -0
- levelapp/endpoint/schemas.py +38 -0
- levelapp/endpoint/tester.py +53 -0
- levelapp/endpoint/usage_example.py +39 -0
- levelapp/evaluator/evaluator.py +9 -1
- levelapp/repository/filesystem.py +203 -0
- levelapp/simulator/schemas.py +4 -4
- levelapp/simulator/simulator.py +57 -43
- levelapp/simulator/utils.py +52 -81
- levelapp/workflow/base.py +33 -2
- levelapp/workflow/config.py +6 -2
- levelapp/workflow/context.py +3 -1
- levelapp/workflow/runtime.py +3 -3
- {levelapp-0.1.3.dist-info → levelapp-0.1.5.dist-info}/METADATA +146 -31
- {levelapp-0.1.3.dist-info → levelapp-0.1.5.dist-info}/RECORD +26 -15
- {levelapp-0.1.3.dist-info → levelapp-0.1.5.dist-info}/WHEEL +0 -0
- {levelapp-0.1.3.dist-info → levelapp-0.1.5.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
"""levelapp/endpoint/client.py"""
|
|
2
|
+
import os
|
|
3
|
+
import httpx
|
|
4
|
+
import asyncio
|
|
5
|
+
import logging
|
|
6
|
+
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from typing import List, Dict, Any
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
|
+
|
|
11
|
+
from levelapp.endpoint.schemas import HttpMethod, HeaderConfig, RequestSchemaConfig, ResponseMappingConfig
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class EndpointConfig(BaseModel):
|
|
15
|
+
"""Complete endpoint configuration."""
|
|
16
|
+
name: str
|
|
17
|
+
base_url: str
|
|
18
|
+
path: str
|
|
19
|
+
method: HttpMethod
|
|
20
|
+
headers: List[HeaderConfig] = Field(default_factory=list)
|
|
21
|
+
request_schema: List[RequestSchemaConfig] = Field(default_factory=list)
|
|
22
|
+
response_mapping: List[ResponseMappingConfig] = Field(default_factory=list)
|
|
23
|
+
timeout: int = Field(default=30)
|
|
24
|
+
retry_count: int = Field(default=3)
|
|
25
|
+
retry_backoff: float = Field(default=1.0)
|
|
26
|
+
|
|
27
|
+
@classmethod
|
|
28
|
+
def validate_path(cls, v: str) -> str:
|
|
29
|
+
if not v.startswith('/'):
|
|
30
|
+
return f"/{v}"
|
|
31
|
+
return v
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass
|
|
35
|
+
class APIClient:
|
|
36
|
+
"""HTTP client for REST API interactions"""
|
|
37
|
+
config: EndpointConfig
|
|
38
|
+
client: httpx.AsyncClient = field(init=False)
|
|
39
|
+
logger: logging.Logger = field(init=False)
|
|
40
|
+
|
|
41
|
+
def __post_init__(self):
|
|
42
|
+
self.logger = logging.getLogger(f"AsyncAPIClient.{self.config.name}")
|
|
43
|
+
self.client = httpx.AsyncClient(
|
|
44
|
+
base_url=self.config.base_url,
|
|
45
|
+
timeout=self.config.timeout,
|
|
46
|
+
follow_redirects=True
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
async def __aenter__(self) -> "APIClient":
|
|
50
|
+
return self
|
|
51
|
+
|
|
52
|
+
async def __aexit__(self, *args) -> None:
|
|
53
|
+
await self.client.aclose()
|
|
54
|
+
|
|
55
|
+
def _build_headers(self) -> Dict[str, str]:
|
|
56
|
+
"""Build headers with secure value resolution."""
|
|
57
|
+
headers = {}
|
|
58
|
+
|
|
59
|
+
for header in self.config.headers:
|
|
60
|
+
if header.secure:
|
|
61
|
+
value = os.getenv(header.value)
|
|
62
|
+
if value is None:
|
|
63
|
+
self.logger.warning(f"Secure header '{header.name}' env var '{header.value}' not found")
|
|
64
|
+
continue
|
|
65
|
+
headers[header.name] = value
|
|
66
|
+
else:
|
|
67
|
+
headers[header.name] = header.value
|
|
68
|
+
|
|
69
|
+
return headers
|
|
70
|
+
|
|
71
|
+
async def execute(
|
|
72
|
+
self,
|
|
73
|
+
payload: Dict[str, Any] | None = None,
|
|
74
|
+
query_params: Dict[str, Any] | None = None,
|
|
75
|
+
) -> httpx.Response:
|
|
76
|
+
"""Execute asynchronous REST API request with retry logic."""
|
|
77
|
+
headers = self._build_headers()
|
|
78
|
+
|
|
79
|
+
for attempt in range(self.config.retry_count):
|
|
80
|
+
try:
|
|
81
|
+
response = await self.client.request(
|
|
82
|
+
method=self.config.method.value,
|
|
83
|
+
url=self.config.path,
|
|
84
|
+
json=payload,
|
|
85
|
+
params=query_params,
|
|
86
|
+
headers=headers,
|
|
87
|
+
)
|
|
88
|
+
response.raise_for_status()
|
|
89
|
+
return response
|
|
90
|
+
|
|
91
|
+
except httpx.HTTPStatusError as e:
|
|
92
|
+
self.logger.error(f"HTTP {e.response.status_code}: {e}")
|
|
93
|
+
if attempt == self.config.retry_count - 1:
|
|
94
|
+
raise
|
|
95
|
+
|
|
96
|
+
except httpx.RequestError as e:
|
|
97
|
+
self.logger.error(f"Request failed (attempt {attempt + 1}): {e}")
|
|
98
|
+
if attempt == self.config.retry_count - 1:
|
|
99
|
+
raise
|
|
100
|
+
await asyncio.sleep(delay=self.config.retry_backoff * (attempt + 1))
|
|
101
|
+
|
|
102
|
+
raise RuntimeError("Max retries exceeded")
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
"""levelapp/endpoint/manager.py"""
|
|
2
|
+
import httpx
|
|
3
|
+
import yaml
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Dict, List, Any
|
|
8
|
+
from pydantic import ValidationError
|
|
9
|
+
|
|
10
|
+
from levelapp.endpoint.schemas import ResponseMappingConfig
|
|
11
|
+
from levelapp.endpoint.tester import ConnectivityTester
|
|
12
|
+
from levelapp.endpoint.client import EndpointConfig, APIClient
|
|
13
|
+
from levelapp.endpoint.parsers import RequestPayloadBuilder, ResponseDataExtractor
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class EndpointConfigManager:
|
|
17
|
+
"""Manages endpoint configurations and creates testers."""
|
|
18
|
+
def __init__(self, config_path: Path | None = None):
|
|
19
|
+
self.config_path = config_path
|
|
20
|
+
self.endpoints: Dict[str, EndpointConfig] = {}
|
|
21
|
+
self.logger = logging.getLogger("ConfigurationManager")
|
|
22
|
+
|
|
23
|
+
if config_path:
|
|
24
|
+
self._load_config()
|
|
25
|
+
|
|
26
|
+
def _load_config(self) -> None:
|
|
27
|
+
"""Load and validate YAML configuration file."""
|
|
28
|
+
try:
|
|
29
|
+
with open(self.config_path, "r") as f:
|
|
30
|
+
data = yaml.safe_load(f)
|
|
31
|
+
|
|
32
|
+
for endpoint_data in data.get("endpoints", []):
|
|
33
|
+
config = EndpointConfig.model_validate(endpoint_data)
|
|
34
|
+
self.endpoints[config.name] = config
|
|
35
|
+
self.logger.info(f"Loaded endpoint config: {config.name}")
|
|
36
|
+
|
|
37
|
+
except ValidationError as e:
|
|
38
|
+
self.logger.error(f"Failed to load endpoint config: {e}")
|
|
39
|
+
|
|
40
|
+
except Exception as e:
|
|
41
|
+
self.logger.error(f"Failed to load endpoint config: {e}", exc_info=e)
|
|
42
|
+
raise RuntimeError("Failed to extract endpoints data from YAML file:\n{e}")
|
|
43
|
+
|
|
44
|
+
def set_endpoints(self, endpoints_config: List[EndpointConfig]):
|
|
45
|
+
for endpoint in endpoints_config:
|
|
46
|
+
try:
|
|
47
|
+
config = EndpointConfig.model_validate(endpoint)
|
|
48
|
+
self.endpoints[config.name] = config
|
|
49
|
+
|
|
50
|
+
except ValidationError as e:
|
|
51
|
+
self.logger.error(f"Failed to load endpoint config: {e}", exc_info=e)
|
|
52
|
+
continue
|
|
53
|
+
|
|
54
|
+
def build_response_mapping(self, content: List[Dict[str, Any]]) -> List[ResponseMappingConfig]:
|
|
55
|
+
mappings = []
|
|
56
|
+
for el in content:
|
|
57
|
+
try:
|
|
58
|
+
mappings.append(ResponseMappingConfig.model_validate(el))
|
|
59
|
+
except ValidationError as e:
|
|
60
|
+
self.logger.error(f"Failed to validate response mapping: {e}", exc_info=e)
|
|
61
|
+
|
|
62
|
+
return mappings
|
|
63
|
+
|
|
64
|
+
async def send_request(
|
|
65
|
+
self,
|
|
66
|
+
endpoint_config: EndpointConfig,
|
|
67
|
+
context: Dict[str, Any],
|
|
68
|
+
contextual_mode: bool = False
|
|
69
|
+
) -> httpx.Response:
|
|
70
|
+
payload_builder = RequestPayloadBuilder()
|
|
71
|
+
client = APIClient(config=endpoint_config)
|
|
72
|
+
|
|
73
|
+
if not contextual_mode:
|
|
74
|
+
context = payload_builder.build(
|
|
75
|
+
schema=endpoint_config.request_schema,
|
|
76
|
+
context=context,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
async with client:
|
|
80
|
+
response = await client.execute(payload=context)
|
|
81
|
+
|
|
82
|
+
self.logger.info(f"Response status: {response.status_code}")
|
|
83
|
+
|
|
84
|
+
return response
|
|
85
|
+
|
|
86
|
+
@staticmethod
|
|
87
|
+
def extract_response_data(
|
|
88
|
+
response: httpx.Response,
|
|
89
|
+
mappings: List[ResponseMappingConfig],
|
|
90
|
+
) -> Dict[str, Any]:
|
|
91
|
+
extractor = ResponseDataExtractor()
|
|
92
|
+
response_data = response.json() if response.text else {}
|
|
93
|
+
extracted = extractor.extract(
|
|
94
|
+
response_data=response_data,
|
|
95
|
+
mappings=mappings
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
return extracted
|
|
99
|
+
|
|
100
|
+
def get_tester(self, endpoint_name: str) -> ConnectivityTester:
|
|
101
|
+
"""Factory method: create connectivity tester for endpoint."""
|
|
102
|
+
if endpoint_name not in self.endpoints:
|
|
103
|
+
raise KeyError(f"Endpoint '{endpoint_name}' not found in configuration")
|
|
104
|
+
|
|
105
|
+
return ConnectivityTester(self.endpoints[endpoint_name])
|
|
106
|
+
|
|
107
|
+
def test_all(self, context: Dict[str, Any] | None = None) -> Dict[str, Dict[str, Any]]:
|
|
108
|
+
"""Test all configured endpoints."""
|
|
109
|
+
results = {}
|
|
110
|
+
for name in self.endpoints:
|
|
111
|
+
tester = self.get_tester(name)
|
|
112
|
+
results[name] = tester.test(context)
|
|
113
|
+
|
|
114
|
+
return results
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"""levelapp/endpoint/parsers.py"""
|
|
2
|
+
from typing import List, Dict, Any
|
|
3
|
+
|
|
4
|
+
from levelapp.config.endpoint_ import ResponseMappingConfig
|
|
5
|
+
from levelapp.endpoint.schemas import RequestSchemaConfig
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class RequestPayloadBuilder:
|
|
9
|
+
def build(self, schema: List[RequestSchemaConfig], context: Dict[str, Any]) -> Dict[str, Any]:
|
|
10
|
+
"""
|
|
11
|
+
Builds nested JSON payloads using dot-notation paths.
|
|
12
|
+
|
|
13
|
+
Args:
|
|
14
|
+
schema (List[RequestSchemaConfig]): List of request schema configurations.
|
|
15
|
+
context (Dict[str, Any]): Context for building the payload.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
payload (Dict[str, Any]): Request payload.
|
|
19
|
+
"""
|
|
20
|
+
payload = {}
|
|
21
|
+
|
|
22
|
+
for field_config in schema:
|
|
23
|
+
value = self._resolve_value(config=field_config, context=context)
|
|
24
|
+
if value is None and field_config.required:
|
|
25
|
+
raise ValueError(f"Required field '{field_config.field_path}' has no value")
|
|
26
|
+
|
|
27
|
+
self._set_nested_value(obj=payload, path=field_config.field_path, value=value)
|
|
28
|
+
|
|
29
|
+
return payload
|
|
30
|
+
|
|
31
|
+
@staticmethod
|
|
32
|
+
def _resolve_value(config: RequestSchemaConfig, context: Dict[str, Any]) -> Any:
|
|
33
|
+
"""
|
|
34
|
+
Resolve value based on type: static, env, or dynamic.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
config (RequestSchemaConfig): Request schema configuration.
|
|
38
|
+
context (Dict[str, Any]): Context for building the payload.
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
Any: Value resolved.
|
|
42
|
+
"""
|
|
43
|
+
if config.value_type == "static":
|
|
44
|
+
return config.value
|
|
45
|
+
elif config.value_type == "env":
|
|
46
|
+
import os
|
|
47
|
+
return os.getenv(config.value)
|
|
48
|
+
elif config.value_type == "dynamic":
|
|
49
|
+
return context.get(config.value, None)
|
|
50
|
+
|
|
51
|
+
return config.value
|
|
52
|
+
|
|
53
|
+
@staticmethod
|
|
54
|
+
def _set_nested_value(obj: Dict, path: str, value: Any) -> None:
|
|
55
|
+
parts: List[str] = path.split(".")
|
|
56
|
+
for part in parts[:-1]:
|
|
57
|
+
obj = obj.setdefault(part, {})
|
|
58
|
+
|
|
59
|
+
obj[parts[-1]] = value
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class ResponseDataExtractor:
|
|
63
|
+
"""Extracts data from API response using mapping-based config."""
|
|
64
|
+
def extract(
|
|
65
|
+
self,
|
|
66
|
+
response_data: Dict[str, Any],
|
|
67
|
+
mappings: List[ResponseMappingConfig]
|
|
68
|
+
) -> Dict[str, Any]:
|
|
69
|
+
"""
|
|
70
|
+
Extracts data from API response using mapping-based config.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
response_data (Dict[str, Any]): API response data.
|
|
74
|
+
mappings (List[ResponseMappingConfig]): List of response mappings.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
Dict[str, Any]: Extracted data.
|
|
78
|
+
"""
|
|
79
|
+
result: Dict[str, Any] = {}
|
|
80
|
+
|
|
81
|
+
for mapping in mappings:
|
|
82
|
+
try:
|
|
83
|
+
value = self._extract_by_path(obj=response_data, path=mapping.field_path, default=mapping.default)
|
|
84
|
+
result[mapping.extract_as] = value
|
|
85
|
+
|
|
86
|
+
except Exception as e:
|
|
87
|
+
print(f"Failed to extract '{mapping.field_path}':\n{e}")
|
|
88
|
+
result[mapping.extract_as] = mapping.default
|
|
89
|
+
|
|
90
|
+
return result
|
|
91
|
+
|
|
92
|
+
@staticmethod
|
|
93
|
+
def _extract_by_path(obj: Dict, path: str, default: Any = "N/A") -> Any:
|
|
94
|
+
"""
|
|
95
|
+
Extracts value using JSON path-like notation.
|
|
96
|
+
"""
|
|
97
|
+
parts = path.split(".")
|
|
98
|
+
current = obj
|
|
99
|
+
|
|
100
|
+
for part in parts:
|
|
101
|
+
if not isinstance(current, dict):
|
|
102
|
+
print("[extract_by_path][WARNING] the response data is not a dict.")
|
|
103
|
+
return default
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
if '[' in part and ']' in part:
|
|
107
|
+
key, idx = part.split('[')
|
|
108
|
+
idx = int(idx.rstrip(']'))
|
|
109
|
+
current = current[key][idx] if key else current[idx]
|
|
110
|
+
else:
|
|
111
|
+
if part not in current:
|
|
112
|
+
print(f"[extract_by_path][WARNING] Key '{part}' is missing from response.")
|
|
113
|
+
return default
|
|
114
|
+
current = current.get(part)
|
|
115
|
+
|
|
116
|
+
except (KeyError, IndexError, TypeError, AttributeError) as e:
|
|
117
|
+
print(f"[extract_by_path][ERROR] Error type <{e.__class__.__name__}> : {e.args[0]}")
|
|
118
|
+
return default
|
|
119
|
+
|
|
120
|
+
return current
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""levelapp/endpoint/schemas.py"""
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class HttpMethod(str, Enum):
|
|
9
|
+
GET = "GET"
|
|
10
|
+
POST = "POST"
|
|
11
|
+
PUT = "PUT"
|
|
12
|
+
PATCH = "PATCH"
|
|
13
|
+
DELETE = "DELETE"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class HeaderConfig(BaseModel):
|
|
17
|
+
"""Secure header configuration with environment variables support."""
|
|
18
|
+
name: str
|
|
19
|
+
value: str
|
|
20
|
+
secure: bool = False
|
|
21
|
+
|
|
22
|
+
class Config:
|
|
23
|
+
frozen = True
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class RequestSchemaConfig(BaseModel):
|
|
27
|
+
"""Schema Definition for request payload population."""
|
|
28
|
+
field_path: str # JSON path-like: "data.user.id"
|
|
29
|
+
value: Any
|
|
30
|
+
value_type: str = "static"
|
|
31
|
+
required: bool = True
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class ResponseMappingConfig(BaseModel):
|
|
35
|
+
"""Response data extraction mapping."""
|
|
36
|
+
field_path: str
|
|
37
|
+
extract_as: str
|
|
38
|
+
default: Any = None
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""levelapp/endpoint/tester.py"""
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Dict, Any
|
|
4
|
+
|
|
5
|
+
from levelapp.config.endpoint_ import ResponseExtractor
|
|
6
|
+
from levelapp.endpoint.client import EndpointConfig, APIClient
|
|
7
|
+
from levelapp.endpoint.parsers import RequestPayloadBuilder
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ConnectivityTester:
|
|
11
|
+
"""Tests REST endpoint connectivity with configurable behavior."""
|
|
12
|
+
def __init__(self, config: EndpointConfig):
|
|
13
|
+
self.config = config
|
|
14
|
+
self.client = APIClient(config=config)
|
|
15
|
+
self.payload_builder = RequestPayloadBuilder()
|
|
16
|
+
self.response_extractor = ResponseExtractor()
|
|
17
|
+
self.logger = logging.getLogger(f"ConnectivityTester.{self.config.name}")
|
|
18
|
+
|
|
19
|
+
async def test(self, context: Dict[str, Any] = None) -> Dict[str, Any]:
|
|
20
|
+
"""Execute connectivity test (template method)."""
|
|
21
|
+
context = context or {}
|
|
22
|
+
|
|
23
|
+
self.logger.info(f"Starting connectivity test for '{self.config.name}'")
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
payload = None
|
|
27
|
+
if self.config.request_schema:
|
|
28
|
+
payload = self.payload_builder.build(schema=self.config.request_schema, context=context)
|
|
29
|
+
self.logger.debug(f"Request payload: {payload}")
|
|
30
|
+
|
|
31
|
+
response = await self.client.execute(payload=payload)
|
|
32
|
+
self.logger.debug(f"Response status: {response.status_code}")
|
|
33
|
+
|
|
34
|
+
response_data = response.json() if response.text else {}
|
|
35
|
+
extracted = self.response_extractor.extract(
|
|
36
|
+
response_data=response_data,
|
|
37
|
+
mappings=self.config.response_mapping,
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
return {
|
|
41
|
+
"success": True,
|
|
42
|
+
"status_code": response.status_code,
|
|
43
|
+
"extracted_data": extracted,
|
|
44
|
+
"raw_response": response,
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
except Exception as e:
|
|
48
|
+
self.logger.error(f"Connectivity test failed: {e}", exc_info=e)
|
|
49
|
+
return {
|
|
50
|
+
"success": False,
|
|
51
|
+
"error": str(e),
|
|
52
|
+
"error_type": type(e).__name__,
|
|
53
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import os
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from levelapp.endpoint.manager import EndpointConfig
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
async def main():
|
|
8
|
+
manager = EndpointConfig(config_path=Path("../config/dashq_api.yaml"))
|
|
9
|
+
|
|
10
|
+
mappings = manager.build_response_mapping(
|
|
11
|
+
[
|
|
12
|
+
{"field_path": "payload.message", "extract_as": "agent_reply"},
|
|
13
|
+
{"field_path": "payload.metadata", "extract_as": "metadata"},
|
|
14
|
+
{"field_path": "eventType", "extract_as": "event_type"},
|
|
15
|
+
{"field_path": "payload.handoffMetadata", "extract_as": "handoff_metadata"},
|
|
16
|
+
]
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
print(f"Generated Mappings:\n{mappings}\n---")
|
|
20
|
+
|
|
21
|
+
context = {
|
|
22
|
+
"conversation_id": "238484ef-403b-43c5-9908-884486149d0b",
|
|
23
|
+
"user_message": "Hello, world!"
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
tester = manager.get_tester(endpoint_name="dashq")
|
|
27
|
+
os.environ["ENVIRONMENT"] = "test"
|
|
28
|
+
results = await tester.test(context=context)
|
|
29
|
+
print(f"Test results:\n{results}\n---")
|
|
30
|
+
|
|
31
|
+
extracted_data = await manager.extract_response_data(
|
|
32
|
+
endpoint_name="dashq",
|
|
33
|
+
context=context,
|
|
34
|
+
mappings=mappings,
|
|
35
|
+
)
|
|
36
|
+
print(f"Extracted Data:\n{extracted_data}\n---")
|
|
37
|
+
|
|
38
|
+
if __name__ == '__main__':
|
|
39
|
+
asyncio.run(main())
|
levelapp/evaluator/evaluator.py
CHANGED
|
@@ -279,7 +279,15 @@ class MetadataEvaluator(BaseEvaluator):
|
|
|
279
279
|
for k, v in output.items():
|
|
280
280
|
field = v.get("field_name", "N/A")
|
|
281
281
|
score = v.get("set_scores", -1)
|
|
282
|
-
|
|
282
|
+
|
|
283
|
+
if score is None:
|
|
284
|
+
results[field] = -1
|
|
285
|
+
|
|
286
|
+
try:
|
|
287
|
+
results[field] = int(score[0]) if isinstance(score, list) else int(score)
|
|
288
|
+
|
|
289
|
+
except (TypeError, ValueError):
|
|
290
|
+
results[field] = -1
|
|
283
291
|
|
|
284
292
|
return results
|
|
285
293
|
|
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
from typing import List, Dict, Any, Type, TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
from pydantic.v1 import ValidationError
|
|
7
|
+
|
|
8
|
+
from levelapp.core.base import BaseRepository, Model
|
|
9
|
+
from levelapp.aspects import logger
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from levelapp.workflow.config import WorkflowConfig
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class FileSystemRepository(BaseRepository):
|
|
16
|
+
"""
|
|
17
|
+
File-system implementation of BaseRepository.
|
|
18
|
+
Persists Pydantic model data as JSON files under the configured base path.
|
|
19
|
+
"""
|
|
20
|
+
def __init__(self, config: "WorkflowConfig | None" = None):
|
|
21
|
+
self._CLASS_NAME = self.__class__.__name__
|
|
22
|
+
|
|
23
|
+
self.config = config
|
|
24
|
+
base_path = getattr(config.repository, "base_path", "./data") if config else "./data"
|
|
25
|
+
self.base_path = Path(base_path).resolve()
|
|
26
|
+
self.base_path.mkdir(parents=True, exist_ok=True)
|
|
27
|
+
logger.info(f"[{self.__class__.__name__}] Base path: {base_path}")
|
|
28
|
+
|
|
29
|
+
def connect(self) -> None:
|
|
30
|
+
"""No-op for local storage."""
|
|
31
|
+
if not self.base_path.exists():
|
|
32
|
+
self.base_path.mkdir(parents=True, exist_ok=True)
|
|
33
|
+
logger.info(f"[{self._CLASS_NAME}] connected to {self.base_path}")
|
|
34
|
+
|
|
35
|
+
def close(self) -> None:
|
|
36
|
+
"""No-op for local storage."""
|
|
37
|
+
logger.info(f"[{self._CLASS_NAME}] Closed (no active connections)")
|
|
38
|
+
|
|
39
|
+
def _compose_path(
|
|
40
|
+
self,
|
|
41
|
+
collection_id: str,
|
|
42
|
+
section_id: str,
|
|
43
|
+
sub_collection_id: str,
|
|
44
|
+
document_id: str,
|
|
45
|
+
) -> Path:
|
|
46
|
+
"""
|
|
47
|
+
Compose the hierarchical path for a document.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
collection_id (str): the ID for the whole collection.
|
|
51
|
+
section_id (str): the ID for the section.
|
|
52
|
+
sub_collection_id (str): the ID for the sub collection.
|
|
53
|
+
document_id (str): the ID for the document.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
Path: the composed path.
|
|
57
|
+
"""
|
|
58
|
+
path = self.base_path / collection_id / section_id / sub_collection_id
|
|
59
|
+
path.mkdir(parents=True, exist_ok=True)
|
|
60
|
+
return path / f"{document_id}.json"
|
|
61
|
+
|
|
62
|
+
def retrieve_document(
|
|
63
|
+
self,
|
|
64
|
+
collection_id: str,
|
|
65
|
+
section_id: str,
|
|
66
|
+
sub_collection_id: str,
|
|
67
|
+
document_id: str,
|
|
68
|
+
model_type: Type[Model]
|
|
69
|
+
) -> Model | None:
|
|
70
|
+
"""
|
|
71
|
+
Retrieve a document from the local JSON file system.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
collection_id (str): the ID for the whole collection.
|
|
75
|
+
section_id (str): the ID for the section.
|
|
76
|
+
sub_collection_id (str): the ID for the sub collection.
|
|
77
|
+
document_id (str): the ID for the document.
|
|
78
|
+
model_type (Type[Model]): Pydantic model for parsing.
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
Model | None: An instance of the provided model.
|
|
82
|
+
"""
|
|
83
|
+
path = self._compose_path(collection_id, section_id, sub_collection_id, document_id)
|
|
84
|
+
if not path.exists():
|
|
85
|
+
logger.warning(f"[{self._CLASS_NAME}] Document '{path}' no found")
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
with path.open("r", encoding="utf-8") as f:
|
|
90
|
+
data = json.load(f)
|
|
91
|
+
|
|
92
|
+
return model_type.model_validate(data)
|
|
93
|
+
|
|
94
|
+
except json.JSONDecodeError as e:
|
|
95
|
+
logger.error(f"[{self._CLASS_NAME}] Failed to load the JSON file '{document_id}':\n{e}")
|
|
96
|
+
return None
|
|
97
|
+
|
|
98
|
+
except ValidationError as e:
|
|
99
|
+
logger.error(f"[{self._CLASS_NAME}] Failed to instantiate a Pydantic model for file '{document_id}':\n{e}")
|
|
100
|
+
return None
|
|
101
|
+
|
|
102
|
+
except Exception as e:
|
|
103
|
+
logger.exception(f"[{self._CLASS_NAME}] Unexpected error retrieving file '{document_id}':\n{e}")
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
def store_document(
|
|
107
|
+
self,
|
|
108
|
+
collection_id: str,
|
|
109
|
+
section_id: str,
|
|
110
|
+
sub_collection_id: str,
|
|
111
|
+
document_id: str,
|
|
112
|
+
data: Model
|
|
113
|
+
) -> None:
|
|
114
|
+
"""
|
|
115
|
+
Store a document as JSON file locally.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
collection_id (str): the ID for the whole collection.
|
|
119
|
+
section_id (str): the ID for the section.
|
|
120
|
+
sub_collection_id (str): the ID for the sub collection.
|
|
121
|
+
document_id (str): the ID for the document.
|
|
122
|
+
data (Model): Pydantic model for parsing.
|
|
123
|
+
"""
|
|
124
|
+
path = self._compose_path(collection_id, section_id, sub_collection_id, document_id)
|
|
125
|
+
|
|
126
|
+
try:
|
|
127
|
+
with path.open("w", encoding="utf-8") as f:
|
|
128
|
+
json.dump(data.model_dump(), f, ensure_ascii=False, indent=2)
|
|
129
|
+
logger.info(f"[{self._CLASS_NAME}] Stored document '{document_id}' in '{path}'")
|
|
130
|
+
|
|
131
|
+
except Exception as e:
|
|
132
|
+
logger.exception(f"[{self._CLASS_NAME}] Failed to store document '{document_id}' in '{path}':\n{e}'")
|
|
133
|
+
|
|
134
|
+
def query_collection(
|
|
135
|
+
self,
|
|
136
|
+
collection_id: str,
|
|
137
|
+
section_id: str,
|
|
138
|
+
sub_collection_id: str,
|
|
139
|
+
filters: Dict[str, Any],
|
|
140
|
+
model_type: Type[Model]
|
|
141
|
+
) -> List[Model]:
|
|
142
|
+
"""
|
|
143
|
+
Query all document in a sub collection, applying simple equality filters.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
collection_id (str): the ID for the whole collection.
|
|
147
|
+
section_id (str): the ID for the section.
|
|
148
|
+
sub_collection_id (str): the ID for the sub collection.
|
|
149
|
+
filters (Dict[str, Any]): Pydantic model for parsing.
|
|
150
|
+
model_type (Type[Model]): Pydantic model for parsing.
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
List[Model]: List of deserialized models that match the query.
|
|
154
|
+
"""
|
|
155
|
+
path = self.base_path / collection_id / section_id / sub_collection_id
|
|
156
|
+
|
|
157
|
+
if not path.exists():
|
|
158
|
+
logger.warning(f"[{self._CLASS_NAME}] Sub-collection '{path}' not found")
|
|
159
|
+
return []
|
|
160
|
+
|
|
161
|
+
results = []
|
|
162
|
+
try:
|
|
163
|
+
for file in path.glob("*.json"):
|
|
164
|
+
with file.open("r", encoding="utf-8") as f:
|
|
165
|
+
data = json.load(f)
|
|
166
|
+
|
|
167
|
+
if all(data.get(k) == v for k, v in filters.items()):
|
|
168
|
+
results.append(model_type.model_validate(data))
|
|
169
|
+
|
|
170
|
+
except json.JSONDecodeError as e:
|
|
171
|
+
logger.error(f"[{self._CLASS_NAME}] Failed to read JSON files content:\n{e}")
|
|
172
|
+
|
|
173
|
+
except ValidationError as e:
|
|
174
|
+
logger.error(f"[{self._CLASS_NAME}] Failed to parse JSON files content:\n{e}")
|
|
175
|
+
|
|
176
|
+
return results
|
|
177
|
+
|
|
178
|
+
def delete_document(
|
|
179
|
+
self,
|
|
180
|
+
collection_id: str,
|
|
181
|
+
section_id: str,
|
|
182
|
+
sub_collection_id: str,
|
|
183
|
+
document_id: str
|
|
184
|
+
) -> bool:
|
|
185
|
+
"""Delete a JSON document from the local file system."""
|
|
186
|
+
path = self._compose_path(collection_id, section_id, sub_collection_id, document_id)
|
|
187
|
+
|
|
188
|
+
if not path.exists():
|
|
189
|
+
logger.warning(f"[{self._CLASS_NAME}] Document '{path}' not found")
|
|
190
|
+
return False
|
|
191
|
+
|
|
192
|
+
try:
|
|
193
|
+
path.unlink()
|
|
194
|
+
logger.info(f"[{self._CLASS_NAME}] Deleted document '{document_id}'")
|
|
195
|
+
return True
|
|
196
|
+
|
|
197
|
+
except FileNotFoundError:
|
|
198
|
+
logger.warning(f"[{self._CLASS_NAME}] Document '{document_id}' not found")
|
|
199
|
+
return False
|
|
200
|
+
|
|
201
|
+
except Exception as e:
|
|
202
|
+
logger.exception(f"[{self._CLASS_NAME}] Failed to delete document '{document_id}':\n{e}")
|
|
203
|
+
return False
|