d365fo-client 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- d365fo_client/cli.py +135 -0
- d365fo_client/client.py +371 -19
- d365fo_client/crud.py +33 -10
- d365fo_client/main.py +58 -0
- d365fo_client/mcp/auth_server/auth/providers/apikey.py +83 -0
- d365fo_client/mcp/auth_server/auth/providers/azure.py +91 -23
- d365fo_client/mcp/fastmcp_main.py +92 -43
- d365fo_client/mcp/fastmcp_utils.py +4 -0
- d365fo_client/mcp/mixins/base_tools_mixin.py +13 -12
- d365fo_client/mcp/mixins/crud_tools_mixin.py +181 -40
- d365fo_client/mcp/server.py +10 -1
- d365fo_client/mcp/tools/__init__.py +2 -0
- d365fo_client/mcp/tools/json_service_tools.py +326 -0
- d365fo_client/models.py +45 -0
- d365fo_client/odata_serializer.py +300 -0
- d365fo_client/query.py +30 -20
- d365fo_client/settings.py +14 -2
- {d365fo_client-0.3.0.dist-info → d365fo_client-0.3.2.dist-info}/METADATA +114 -3
- {d365fo_client-0.3.0.dist-info → d365fo_client-0.3.2.dist-info}/RECORD +23 -20
- {d365fo_client-0.3.0.dist-info → d365fo_client-0.3.2.dist-info}/WHEEL +0 -0
- {d365fo_client-0.3.0.dist-info → d365fo_client-0.3.2.dist-info}/entry_points.txt +0 -0
- {d365fo_client-0.3.0.dist-info → d365fo_client-0.3.2.dist-info}/licenses/LICENSE +0 -0
- {d365fo_client-0.3.0.dist-info → d365fo_client-0.3.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,326 @@
|
|
1
|
+
"""JSON service tools for MCP server."""
|
2
|
+
|
3
|
+
import json
|
4
|
+
import logging
|
5
|
+
import time
|
6
|
+
from typing import Any, Dict, List, Optional
|
7
|
+
|
8
|
+
from mcp import Tool
|
9
|
+
from mcp.types import TextContent
|
10
|
+
|
11
|
+
from ..client_manager import D365FOClientManager
|
12
|
+
|
13
|
+
logger = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class JsonServiceTools:
|
17
|
+
"""JSON service tools for the MCP server."""
|
18
|
+
|
19
|
+
def __init__(self, client_manager: D365FOClientManager):
|
20
|
+
"""Initialize JSON service tools.
|
21
|
+
|
22
|
+
Args:
|
23
|
+
client_manager: D365FO client manager instance
|
24
|
+
"""
|
25
|
+
self.client_manager = client_manager
|
26
|
+
|
27
|
+
def get_tools(self) -> List[Tool]:
|
28
|
+
"""Get list of JSON service tools.
|
29
|
+
|
30
|
+
Returns:
|
31
|
+
List of Tool definitions
|
32
|
+
"""
|
33
|
+
return [
|
34
|
+
self._get_call_json_service_tool(),
|
35
|
+
self._get_call_sql_diagnostic_service_tool(),
|
36
|
+
]
|
37
|
+
|
38
|
+
def _get_call_json_service_tool(self) -> Tool:
|
39
|
+
"""Get generic JSON service call tool definition."""
|
40
|
+
return Tool(
|
41
|
+
name="d365fo_call_json_service",
|
42
|
+
description="Call a D365 F&O JSON service endpoint using the /api/services pattern. This provides a generic way to invoke any JSON service operation in D365 F&O.",
|
43
|
+
inputSchema={
|
44
|
+
"type": "object",
|
45
|
+
"properties": {
|
46
|
+
"serviceGroup": {
|
47
|
+
"type": "string",
|
48
|
+
"description": "Service group name (e.g., 'SysSqlDiagnosticService')",
|
49
|
+
},
|
50
|
+
"serviceName": {
|
51
|
+
"type": "string",
|
52
|
+
"description": "Service name (e.g., 'SysSqlDiagnosticServiceOperations')",
|
53
|
+
},
|
54
|
+
"operationName": {
|
55
|
+
"type": "string",
|
56
|
+
"description": "Operation name (e.g., 'GetAxSqlExecuting')",
|
57
|
+
},
|
58
|
+
"parameters": {
|
59
|
+
"type": "object",
|
60
|
+
"description": "Optional parameters to send in the POST body",
|
61
|
+
"additionalProperties": True,
|
62
|
+
},
|
63
|
+
"profile": {
|
64
|
+
"type": "string",
|
65
|
+
"description": "Configuration profile to use (optional - uses default profile if not specified)",
|
66
|
+
},
|
67
|
+
},
|
68
|
+
"required": ["serviceGroup", "serviceName", "operationName"],
|
69
|
+
},
|
70
|
+
)
|
71
|
+
|
72
|
+
def _get_call_sql_diagnostic_service_tool(self) -> Tool:
|
73
|
+
"""Get SQL diagnostic service tool definition with predefined operations."""
|
74
|
+
return Tool(
|
75
|
+
name="d365fo_call_sql_diagnostic_service",
|
76
|
+
description="Call SQL diagnostic service operations for D365 F&O performance monitoring. Provides convenient access to common SQL diagnostic operations.",
|
77
|
+
inputSchema={
|
78
|
+
"type": "object",
|
79
|
+
"properties": {
|
80
|
+
"operation": {
|
81
|
+
"type": "string",
|
82
|
+
"enum": [
|
83
|
+
"GetAxSqlExecuting",
|
84
|
+
"GetAxSqlResourceStats",
|
85
|
+
"GetAxSqlBlocking",
|
86
|
+
"GetAxSqlLockInfo",
|
87
|
+
"GetAxSqlDisabledIndexes",
|
88
|
+
],
|
89
|
+
"description": "SQL diagnostic operation to execute",
|
90
|
+
},
|
91
|
+
"parameters": {
|
92
|
+
"type": "object",
|
93
|
+
"description": "Operation-specific parameters",
|
94
|
+
"properties": {
|
95
|
+
"start": {
|
96
|
+
"type": "string",
|
97
|
+
"description": "Start date/time (ISO format) for GetAxSqlResourceStats",
|
98
|
+
},
|
99
|
+
"end": {
|
100
|
+
"type": "string",
|
101
|
+
"description": "End date/time (ISO format) for GetAxSqlResourceStats",
|
102
|
+
},
|
103
|
+
"sinceLastMinutes": {
|
104
|
+
"type": "integer",
|
105
|
+
"description": "Alternative to start/end - get stats for last N minutes",
|
106
|
+
"minimum": 1,
|
107
|
+
"maximum": 1440,
|
108
|
+
},
|
109
|
+
},
|
110
|
+
},
|
111
|
+
"profile": {
|
112
|
+
"type": "string",
|
113
|
+
"description": "Configuration profile to use (optional - uses default profile if not specified)",
|
114
|
+
},
|
115
|
+
},
|
116
|
+
"required": ["operation"],
|
117
|
+
},
|
118
|
+
)
|
119
|
+
|
120
|
+
async def execute_call_json_service(self, arguments: dict) -> List[TextContent]:
|
121
|
+
"""Execute generic JSON service call tool.
|
122
|
+
|
123
|
+
Args:
|
124
|
+
arguments: Tool arguments
|
125
|
+
|
126
|
+
Returns:
|
127
|
+
List of TextContent responses
|
128
|
+
"""
|
129
|
+
try:
|
130
|
+
start_time = time.time()
|
131
|
+
profile = arguments.get("profile", "default")
|
132
|
+
|
133
|
+
service_group = arguments["serviceGroup"]
|
134
|
+
service_name = arguments["serviceName"]
|
135
|
+
operation_name = arguments["operationName"]
|
136
|
+
parameters = arguments.get("parameters")
|
137
|
+
|
138
|
+
# Get client and call service
|
139
|
+
client = await self.client_manager.get_client(profile)
|
140
|
+
response = await client.post_json_service(
|
141
|
+
service_group=service_group,
|
142
|
+
service_name=service_name,
|
143
|
+
operation_name=operation_name,
|
144
|
+
parameters=parameters,
|
145
|
+
)
|
146
|
+
|
147
|
+
execution_time = time.time() - start_time
|
148
|
+
|
149
|
+
# Format response
|
150
|
+
result = {
|
151
|
+
"success": response.success,
|
152
|
+
"statusCode": response.status_code,
|
153
|
+
"data": response.data,
|
154
|
+
"executionTimeMs": round(execution_time * 1000, 2),
|
155
|
+
"serviceGroup": service_group,
|
156
|
+
"serviceName": service_name,
|
157
|
+
"operationName": operation_name,
|
158
|
+
}
|
159
|
+
|
160
|
+
if response.error_message:
|
161
|
+
result["errorMessage"] = response.error_message
|
162
|
+
|
163
|
+
return [
|
164
|
+
TextContent(
|
165
|
+
type="text",
|
166
|
+
text=json.dumps(result, indent=2, default=str),
|
167
|
+
)
|
168
|
+
]
|
169
|
+
|
170
|
+
except Exception as e:
|
171
|
+
logger.error(f"Error calling JSON service: {e}")
|
172
|
+
error_result = {
|
173
|
+
"success": False,
|
174
|
+
"error": str(e),
|
175
|
+
"serviceGroup": arguments.get("serviceGroup"),
|
176
|
+
"serviceName": arguments.get("serviceName"),
|
177
|
+
"operationName": arguments.get("operationName"),
|
178
|
+
}
|
179
|
+
return [
|
180
|
+
TextContent(
|
181
|
+
type="text",
|
182
|
+
text=json.dumps(error_result, indent=2),
|
183
|
+
)
|
184
|
+
]
|
185
|
+
|
186
|
+
async def execute_call_sql_diagnostic_service(self, arguments: dict) -> List[TextContent]:
|
187
|
+
"""Execute SQL diagnostic service call tool.
|
188
|
+
|
189
|
+
Args:
|
190
|
+
arguments: Tool arguments
|
191
|
+
|
192
|
+
Returns:
|
193
|
+
List of TextContent responses
|
194
|
+
"""
|
195
|
+
try:
|
196
|
+
start_time = time.time()
|
197
|
+
profile = arguments.get("profile", "default")
|
198
|
+
operation = arguments["operation"]
|
199
|
+
parameters = arguments.get("parameters", {})
|
200
|
+
|
201
|
+
# Prepare service call parameters
|
202
|
+
service_group = "SysSqlDiagnosticService"
|
203
|
+
service_name = "SysSqlDiagnosticServiceOperations"
|
204
|
+
|
205
|
+
# Handle special parameter processing for GetAxSqlResourceStats
|
206
|
+
service_parameters = None
|
207
|
+
if operation == "GetAxSqlResourceStats":
|
208
|
+
service_parameters = self._prepare_resource_stats_parameters(parameters)
|
209
|
+
elif parameters:
|
210
|
+
service_parameters = parameters
|
211
|
+
|
212
|
+
# Get client and call service
|
213
|
+
client = await self.client_manager.get_client(profile)
|
214
|
+
response = await client.post_json_service(
|
215
|
+
service_group=service_group,
|
216
|
+
service_name=service_name,
|
217
|
+
operation_name=operation,
|
218
|
+
parameters=service_parameters,
|
219
|
+
)
|
220
|
+
|
221
|
+
execution_time = time.time() - start_time
|
222
|
+
|
223
|
+
# Format response
|
224
|
+
result = {
|
225
|
+
"success": response.success,
|
226
|
+
"statusCode": response.status_code,
|
227
|
+
"data": response.data,
|
228
|
+
"executionTimeMs": round(execution_time * 1000, 2),
|
229
|
+
"operation": operation,
|
230
|
+
"parameters": service_parameters,
|
231
|
+
}
|
232
|
+
|
233
|
+
if response.error_message:
|
234
|
+
result["errorMessage"] = response.error_message
|
235
|
+
|
236
|
+
# Add operation-specific formatting
|
237
|
+
if response.success and response.data:
|
238
|
+
result["summary"] = self._format_operation_summary(operation, response.data)
|
239
|
+
|
240
|
+
return [
|
241
|
+
TextContent(
|
242
|
+
type="text",
|
243
|
+
text=json.dumps(result, indent=2, default=str),
|
244
|
+
)
|
245
|
+
]
|
246
|
+
|
247
|
+
except Exception as e:
|
248
|
+
logger.error(f"Error calling SQL diagnostic service: {e}")
|
249
|
+
error_result = {
|
250
|
+
"success": False,
|
251
|
+
"error": str(e),
|
252
|
+
"operation": arguments.get("operation"),
|
253
|
+
}
|
254
|
+
return [
|
255
|
+
TextContent(
|
256
|
+
type="text",
|
257
|
+
text=json.dumps(error_result, indent=2),
|
258
|
+
)
|
259
|
+
]
|
260
|
+
|
261
|
+
def _prepare_resource_stats_parameters(self, parameters: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
262
|
+
"""Prepare parameters for GetAxSqlResourceStats operation.
|
263
|
+
|
264
|
+
Args:
|
265
|
+
parameters: Input parameters
|
266
|
+
|
267
|
+
Returns:
|
268
|
+
Formatted parameters for the service call
|
269
|
+
"""
|
270
|
+
from datetime import datetime, timezone, timedelta
|
271
|
+
|
272
|
+
if "sinceLastMinutes" in parameters:
|
273
|
+
# Convert sinceLastMinutes to start/end dates
|
274
|
+
minutes = parameters["sinceLastMinutes"]
|
275
|
+
end_time = datetime.now(timezone.utc)
|
276
|
+
start_time = end_time - timedelta(minutes=minutes)
|
277
|
+
|
278
|
+
return {
|
279
|
+
"start": start_time.isoformat(),
|
280
|
+
"end": end_time.isoformat(),
|
281
|
+
}
|
282
|
+
elif "start" in parameters and "end" in parameters:
|
283
|
+
# Use provided start/end dates
|
284
|
+
return {
|
285
|
+
"start": parameters["start"],
|
286
|
+
"end": parameters["end"],
|
287
|
+
}
|
288
|
+
else:
|
289
|
+
# Default to last 10 minutes
|
290
|
+
end_time = datetime.now(timezone.utc)
|
291
|
+
start_time = end_time - timedelta(minutes=10)
|
292
|
+
|
293
|
+
return {
|
294
|
+
"start": start_time.isoformat(),
|
295
|
+
"end": end_time.isoformat(),
|
296
|
+
}
|
297
|
+
|
298
|
+
def _format_operation_summary(self, operation: str, data: Any) -> Dict[str, Any]:
|
299
|
+
"""Format operation-specific summary information.
|
300
|
+
|
301
|
+
Args:
|
302
|
+
operation: Operation name
|
303
|
+
data: Response data
|
304
|
+
|
305
|
+
Returns:
|
306
|
+
Summary information
|
307
|
+
"""
|
308
|
+
summary = {"operation": operation}
|
309
|
+
|
310
|
+
if isinstance(data, list):
|
311
|
+
summary["recordCount"] = str(len(data))
|
312
|
+
|
313
|
+
if operation == "GetAxSqlExecuting":
|
314
|
+
summary["description"] = f"Found {len(data)} currently executing SQL statements"
|
315
|
+
elif operation == "GetAxSqlResourceStats":
|
316
|
+
summary["description"] = f"Retrieved {len(data)} SQL resource statistics records"
|
317
|
+
elif operation == "GetAxSqlBlocking":
|
318
|
+
summary["description"] = f"Found {len(data)} SQL blocking situations"
|
319
|
+
elif operation == "GetAxSqlLockInfo":
|
320
|
+
summary["description"] = f"Retrieved {len(data)} SQL lock information records"
|
321
|
+
elif operation == "GetAxSqlDisabledIndexes":
|
322
|
+
summary["description"] = f"Found {len(data)} disabled indexes"
|
323
|
+
else:
|
324
|
+
summary["description"] = f"Operation {operation} completed successfully"
|
325
|
+
|
326
|
+
return summary
|
d365fo_client/models.py
CHANGED
@@ -693,6 +693,51 @@ class SearchResults:
|
|
693
693
|
}
|
694
694
|
|
695
695
|
|
696
|
+
# ============================================================================
|
697
|
+
# JSON Service Models
|
698
|
+
# ============================================================================
|
699
|
+
|
700
|
+
|
701
|
+
@dataclass
|
702
|
+
class JsonServiceRequest:
|
703
|
+
"""Request for D365 F&O JSON service endpoint"""
|
704
|
+
|
705
|
+
service_group: str
|
706
|
+
service_name: str
|
707
|
+
operation_name: str
|
708
|
+
parameters: Optional[Dict[str, Any]] = None
|
709
|
+
|
710
|
+
def get_endpoint_path(self) -> str:
|
711
|
+
"""Get the endpoint path for the service"""
|
712
|
+
return f"/api/services/{self.service_group}/{self.service_name}/{self.operation_name}"
|
713
|
+
|
714
|
+
def to_dict(self) -> Dict[str, Any]:
|
715
|
+
return {
|
716
|
+
"service_group": self.service_group,
|
717
|
+
"service_name": self.service_name,
|
718
|
+
"operation_name": self.operation_name,
|
719
|
+
"parameters": self.parameters
|
720
|
+
}
|
721
|
+
|
722
|
+
|
723
|
+
@dataclass
|
724
|
+
class JsonServiceResponse:
|
725
|
+
"""Response from D365 F&O JSON service endpoint"""
|
726
|
+
|
727
|
+
success: bool
|
728
|
+
data: Any
|
729
|
+
status_code: int
|
730
|
+
error_message: Optional[str] = None
|
731
|
+
|
732
|
+
def to_dict(self) -> Dict[str, Any]:
|
733
|
+
return {
|
734
|
+
"success": self.success,
|
735
|
+
"data": self.data,
|
736
|
+
"status_code": self.status_code,
|
737
|
+
"error_message": self.error_message
|
738
|
+
}
|
739
|
+
|
740
|
+
|
696
741
|
# ============================================================================
|
697
742
|
# Enhanced V2 Models for Advanced Metadata Caching
|
698
743
|
# ============================================================================
|
@@ -0,0 +1,300 @@
|
|
1
|
+
"""OData serialization utilities for D365 F&O client.
|
2
|
+
|
3
|
+
This module provides shared OData value serialization functionality
|
4
|
+
that can be used by both QueryBuilder and other components requiring
|
5
|
+
type-aware OData serialization.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import logging
|
9
|
+
from typing import Any, Dict, Optional, TYPE_CHECKING
|
10
|
+
from urllib.parse import quote
|
11
|
+
|
12
|
+
if TYPE_CHECKING:
|
13
|
+
from .models import PublicEntityInfo, PublicEntityPropertyInfo
|
14
|
+
|
15
|
+
logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
|
18
|
+
class ODataSerializer:
|
19
|
+
"""Shared OData value serialization utilities.
|
20
|
+
|
21
|
+
This class provides comprehensive OData serialization support for all
|
22
|
+
D365 Finance & Operations data types, ensuring proper URL encoding
|
23
|
+
and OData protocol compliance.
|
24
|
+
"""
|
25
|
+
|
26
|
+
@staticmethod
|
27
|
+
def serialize_value(value: Any, data_type: str, type_name: str) -> str:
|
28
|
+
"""Serialize a value according to OData protocol standards based on data type.
|
29
|
+
|
30
|
+
Handles all OData EDM types and D365 F&O specific ODataXppType values.
|
31
|
+
|
32
|
+
Args:
|
33
|
+
value: The value to serialize
|
34
|
+
data_type: The simplified data type (e.g., "String", "Int32", "DateTime")
|
35
|
+
type_name: The full OData type name (e.g., "Edm.String", "Edm.Int32")
|
36
|
+
|
37
|
+
Returns:
|
38
|
+
Properly serialized value for OData URL
|
39
|
+
"""
|
40
|
+
if value is None:
|
41
|
+
return "null"
|
42
|
+
|
43
|
+
# Convert to string first
|
44
|
+
str_value = str(value)
|
45
|
+
|
46
|
+
# Handle different data types according to OData standards and D365 F&O ODataXppType
|
47
|
+
if data_type in [
|
48
|
+
"String",
|
49
|
+
"Guid",
|
50
|
+
"Binary",
|
51
|
+
"Memo",
|
52
|
+
"Container",
|
53
|
+
"VarString",
|
54
|
+
"Record", # D365 F&O ODataXppType.RECORD
|
55
|
+
]:
|
56
|
+
# String-like types need URL encoding
|
57
|
+
# D365 F&O specific types:
|
58
|
+
# - VarString: variable-length string type
|
59
|
+
# - Memo: long text fields
|
60
|
+
# - Container: binary/container data (ODataXppType.CONTAINER)
|
61
|
+
# - Record: complex record type (ODataXppType.RECORD)
|
62
|
+
return quote(str_value, safe="")
|
63
|
+
|
64
|
+
elif data_type in [
|
65
|
+
"Int32",
|
66
|
+
"Int64",
|
67
|
+
"Decimal",
|
68
|
+
"Double",
|
69
|
+
"Single",
|
70
|
+
"Real", # D365 F&O ODataXppType.REAL - floating-point numbers
|
71
|
+
"Float",
|
72
|
+
"Money", # D365 F&O currency type
|
73
|
+
"Byte",
|
74
|
+
"SByte",
|
75
|
+
"Int16",
|
76
|
+
"UInt16",
|
77
|
+
"UInt32",
|
78
|
+
"UInt64",
|
79
|
+
"Number", # Generic numeric type
|
80
|
+
]:
|
81
|
+
# Numeric types don't need quotes or special encoding
|
82
|
+
# Real is D365 F&O's primary floating-point type (ODataXppType.REAL)
|
83
|
+
# Money handles currency values with proper decimal precision
|
84
|
+
return str_value
|
85
|
+
|
86
|
+
elif data_type == "Boolean":
|
87
|
+
# Boolean values should be lowercase
|
88
|
+
return (
|
89
|
+
str_value.lower()
|
90
|
+
if str_value.lower() in ["true", "false"]
|
91
|
+
else str_value
|
92
|
+
)
|
93
|
+
|
94
|
+
elif data_type in [
|
95
|
+
"DateTime",
|
96
|
+
"DateTimeOffset",
|
97
|
+
"Date", # D365 F&O ODataXppType.DATE - date only
|
98
|
+
"Time", # D365 F&O ODataXppType.TIME - time only
|
99
|
+
"UtcDateTime", # D365 F&O ODataXppType.UTC_DATETIME - UTC timezone
|
100
|
+
]:
|
101
|
+
# DateTime values need special formatting
|
102
|
+
# OData expects ISO 8601 format: YYYY-MM-DDTHH:MM:SS.fffZ
|
103
|
+
# D365 F&O specific types:
|
104
|
+
# - Date: date only (YYYY-MM-DD)
|
105
|
+
# - Time: time only (HH:MM:SS)
|
106
|
+
# - UtcDateTime: full datetime with UTC timezone
|
107
|
+
if "T" in str_value and (
|
108
|
+
"Z" in str_value or "+" in str_value or str_value.count("-") > 2
|
109
|
+
):
|
110
|
+
# Already in ISO format
|
111
|
+
return quote(str_value, safe="")
|
112
|
+
else:
|
113
|
+
# Try to handle common formats - for now, pass through with encoding
|
114
|
+
return quote(str_value, safe="")
|
115
|
+
|
116
|
+
elif data_type == "Enum":
|
117
|
+
# Enum values need the full qualified name from type_name
|
118
|
+
# e.g., Microsoft.Dynamics.DataEntities.NoYes'Yes'
|
119
|
+
# Corresponds to D365 F&O ODataXppType.ENUM
|
120
|
+
if "Microsoft.Dynamics.DataEntities." in type_name:
|
121
|
+
# Value should already be in the correct enum format
|
122
|
+
return quote(str_value, safe="")
|
123
|
+
else:
|
124
|
+
# Default enum handling
|
125
|
+
return quote(str_value, safe="")
|
126
|
+
|
127
|
+
elif data_type == "Void":
|
128
|
+
# D365 F&O ODataXppType.VOID - represents void/empty values
|
129
|
+
return "null"
|
130
|
+
|
131
|
+
else:
|
132
|
+
# Default: treat as string and URL encode
|
133
|
+
# Log warning only for truly unknown types (not common variations)
|
134
|
+
if data_type not in [
|
135
|
+
"Text", "Char", "Character", "Varchar", "NVarchar", "LongText",
|
136
|
+
"ShortText", "Description", "Name", "Code", "Id", "Key"
|
137
|
+
]:
|
138
|
+
logger.warning(
|
139
|
+
f"Unknown data type '{data_type}' for OData serialization, treating as string"
|
140
|
+
)
|
141
|
+
return quote(str_value, safe="")
|
142
|
+
|
143
|
+
@staticmethod
|
144
|
+
def serialize_key_dict(
|
145
|
+
key_dict: Dict[str, Any],
|
146
|
+
entity_schema: Optional["PublicEntityInfo"] = None
|
147
|
+
) -> Dict[str, str]:
|
148
|
+
"""Serialize key dictionary with proper data type handling.
|
149
|
+
|
150
|
+
Args:
|
151
|
+
key_dict: Dictionary of key field names to values
|
152
|
+
entity_schema: Optional entity schema for type-aware serialization
|
153
|
+
|
154
|
+
Returns:
|
155
|
+
Dictionary with properly serialized key values (URL-encoded but not OData-quoted)
|
156
|
+
"""
|
157
|
+
serialized_dict = {}
|
158
|
+
|
159
|
+
if entity_schema:
|
160
|
+
# Create property lookup for efficient access
|
161
|
+
property_lookup = {prop.name: prop for prop in entity_schema.properties}
|
162
|
+
|
163
|
+
for field_name, field_value in key_dict.items():
|
164
|
+
prop = property_lookup.get(field_name)
|
165
|
+
if prop:
|
166
|
+
# Serialize value according to property data type
|
167
|
+
# For key formatting, we need URL encoding but not OData quoting yet
|
168
|
+
serialized_value = ODataSerializer._serialize_for_key(
|
169
|
+
field_value,
|
170
|
+
prop.data_type or "String",
|
171
|
+
prop.type_name or "Edm.String",
|
172
|
+
)
|
173
|
+
serialized_dict[field_name] = serialized_value
|
174
|
+
else:
|
175
|
+
# Fallback: treat as string (URL encode but don't quote)
|
176
|
+
serialized_dict[field_name] = quote(str(field_value), safe="")
|
177
|
+
else:
|
178
|
+
# No schema available - fallback to string serialization
|
179
|
+
for field_name, field_value in key_dict.items():
|
180
|
+
serialized_dict[field_name] = quote(str(field_value), safe="")
|
181
|
+
|
182
|
+
return serialized_dict
|
183
|
+
|
184
|
+
@staticmethod
|
185
|
+
def _serialize_for_key(value: Any, data_type: str, type_name: str) -> str:
|
186
|
+
"""Serialize a value for use in OData keys (URL-encoded, no OData quotes).
|
187
|
+
|
188
|
+
Args:
|
189
|
+
value: The value to serialize
|
190
|
+
data_type: The simplified data type
|
191
|
+
type_name: The full OData type name
|
192
|
+
|
193
|
+
Returns:
|
194
|
+
URL-encoded value ready for key formatting
|
195
|
+
"""
|
196
|
+
if value is None:
|
197
|
+
return "null"
|
198
|
+
|
199
|
+
# Convert to string first
|
200
|
+
str_value = str(value)
|
201
|
+
|
202
|
+
# Handle different data types - URL encode when needed but don't add OData quotes
|
203
|
+
if data_type in [
|
204
|
+
"String", "Guid", "Binary", "Memo", "Container", "VarString",
|
205
|
+
"Record", "Text", "Char", "Character", "Varchar", "NVarchar",
|
206
|
+
"LongText", "ShortText", "Description", "Name", "Code", "Id", "Key"
|
207
|
+
]:
|
208
|
+
# String-like types need URL encoding
|
209
|
+
return quote(str_value, safe="")
|
210
|
+
|
211
|
+
elif data_type in [
|
212
|
+
"DateTime", "DateTimeOffset", "Date", "Time", "UtcDateTime"
|
213
|
+
]:
|
214
|
+
# DateTime values need URL encoding
|
215
|
+
return quote(str_value, safe="")
|
216
|
+
|
217
|
+
elif data_type == "Enum":
|
218
|
+
# Enum values need URL encoding
|
219
|
+
return quote(str_value, safe="")
|
220
|
+
|
221
|
+
elif data_type == "Boolean":
|
222
|
+
# Boolean values should be lowercase, no encoding needed
|
223
|
+
return str_value.lower() if str_value.lower() in ["true", "false"] else str_value
|
224
|
+
|
225
|
+
elif data_type in [
|
226
|
+
"Int32", "Int64", "Decimal", "Double", "Single", "Real", "Float",
|
227
|
+
"Money", "Byte", "SByte", "Int16", "UInt16", "UInt32", "UInt64", "Number"
|
228
|
+
]:
|
229
|
+
# Numeric types don't need URL encoding
|
230
|
+
return str_value
|
231
|
+
|
232
|
+
elif data_type == "Void":
|
233
|
+
return "null"
|
234
|
+
|
235
|
+
else:
|
236
|
+
# Default: treat as string and URL encode
|
237
|
+
return quote(str_value, safe="")
|
238
|
+
|
239
|
+
@staticmethod
|
240
|
+
def format_composite_key(
|
241
|
+
key_dict: Dict[str, str],
|
242
|
+
entity_schema: Optional["PublicEntityInfo"] = None
|
243
|
+
) -> str:
|
244
|
+
"""Format a composite key dictionary into OData key string.
|
245
|
+
|
246
|
+
Args:
|
247
|
+
key_dict: Dictionary of serialized key field names to values
|
248
|
+
entity_schema: Optional entity schema for determining quote requirements
|
249
|
+
|
250
|
+
Returns:
|
251
|
+
Formatted composite key string (e.g., "key1='value1',key2=123")
|
252
|
+
"""
|
253
|
+
key_parts = []
|
254
|
+
|
255
|
+
if entity_schema:
|
256
|
+
# Use schema information to determine if quotes are needed
|
257
|
+
property_lookup = {prop.name: prop for prop in entity_schema.properties}
|
258
|
+
|
259
|
+
for key_name, key_value in key_dict.items():
|
260
|
+
prop = property_lookup.get(key_name)
|
261
|
+
if prop and ODataSerializer._needs_quotes(prop.data_type):
|
262
|
+
# String-like types need quotes
|
263
|
+
key_parts.append(f"{key_name}='{key_value}'")
|
264
|
+
elif prop and not ODataSerializer._needs_quotes(prop.data_type):
|
265
|
+
# Numeric, boolean types don't need quotes
|
266
|
+
key_parts.append(f"{key_name}={key_value}")
|
267
|
+
else:
|
268
|
+
# Unknown property - default to string behavior (needs quotes)
|
269
|
+
key_parts.append(f"{key_name}='{key_value}'")
|
270
|
+
else:
|
271
|
+
# No schema - default to quoting all values (backward compatibility)
|
272
|
+
for key_name, key_value in key_dict.items():
|
273
|
+
key_parts.append(f"{key_name}='{key_value}'")
|
274
|
+
|
275
|
+
return ",".join(key_parts)
|
276
|
+
|
277
|
+
@staticmethod
|
278
|
+
def _needs_quotes(data_type: str) -> bool:
|
279
|
+
"""Determine if a data type needs quotes in OData key formatting.
|
280
|
+
|
281
|
+
Args:
|
282
|
+
data_type: The data type to check
|
283
|
+
|
284
|
+
Returns:
|
285
|
+
True if the data type needs quotes, False otherwise
|
286
|
+
"""
|
287
|
+
# String-like types need quotes
|
288
|
+
string_types = {
|
289
|
+
"String", "Guid", "Binary", "Memo", "Container", "VarString",
|
290
|
+
"Record", "Enum", "Text", "Char", "Character", "Varchar",
|
291
|
+
"NVarchar", "LongText", "ShortText", "Description", "Name",
|
292
|
+
"Code", "Id", "Key"
|
293
|
+
}
|
294
|
+
|
295
|
+
# Date/time types need quotes for URL encoding
|
296
|
+
datetime_types = {
|
297
|
+
"DateTime", "DateTimeOffset", "Date", "Time", "UtcDateTime"
|
298
|
+
}
|
299
|
+
|
300
|
+
return data_type in string_types or data_type in datetime_types
|