d365fo-client 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- d365fo_client/__init__.py +305 -0
- d365fo_client/auth.py +93 -0
- d365fo_client/cli.py +700 -0
- d365fo_client/client.py +1454 -0
- d365fo_client/config.py +304 -0
- d365fo_client/crud.py +200 -0
- d365fo_client/exceptions.py +49 -0
- d365fo_client/labels.py +528 -0
- d365fo_client/main.py +502 -0
- d365fo_client/mcp/__init__.py +16 -0
- d365fo_client/mcp/client_manager.py +276 -0
- d365fo_client/mcp/main.py +98 -0
- d365fo_client/mcp/models.py +371 -0
- d365fo_client/mcp/prompts/__init__.py +43 -0
- d365fo_client/mcp/prompts/action_execution.py +480 -0
- d365fo_client/mcp/prompts/sequence_analysis.py +349 -0
- d365fo_client/mcp/resources/__init__.py +15 -0
- d365fo_client/mcp/resources/database_handler.py +555 -0
- d365fo_client/mcp/resources/entity_handler.py +176 -0
- d365fo_client/mcp/resources/environment_handler.py +132 -0
- d365fo_client/mcp/resources/metadata_handler.py +283 -0
- d365fo_client/mcp/resources/query_handler.py +135 -0
- d365fo_client/mcp/server.py +432 -0
- d365fo_client/mcp/tools/__init__.py +17 -0
- d365fo_client/mcp/tools/connection_tools.py +175 -0
- d365fo_client/mcp/tools/crud_tools.py +579 -0
- d365fo_client/mcp/tools/database_tools.py +813 -0
- d365fo_client/mcp/tools/label_tools.py +189 -0
- d365fo_client/mcp/tools/metadata_tools.py +766 -0
- d365fo_client/mcp/tools/profile_tools.py +706 -0
- d365fo_client/metadata_api.py +793 -0
- d365fo_client/metadata_v2/__init__.py +59 -0
- d365fo_client/metadata_v2/cache_v2.py +1372 -0
- d365fo_client/metadata_v2/database_v2.py +585 -0
- d365fo_client/metadata_v2/global_version_manager.py +573 -0
- d365fo_client/metadata_v2/search_engine_v2.py +423 -0
- d365fo_client/metadata_v2/sync_manager_v2.py +819 -0
- d365fo_client/metadata_v2/version_detector.py +439 -0
- d365fo_client/models.py +862 -0
- d365fo_client/output.py +181 -0
- d365fo_client/profile_manager.py +342 -0
- d365fo_client/profiles.py +178 -0
- d365fo_client/query.py +162 -0
- d365fo_client/session.py +60 -0
- d365fo_client/utils.py +196 -0
- d365fo_client-0.1.0.dist-info/METADATA +1084 -0
- d365fo_client-0.1.0.dist-info/RECORD +51 -0
- d365fo_client-0.1.0.dist-info/WHEEL +5 -0
- d365fo_client-0.1.0.dist-info/entry_points.txt +3 -0
- d365fo_client-0.1.0.dist-info/licenses/LICENSE +21 -0
- d365fo_client-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1372 @@
|
|
1
|
+
"""Version-aware metadata cache implementation."""
|
2
|
+
|
3
|
+
import json
|
4
|
+
import logging
|
5
|
+
from datetime import datetime, timezone
|
6
|
+
from pathlib import Path
|
7
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
8
|
+
|
9
|
+
import aiosqlite
|
10
|
+
|
11
|
+
# Use TYPE_CHECKING to avoid circular import
|
12
|
+
if TYPE_CHECKING:
|
13
|
+
from ..metadata_api import MetadataAPIOperations
|
14
|
+
|
15
|
+
from ..models import (
|
16
|
+
ActionInfo,
|
17
|
+
ActionParameterInfo,
|
18
|
+
ActionParameterTypeInfo,
|
19
|
+
ActionReturnTypeInfo,
|
20
|
+
Cardinality,
|
21
|
+
DataEntityInfo,
|
22
|
+
EnumerationInfo,
|
23
|
+
EnumerationMemberInfo,
|
24
|
+
EnvironmentVersionInfo,
|
25
|
+
FixedConstraintInfo,
|
26
|
+
GlobalVersionInfo,
|
27
|
+
LabelInfo,
|
28
|
+
ModuleVersionInfo,
|
29
|
+
NavigationPropertyInfo,
|
30
|
+
ODataBindingKind,
|
31
|
+
PropertyGroupInfo,
|
32
|
+
PublicEntityActionInfo,
|
33
|
+
PublicEntityInfo,
|
34
|
+
PublicEntityPropertyInfo,
|
35
|
+
ReferentialConstraintInfo,
|
36
|
+
RelatedFixedConstraintInfo,
|
37
|
+
RelationConstraintInfo,
|
38
|
+
VersionDetectionResult,
|
39
|
+
)
|
40
|
+
from .database_v2 import MetadataDatabaseV2
|
41
|
+
from .global_version_manager import GlobalVersionManager
|
42
|
+
from .version_detector import ModuleVersionDetector
|
43
|
+
|
44
|
+
logger = logging.getLogger(__name__)
|
45
|
+
|
46
|
+
|
47
|
+
class MetadataCacheV2:
|
48
|
+
"""Version-aware metadata cache with intelligent invalidation"""
|
49
|
+
|
50
|
+
def __init__(
|
51
|
+
self,
|
52
|
+
cache_dir: Path,
|
53
|
+
base_url: str,
|
54
|
+
metadata_api: Optional["MetadataAPIOperations"] = None,
|
55
|
+
):
|
56
|
+
"""Initialize metadata cache v2
|
57
|
+
|
58
|
+
Args:
|
59
|
+
cache_dir: Directory for cache storage
|
60
|
+
base_url: D365 F&O environment base URL
|
61
|
+
metadata_api: Optional MetadataAPIOperations instance for version detection
|
62
|
+
"""
|
63
|
+
self.cache_dir = cache_dir
|
64
|
+
self.base_url = base_url
|
65
|
+
self.metadata_api = metadata_api
|
66
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
67
|
+
|
68
|
+
# Database and managers
|
69
|
+
self.db_path = cache_dir / "metadata_v2.db"
|
70
|
+
self.database = MetadataDatabaseV2(self.db_path)
|
71
|
+
self.version_manager = GlobalVersionManager(self.db_path)
|
72
|
+
|
73
|
+
# Version detector - initialized when metadata_api is available
|
74
|
+
self.version_detector = None
|
75
|
+
if self.metadata_api:
|
76
|
+
self.version_detector = ModuleVersionDetector(self.metadata_api)
|
77
|
+
|
78
|
+
# Cache state
|
79
|
+
self._environment_id: Optional[int] = None
|
80
|
+
self._current_version_info: Optional[EnvironmentVersionInfo] = None
|
81
|
+
self._current_global_version_id: Optional[int] = None
|
82
|
+
self._initialized = False
|
83
|
+
|
84
|
+
async def initialize(self):
|
85
|
+
"""Initialize cache database and environment"""
|
86
|
+
if self._initialized:
|
87
|
+
return
|
88
|
+
|
89
|
+
await self.database.initialize()
|
90
|
+
self._environment_id = await self.database.get_or_create_environment(
|
91
|
+
self.base_url
|
92
|
+
)
|
93
|
+
self._initialized = True
|
94
|
+
|
95
|
+
logger.info(
|
96
|
+
f"MetadataCacheV2 initialized for environment {self._environment_id}"
|
97
|
+
)
|
98
|
+
|
99
|
+
def set_metadata_api(self, metadata_api: "MetadataAPIOperations"):
|
100
|
+
"""Set metadata API operations instance and initialize version detector
|
101
|
+
|
102
|
+
Args:
|
103
|
+
metadata_api: MetadataAPIOperations instance
|
104
|
+
"""
|
105
|
+
self.metadata_api = metadata_api
|
106
|
+
self.version_detector = ModuleVersionDetector(metadata_api)
|
107
|
+
logger.debug("Version detector initialized with metadata API")
|
108
|
+
|
109
|
+
async def check_version_and_sync(
|
110
|
+
self, metadata_api: Optional["MetadataAPIOperations"] = None
|
111
|
+
) -> Tuple[bool, Optional[int]]:
|
112
|
+
"""Check environment version and determine if sync is needed
|
113
|
+
|
114
|
+
Args:
|
115
|
+
metadata_api: Optional MetadataAPIOperations instance for version detection
|
116
|
+
|
117
|
+
Returns:
|
118
|
+
Tuple of (sync_needed, global_version_id)
|
119
|
+
"""
|
120
|
+
await self.initialize()
|
121
|
+
|
122
|
+
# Set up version detector if metadata_api is provided
|
123
|
+
if metadata_api and not self.version_detector:
|
124
|
+
self.set_metadata_api(metadata_api)
|
125
|
+
|
126
|
+
# Check if version detector is available
|
127
|
+
if not self.version_detector:
|
128
|
+
logger.warning("Version detector not available - sync needed")
|
129
|
+
return True, None
|
130
|
+
|
131
|
+
try:
|
132
|
+
# Detect current version
|
133
|
+
detection_result = await self.version_detector.get_environment_version()
|
134
|
+
|
135
|
+
if not detection_result.success or not detection_result.version_info:
|
136
|
+
logger.warning(
|
137
|
+
f"Version detection failed: {detection_result.error_message}"
|
138
|
+
)
|
139
|
+
return True, None
|
140
|
+
|
141
|
+
version_info = detection_result.version_info
|
142
|
+
logger.info(
|
143
|
+
f"Version detected: {len(version_info.modules)} modules, "
|
144
|
+
f"hash: {version_info.version_hash}"
|
145
|
+
)
|
146
|
+
|
147
|
+
# Set environment ID on version info
|
148
|
+
version_info.environment_id = self._environment_id
|
149
|
+
|
150
|
+
# Register/find global version
|
151
|
+
global_version_id, was_created = (
|
152
|
+
await self.version_manager.register_environment_version(
|
153
|
+
self._environment_id, version_info.modules
|
154
|
+
)
|
155
|
+
)
|
156
|
+
|
157
|
+
# Update current version info
|
158
|
+
self._current_version_info = version_info
|
159
|
+
self._current_global_version_id = global_version_id
|
160
|
+
|
161
|
+
if was_created:
|
162
|
+
logger.info(f"New version detected: {global_version_id}")
|
163
|
+
return True, global_version_id
|
164
|
+
|
165
|
+
# Check if metadata exists for this version
|
166
|
+
if await self._has_complete_metadata(global_version_id):
|
167
|
+
logger.info(f"Using cached metadata for version {global_version_id}")
|
168
|
+
return False, global_version_id
|
169
|
+
else:
|
170
|
+
logger.info(
|
171
|
+
f"Metadata incomplete for version {global_version_id}, sync needed"
|
172
|
+
)
|
173
|
+
return True, global_version_id
|
174
|
+
|
175
|
+
except Exception as e:
|
176
|
+
logger.error(f"Version detection failed: {e}")
|
177
|
+
return True, None
|
178
|
+
|
179
|
+
async def _has_complete_metadata(self, global_version_id: int) -> bool:
|
180
|
+
"""Check if metadata is complete for a global version
|
181
|
+
|
182
|
+
Args:
|
183
|
+
global_version_id: Global version ID to check
|
184
|
+
|
185
|
+
Returns:
|
186
|
+
True if metadata is complete
|
187
|
+
"""
|
188
|
+
async with aiosqlite.connect(self.db_path) as db:
|
189
|
+
# Check metadata version record
|
190
|
+
cursor = await db.execute(
|
191
|
+
"""SELECT sync_completed_at, entity_count, action_count, enumeration_count
|
192
|
+
FROM metadata_versions
|
193
|
+
WHERE global_version_id = ?""",
|
194
|
+
(global_version_id,),
|
195
|
+
)
|
196
|
+
|
197
|
+
row = await cursor.fetchone()
|
198
|
+
if not row or not row[0]: # No completed sync
|
199
|
+
return False
|
200
|
+
|
201
|
+
# Check basic entity count
|
202
|
+
cursor = await db.execute(
|
203
|
+
"SELECT COUNT(*) FROM data_entities WHERE global_version_id = ?",
|
204
|
+
(global_version_id,),
|
205
|
+
)
|
206
|
+
|
207
|
+
entity_count = (await cursor.fetchone())[0]
|
208
|
+
return entity_count > 0 # Has some entities
|
209
|
+
|
210
|
+
async def store_data_entities(
|
211
|
+
self, global_version_id: int, entities: List[DataEntityInfo]
|
212
|
+
):
|
213
|
+
"""Store data entities for global version
|
214
|
+
|
215
|
+
Args:
|
216
|
+
global_version_id: Global version ID
|
217
|
+
entities: List of data entity information
|
218
|
+
"""
|
219
|
+
async with aiosqlite.connect(self.db_path) as db:
|
220
|
+
# Clear existing entities for this version
|
221
|
+
await db.execute(
|
222
|
+
"DELETE FROM data_entities WHERE global_version_id = ?",
|
223
|
+
(global_version_id,),
|
224
|
+
)
|
225
|
+
|
226
|
+
# Insert new entities
|
227
|
+
for entity in entities:
|
228
|
+
await db.execute(
|
229
|
+
"""INSERT INTO data_entities
|
230
|
+
(global_version_id, name, public_entity_name, public_collection_name,
|
231
|
+
label_id, label_text, entity_category, data_service_enabled,
|
232
|
+
data_management_enabled, is_read_only)
|
233
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
234
|
+
(
|
235
|
+
global_version_id,
|
236
|
+
entity.name,
|
237
|
+
entity.public_entity_name,
|
238
|
+
entity.public_collection_name,
|
239
|
+
entity.label_id,
|
240
|
+
entity.label_text,
|
241
|
+
entity.entity_category,
|
242
|
+
entity.data_service_enabled,
|
243
|
+
entity.data_management_enabled,
|
244
|
+
entity.is_read_only,
|
245
|
+
),
|
246
|
+
)
|
247
|
+
|
248
|
+
await db.commit()
|
249
|
+
logger.info(
|
250
|
+
f"Stored {len(entities)} data entities for version {global_version_id}"
|
251
|
+
)
|
252
|
+
|
253
|
+
async def get_data_entities(
|
254
|
+
self,
|
255
|
+
global_version_id: Optional[int] = None,
|
256
|
+
data_service_enabled: Optional[bool] = None,
|
257
|
+
entity_category: Optional[str] = None,
|
258
|
+
name_pattern: Optional[str] = None,
|
259
|
+
) -> List[DataEntityInfo]:
|
260
|
+
"""Get data entities with filtering
|
261
|
+
|
262
|
+
Args:
|
263
|
+
global_version_id: Global version ID (uses current if None)
|
264
|
+
data_service_enabled: Filter by data service enabled status
|
265
|
+
entity_category: Filter by entity category
|
266
|
+
name_pattern: Filter by name pattern (SQL LIKE) - searches across all text fields:
|
267
|
+
name, public_entity_name, public_collection_name, label_id, label_text, entity_category
|
268
|
+
|
269
|
+
Returns:
|
270
|
+
List of matching data entities
|
271
|
+
"""
|
272
|
+
if global_version_id is None:
|
273
|
+
global_version_id = await self._get_current_global_version_id()
|
274
|
+
if global_version_id is None:
|
275
|
+
return []
|
276
|
+
|
277
|
+
# Build query conditions
|
278
|
+
conditions = ["global_version_id = ?"]
|
279
|
+
params = [global_version_id]
|
280
|
+
|
281
|
+
if data_service_enabled is not None:
|
282
|
+
conditions.append("data_service_enabled = ?")
|
283
|
+
params.append(data_service_enabled)
|
284
|
+
|
285
|
+
if entity_category is not None:
|
286
|
+
conditions.append("entity_category = ?")
|
287
|
+
params.append(entity_category)
|
288
|
+
|
289
|
+
if name_pattern is not None:
|
290
|
+
# Search across all text fields with OR conditions
|
291
|
+
conditions.append(
|
292
|
+
"(name LIKE ? OR public_entity_name LIKE ? OR public_collection_name LIKE ? OR label_id LIKE ? OR label_text LIKE ? OR entity_category LIKE ?)"
|
293
|
+
)
|
294
|
+
# Add the pattern 6 times for each field
|
295
|
+
params.extend([name_pattern] * 6)
|
296
|
+
|
297
|
+
where_clause = " AND ".join(conditions)
|
298
|
+
|
299
|
+
async with aiosqlite.connect(self.db_path) as db:
|
300
|
+
cursor = await db.execute(
|
301
|
+
f"""SELECT name, public_entity_name, public_collection_name,
|
302
|
+
label_id, label_text, entity_category, data_service_enabled,
|
303
|
+
data_management_enabled, is_read_only
|
304
|
+
FROM data_entities
|
305
|
+
WHERE {where_clause}
|
306
|
+
ORDER BY name""",
|
307
|
+
params,
|
308
|
+
)
|
309
|
+
|
310
|
+
entities = []
|
311
|
+
for row in await cursor.fetchall():
|
312
|
+
entities.append(
|
313
|
+
DataEntityInfo(
|
314
|
+
name=row[0],
|
315
|
+
public_entity_name=row[1],
|
316
|
+
public_collection_name=row[2],
|
317
|
+
label_id=row[3],
|
318
|
+
label_text=row[4],
|
319
|
+
entity_category=row[5],
|
320
|
+
data_service_enabled=row[6],
|
321
|
+
data_management_enabled=row[7],
|
322
|
+
is_read_only=row[8],
|
323
|
+
)
|
324
|
+
)
|
325
|
+
|
326
|
+
return entities
|
327
|
+
|
328
|
+
async def store_public_entity_schema(
|
329
|
+
self, global_version_id: int, entity_schema: PublicEntityInfo
|
330
|
+
):
|
331
|
+
"""Store public entity schema
|
332
|
+
|
333
|
+
Args:
|
334
|
+
global_version_id: Global version ID
|
335
|
+
entity_schema: Public entity schema information
|
336
|
+
"""
|
337
|
+
async with aiosqlite.connect(self.db_path) as db:
|
338
|
+
# First, get existing entity ID if it exists for this name and version
|
339
|
+
cursor = await db.execute(
|
340
|
+
"""SELECT id FROM public_entities
|
341
|
+
WHERE name = ? AND global_version_id = ?""",
|
342
|
+
(entity_schema.name, global_version_id),
|
343
|
+
)
|
344
|
+
|
345
|
+
existing_entity = await cursor.fetchone()
|
346
|
+
existing_entity_id = existing_entity[0] if existing_entity else None
|
347
|
+
|
348
|
+
# Clear existing related data for this entity and version
|
349
|
+
if existing_entity_id:
|
350
|
+
logger.debug(
|
351
|
+
f"Clearing existing data for entity {entity_schema.name} (ID: {existing_entity_id})"
|
352
|
+
)
|
353
|
+
|
354
|
+
# Delete related data in correct order (respecting foreign key constraints)
|
355
|
+
# 1. Delete relation constraints first
|
356
|
+
await db.execute(
|
357
|
+
"""DELETE FROM relation_constraints
|
358
|
+
WHERE navigation_property_id IN (
|
359
|
+
SELECT id FROM navigation_properties
|
360
|
+
WHERE entity_id = ? AND global_version_id = ?
|
361
|
+
)""",
|
362
|
+
(existing_entity_id, global_version_id),
|
363
|
+
)
|
364
|
+
|
365
|
+
# 2. Delete action parameters
|
366
|
+
await db.execute(
|
367
|
+
"""DELETE FROM action_parameters
|
368
|
+
WHERE action_id IN (
|
369
|
+
SELECT id FROM entity_actions
|
370
|
+
WHERE entity_id = ? AND global_version_id = ?
|
371
|
+
)""",
|
372
|
+
(existing_entity_id, global_version_id),
|
373
|
+
)
|
374
|
+
|
375
|
+
# 3. Delete property group members
|
376
|
+
await db.execute(
|
377
|
+
"""DELETE FROM property_group_members
|
378
|
+
WHERE property_group_id IN (
|
379
|
+
SELECT id FROM property_groups
|
380
|
+
WHERE entity_id = ? AND global_version_id = ?
|
381
|
+
)""",
|
382
|
+
(existing_entity_id, global_version_id),
|
383
|
+
)
|
384
|
+
|
385
|
+
# 4. Delete direct child records
|
386
|
+
await db.execute(
|
387
|
+
"DELETE FROM entity_properties WHERE entity_id = ? AND global_version_id = ?",
|
388
|
+
(existing_entity_id, global_version_id),
|
389
|
+
)
|
390
|
+
await db.execute(
|
391
|
+
"DELETE FROM navigation_properties WHERE entity_id = ? AND global_version_id = ?",
|
392
|
+
(existing_entity_id, global_version_id),
|
393
|
+
)
|
394
|
+
await db.execute(
|
395
|
+
"DELETE FROM property_groups WHERE entity_id = ? AND global_version_id = ?",
|
396
|
+
(existing_entity_id, global_version_id),
|
397
|
+
)
|
398
|
+
await db.execute(
|
399
|
+
"DELETE FROM entity_actions WHERE entity_id = ? AND global_version_id = ?",
|
400
|
+
(existing_entity_id, global_version_id),
|
401
|
+
)
|
402
|
+
|
403
|
+
# 5. Finally delete the entity itself
|
404
|
+
await db.execute(
|
405
|
+
"DELETE FROM public_entities WHERE id = ? AND global_version_id = ?",
|
406
|
+
(existing_entity_id, global_version_id),
|
407
|
+
)
|
408
|
+
|
409
|
+
# Insert new entity
|
410
|
+
cursor = await db.execute(
|
411
|
+
"""INSERT INTO public_entities
|
412
|
+
(global_version_id, name, entity_set_name, label_id, label_text,
|
413
|
+
is_read_only, configuration_enabled)
|
414
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
415
|
+
(
|
416
|
+
global_version_id,
|
417
|
+
entity_schema.name,
|
418
|
+
entity_schema.entity_set_name,
|
419
|
+
entity_schema.label_id,
|
420
|
+
entity_schema.label_text,
|
421
|
+
entity_schema.is_read_only,
|
422
|
+
entity_schema.configuration_enabled,
|
423
|
+
),
|
424
|
+
)
|
425
|
+
|
426
|
+
entity_id = cursor.lastrowid
|
427
|
+
|
428
|
+
# Store properties
|
429
|
+
prop_order = 0
|
430
|
+
for prop in entity_schema.properties:
|
431
|
+
prop_order += 1
|
432
|
+
await db.execute(
|
433
|
+
"""INSERT INTO entity_properties
|
434
|
+
(entity_id, global_version_id, name, type_name, data_type,
|
435
|
+
odata_xpp_type, label_id, label_text, is_key, is_mandatory,
|
436
|
+
configuration_enabled, allow_edit, allow_edit_on_create,
|
437
|
+
is_dimension, dimension_relation, is_dynamic_dimension,
|
438
|
+
dimension_legal_entity_property, dimension_type_property,
|
439
|
+
property_order)
|
440
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
441
|
+
(
|
442
|
+
entity_id,
|
443
|
+
global_version_id,
|
444
|
+
prop.name,
|
445
|
+
prop.type_name,
|
446
|
+
prop.data_type,
|
447
|
+
prop.data_type,
|
448
|
+
prop.label_id,
|
449
|
+
prop.label_text,
|
450
|
+
prop.is_key,
|
451
|
+
prop.is_mandatory,
|
452
|
+
prop.configuration_enabled,
|
453
|
+
prop.allow_edit,
|
454
|
+
prop.allow_edit_on_create,
|
455
|
+
prop.is_dimension,
|
456
|
+
prop.dimension_relation,
|
457
|
+
prop.is_dynamic_dimension,
|
458
|
+
prop.dimension_legal_entity_property,
|
459
|
+
prop.dimension_type_property,
|
460
|
+
prop_order,
|
461
|
+
),
|
462
|
+
)
|
463
|
+
|
464
|
+
# Store navigation properties
|
465
|
+
for nav_prop in entity_schema.navigation_properties:
|
466
|
+
nav_cursor = await db.execute(
|
467
|
+
"""INSERT INTO navigation_properties
|
468
|
+
(entity_id, global_version_id, name, related_entity,
|
469
|
+
related_relation_name, cardinality)
|
470
|
+
VALUES (?, ?, ?, ?, ?, ?)""",
|
471
|
+
(
|
472
|
+
entity_id,
|
473
|
+
global_version_id,
|
474
|
+
nav_prop.name,
|
475
|
+
nav_prop.related_entity,
|
476
|
+
nav_prop.related_relation_name,
|
477
|
+
nav_prop.cardinality.value, # Convert enum to string value
|
478
|
+
),
|
479
|
+
)
|
480
|
+
|
481
|
+
nav_prop_id = nav_cursor.lastrowid
|
482
|
+
|
483
|
+
# Store relation constraints
|
484
|
+
for constraint in nav_prop.constraints:
|
485
|
+
await db.execute(
|
486
|
+
"""INSERT INTO relation_constraints
|
487
|
+
(navigation_property_id, global_version_id, constraint_type,
|
488
|
+
property_name, referenced_property, related_property,
|
489
|
+
fixed_value, fixed_value_str)
|
490
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
491
|
+
(
|
492
|
+
nav_prop_id,
|
493
|
+
global_version_id,
|
494
|
+
constraint.constraint_type,
|
495
|
+
getattr(constraint, "property", None),
|
496
|
+
getattr(constraint, "referenced_property", None),
|
497
|
+
getattr(constraint, "related_property", None),
|
498
|
+
getattr(constraint, "value", None),
|
499
|
+
getattr(constraint, "value_str", None),
|
500
|
+
),
|
501
|
+
)
|
502
|
+
|
503
|
+
# Store actions
|
504
|
+
for action in entity_schema.actions:
|
505
|
+
action_cursor = await db.execute(
|
506
|
+
"""INSERT INTO entity_actions
|
507
|
+
(entity_id, global_version_id, name, binding_kind, entity_name,
|
508
|
+
entity_set_name, return_type_name, return_is_collection,
|
509
|
+
return_odata_xpp_type, field_lookup)
|
510
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
511
|
+
(
|
512
|
+
entity_id,
|
513
|
+
global_version_id,
|
514
|
+
action.name,
|
515
|
+
action.binding_kind.value, # Convert enum to string value
|
516
|
+
entity_schema.name,
|
517
|
+
entity_schema.entity_set_name,
|
518
|
+
action.return_type.type_name if action.return_type else None,
|
519
|
+
(
|
520
|
+
action.return_type.is_collection
|
521
|
+
if action.return_type
|
522
|
+
else False
|
523
|
+
),
|
524
|
+
(
|
525
|
+
action.return_type.odata_xpp_type
|
526
|
+
if action.return_type
|
527
|
+
else None
|
528
|
+
),
|
529
|
+
action.field_lookup,
|
530
|
+
),
|
531
|
+
)
|
532
|
+
|
533
|
+
action_id = action_cursor.lastrowid
|
534
|
+
|
535
|
+
# Store action parameters
|
536
|
+
param_order = 0
|
537
|
+
for param in action.parameters:
|
538
|
+
param_order += 1
|
539
|
+
await db.execute(
|
540
|
+
"""INSERT INTO action_parameters
|
541
|
+
(action_id, global_version_id, name, type_name,
|
542
|
+
is_collection, odata_xpp_type, parameter_order)
|
543
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
544
|
+
(
|
545
|
+
action_id,
|
546
|
+
global_version_id,
|
547
|
+
param.name,
|
548
|
+
param.type.type_name,
|
549
|
+
param.type.is_collection,
|
550
|
+
param.type.type_name,
|
551
|
+
param_order,
|
552
|
+
),
|
553
|
+
)
|
554
|
+
|
555
|
+
# Store property groups
|
556
|
+
for group in entity_schema.property_groups:
|
557
|
+
group_cursor = await db.execute(
|
558
|
+
"""INSERT INTO property_groups
|
559
|
+
(entity_id, global_version_id, name)
|
560
|
+
VALUES (?, ?, ?)""",
|
561
|
+
(entity_id, global_version_id, group.name),
|
562
|
+
)
|
563
|
+
|
564
|
+
group_id = group_cursor.lastrowid
|
565
|
+
|
566
|
+
# Store property group members
|
567
|
+
for property_name in group.properties:
|
568
|
+
await db.execute(
|
569
|
+
"""INSERT INTO property_group_members
|
570
|
+
(property_group_id, global_version_id, property_name)
|
571
|
+
VALUES (?, ?, ?)""",
|
572
|
+
(group_id, global_version_id, property_name),
|
573
|
+
)
|
574
|
+
|
575
|
+
await db.commit()
|
576
|
+
logger.debug(f"Stored entity schema for {entity_schema.name}")
|
577
|
+
|
578
|
+
async def get_public_entity_schema(
|
579
|
+
self, entity_name: str, global_version_id: Optional[int] = None
|
580
|
+
) -> Optional[PublicEntityInfo]:
|
581
|
+
"""Get public entity schema
|
582
|
+
|
583
|
+
Args:
|
584
|
+
entity_name: Entity name to retrieve
|
585
|
+
global_version_id: Global version ID (uses current if None)
|
586
|
+
|
587
|
+
Returns:
|
588
|
+
Public entity schema if found
|
589
|
+
"""
|
590
|
+
if global_version_id is None:
|
591
|
+
global_version_id = await self._get_current_global_version_id()
|
592
|
+
if global_version_id is None:
|
593
|
+
return None
|
594
|
+
|
595
|
+
async with aiosqlite.connect(self.db_path) as db:
|
596
|
+
# Get entity
|
597
|
+
cursor = await db.execute(
|
598
|
+
"""SELECT id, name, entity_set_name, label_id, label_text,
|
599
|
+
is_read_only, configuration_enabled
|
600
|
+
FROM public_entities
|
601
|
+
WHERE name = ? AND global_version_id = ?""",
|
602
|
+
(entity_name, global_version_id),
|
603
|
+
)
|
604
|
+
|
605
|
+
entity_row = await cursor.fetchone()
|
606
|
+
if not entity_row:
|
607
|
+
return None
|
608
|
+
|
609
|
+
entity_id = entity_row[0]
|
610
|
+
|
611
|
+
# Get properties
|
612
|
+
cursor = await db.execute(
|
613
|
+
"""SELECT name, type_name, data_type, odata_xpp_type, label_id,
|
614
|
+
label_text, is_key, is_mandatory, configuration_enabled,
|
615
|
+
allow_edit, allow_edit_on_create, is_dimension,
|
616
|
+
dimension_relation, is_dynamic_dimension,
|
617
|
+
dimension_legal_entity_property, dimension_type_property,
|
618
|
+
property_order
|
619
|
+
FROM entity_properties
|
620
|
+
WHERE entity_id = ?
|
621
|
+
ORDER BY property_order""",
|
622
|
+
(entity_id,),
|
623
|
+
)
|
624
|
+
|
625
|
+
properties = []
|
626
|
+
for prop_row in await cursor.fetchall():
|
627
|
+
properties.append(
|
628
|
+
PublicEntityPropertyInfo(
|
629
|
+
name=prop_row[0],
|
630
|
+
type_name=prop_row[1],
|
631
|
+
data_type=prop_row[2],
|
632
|
+
odata_xpp_type=prop_row[3],
|
633
|
+
label_id=prop_row[4],
|
634
|
+
label_text=prop_row[5],
|
635
|
+
is_key=prop_row[6],
|
636
|
+
is_mandatory=prop_row[7],
|
637
|
+
configuration_enabled=prop_row[8],
|
638
|
+
allow_edit=prop_row[9],
|
639
|
+
allow_edit_on_create=prop_row[10],
|
640
|
+
is_dimension=prop_row[11],
|
641
|
+
dimension_relation=prop_row[12],
|
642
|
+
is_dynamic_dimension=prop_row[13],
|
643
|
+
dimension_legal_entity_property=prop_row[14],
|
644
|
+
dimension_type_property=prop_row[15],
|
645
|
+
property_order=prop_row[16],
|
646
|
+
)
|
647
|
+
)
|
648
|
+
|
649
|
+
# Get navigation properties
|
650
|
+
cursor = await db.execute(
|
651
|
+
"""SELECT id, name, related_entity, related_relation_name, cardinality
|
652
|
+
FROM navigation_properties
|
653
|
+
WHERE entity_id = ?
|
654
|
+
ORDER BY name""",
|
655
|
+
(entity_id,),
|
656
|
+
)
|
657
|
+
|
658
|
+
navigation_properties = []
|
659
|
+
for nav_row in await cursor.fetchall():
|
660
|
+
nav_prop_id = nav_row[0]
|
661
|
+
|
662
|
+
# Get constraints for this navigation property
|
663
|
+
constraint_cursor = await db.execute(
|
664
|
+
"""SELECT constraint_type, property_name, referenced_property,
|
665
|
+
related_property, fixed_value, fixed_value_str
|
666
|
+
FROM relation_constraints
|
667
|
+
WHERE navigation_property_id = ?
|
668
|
+
ORDER BY constraint_type""",
|
669
|
+
(nav_prop_id,),
|
670
|
+
)
|
671
|
+
|
672
|
+
constraints = []
|
673
|
+
for constraint_row in await constraint_cursor.fetchall():
|
674
|
+
constraint_type = constraint_row[0]
|
675
|
+
|
676
|
+
if constraint_type == "Referential":
|
677
|
+
constraints.append(
|
678
|
+
ReferentialConstraintInfo(
|
679
|
+
property=constraint_row[1],
|
680
|
+
referenced_property=constraint_row[2],
|
681
|
+
)
|
682
|
+
)
|
683
|
+
elif constraint_type == "Fixed":
|
684
|
+
constraints.append(
|
685
|
+
FixedConstraintInfo(
|
686
|
+
property=constraint_row[1],
|
687
|
+
value=constraint_row[4],
|
688
|
+
value_str=constraint_row[5],
|
689
|
+
)
|
690
|
+
)
|
691
|
+
elif constraint_type == "RelatedFixed":
|
692
|
+
constraints.append(
|
693
|
+
RelatedFixedConstraintInfo(
|
694
|
+
related_property=constraint_row[3],
|
695
|
+
value=constraint_row[4],
|
696
|
+
value_str=constraint_row[5],
|
697
|
+
)
|
698
|
+
)
|
699
|
+
|
700
|
+
navigation_properties.append(
|
701
|
+
NavigationPropertyInfo(
|
702
|
+
name=nav_row[1],
|
703
|
+
related_entity=nav_row[2],
|
704
|
+
related_relation_name=nav_row[3],
|
705
|
+
cardinality=Cardinality(nav_row[4]) if nav_row[4] else Cardinality.SINGLE,
|
706
|
+
constraints=constraints,
|
707
|
+
)
|
708
|
+
)
|
709
|
+
|
710
|
+
# Get property groups
|
711
|
+
cursor = await db.execute(
|
712
|
+
"""SELECT id, name
|
713
|
+
FROM property_groups
|
714
|
+
WHERE entity_id = ?
|
715
|
+
ORDER BY name""",
|
716
|
+
(entity_id,),
|
717
|
+
)
|
718
|
+
|
719
|
+
property_groups = []
|
720
|
+
for group_row in await cursor.fetchall():
|
721
|
+
group_id = group_row[0]
|
722
|
+
|
723
|
+
# Get property group members
|
724
|
+
member_cursor = await db.execute(
|
725
|
+
"""SELECT property_name
|
726
|
+
FROM property_group_members
|
727
|
+
WHERE property_group_id = ?
|
728
|
+
ORDER BY property_name""",
|
729
|
+
(group_id,),
|
730
|
+
)
|
731
|
+
|
732
|
+
property_names = [row[0] for row in await member_cursor.fetchall()]
|
733
|
+
|
734
|
+
property_groups.append(
|
735
|
+
PropertyGroupInfo(
|
736
|
+
name=group_row[1],
|
737
|
+
properties=property_names,
|
738
|
+
)
|
739
|
+
)
|
740
|
+
|
741
|
+
# Get actions
|
742
|
+
cursor = await db.execute(
|
743
|
+
"""SELECT id, name, binding_kind, return_type_name, return_is_collection,
|
744
|
+
return_odata_xpp_type, field_lookup
|
745
|
+
FROM entity_actions
|
746
|
+
WHERE entity_id = ?
|
747
|
+
ORDER BY name""",
|
748
|
+
(entity_id,),
|
749
|
+
)
|
750
|
+
|
751
|
+
actions = []
|
752
|
+
for action_row in await cursor.fetchall():
|
753
|
+
action_id = action_row[0]
|
754
|
+
|
755
|
+
# Get action parameters
|
756
|
+
param_cursor = await db.execute(
|
757
|
+
"""SELECT name, type_name, is_collection, odata_xpp_type, parameter_order
|
758
|
+
FROM action_parameters
|
759
|
+
WHERE action_id = ?
|
760
|
+
ORDER BY parameter_order""",
|
761
|
+
(action_id,),
|
762
|
+
)
|
763
|
+
|
764
|
+
parameters = []
|
765
|
+
for param_row in await param_cursor.fetchall():
|
766
|
+
parameters.append(
|
767
|
+
ActionParameterInfo(
|
768
|
+
name=param_row[0],
|
769
|
+
type=ActionParameterTypeInfo(
|
770
|
+
type_name=param_row[1],
|
771
|
+
is_collection=param_row[2],
|
772
|
+
odata_xpp_type=param_row[3],
|
773
|
+
),
|
774
|
+
parameter_order=param_row[4],
|
775
|
+
)
|
776
|
+
)
|
777
|
+
|
778
|
+
# Create return type if present
|
779
|
+
return_type = None
|
780
|
+
if action_row[3]: # return_type_name
|
781
|
+
return_type = ActionReturnTypeInfo(
|
782
|
+
type_name=action_row[3],
|
783
|
+
is_collection=action_row[4],
|
784
|
+
odata_xpp_type=action_row[5],
|
785
|
+
)
|
786
|
+
|
787
|
+
actions.append(
|
788
|
+
PublicEntityActionInfo(
|
789
|
+
name=action_row[1],
|
790
|
+
binding_kind=ODataBindingKind(action_row[2]),
|
791
|
+
parameters=parameters,
|
792
|
+
return_type=return_type,
|
793
|
+
field_lookup=action_row[6],
|
794
|
+
)
|
795
|
+
)
|
796
|
+
|
797
|
+
return PublicEntityInfo(
|
798
|
+
name=entity_row[1],
|
799
|
+
entity_set_name=entity_row[2],
|
800
|
+
label_id=entity_row[3],
|
801
|
+
label_text=entity_row[4],
|
802
|
+
is_read_only=entity_row[5],
|
803
|
+
configuration_enabled=entity_row[6],
|
804
|
+
properties=properties,
|
805
|
+
navigation_properties=navigation_properties,
|
806
|
+
property_groups=property_groups,
|
807
|
+
actions=actions,
|
808
|
+
)
|
809
|
+
|
810
|
+
async def store_enumerations(
|
811
|
+
self, global_version_id: int, enumerations: List[EnumerationInfo]
|
812
|
+
):
|
813
|
+
"""Store enumerations
|
814
|
+
|
815
|
+
Args:
|
816
|
+
global_version_id: Global version ID
|
817
|
+
enumerations: List of enumeration information
|
818
|
+
"""
|
819
|
+
async with aiosqlite.connect(self.db_path) as db:
|
820
|
+
# Clear existing enumerations for this version
|
821
|
+
await db.execute(
|
822
|
+
"DELETE FROM enumerations WHERE global_version_id = ?",
|
823
|
+
(global_version_id,),
|
824
|
+
)
|
825
|
+
|
826
|
+
for enum_info in enumerations:
|
827
|
+
# Insert enumeration
|
828
|
+
cursor = await db.execute(
|
829
|
+
"""INSERT INTO enumerations
|
830
|
+
(global_version_id, name, label_id, label_text)
|
831
|
+
VALUES (?, ?, ?, ?)""",
|
832
|
+
(
|
833
|
+
global_version_id,
|
834
|
+
enum_info.name,
|
835
|
+
enum_info.label_id,
|
836
|
+
enum_info.label_text,
|
837
|
+
),
|
838
|
+
)
|
839
|
+
|
840
|
+
enum_id = cursor.lastrowid
|
841
|
+
|
842
|
+
# Insert members
|
843
|
+
member_order = 0
|
844
|
+
for member in enum_info.members:
|
845
|
+
member_order += 1
|
846
|
+
await db.execute(
|
847
|
+
"""INSERT INTO enumeration_members
|
848
|
+
(enumeration_id, global_version_id, name, value,
|
849
|
+
label_id, label_text, configuration_enabled, member_order)
|
850
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
851
|
+
(
|
852
|
+
enum_id,
|
853
|
+
global_version_id,
|
854
|
+
member.name,
|
855
|
+
member.value,
|
856
|
+
member.label_id,
|
857
|
+
member.label_text,
|
858
|
+
member.configuration_enabled,
|
859
|
+
member_order,
|
860
|
+
),
|
861
|
+
)
|
862
|
+
|
863
|
+
await db.commit()
|
864
|
+
logger.info(
|
865
|
+
f"Stored {len(enumerations)} enumerations for version {global_version_id}"
|
866
|
+
)
|
867
|
+
|
868
|
+
async def get_enumeration_info(
|
869
|
+
self, enum_name: str, global_version_id: Optional[int] = None
|
870
|
+
) -> Optional[EnumerationInfo]:
|
871
|
+
"""Get enumeration information
|
872
|
+
|
873
|
+
Args:
|
874
|
+
enum_name: Enumeration name
|
875
|
+
global_version_id: Global version ID (uses current if None)
|
876
|
+
|
877
|
+
Returns:
|
878
|
+
Enumeration info if found
|
879
|
+
"""
|
880
|
+
if global_version_id is None:
|
881
|
+
global_version_id = await self._get_current_global_version_id()
|
882
|
+
if global_version_id is None:
|
883
|
+
return None
|
884
|
+
|
885
|
+
async with aiosqlite.connect(self.db_path) as db:
|
886
|
+
# Get enumeration
|
887
|
+
cursor = await db.execute(
|
888
|
+
"""SELECT id, name, label_id, label_text
|
889
|
+
FROM enumerations
|
890
|
+
WHERE name = ? AND global_version_id = ?""",
|
891
|
+
(enum_name, global_version_id),
|
892
|
+
)
|
893
|
+
|
894
|
+
enum_row = await cursor.fetchone()
|
895
|
+
if not enum_row:
|
896
|
+
return None
|
897
|
+
|
898
|
+
enum_id = enum_row[0]
|
899
|
+
|
900
|
+
# Get members
|
901
|
+
cursor = await db.execute(
|
902
|
+
"""SELECT name, value, label_id, label_text, configuration_enabled, member_order
|
903
|
+
FROM enumeration_members
|
904
|
+
WHERE enumeration_id = ?
|
905
|
+
ORDER BY member_order""",
|
906
|
+
(enum_id,),
|
907
|
+
)
|
908
|
+
|
909
|
+
members = []
|
910
|
+
for member_row in await cursor.fetchall():
|
911
|
+
members.append(
|
912
|
+
EnumerationMemberInfo(
|
913
|
+
name=member_row[0],
|
914
|
+
value=member_row[1],
|
915
|
+
label_id=member_row[2],
|
916
|
+
label_text=member_row[3],
|
917
|
+
configuration_enabled=member_row[4],
|
918
|
+
member_order=member_row[5],
|
919
|
+
)
|
920
|
+
)
|
921
|
+
|
922
|
+
return EnumerationInfo(
|
923
|
+
name=enum_row[1],
|
924
|
+
label_id=enum_row[2],
|
925
|
+
label_text=enum_row[3],
|
926
|
+
members=members,
|
927
|
+
)
|
928
|
+
|
929
|
+
async def mark_sync_completed(
|
930
|
+
self,
|
931
|
+
global_version_id: int,
|
932
|
+
entity_count: int = 0,
|
933
|
+
action_count: int = 0,
|
934
|
+
enumeration_count: int = 0,
|
935
|
+
label_count: int = 0,
|
936
|
+
):
|
937
|
+
"""Mark sync as completed for a global version
|
938
|
+
|
939
|
+
Args:
|
940
|
+
global_version_id: Global version ID
|
941
|
+
entity_count: Number of entities synced
|
942
|
+
action_count: Number of actions synced
|
943
|
+
enumeration_count: Number of enumerations synced
|
944
|
+
label_count: Number of labels synced
|
945
|
+
"""
|
946
|
+
async with aiosqlite.connect(self.db_path) as db:
|
947
|
+
await db.execute(
|
948
|
+
"""INSERT OR REPLACE INTO metadata_versions
|
949
|
+
(global_version_id, sync_completed_at, entity_count,
|
950
|
+
action_count, enumeration_count, label_count)
|
951
|
+
VALUES (?, CURRENT_TIMESTAMP, ?, ?, ?, ?)""",
|
952
|
+
(
|
953
|
+
global_version_id,
|
954
|
+
entity_count,
|
955
|
+
action_count,
|
956
|
+
enumeration_count,
|
957
|
+
label_count,
|
958
|
+
),
|
959
|
+
)
|
960
|
+
|
961
|
+
await db.commit()
|
962
|
+
logger.info(f"Marked sync completed for version {global_version_id}")
|
963
|
+
|
964
|
+
async def _get_current_global_version_id(self) -> Optional[int]:
|
965
|
+
"""Get current global version ID for environment
|
966
|
+
|
967
|
+
Returns:
|
968
|
+
Current global version ID if available
|
969
|
+
"""
|
970
|
+
if self._current_global_version_id is not None:
|
971
|
+
return self._current_global_version_id
|
972
|
+
|
973
|
+
if self._environment_id is None:
|
974
|
+
return None
|
975
|
+
|
976
|
+
result = await self.version_manager.get_environment_version_info(
|
977
|
+
self._environment_id
|
978
|
+
)
|
979
|
+
if result:
|
980
|
+
global_version_id, version_info = result
|
981
|
+
self._current_version_info = version_info
|
982
|
+
self._current_global_version_id = global_version_id
|
983
|
+
return global_version_id
|
984
|
+
|
985
|
+
return None
|
986
|
+
|
987
|
+
# Label Operations
|
988
|
+
|
989
|
+
async def get_label(
|
990
|
+
self,
|
991
|
+
label_id: str,
|
992
|
+
language: str = "en-US",
|
993
|
+
global_version_id: Optional[int] = None,
|
994
|
+
) -> Optional[str]:
|
995
|
+
"""Get label text from cache
|
996
|
+
|
997
|
+
Args:
|
998
|
+
label_id: Label identifier (e.g., "@SYS13342")
|
999
|
+
language: Language code (e.g., "en-US")
|
1000
|
+
global_version_id: Global version ID (uses current if None, includes temporary entries)
|
1001
|
+
|
1002
|
+
Returns:
|
1003
|
+
Label text or None if not found or expired
|
1004
|
+
"""
|
1005
|
+
if global_version_id is None:
|
1006
|
+
global_version_id = await self._get_current_global_version_id()
|
1007
|
+
|
1008
|
+
async with aiosqlite.connect(self.db_path) as db:
|
1009
|
+
if global_version_id is not None:
|
1010
|
+
# Search for specific version
|
1011
|
+
cursor = await db.execute(
|
1012
|
+
"""SELECT label_text, expires_at
|
1013
|
+
FROM labels_cache
|
1014
|
+
WHERE global_version_id = ? AND label_id = ? AND language = ?
|
1015
|
+
AND (expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)""",
|
1016
|
+
(global_version_id, label_id, language),
|
1017
|
+
)
|
1018
|
+
else:
|
1019
|
+
# Search across all versions (including temporary entries)
|
1020
|
+
cursor = await db.execute(
|
1021
|
+
"""SELECT label_text, expires_at
|
1022
|
+
FROM labels_cache
|
1023
|
+
WHERE label_id = ? AND language = ?
|
1024
|
+
AND (expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)
|
1025
|
+
ORDER BY global_version_id DESC
|
1026
|
+
LIMIT 1""", # Get the highest version (prefer actual versions over temporary)
|
1027
|
+
(label_id, language),
|
1028
|
+
)
|
1029
|
+
|
1030
|
+
row = await cursor.fetchone()
|
1031
|
+
if row:
|
1032
|
+
# Update hit count and last accessed time
|
1033
|
+
if global_version_id is not None:
|
1034
|
+
await db.execute(
|
1035
|
+
"""UPDATE labels_cache
|
1036
|
+
SET hit_count = hit_count + 1, last_accessed = CURRENT_TIMESTAMP
|
1037
|
+
WHERE global_version_id = ? AND label_id = ? AND language = ?""",
|
1038
|
+
(global_version_id, label_id, language),
|
1039
|
+
)
|
1040
|
+
else:
|
1041
|
+
# Update for the found entry
|
1042
|
+
await db.execute(
|
1043
|
+
"""UPDATE labels_cache
|
1044
|
+
SET hit_count = hit_count + 1, last_accessed = CURRENT_TIMESTAMP
|
1045
|
+
WHERE label_id = ? AND language = ? AND label_text = ?""",
|
1046
|
+
(label_id, language, row[0]),
|
1047
|
+
)
|
1048
|
+
await db.commit()
|
1049
|
+
|
1050
|
+
logger.debug(f"Label cache hit: {label_id} ({language}) -> {row[0]}")
|
1051
|
+
return row[0]
|
1052
|
+
|
1053
|
+
logger.debug(f"Label cache miss: {label_id} ({language})")
|
1054
|
+
return None
|
1055
|
+
|
1056
|
+
async def set_label(
|
1057
|
+
self,
|
1058
|
+
label_id: str,
|
1059
|
+
label_text: str,
|
1060
|
+
language: str = "en-US",
|
1061
|
+
global_version_id: Optional[int] = None,
|
1062
|
+
ttl_hours: int = 24,
|
1063
|
+
):
|
1064
|
+
"""Set label in cache
|
1065
|
+
|
1066
|
+
Args:
|
1067
|
+
label_id: Label identifier
|
1068
|
+
label_text: Label text value
|
1069
|
+
language: Language code
|
1070
|
+
global_version_id: Global version ID (uses current if None)
|
1071
|
+
ttl_hours: Time to live in hours
|
1072
|
+
"""
|
1073
|
+
if global_version_id is None:
|
1074
|
+
global_version_id = await self._get_current_global_version_id()
|
1075
|
+
if global_version_id is None:
|
1076
|
+
# Create a temporary version for immediate label caching
|
1077
|
+
logger.warning(
|
1078
|
+
"No global version ID available, creating temporary cache entry"
|
1079
|
+
)
|
1080
|
+
global_version_id = -1 # Use -1 for temporary entries
|
1081
|
+
|
1082
|
+
# Calculate expiration time
|
1083
|
+
from datetime import datetime, timedelta, timezone
|
1084
|
+
|
1085
|
+
expires_at = datetime.now(timezone.utc) + timedelta(hours=ttl_hours)
|
1086
|
+
|
1087
|
+
async with aiosqlite.connect(self.db_path) as db:
|
1088
|
+
await db.execute(
|
1089
|
+
"""INSERT OR REPLACE INTO labels_cache
|
1090
|
+
(global_version_id, label_id, language, label_text, expires_at, hit_count, last_accessed)
|
1091
|
+
VALUES (?, ?, ?, ?, ?, 0, CURRENT_TIMESTAMP)""",
|
1092
|
+
(
|
1093
|
+
global_version_id,
|
1094
|
+
label_id,
|
1095
|
+
language,
|
1096
|
+
label_text,
|
1097
|
+
expires_at.isoformat(),
|
1098
|
+
),
|
1099
|
+
)
|
1100
|
+
await db.commit()
|
1101
|
+
|
1102
|
+
logger.debug(f"Label cached: {label_id} ({language}) -> {label_text}")
|
1103
|
+
|
1104
|
+
async def set_labels_batch(
|
1105
|
+
self,
|
1106
|
+
labels: List[LabelInfo],
|
1107
|
+
global_version_id: Optional[int] = None,
|
1108
|
+
ttl_hours: int = 24,
|
1109
|
+
):
|
1110
|
+
"""Set multiple labels in cache efficiently
|
1111
|
+
|
1112
|
+
Args:
|
1113
|
+
labels: List of LabelInfo objects
|
1114
|
+
global_version_id: Global version ID (uses current if None)
|
1115
|
+
ttl_hours: Time to live in hours
|
1116
|
+
"""
|
1117
|
+
if not labels:
|
1118
|
+
return
|
1119
|
+
|
1120
|
+
if global_version_id is None:
|
1121
|
+
global_version_id = await self._get_current_global_version_id()
|
1122
|
+
if global_version_id is None:
|
1123
|
+
# Create a temporary version for immediate label caching
|
1124
|
+
logger.warning(
|
1125
|
+
"No global version ID available, creating temporary cache entries"
|
1126
|
+
)
|
1127
|
+
global_version_id = -1 # Use -1 for temporary entries
|
1128
|
+
|
1129
|
+
# Calculate expiration time
|
1130
|
+
from datetime import datetime, timedelta, timezone
|
1131
|
+
|
1132
|
+
expires_at = datetime.now(timezone.utc) + timedelta(hours=ttl_hours)
|
1133
|
+
|
1134
|
+
# Prepare batch data
|
1135
|
+
label_data = []
|
1136
|
+
for label in labels:
|
1137
|
+
label_data.append(
|
1138
|
+
(
|
1139
|
+
global_version_id,
|
1140
|
+
label.id,
|
1141
|
+
label.language,
|
1142
|
+
label.value,
|
1143
|
+
expires_at.isoformat(),
|
1144
|
+
)
|
1145
|
+
)
|
1146
|
+
|
1147
|
+
async with aiosqlite.connect(self.db_path) as db:
|
1148
|
+
await db.executemany(
|
1149
|
+
"""INSERT OR REPLACE INTO labels_cache
|
1150
|
+
(global_version_id, label_id, language, label_text, expires_at, hit_count, last_accessed)
|
1151
|
+
VALUES (?, ?, ?, ?, ?, 0, CURRENT_TIMESTAMP)""",
|
1152
|
+
label_data,
|
1153
|
+
)
|
1154
|
+
await db.commit()
|
1155
|
+
|
1156
|
+
logger.info(
|
1157
|
+
f"Batch cached {len(labels)} labels for version {global_version_id}"
|
1158
|
+
)
|
1159
|
+
|
1160
|
+
async def get_labels_batch(
|
1161
|
+
self,
|
1162
|
+
label_ids: List[str],
|
1163
|
+
language: str = "en-US",
|
1164
|
+
global_version_id: Optional[int] = None,
|
1165
|
+
) -> Dict[str, str]:
|
1166
|
+
"""Get multiple labels from cache efficiently
|
1167
|
+
|
1168
|
+
Args:
|
1169
|
+
label_ids: List of label IDs to retrieve
|
1170
|
+
language: Language code
|
1171
|
+
global_version_id: Global version ID (uses current if None, includes temporary entries)
|
1172
|
+
|
1173
|
+
Returns:
|
1174
|
+
Dictionary mapping label_id to label_text for found labels
|
1175
|
+
"""
|
1176
|
+
if not label_ids:
|
1177
|
+
return {}
|
1178
|
+
|
1179
|
+
if global_version_id is None:
|
1180
|
+
global_version_id = await self._get_current_global_version_id()
|
1181
|
+
|
1182
|
+
# Create placeholders for SQL IN clause
|
1183
|
+
placeholders = ",".join("?" for _ in label_ids)
|
1184
|
+
|
1185
|
+
async with aiosqlite.connect(self.db_path) as db:
|
1186
|
+
if global_version_id is not None:
|
1187
|
+
# Search for specific version
|
1188
|
+
params = [global_version_id, language] + label_ids
|
1189
|
+
query = f"""SELECT label_id, label_text
|
1190
|
+
FROM labels_cache
|
1191
|
+
WHERE global_version_id = ? AND language = ? AND label_id IN ({placeholders})
|
1192
|
+
AND (expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)"""
|
1193
|
+
else:
|
1194
|
+
# Search across all versions (including temporary entries)
|
1195
|
+
params = [language] + label_ids
|
1196
|
+
query = f"""SELECT label_id, label_text
|
1197
|
+
FROM labels_cache
|
1198
|
+
WHERE language = ? AND label_id IN ({placeholders})
|
1199
|
+
AND (expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)
|
1200
|
+
ORDER BY global_version_id DESC""" # Prefer actual versions over temporary
|
1201
|
+
|
1202
|
+
cursor = await db.execute(query, params)
|
1203
|
+
|
1204
|
+
results = {}
|
1205
|
+
found_ids = []
|
1206
|
+
async for row in cursor:
|
1207
|
+
if row[0] not in results: # Only use first match (highest version)
|
1208
|
+
results[row[0]] = row[1]
|
1209
|
+
found_ids.append(row[0])
|
1210
|
+
|
1211
|
+
# Update hit counts for found labels
|
1212
|
+
if found_ids and global_version_id is not None:
|
1213
|
+
update_placeholders = ",".join("?" for _ in found_ids)
|
1214
|
+
await db.execute(
|
1215
|
+
f"""UPDATE labels_cache
|
1216
|
+
SET hit_count = hit_count + 1, last_accessed = CURRENT_TIMESTAMP
|
1217
|
+
WHERE global_version_id = ? AND language = ? AND label_id IN ({update_placeholders})""",
|
1218
|
+
[global_version_id, language] + found_ids,
|
1219
|
+
)
|
1220
|
+
await db.commit()
|
1221
|
+
|
1222
|
+
logger.debug(f"Label batch lookup: {len(results)}/{len(label_ids)} found")
|
1223
|
+
return results
|
1224
|
+
|
1225
|
+
async def clear_expired_labels(self, global_version_id: Optional[int] = None):
|
1226
|
+
"""Clear expired labels from cache
|
1227
|
+
|
1228
|
+
Args:
|
1229
|
+
global_version_id: Global version ID to clear (clears all if None)
|
1230
|
+
"""
|
1231
|
+
async with aiosqlite.connect(self.db_path) as db:
|
1232
|
+
if global_version_id is not None:
|
1233
|
+
cursor = await db.execute(
|
1234
|
+
"""DELETE FROM labels_cache
|
1235
|
+
WHERE global_version_id = ? AND expires_at IS NOT NULL AND expires_at <= CURRENT_TIMESTAMP""",
|
1236
|
+
(global_version_id,),
|
1237
|
+
)
|
1238
|
+
else:
|
1239
|
+
cursor = await db.execute(
|
1240
|
+
"""DELETE FROM labels_cache
|
1241
|
+
WHERE expires_at IS NOT NULL AND expires_at <= CURRENT_TIMESTAMP"""
|
1242
|
+
)
|
1243
|
+
|
1244
|
+
deleted_count = cursor.rowcount
|
1245
|
+
await db.commit()
|
1246
|
+
|
1247
|
+
logger.info(f"Cleared {deleted_count} expired labels")
|
1248
|
+
|
1249
|
+
async def get_label_cache_statistics(
|
1250
|
+
self, global_version_id: Optional[int] = None
|
1251
|
+
) -> Dict[str, Any]:
|
1252
|
+
"""Get label cache statistics
|
1253
|
+
|
1254
|
+
Args:
|
1255
|
+
global_version_id: Global version ID to get stats for (all if None)
|
1256
|
+
|
1257
|
+
Returns:
|
1258
|
+
Dictionary with label cache statistics
|
1259
|
+
"""
|
1260
|
+
async with aiosqlite.connect(self.db_path) as db:
|
1261
|
+
stats = {}
|
1262
|
+
|
1263
|
+
# Base query conditions
|
1264
|
+
if global_version_id is not None:
|
1265
|
+
version_filter = "WHERE global_version_id = ?"
|
1266
|
+
params = [global_version_id]
|
1267
|
+
else:
|
1268
|
+
version_filter = ""
|
1269
|
+
params = []
|
1270
|
+
|
1271
|
+
# Total labels
|
1272
|
+
cursor = await db.execute(
|
1273
|
+
f"SELECT COUNT(*) FROM labels_cache {version_filter}", params
|
1274
|
+
)
|
1275
|
+
stats["total_labels"] = (await cursor.fetchone())[0]
|
1276
|
+
|
1277
|
+
# Expired labels
|
1278
|
+
cursor = await db.execute(
|
1279
|
+
f"""SELECT COUNT(*) FROM labels_cache
|
1280
|
+
{version_filter} {'AND' if version_filter else 'WHERE'}
|
1281
|
+
expires_at IS NOT NULL AND expires_at <= CURRENT_TIMESTAMP""",
|
1282
|
+
params,
|
1283
|
+
)
|
1284
|
+
stats["expired_labels"] = (await cursor.fetchone())[0]
|
1285
|
+
|
1286
|
+
# Active labels
|
1287
|
+
stats["active_labels"] = stats["total_labels"] - stats["expired_labels"]
|
1288
|
+
|
1289
|
+
# Languages
|
1290
|
+
cursor = await db.execute(
|
1291
|
+
f"""SELECT language, COUNT(*) FROM labels_cache
|
1292
|
+
{version_filter} {'AND' if version_filter else 'WHERE'}
|
1293
|
+
(expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)
|
1294
|
+
GROUP BY language ORDER BY COUNT(*) DESC""",
|
1295
|
+
params,
|
1296
|
+
)
|
1297
|
+
stats["languages"] = dict(await cursor.fetchall())
|
1298
|
+
|
1299
|
+
# Hit statistics
|
1300
|
+
cursor = await db.execute(
|
1301
|
+
f"""SELECT
|
1302
|
+
COUNT(*) as accessed_labels,
|
1303
|
+
SUM(hit_count) as total_hits,
|
1304
|
+
AVG(hit_count) as avg_hits_per_label,
|
1305
|
+
MAX(hit_count) as max_hits
|
1306
|
+
FROM labels_cache
|
1307
|
+
{version_filter} {'AND' if version_filter else 'WHERE'}
|
1308
|
+
hit_count > 0""",
|
1309
|
+
params,
|
1310
|
+
)
|
1311
|
+
hit_stats = await cursor.fetchone()
|
1312
|
+
if hit_stats[0]: # If there are accessed labels
|
1313
|
+
stats["hit_statistics"] = {
|
1314
|
+
"accessed_labels": hit_stats[0],
|
1315
|
+
"total_hits": hit_stats[1] or 0,
|
1316
|
+
"average_hits_per_label": round(hit_stats[2] or 0, 2),
|
1317
|
+
"max_hits": hit_stats[3] or 0,
|
1318
|
+
}
|
1319
|
+
else:
|
1320
|
+
stats["hit_statistics"] = {
|
1321
|
+
"accessed_labels": 0,
|
1322
|
+
"total_hits": 0,
|
1323
|
+
"average_hits_per_label": 0,
|
1324
|
+
"max_hits": 0,
|
1325
|
+
}
|
1326
|
+
|
1327
|
+
return stats
|
1328
|
+
|
1329
|
+
async def get_cache_statistics(self) -> Dict[str, Any]:
|
1330
|
+
"""Get cache statistics
|
1331
|
+
|
1332
|
+
Returns:
|
1333
|
+
Dictionary with cache statistics
|
1334
|
+
"""
|
1335
|
+
stats = {}
|
1336
|
+
|
1337
|
+
# Database statistics
|
1338
|
+
db_stats = await self.database.get_database_statistics()
|
1339
|
+
stats.update(db_stats)
|
1340
|
+
|
1341
|
+
# Version statistics
|
1342
|
+
version_stats = await self.version_manager.get_version_statistics()
|
1343
|
+
stats["version_manager"] = version_stats
|
1344
|
+
|
1345
|
+
# Current version info
|
1346
|
+
current_version = await self._get_current_global_version_id()
|
1347
|
+
if current_version:
|
1348
|
+
version_info = await self.version_manager.get_global_version_info(
|
1349
|
+
current_version
|
1350
|
+
)
|
1351
|
+
if version_info:
|
1352
|
+
stats["current_version"] = {
|
1353
|
+
"global_version_id": version_info.id,
|
1354
|
+
"version_hash": version_info.version_hash,
|
1355
|
+
"modules_count": len(version_info.sample_modules),
|
1356
|
+
"reference_count": version_info.reference_count,
|
1357
|
+
}
|
1358
|
+
|
1359
|
+
# Label cache statistics
|
1360
|
+
label_stats = await self.get_label_cache_statistics(current_version)
|
1361
|
+
stats["label_cache"] = label_stats
|
1362
|
+
|
1363
|
+
return stats
|
1364
|
+
|
1365
|
+
def create_search_engine(self):
|
1366
|
+
"""Create a search engine instance for this cache.
|
1367
|
+
|
1368
|
+
Returns:
|
1369
|
+
VersionAwareSearchEngine instance
|
1370
|
+
"""
|
1371
|
+
from .search_engine_v2 import VersionAwareSearchEngine
|
1372
|
+
return VersionAwareSearchEngine(self)
|