d365fo-client 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. d365fo_client/__init__.py +305 -0
  2. d365fo_client/auth.py +93 -0
  3. d365fo_client/cli.py +700 -0
  4. d365fo_client/client.py +1454 -0
  5. d365fo_client/config.py +304 -0
  6. d365fo_client/crud.py +200 -0
  7. d365fo_client/exceptions.py +49 -0
  8. d365fo_client/labels.py +528 -0
  9. d365fo_client/main.py +502 -0
  10. d365fo_client/mcp/__init__.py +16 -0
  11. d365fo_client/mcp/client_manager.py +276 -0
  12. d365fo_client/mcp/main.py +98 -0
  13. d365fo_client/mcp/models.py +371 -0
  14. d365fo_client/mcp/prompts/__init__.py +43 -0
  15. d365fo_client/mcp/prompts/action_execution.py +480 -0
  16. d365fo_client/mcp/prompts/sequence_analysis.py +349 -0
  17. d365fo_client/mcp/resources/__init__.py +15 -0
  18. d365fo_client/mcp/resources/database_handler.py +555 -0
  19. d365fo_client/mcp/resources/entity_handler.py +176 -0
  20. d365fo_client/mcp/resources/environment_handler.py +132 -0
  21. d365fo_client/mcp/resources/metadata_handler.py +283 -0
  22. d365fo_client/mcp/resources/query_handler.py +135 -0
  23. d365fo_client/mcp/server.py +432 -0
  24. d365fo_client/mcp/tools/__init__.py +17 -0
  25. d365fo_client/mcp/tools/connection_tools.py +175 -0
  26. d365fo_client/mcp/tools/crud_tools.py +579 -0
  27. d365fo_client/mcp/tools/database_tools.py +813 -0
  28. d365fo_client/mcp/tools/label_tools.py +189 -0
  29. d365fo_client/mcp/tools/metadata_tools.py +766 -0
  30. d365fo_client/mcp/tools/profile_tools.py +706 -0
  31. d365fo_client/metadata_api.py +793 -0
  32. d365fo_client/metadata_v2/__init__.py +59 -0
  33. d365fo_client/metadata_v2/cache_v2.py +1372 -0
  34. d365fo_client/metadata_v2/database_v2.py +585 -0
  35. d365fo_client/metadata_v2/global_version_manager.py +573 -0
  36. d365fo_client/metadata_v2/search_engine_v2.py +423 -0
  37. d365fo_client/metadata_v2/sync_manager_v2.py +819 -0
  38. d365fo_client/metadata_v2/version_detector.py +439 -0
  39. d365fo_client/models.py +862 -0
  40. d365fo_client/output.py +181 -0
  41. d365fo_client/profile_manager.py +342 -0
  42. d365fo_client/profiles.py +178 -0
  43. d365fo_client/query.py +162 -0
  44. d365fo_client/session.py +60 -0
  45. d365fo_client/utils.py +196 -0
  46. d365fo_client-0.1.0.dist-info/METADATA +1084 -0
  47. d365fo_client-0.1.0.dist-info/RECORD +51 -0
  48. d365fo_client-0.1.0.dist-info/WHEEL +5 -0
  49. d365fo_client-0.1.0.dist-info/entry_points.txt +3 -0
  50. d365fo_client-0.1.0.dist-info/licenses/LICENSE +21 -0
  51. d365fo_client-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,819 @@
1
+ """Smart sync manager with intelligent metadata synchronization strategies."""
2
+
3
+ import asyncio
4
+ import hashlib
5
+ import json
6
+ import logging
7
+ import time
8
+ from datetime import datetime, timezone
9
+ from pathlib import Path
10
+ from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set
11
+
12
+ # Use TYPE_CHECKING to avoid circular import
13
+ if TYPE_CHECKING:
14
+ from ..metadata_api import MetadataAPIOperations
15
+
16
+ from ..models import (
17
+ DataEntityInfo,
18
+ EnumerationInfo,
19
+ LabelInfo,
20
+ MetadataVersionInfo,
21
+ PublicEntityInfo,
22
+ QueryOptions,
23
+ SyncProgress,
24
+ SyncResult,
25
+ SyncStrategy,
26
+ )
27
+ from .cache_v2 import MetadataCacheV2
28
+
29
+ logger = logging.getLogger(__name__)
30
+
31
+
32
+ class SmartSyncManagerV2:
33
+ """Intelligent metadata synchronization with progress tracking and error handling"""
34
+
35
+ def __init__(self, cache: MetadataCacheV2, metadata_api: "MetadataAPIOperations"):
36
+ """Initialize smart sync manager
37
+
38
+ Args:
39
+ cache: Metadata cache v2 instance
40
+ metadata_api: Metadata API operations instance
41
+ """
42
+ self.cache = cache
43
+ self.metadata_api = metadata_api
44
+ self.version_manager = cache.version_manager
45
+
46
+ # Sync state
47
+ self._is_syncing = False
48
+ self._sync_progress: Optional[SyncProgress] = None
49
+ self._progress_callbacks: List[Callable[[SyncProgress], None]] = []
50
+
51
+ def add_progress_callback(self, callback: Callable[[SyncProgress], None]):
52
+ """Add progress callback
53
+
54
+ Args:
55
+ callback: Function to call with progress updates
56
+ """
57
+ self._progress_callbacks.append(callback)
58
+
59
+ def remove_progress_callback(self, callback: Callable[[SyncProgress], None]):
60
+ """Remove progress callback
61
+
62
+ Args:
63
+ callback: Function to remove from callbacks
64
+ """
65
+ if callback in self._progress_callbacks:
66
+ self._progress_callbacks.remove(callback)
67
+
68
+ def _update_progress(self, progress: SyncProgress):
69
+ """Update sync progress and notify callbacks
70
+
71
+ Args:
72
+ progress: Current sync progress
73
+ """
74
+ self._sync_progress = progress
75
+ for callback in self._progress_callbacks:
76
+ try:
77
+ callback(progress)
78
+ except Exception as e:
79
+ logger.warning(f"Progress callback error: {e}")
80
+
81
+ async def sync_metadata(
82
+ self, global_version_id: int, strategy: SyncStrategy = SyncStrategy.FULL
83
+ ) -> SyncResult:
84
+ """Sync metadata for global version
85
+
86
+ Args:
87
+ global_version_id: Global version ID to sync
88
+ strategy: Sync strategy to use
89
+ force_resync: Force resync even if data exists
90
+
91
+ Returns:
92
+ Sync result with counts and timing
93
+ """
94
+ if self._is_syncing:
95
+ return SyncResult(
96
+ success=False,
97
+ error="Sync already in progress",
98
+ duration_ms=0,
99
+ entity_count=0,
100
+ action_count=0,
101
+ enumeration_count=0,
102
+ label_count=0,
103
+ )
104
+
105
+ self._is_syncing = True
106
+ start_time = time.time()
107
+
108
+ try:
109
+ # Initialize progress
110
+ progress = SyncProgress(
111
+ global_version_id=global_version_id,
112
+ strategy=strategy,
113
+ phase="initializing",
114
+ total_steps=self._calculate_total_steps(strategy),
115
+ completed_steps=0,
116
+ current_operation="Starting sync",
117
+ start_time=datetime.now(timezone.utc),
118
+ estimated_completion=None,
119
+ )
120
+ self._update_progress(progress)
121
+
122
+ # Update sync status
123
+ await self.version_manager.update_sync_status(
124
+ self.cache._environment_id, global_version_id, "syncing"
125
+ )
126
+
127
+ # Execute sync strategy
128
+ if strategy == SyncStrategy.FULL:
129
+ result = await self._sync_full_metadata(global_version_id, progress)
130
+ elif strategy == SyncStrategy.INCREMENTAL:
131
+ result = await self._sync_incremental_metadata(
132
+ global_version_id, progress
133
+ )
134
+ elif strategy == SyncStrategy.ENTITIES_ONLY:
135
+ result = await self._sync_entities_only(global_version_id, progress)
136
+ elif strategy == SyncStrategy.SHARING_MODE:
137
+ result = await self._sync_sharing_mode(global_version_id, progress)
138
+ else:
139
+ raise ValueError(f"Unknown sync strategy: {strategy}")
140
+
141
+ # Calculate duration
142
+ duration_ms = int((time.time() - start_time) * 1000)
143
+ result.duration_ms = duration_ms
144
+
145
+ # Update sync status
146
+ if result.success:
147
+ await self.version_manager.update_sync_status(
148
+ self.cache._environment_id,
149
+ global_version_id,
150
+ "completed",
151
+ duration_ms,
152
+ )
153
+
154
+ # Mark cache sync completed
155
+ await self.cache.mark_sync_completed(
156
+ global_version_id,
157
+ result.entity_count,
158
+ result.action_count,
159
+ result.enumeration_count,
160
+ result.label_count,
161
+ )
162
+ else:
163
+ await self.version_manager.update_sync_status(
164
+ self.cache._environment_id, global_version_id, "failed"
165
+ )
166
+
167
+ # Final progress update
168
+ progress.phase = "completed" if result.success else "failed"
169
+ progress.completed_steps = progress.total_steps
170
+ progress.current_operation = (
171
+ "Sync completed" if result.success else f"Sync failed: {result.error}"
172
+ )
173
+ progress.estimated_completion = datetime.now(timezone.utc)
174
+ self._update_progress(progress)
175
+
176
+ logger.info(f"Sync completed in {duration_ms}ms: {result}")
177
+ return result
178
+
179
+ except Exception as e:
180
+ duration_ms = int((time.time() - start_time) * 1000)
181
+ logger.error(f"Sync failed after {duration_ms}ms: {e}")
182
+
183
+ # Update failed status
184
+ await self.version_manager.update_sync_status(
185
+ self.cache._environment_id, global_version_id, "failed"
186
+ )
187
+
188
+ return SyncResult(
189
+ success=False,
190
+ error=str(e),
191
+ duration_ms=duration_ms,
192
+ entity_count=0,
193
+ action_count=0,
194
+ enumeration_count=0,
195
+ label_count=0,
196
+ )
197
+ finally:
198
+ self._is_syncing = False
199
+ # MetadataAPIOperations doesn't need explicit cleanup
200
+
201
+ def _calculate_total_steps(self, strategy: SyncStrategy) -> int:
202
+ """Calculate total sync steps for strategy
203
+
204
+ Args:
205
+ strategy: Sync strategy
206
+
207
+ Returns:
208
+ Total number of steps
209
+ """
210
+ if strategy == SyncStrategy.FULL:
211
+ return 10 # entities, schemas, enums, labels, indexing, etc.
212
+ elif strategy == SyncStrategy.INCREMENTAL:
213
+ return 6 # check changes, update entities, update schemas, etc.
214
+ elif strategy == SyncStrategy.ENTITIES_ONLY:
215
+ return 4 # entities, basic schemas, indexing
216
+ elif strategy == SyncStrategy.SHARING_MODE:
217
+ return 3 # copy from compatible version
218
+ else:
219
+ return 5 # default estimate
220
+
221
+ async def _sync_full_metadata(
222
+ self, global_version_id: int, progress: SyncProgress
223
+ ) -> SyncResult:
224
+ """Perform full metadata synchronization
225
+
226
+ Args:
227
+ global_version_id: Global version ID
228
+ progress: Progress tracker
229
+
230
+ Returns:
231
+ Sync result
232
+ """
233
+ entity_count = 0
234
+ action_count = 0
235
+ enumeration_count = 0
236
+ label_count = 0
237
+
238
+ try:
239
+ # Step 1: Sync data entities
240
+ progress.phase = "entities"
241
+ progress.current_operation = "Syncing data entities"
242
+ progress.completed_steps = 1
243
+ self._update_progress(progress)
244
+
245
+ entities = await self._get_data_entities()
246
+ if entities:
247
+ await self.cache.store_data_entities(global_version_id, entities)
248
+ entity_count = len(entities)
249
+ logger.info(f"Synced {entity_count} data entities")
250
+
251
+ # Step 2: Sync public entity schemas (top entities)
252
+ progress.phase = "schemas"
253
+ progress.current_operation = "Syncing entity schemas"
254
+ progress.completed_steps = 2
255
+ self._update_progress(progress)
256
+
257
+ public_entities = await self._get_public_entities()
258
+ for entity in public_entities:
259
+ await self.cache.store_public_entity_schema(global_version_id, entity)
260
+ action_count += len(entity.actions)
261
+
262
+ schema_count = len(public_entities)
263
+
264
+ logger.info(f"Synced {schema_count} entity schemas")
265
+
266
+ # Step 3: Sync enumerations
267
+ progress.phase = "enumerations"
268
+ progress.current_operation = "Syncing enumerations"
269
+ progress.completed_steps = 6
270
+ self._update_progress(progress)
271
+
272
+ try:
273
+ enumerations = await self._get_public_enumerations()
274
+ if enumerations:
275
+ await self.cache.store_enumerations(global_version_id, enumerations)
276
+ enumeration_count = len(enumerations)
277
+ logger.info(f"Synced {enumeration_count} enumerations")
278
+ except Exception as e:
279
+ logger.warning(f"Failed to sync enumerations: {e}")
280
+
281
+ # Step 4: Sync frequently used labels
282
+ progress.phase = "labels"
283
+ progress.current_operation = "Syncing common labels"
284
+ progress.completed_steps = 7
285
+ self._update_progress(progress)
286
+
287
+ try:
288
+ label_count = await self._sync_common_labels(
289
+ global_version_id, entities, public_entities, enumerations
290
+ )
291
+ logger.info(f"Pre-cached {label_count} common labels")
292
+ except Exception as e:
293
+ logger.warning(f"Failed to sync common labels: {e}")
294
+
295
+ # Step 5: Build search index
296
+ progress.phase = "indexing"
297
+ progress.current_operation = "Building search index"
298
+ progress.completed_steps = 9
299
+ self._update_progress(progress)
300
+
301
+ # TODO: Implement search index building
302
+ # await self._build_search_index(global_version_id)
303
+
304
+ # Step 6: Complete
305
+ progress.phase = "completed"
306
+ progress.current_operation = "Finalizing sync"
307
+ progress.completed_steps = 10
308
+ self._update_progress(progress)
309
+
310
+ return SyncResult(
311
+ success=True,
312
+ error=None,
313
+ duration_ms=0, # Will be set by caller
314
+ entity_count=entity_count,
315
+ action_count=action_count,
316
+ enumeration_count=enumeration_count,
317
+ label_count=label_count,
318
+ )
319
+
320
+ except Exception as e:
321
+ logger.error(f"Full sync failed: {e}")
322
+ return SyncResult(
323
+ success=False,
324
+ error=str(e),
325
+ duration_ms=0,
326
+ entity_count=entity_count,
327
+ action_count=action_count,
328
+ enumeration_count=enumeration_count,
329
+ label_count=label_count,
330
+ )
331
+
332
+ async def _sync_incremental_metadata(
333
+ self, global_version_id: int, progress: SyncProgress
334
+ ) -> SyncResult:
335
+ """Perform incremental metadata synchronization
336
+
337
+ Args:
338
+ global_version_id: Global version ID
339
+ progress: Progress tracker
340
+
341
+ Returns:
342
+ Sync result
343
+ """
344
+ # For now, fall back to full sync
345
+ # TODO: Implement true incremental sync logic
346
+ logger.info("Incremental sync not yet implemented, falling back to full sync")
347
+ return await self._sync_full_metadata(global_version_id, progress)
348
+
349
+ async def _sync_entities_only(
350
+ self, global_version_id: int, progress: SyncProgress
351
+ ) -> SyncResult:
352
+ """Sync only data entities (fast mode)
353
+
354
+ Args:
355
+ global_version_id: Global version ID
356
+ progress: Progress tracker
357
+
358
+ Returns:
359
+ Sync result
360
+ """
361
+ try:
362
+ # Step 1: Sync data entities
363
+ progress.phase = "entities"
364
+ progress.current_operation = "Syncing data entities"
365
+ progress.completed_steps = 1
366
+ self._update_progress(progress)
367
+
368
+ entities = await self._get_data_entities()
369
+ entity_count = 0
370
+ if entities:
371
+ await self.cache.store_data_entities(global_version_id, entities)
372
+ entity_count = len(entities)
373
+ logger.info(f"Synced {entity_count} data entities (entities-only mode)")
374
+
375
+ # Step 2: Complete
376
+ progress.phase = "completed"
377
+ progress.current_operation = "Entities sync completed"
378
+ progress.completed_steps = 4
379
+ self._update_progress(progress)
380
+
381
+ return SyncResult(
382
+ success=True,
383
+ error=None,
384
+ duration_ms=0,
385
+ entity_count=entity_count,
386
+ action_count=0,
387
+ enumeration_count=0,
388
+ label_count=0,
389
+ )
390
+
391
+ except Exception as e:
392
+ logger.error(f"Entities-only sync failed: {e}")
393
+ return SyncResult(
394
+ success=False,
395
+ error=str(e),
396
+ duration_ms=0,
397
+ entity_count=0,
398
+ action_count=0,
399
+ enumeration_count=0,
400
+ label_count=0,
401
+ )
402
+
403
+ async def _sync_sharing_mode(
404
+ self, global_version_id: int, progress: SyncProgress
405
+ ) -> SyncResult:
406
+ """Sync using cross-environment sharing
407
+
408
+ Args:
409
+ global_version_id: Global version ID
410
+ progress: Progress tracker
411
+
412
+ Returns:
413
+ Sync result
414
+ """
415
+ try:
416
+ # Step 1: Check if compatible version exists
417
+ progress.phase = "sharing"
418
+ progress.current_operation = "Looking for compatible version"
419
+ progress.completed_steps = 1
420
+ self._update_progress(progress)
421
+
422
+ # Get version modules for compatibility check
423
+ version_info = await self.version_manager.get_global_version_info(
424
+ global_version_id
425
+ )
426
+ if not version_info:
427
+ raise ValueError("Global version not found")
428
+
429
+ # Find compatible versions
430
+ compatible_versions = await self.version_manager.find_compatible_versions(
431
+ version_info.modules, exact_match=True
432
+ )
433
+
434
+ # Filter out current version and find one with complete metadata
435
+ source_version = None
436
+ for version in compatible_versions:
437
+ if version.global_version_id != global_version_id:
438
+ if await self.cache._has_complete_metadata(
439
+ version.global_version_id
440
+ ):
441
+ source_version = version
442
+ break
443
+
444
+ if not source_version:
445
+ # No compatible version found, fall back to full sync
446
+ logger.info(
447
+ "No compatible version found for sharing, falling back to full sync"
448
+ )
449
+ return await self._sync_full_metadata(global_version_id, progress)
450
+
451
+ # Step 2: Copy metadata from compatible version
452
+ progress.phase = "copying"
453
+ progress.current_operation = (
454
+ f"Copying from version {source_version.global_version_id}"
455
+ )
456
+ progress.completed_steps = 2
457
+ self._update_progress(progress)
458
+
459
+ counts = await self._copy_metadata_between_versions(
460
+ source_version.global_version_id, global_version_id
461
+ )
462
+
463
+ # Step 3: Complete
464
+ progress.phase = "completed"
465
+ progress.current_operation = "Sharing sync completed"
466
+ progress.completed_steps = 3
467
+ self._update_progress(progress)
468
+
469
+ logger.info(
470
+ f"Shared metadata from version {source_version.global_version_id}"
471
+ )
472
+
473
+ return SyncResult(
474
+ success=True,
475
+ error=None,
476
+ duration_ms=0,
477
+ entity_count=counts.get("entities", 0),
478
+ action_count=counts.get("actions", 0),
479
+ enumeration_count=counts.get("enumerations", 0),
480
+ label_count=counts.get("labels", 0),
481
+ )
482
+
483
+ except Exception as e:
484
+ logger.error(f"Sharing sync failed: {e}")
485
+ return SyncResult(
486
+ success=False,
487
+ error=str(e),
488
+ duration_ms=0,
489
+ entity_count=0,
490
+ action_count=0,
491
+ enumeration_count=0,
492
+ label_count=0,
493
+ )
494
+
495
+ async def _copy_metadata_between_versions(
496
+ self, source_version_id: int, target_version_id: int
497
+ ) -> Dict[str, int]:
498
+ """Copy metadata between global versions
499
+
500
+ Args:
501
+ source_version_id: Source global version ID
502
+ target_version_id: Target global version ID
503
+
504
+ Returns:
505
+ Dictionary with copy counts
506
+ """
507
+ import aiosqlite
508
+
509
+ counts = {}
510
+
511
+ async with aiosqlite.connect(self.cache.db_path) as db:
512
+ # Copy data entities
513
+ await db.execute(
514
+ """INSERT INTO data_entities
515
+ (global_version_id, name, public_entity_name, public_collection_name,
516
+ label_id, label_text, entity_category, data_service_enabled,
517
+ data_management_enabled, is_read_only)
518
+ SELECT ?, name, public_entity_name, public_collection_name,
519
+ label_id, label_text, entity_category, data_service_enabled,
520
+ data_management_enabled, is_read_only
521
+ FROM data_entities
522
+ WHERE global_version_id = ?""",
523
+ (target_version_id, source_version_id),
524
+ )
525
+ counts["entities"] = db.total_changes
526
+
527
+ # Copy enumerations
528
+ await db.execute(
529
+ """INSERT INTO enumerations
530
+ (global_version_id, name, label_id, label_text)
531
+ SELECT ?, name, label_id, label_text
532
+ FROM enumerations
533
+ WHERE global_version_id = ?""",
534
+ (target_version_id, source_version_id),
535
+ )
536
+ counts["enumerations"] = db.total_changes
537
+
538
+ # Copy other metadata tables as needed...
539
+ # This is a simplified implementation
540
+
541
+ await db.commit()
542
+
543
+ return counts
544
+
545
+ def get_sync_progress(self) -> Optional[SyncProgress]:
546
+ """Get current sync progress
547
+
548
+ Returns:
549
+ Current sync progress if syncing
550
+ """
551
+ return self._sync_progress
552
+
553
+ def is_syncing(self) -> bool:
554
+ """Check if sync is in progress
555
+
556
+ Returns:
557
+ True if sync is in progress
558
+ """
559
+ return self._is_syncing
560
+
561
+ async def recommend_sync_strategy(self, global_version_id: int) -> SyncStrategy:
562
+ """Recommend sync strategy based on environment and cache state
563
+
564
+ Args:
565
+ global_version_id: Global version ID
566
+
567
+ Returns:
568
+ Recommended sync strategy
569
+ """
570
+ try:
571
+ # Check if metadata already exists
572
+ has_metadata = await self.cache._has_complete_metadata(global_version_id)
573
+ if has_metadata:
574
+ return SyncStrategy.INCREMENTAL
575
+
576
+ # Get version info
577
+ version_info = await self.version_manager.get_global_version_info(
578
+ global_version_id
579
+ )
580
+ if not version_info:
581
+ return SyncStrategy.FULL
582
+
583
+ # Check for compatible versions (sharing opportunity)
584
+ compatible_versions = await self.version_manager.find_compatible_versions(
585
+ version_info.sample_modules, exact_match=True
586
+ )
587
+
588
+ for version in compatible_versions:
589
+ if version.global_version_id != global_version_id:
590
+ if await self.cache._has_complete_metadata(
591
+ version.global_version_id
592
+ ):
593
+ return SyncStrategy.SHARING_MODE
594
+
595
+ # Default to full sync for new versions
596
+ return SyncStrategy.FULL
597
+
598
+ except Exception as e:
599
+ logger.warning(f"Failed to recommend sync strategy: {e}")
600
+ return SyncStrategy.FULL
601
+
602
+ # Metadata API Methods (Using MetadataAPIOperations)
603
+
604
+ async def _get_data_entities(self) -> List[DataEntityInfo]:
605
+ """Get data entities using MetadataAPIOperations
606
+
607
+ Args:
608
+ options: OData query options
609
+
610
+ Returns:
611
+ List of data entity information
612
+ """
613
+ try:
614
+ # Use existing search method which handles the data extraction and parsing
615
+ entities = await self.metadata_api.search_data_entities()
616
+ return entities
617
+
618
+ except Exception as e:
619
+ logger.error(f"Error getting data entities: {e}")
620
+ raise
621
+
622
+ async def _get_public_entities(self) -> List[PublicEntityInfo]:
623
+ """Get detailed schema for all public entities using MetadataAPIOperations
624
+
625
+ Returns:
626
+ List of PublicEntityInfo with full schema
627
+ """
628
+ try:
629
+ return await self.metadata_api.get_all_public_entities_with_details(
630
+ resolve_labels=False # We'll handle labels separately if needed
631
+ )
632
+
633
+ except Exception as e:
634
+ logger.error(f"Error getting public entities: {e}")
635
+ return []
636
+
637
+ async def _get_public_enumerations(self) -> List[EnumerationInfo]:
638
+ """Get public enumerations using MetadataAPIOperations
639
+
640
+ Args:
641
+ options: OData query options
642
+
643
+ Returns:
644
+ List of enumeration information
645
+ """
646
+ try:
647
+ # Use existing method which handles the data extraction and parsing
648
+ enumerations = (
649
+ await self.metadata_api.get_all_public_enumerations_with_details(
650
+ resolve_labels=False # We'll handle labels separately if needed
651
+ )
652
+ )
653
+ return enumerations
654
+
655
+ except Exception as e:
656
+ logger.error(f"Error getting public enumerations: {e}")
657
+ raise
658
+
659
+ async def _get_current_version(self) -> MetadataVersionInfo:
660
+ """Get current environment version information
661
+
662
+ Returns:
663
+ Current version information
664
+ """
665
+ try:
666
+ # Get version information from D365 F&O using MetadataAPIOperations
667
+ application_version = await self.metadata_api.get_application_version()
668
+ platform_version = await self.metadata_api.get_platform_build_version()
669
+ except Exception as e:
670
+ logger.warning(f"Failed to get version information: {e}, using fallback")
671
+ application_version = "10.0.latest"
672
+ platform_version = "10.0.latest"
673
+
674
+ # Create a version hash based on the actual version information
675
+ version_components = {
676
+ "application_version": application_version,
677
+ "platform_version": platform_version,
678
+ }
679
+
680
+ # Create version hash
681
+ version_str = json.dumps(version_components, sort_keys=True)
682
+ version_hash = hashlib.sha256(version_str.encode()).hexdigest()[:16]
683
+
684
+ return MetadataVersionInfo(
685
+ environment_id=self.cache._environment_id,
686
+ version_hash=version_hash,
687
+ application_version=application_version,
688
+ platform_version=platform_version,
689
+ package_info=[], # Would be populated with actual package info
690
+ created_at=datetime.now(timezone.utc),
691
+ is_active=True,
692
+ )
693
+
694
+ async def needs_sync(self, global_version_id: int) -> bool:
695
+ """Check if metadata synchronization is needed
696
+
697
+ Args:
698
+ global_version_id: Global version ID to check
699
+
700
+ Returns:
701
+ True if sync is needed
702
+ """
703
+ try:
704
+ # Check if metadata exists for this version
705
+ return not await self.cache._has_complete_metadata(global_version_id)
706
+
707
+ except Exception as e:
708
+ logger.warning(f"Could not check sync status: {e}")
709
+ # When in doubt, assume sync is needed
710
+ return True
711
+
712
+ async def _sync_common_labels(
713
+ self,
714
+ global_version_id: int,
715
+ entities: List[DataEntityInfo],
716
+ public_entities: List[PublicEntityInfo],
717
+ enumerations: List[EnumerationInfo],
718
+ ) -> int:
719
+ """Sync commonly used labels to improve performance
720
+
721
+ Args:
722
+ global_version_id: Global version ID
723
+ entities: Data entities to extract labels from
724
+ public_entities: Public entities to extract labels from
725
+ enumerations: Enumerations to extract labels from
726
+
727
+ Returns:
728
+ Number of labels cached
729
+ """
730
+ label_ids = set()
731
+
732
+ # Collect label IDs from data entities
733
+ if entities:
734
+ for entity in entities:
735
+ if entity.label_id and entity.label_id.startswith("@"):
736
+ label_ids.add(entity.label_id)
737
+
738
+ # Collect label IDs from public entities and their properties
739
+ if public_entities:
740
+ for entity in public_entities:
741
+ if entity.label_id and entity.label_id.startswith("@"):
742
+ label_ids.add(entity.label_id)
743
+
744
+ # Collect from properties
745
+ for prop in entity.properties:
746
+ if prop.label_id and prop.label_id.startswith("@"):
747
+ label_ids.add(prop.label_id)
748
+
749
+ # Collect label IDs from enumerations and their members
750
+ if enumerations:
751
+ for enum in enumerations:
752
+ if enum.label_id and enum.label_id.startswith("@"):
753
+ label_ids.add(enum.label_id)
754
+
755
+ # Collect from members
756
+ for member in enum.members:
757
+ if member.label_id and member.label_id.startswith("@"):
758
+ label_ids.add(member.label_id)
759
+
760
+ # Remove empty/None labels
761
+ label_ids = {
762
+ label_id for label_id in label_ids if label_id and label_id.strip()
763
+ }
764
+
765
+ if not label_ids:
766
+ logger.debug("No label IDs found to pre-cache")
767
+ return 0
768
+
769
+ logger.info(f"Pre-caching {len(label_ids)} common labels")
770
+
771
+ # Fetch labels from API and cache them
772
+ labels_to_cache = []
773
+ cached_count = 0
774
+
775
+ # Use the label operations from metadata API to fetch labels
776
+ if hasattr(self.metadata_api, "label_ops") and self.metadata_api.label_ops:
777
+ try:
778
+ # Get labels in batch for efficiency
779
+ label_texts = await self.metadata_api.label_ops.get_labels_batch(
780
+ list(label_ids)
781
+ )
782
+
783
+ for label_id, label_text in label_texts.items():
784
+ if label_text: # Only cache labels that have actual text
785
+ labels_to_cache.append(
786
+ LabelInfo(id=label_id, language="en-US", value=label_text)
787
+ )
788
+ cached_count += 1
789
+
790
+ # Batch cache all labels
791
+ if labels_to_cache:
792
+ await self.cache.set_labels_batch(
793
+ labels_to_cache, global_version_id
794
+ )
795
+
796
+ except Exception as e:
797
+ logger.warning(f"Failed to batch fetch labels: {e}")
798
+ # Fall back to individual fetching for critical labels
799
+ for label_id in list(label_ids)[
800
+ :50
801
+ ]: # Limit to first 50 to avoid timeout
802
+ try:
803
+ label_text = await self.metadata_api.label_ops.get_label_text(
804
+ label_id
805
+ )
806
+ if label_text:
807
+ await self.cache.set_label(
808
+ label_id,
809
+ label_text,
810
+ global_version_id=global_version_id,
811
+ )
812
+ cached_count += 1
813
+ except Exception as e2:
814
+ logger.debug(
815
+ f"Failed to fetch individual label {label_id}: {e2}"
816
+ )
817
+
818
+ logger.info(f"Successfully pre-cached {cached_count} labels")
819
+ return cached_count