d365fo-client 0.2.2__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- d365fo_client/auth.py +48 -9
- d365fo_client/client.py +40 -20
- d365fo_client/credential_sources.py +431 -0
- d365fo_client/mcp/client_manager.py +8 -0
- d365fo_client/mcp/main.py +39 -17
- d365fo_client/mcp/server.py +69 -22
- d365fo_client/mcp/tools/__init__.py +2 -0
- d365fo_client/mcp/tools/profile_tools.py +261 -2
- d365fo_client/mcp/tools/sync_tools.py +503 -0
- d365fo_client/metadata_api.py +67 -0
- d365fo_client/metadata_v2/cache_v2.py +11 -9
- d365fo_client/metadata_v2/global_version_manager.py +2 -4
- d365fo_client/metadata_v2/sync_manager_v2.py +1 -1
- d365fo_client/metadata_v2/sync_session_manager.py +1043 -0
- d365fo_client/models.py +22 -3
- d365fo_client/profile_manager.py +7 -1
- d365fo_client/profiles.py +28 -1
- d365fo_client/sync_models.py +181 -0
- {d365fo_client-0.2.2.dist-info → d365fo_client-0.2.4.dist-info}/METADATA +1011 -784
- {d365fo_client-0.2.2.dist-info → d365fo_client-0.2.4.dist-info}/RECORD +24 -20
- {d365fo_client-0.2.2.dist-info → d365fo_client-0.2.4.dist-info}/WHEEL +0 -0
- {d365fo_client-0.2.2.dist-info → d365fo_client-0.2.4.dist-info}/entry_points.txt +0 -0
- {d365fo_client-0.2.2.dist-info → d365fo_client-0.2.4.dist-info}/licenses/LICENSE +0 -0
- {d365fo_client-0.2.2.dist-info → d365fo_client-0.2.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1043 @@
|
|
1
|
+
"""Enhanced sync manager with session-based progress tracking."""
|
2
|
+
|
3
|
+
import asyncio
|
4
|
+
import logging
|
5
|
+
import time
|
6
|
+
from datetime import datetime, timezone
|
7
|
+
from typing import TYPE_CHECKING, Callable, Dict, List, Optional
|
8
|
+
|
9
|
+
if TYPE_CHECKING:
|
10
|
+
from ..metadata_api import MetadataAPIOperations
|
11
|
+
|
12
|
+
from ..models import (
|
13
|
+
DataEntityInfo,
|
14
|
+
EnumerationInfo,
|
15
|
+
LabelInfo,
|
16
|
+
PublicEntityInfo,
|
17
|
+
SyncResult,
|
18
|
+
SyncStrategy,
|
19
|
+
)
|
20
|
+
from ..sync_models import (
|
21
|
+
SyncActivity,
|
22
|
+
SyncPhase,
|
23
|
+
SyncSession,
|
24
|
+
SyncSessionSummary,
|
25
|
+
SyncStatus,
|
26
|
+
)
|
27
|
+
from .cache_v2 import MetadataCacheV2
|
28
|
+
|
29
|
+
logger = logging.getLogger(__name__)
|
30
|
+
|
31
|
+
|
32
|
+
class SyncSessionManager:
|
33
|
+
"""Enhanced sync manager with session-based progress tracking."""
|
34
|
+
|
35
|
+
def __init__(self, cache: MetadataCacheV2, metadata_api: "MetadataAPIOperations"):
|
36
|
+
"""Initialize sync session manager
|
37
|
+
|
38
|
+
Args:
|
39
|
+
cache: Metadata cache v2 instance
|
40
|
+
metadata_api: Metadata API operations instance
|
41
|
+
"""
|
42
|
+
self.cache = cache
|
43
|
+
self.metadata_api = metadata_api
|
44
|
+
self.version_manager = cache.version_manager
|
45
|
+
|
46
|
+
# Session management
|
47
|
+
self._active_sessions: Dict[str, SyncSession] = {}
|
48
|
+
self._session_history: List[SyncSessionSummary] = []
|
49
|
+
self._progress_callbacks: Dict[str, List[Callable[[SyncSession], None]]] = {}
|
50
|
+
self._max_history = 100 # Keep last 100 sessions in memory
|
51
|
+
|
52
|
+
async def start_sync_session(
|
53
|
+
self,
|
54
|
+
global_version_id: int,
|
55
|
+
strategy: SyncStrategy = SyncStrategy.FULL_WITHOUT_LABELS,
|
56
|
+
initiated_by: str = "user"
|
57
|
+
) -> str:
|
58
|
+
"""Start new sync session and return session ID
|
59
|
+
|
60
|
+
Args:
|
61
|
+
global_version_id: Global version ID to sync
|
62
|
+
strategy: Sync strategy to use
|
63
|
+
initiated_by: Who initiated the sync (user, system, mcp, etc.)
|
64
|
+
|
65
|
+
Returns:
|
66
|
+
Session ID for tracking progress
|
67
|
+
|
68
|
+
Raises:
|
69
|
+
ValueError: If sync already running for this version
|
70
|
+
"""
|
71
|
+
# Check if sync already running for this version
|
72
|
+
for session in self._active_sessions.values():
|
73
|
+
if (session.global_version_id == global_version_id and
|
74
|
+
session.status == SyncStatus.RUNNING):
|
75
|
+
raise ValueError(f"Sync already running for version {global_version_id}")
|
76
|
+
|
77
|
+
# Create new session
|
78
|
+
session = SyncSession(
|
79
|
+
global_version_id=global_version_id,
|
80
|
+
strategy=strategy,
|
81
|
+
status=SyncStatus.PENDING,
|
82
|
+
start_time=datetime.now(timezone.utc),
|
83
|
+
initiated_by=initiated_by
|
84
|
+
)
|
85
|
+
|
86
|
+
# Initialize phases based on strategy
|
87
|
+
session.phases = self._initialize_phases(strategy)
|
88
|
+
|
89
|
+
# Store session
|
90
|
+
self._active_sessions[session.session_id] = session
|
91
|
+
|
92
|
+
# Start background sync
|
93
|
+
asyncio.create_task(self._execute_sync_session(session.session_id))
|
94
|
+
|
95
|
+
logger.info(f"Started sync session {session.session_id} for version {global_version_id} with strategy {strategy}")
|
96
|
+
return session.session_id
|
97
|
+
|
98
|
+
def _initialize_phases(self, strategy: SyncStrategy) -> Dict[SyncPhase, SyncActivity]:
|
99
|
+
"""Initialize phases based on sync strategy"""
|
100
|
+
phases = {}
|
101
|
+
|
102
|
+
if strategy == SyncStrategy.FULL:
|
103
|
+
phase_list = [
|
104
|
+
SyncPhase.INITIALIZING,
|
105
|
+
SyncPhase.VERSION_CHECK,
|
106
|
+
SyncPhase.ENTITIES,
|
107
|
+
SyncPhase.SCHEMAS,
|
108
|
+
SyncPhase.ENUMERATIONS,
|
109
|
+
SyncPhase.LABELS,
|
110
|
+
SyncPhase.INDEXING,
|
111
|
+
SyncPhase.FINALIZING
|
112
|
+
]
|
113
|
+
elif strategy == SyncStrategy.FULL_WITHOUT_LABELS:
|
114
|
+
phase_list = [
|
115
|
+
SyncPhase.INITIALIZING,
|
116
|
+
SyncPhase.VERSION_CHECK,
|
117
|
+
SyncPhase.ENTITIES,
|
118
|
+
SyncPhase.SCHEMAS,
|
119
|
+
SyncPhase.ENUMERATIONS,
|
120
|
+
SyncPhase.INDEXING,
|
121
|
+
SyncPhase.FINALIZING
|
122
|
+
]
|
123
|
+
elif strategy == SyncStrategy.ENTITIES_ONLY:
|
124
|
+
phase_list = [
|
125
|
+
SyncPhase.INITIALIZING,
|
126
|
+
SyncPhase.VERSION_CHECK,
|
127
|
+
SyncPhase.ENTITIES,
|
128
|
+
SyncPhase.FINALIZING
|
129
|
+
]
|
130
|
+
elif strategy == SyncStrategy.LABELS_ONLY:
|
131
|
+
phase_list = [
|
132
|
+
SyncPhase.INITIALIZING,
|
133
|
+
SyncPhase.VERSION_CHECK,
|
134
|
+
SyncPhase.LABELS,
|
135
|
+
SyncPhase.FINALIZING
|
136
|
+
]
|
137
|
+
elif strategy == SyncStrategy.SHARING_MODE:
|
138
|
+
phase_list = [
|
139
|
+
SyncPhase.INITIALIZING,
|
140
|
+
SyncPhase.VERSION_CHECK,
|
141
|
+
SyncPhase.FINALIZING
|
142
|
+
]
|
143
|
+
else:
|
144
|
+
phase_list = [SyncPhase.INITIALIZING, SyncPhase.FINALIZING]
|
145
|
+
|
146
|
+
for phase in phase_list:
|
147
|
+
phases[phase] = SyncActivity(
|
148
|
+
name=phase.value.replace('_', ' ').title(),
|
149
|
+
status=SyncStatus.PENDING
|
150
|
+
)
|
151
|
+
|
152
|
+
return phases
|
153
|
+
|
154
|
+
async def _execute_sync_session(self, session_id: str):
|
155
|
+
"""Execute sync session with detailed progress tracking"""
|
156
|
+
session = self._active_sessions.get(session_id)
|
157
|
+
if not session:
|
158
|
+
return
|
159
|
+
|
160
|
+
try:
|
161
|
+
session.status = SyncStatus.RUNNING
|
162
|
+
self._notify_progress(session_id)
|
163
|
+
|
164
|
+
# Use enhanced sync logic with detailed progress updates
|
165
|
+
result = await self._sync_with_detailed_progress(session)
|
166
|
+
|
167
|
+
session.result = result
|
168
|
+
session.status = SyncStatus.COMPLETED if result.success else SyncStatus.FAILED
|
169
|
+
session.end_time = datetime.now(timezone.utc)
|
170
|
+
session.progress_percent = 100.0
|
171
|
+
|
172
|
+
except Exception as e:
|
173
|
+
logger.error(f"Sync session {session_id} failed: {e}")
|
174
|
+
session.error = str(e)
|
175
|
+
session.status = SyncStatus.FAILED
|
176
|
+
session.end_time = datetime.now(timezone.utc)
|
177
|
+
|
178
|
+
finally:
|
179
|
+
self._notify_progress(session_id)
|
180
|
+
self._archive_session(session_id)
|
181
|
+
|
182
|
+
async def _sync_with_detailed_progress(self, session: SyncSession) -> SyncResult:
|
183
|
+
"""Execute sync with granular progress updates"""
|
184
|
+
|
185
|
+
# Phase 1: Initializing
|
186
|
+
await self._update_phase_progress(session, SyncPhase.INITIALIZING, SyncStatus.RUNNING)
|
187
|
+
await asyncio.sleep(0.1) # Simulate initialization
|
188
|
+
await self._complete_phase(session, SyncPhase.INITIALIZING)
|
189
|
+
|
190
|
+
# Phase 2: Version Check
|
191
|
+
await self._update_phase_progress(session, SyncPhase.VERSION_CHECK, SyncStatus.RUNNING)
|
192
|
+
|
193
|
+
# Update sync status in database
|
194
|
+
await self.version_manager.update_sync_status(
|
195
|
+
self.cache._environment_id, session.global_version_id, "syncing"
|
196
|
+
)
|
197
|
+
|
198
|
+
await self._complete_phase(session, SyncPhase.VERSION_CHECK)
|
199
|
+
|
200
|
+
if session.strategy == SyncStrategy.FULL:
|
201
|
+
# Phase 3: Entities
|
202
|
+
await self._sync_entities_with_progress(session)
|
203
|
+
|
204
|
+
# Phase 4: Schemas
|
205
|
+
await self._sync_schemas_with_progress(session)
|
206
|
+
|
207
|
+
# Phase 5: Enumerations
|
208
|
+
await self._sync_enumerations_with_progress(session)
|
209
|
+
|
210
|
+
# Phase 6: Labels
|
211
|
+
await self._sync_labels_with_progress(session)
|
212
|
+
|
213
|
+
# Phase 7: Indexing
|
214
|
+
await self._sync_indexing_with_progress(session)
|
215
|
+
|
216
|
+
elif session.strategy == SyncStrategy.FULL_WITHOUT_LABELS:
|
217
|
+
# Phase 3: Entities
|
218
|
+
await self._sync_entities_with_progress(session)
|
219
|
+
|
220
|
+
# Phase 4: Schemas
|
221
|
+
await self._sync_schemas_with_progress(session)
|
222
|
+
|
223
|
+
# Phase 5: Enumerations
|
224
|
+
await self._sync_enumerations_with_progress(session)
|
225
|
+
|
226
|
+
# Phase 6: Indexing
|
227
|
+
await self._sync_indexing_with_progress(session)
|
228
|
+
|
229
|
+
elif session.strategy == SyncStrategy.ENTITIES_ONLY:
|
230
|
+
# Only sync entities
|
231
|
+
await self._sync_entities_with_progress(session)
|
232
|
+
|
233
|
+
elif session.strategy == SyncStrategy.LABELS_ONLY:
|
234
|
+
# Only sync labels
|
235
|
+
await self._sync_labels_only_with_progress(session)
|
236
|
+
|
237
|
+
elif session.strategy == SyncStrategy.SHARING_MODE:
|
238
|
+
# Copy from compatible version
|
239
|
+
await self._sync_sharing_with_progress(session)
|
240
|
+
|
241
|
+
# Final phase
|
242
|
+
await self._update_phase_progress(session, SyncPhase.FINALIZING, SyncStatus.RUNNING)
|
243
|
+
|
244
|
+
# Mark cache sync completed if successful - get counts based on strategy
|
245
|
+
entity_count = session.phases.get(SyncPhase.ENTITIES, SyncActivity("", SyncStatus.PENDING)).items_processed
|
246
|
+
action_count = session.phases.get(SyncPhase.SCHEMAS, SyncActivity("", SyncStatus.PENDING)).items_processed
|
247
|
+
enumeration_count = session.phases.get(SyncPhase.ENUMERATIONS, SyncActivity("", SyncStatus.PENDING)).items_processed
|
248
|
+
label_count = session.phases.get(SyncPhase.LABELS, SyncActivity("", SyncStatus.PENDING)).items_processed
|
249
|
+
|
250
|
+
# Override counts to 0 for phases not included in the strategy
|
251
|
+
if session.strategy == SyncStrategy.ENTITIES_ONLY:
|
252
|
+
action_count = 0
|
253
|
+
enumeration_count = 0
|
254
|
+
label_count = 0
|
255
|
+
elif session.strategy == SyncStrategy.FULL_WITHOUT_LABELS:
|
256
|
+
label_count = 0
|
257
|
+
elif session.strategy == SyncStrategy.LABELS_ONLY:
|
258
|
+
entity_count = 0
|
259
|
+
action_count = 0
|
260
|
+
enumeration_count = 0
|
261
|
+
elif session.strategy == SyncStrategy.SHARING_MODE:
|
262
|
+
# For sharing mode, counts come from the copy operation
|
263
|
+
pass
|
264
|
+
|
265
|
+
await self.cache.mark_sync_completed(
|
266
|
+
session.global_version_id,
|
267
|
+
entity_count,
|
268
|
+
action_count,
|
269
|
+
enumeration_count,
|
270
|
+
label_count,
|
271
|
+
)
|
272
|
+
|
273
|
+
# Update version manager
|
274
|
+
duration_ms = int((datetime.now(timezone.utc) - session.start_time).total_seconds() * 1000)
|
275
|
+
await self.version_manager.update_sync_status(
|
276
|
+
self.cache._environment_id,
|
277
|
+
session.global_version_id,
|
278
|
+
"completed",
|
279
|
+
duration_ms,
|
280
|
+
)
|
281
|
+
|
282
|
+
await self._complete_phase(session, SyncPhase.FINALIZING)
|
283
|
+
|
284
|
+
return SyncResult(
|
285
|
+
success=True,
|
286
|
+
error=None,
|
287
|
+
duration_ms=duration_ms,
|
288
|
+
entity_count=entity_count,
|
289
|
+
action_count=action_count,
|
290
|
+
enumeration_count=enumeration_count,
|
291
|
+
label_count=label_count
|
292
|
+
)
|
293
|
+
|
294
|
+
async def _sync_entities_with_progress(self, session: SyncSession):
|
295
|
+
"""Sync entities with detailed progress reporting"""
|
296
|
+
phase = SyncPhase.ENTITIES
|
297
|
+
await self._update_phase_progress(session, phase, SyncStatus.RUNNING)
|
298
|
+
|
299
|
+
activity = session.phases[phase]
|
300
|
+
activity.current_item = "Fetching entity list..."
|
301
|
+
self._notify_progress(session.session_id)
|
302
|
+
|
303
|
+
try:
|
304
|
+
# Get entities
|
305
|
+
entities = await self._get_data_entities()
|
306
|
+
activity.items_total = len(entities) if entities else 0
|
307
|
+
|
308
|
+
if entities:
|
309
|
+
# Collect label IDs from all entities (only if labels will be synced)
|
310
|
+
if self._should_collect_label_ids(session):
|
311
|
+
self._collect_label_ids_from_entities(session, entities)
|
312
|
+
|
313
|
+
for i, entity in enumerate(entities):
|
314
|
+
activity.current_item = f"Processing {entity.name}"
|
315
|
+
activity.items_processed = i + 1
|
316
|
+
activity.progress_percent = ((i + 1) / len(entities)) * 100
|
317
|
+
|
318
|
+
# Store entity
|
319
|
+
await self.cache.store_data_entities(session.global_version_id, [entity])
|
320
|
+
|
321
|
+
# Notify progress every 10 entities or at completion
|
322
|
+
if (i + 1) % 10 == 0 or i == len(entities) - 1:
|
323
|
+
self._notify_progress(session.session_id)
|
324
|
+
|
325
|
+
await self._complete_phase(session, phase)
|
326
|
+
|
327
|
+
except Exception as e:
|
328
|
+
logger.error(f"Entity sync failed: {e}")
|
329
|
+
activity.error = str(e)
|
330
|
+
activity.status = SyncStatus.FAILED
|
331
|
+
raise
|
332
|
+
|
333
|
+
async def _sync_schemas_with_progress(self, session: SyncSession):
|
334
|
+
"""Sync schemas with detailed progress reporting"""
|
335
|
+
phase = SyncPhase.SCHEMAS
|
336
|
+
await self._update_phase_progress(session, phase, SyncStatus.RUNNING)
|
337
|
+
|
338
|
+
activity = session.phases[phase]
|
339
|
+
activity.current_item = "Fetching public entities..."
|
340
|
+
self._notify_progress(session.session_id)
|
341
|
+
|
342
|
+
try:
|
343
|
+
public_entities = await self._get_public_entities()
|
344
|
+
activity.items_total = len(public_entities) if public_entities else 0
|
345
|
+
action_count = 0
|
346
|
+
|
347
|
+
if public_entities:
|
348
|
+
# Collect label IDs from all public entities and their fields/actions (only if labels will be synced)
|
349
|
+
if self._should_collect_label_ids(session):
|
350
|
+
self._collect_label_ids_from_public_entities(session, public_entities)
|
351
|
+
|
352
|
+
for i, entity in enumerate(public_entities):
|
353
|
+
activity.current_item = f"Processing schema for {entity.name}"
|
354
|
+
activity.items_processed = i + 1
|
355
|
+
activity.progress_percent = ((i + 1) / len(public_entities)) * 100
|
356
|
+
|
357
|
+
await self.cache.store_public_entity_schema(session.global_version_id, entity)
|
358
|
+
action_count += len(entity.actions)
|
359
|
+
|
360
|
+
# Notify progress every 5 entities
|
361
|
+
if (i + 1) % 5 == 0 or i == len(public_entities) - 1:
|
362
|
+
self._notify_progress(session.session_id)
|
363
|
+
|
364
|
+
# Store action count for result
|
365
|
+
activity.items_processed = action_count
|
366
|
+
await self._complete_phase(session, phase)
|
367
|
+
|
368
|
+
except Exception as e:
|
369
|
+
logger.error(f"Schema sync failed: {e}")
|
370
|
+
activity.error = str(e)
|
371
|
+
activity.status = SyncStatus.FAILED
|
372
|
+
raise
|
373
|
+
|
374
|
+
async def _sync_enumerations_with_progress(self, session: SyncSession):
|
375
|
+
"""Sync enumerations with detailed progress reporting"""
|
376
|
+
phase = SyncPhase.ENUMERATIONS
|
377
|
+
await self._update_phase_progress(session, phase, SyncStatus.RUNNING)
|
378
|
+
|
379
|
+
activity = session.phases[phase]
|
380
|
+
activity.current_item = "Fetching enumerations..."
|
381
|
+
self._notify_progress(session.session_id)
|
382
|
+
|
383
|
+
try:
|
384
|
+
enumerations = await self._get_public_enumerations()
|
385
|
+
activity.items_total = len(enumerations) if enumerations else 0
|
386
|
+
|
387
|
+
if enumerations:
|
388
|
+
# Collect label IDs from all enumerations and their members (only if labels will be synced)
|
389
|
+
if self._should_collect_label_ids(session):
|
390
|
+
self._collect_label_ids_from_enumerations(session, enumerations)
|
391
|
+
|
392
|
+
await self.cache.store_enumerations(session.global_version_id, enumerations)
|
393
|
+
activity.items_processed = len(enumerations)
|
394
|
+
activity.progress_percent = 100.0
|
395
|
+
|
396
|
+
self._notify_progress(session.session_id)
|
397
|
+
await self._complete_phase(session, phase)
|
398
|
+
|
399
|
+
except Exception as e:
|
400
|
+
logger.warning(f"Enumeration sync failed: {e}")
|
401
|
+
activity.error = str(e)
|
402
|
+
activity.status = SyncStatus.FAILED
|
403
|
+
# Don't raise - enumerations are optional
|
404
|
+
|
405
|
+
async def _sync_labels_with_progress(self, session: SyncSession):
|
406
|
+
"""Sync labels with detailed progress reporting"""
|
407
|
+
|
408
|
+
if not self.metadata_api.label_ops:
|
409
|
+
logger.info("Label operations not available, skipping label sync")
|
410
|
+
await self._complete_phase(session, SyncPhase.LABELS)
|
411
|
+
return
|
412
|
+
|
413
|
+
phase = SyncPhase.LABELS
|
414
|
+
await self._update_phase_progress(session, phase, SyncStatus.RUNNING)
|
415
|
+
|
416
|
+
activity = session.phases[phase]
|
417
|
+
activity.current_item = "Processing collected labels..."
|
418
|
+
self._notify_progress(session.session_id)
|
419
|
+
|
420
|
+
try:
|
421
|
+
# Use collected label IDs from previous phases
|
422
|
+
label_ids = list(session.collected_label_ids)
|
423
|
+
activity.items_total = len(label_ids)
|
424
|
+
|
425
|
+
logger.info(f"Processing {len(label_ids)} collected label IDs for version {session.global_version_id}")
|
426
|
+
|
427
|
+
label_count = 0
|
428
|
+
if label_ids:
|
429
|
+
# Process labels in batches for better progress reporting
|
430
|
+
batch_size = 50
|
431
|
+
for i in range(0, len(label_ids), batch_size):
|
432
|
+
labels_to_cache = []
|
433
|
+
batch = label_ids[i:i + batch_size]
|
434
|
+
|
435
|
+
activity.current_item = f"Fetching labels {i+1}-{min(i+batch_size, len(label_ids))} of {len(label_ids)}"
|
436
|
+
self._notify_progress(session.session_id)
|
437
|
+
|
438
|
+
# Fetch and store this batch of labels using label operations
|
439
|
+
label_texts = await self.metadata_api.label_ops.get_labels_batch(batch)
|
440
|
+
|
441
|
+
for label_id, label_text in label_texts.items():
|
442
|
+
if label_text: # Only cache labels that have actual text
|
443
|
+
labels_to_cache.append(
|
444
|
+
LabelInfo(id=label_id, language="en-US", value=label_text)
|
445
|
+
)
|
446
|
+
label_count += 1
|
447
|
+
|
448
|
+
# Batch cache labels
|
449
|
+
if labels_to_cache:
|
450
|
+
await self.cache.set_labels_batch(
|
451
|
+
labels_to_cache, session.global_version_id
|
452
|
+
)
|
453
|
+
|
454
|
+
# Update progress
|
455
|
+
activity.items_processed = min(i + batch_size, len(label_ids))
|
456
|
+
activity.progress_percent = (activity.items_processed / len(label_ids)) * 100
|
457
|
+
self._notify_progress(session.session_id)
|
458
|
+
else:
|
459
|
+
logger.info(f"No label IDs collected for version {session.global_version_id}")
|
460
|
+
activity.items_processed = 0
|
461
|
+
activity.progress_percent = 100.0
|
462
|
+
|
463
|
+
logger.info(f"Successfully synced {label_count} labels from {len(label_ids)} collected label IDs")
|
464
|
+
await self._complete_phase(session, phase)
|
465
|
+
|
466
|
+
except Exception as e:
|
467
|
+
logger.warning(f"Label sync failed: {e}")
|
468
|
+
activity.error = str(e)
|
469
|
+
activity.status = SyncStatus.FAILED
|
470
|
+
# Don't raise - labels are optional
|
471
|
+
|
472
|
+
async def _sync_labels_only_with_progress(self, session: SyncSession):
|
473
|
+
"""Sync labels only with detailed progress reporting
|
474
|
+
|
475
|
+
This method efficiently collects missing labels from stored metadata using SQL queries,
|
476
|
+
then fetches and caches the missing label texts.
|
477
|
+
"""
|
478
|
+
if not self.metadata_api.label_ops:
|
479
|
+
logger.info("Label operations not available, skipping label sync")
|
480
|
+
await self._complete_phase(session, SyncPhase.LABELS)
|
481
|
+
return
|
482
|
+
|
483
|
+
phase = SyncPhase.LABELS
|
484
|
+
await self._update_phase_progress(session, phase, SyncStatus.RUNNING)
|
485
|
+
|
486
|
+
activity = session.phases[phase]
|
487
|
+
activity.current_item = "Finding missing labels from cached metadata..."
|
488
|
+
self._notify_progress(session.session_id)
|
489
|
+
|
490
|
+
try:
|
491
|
+
# Use SQL to efficiently find missing labels from stored metadata
|
492
|
+
missing_label_ids = await self._get_missing_label_ids_from_database(session.global_version_id)
|
493
|
+
|
494
|
+
if not missing_label_ids:
|
495
|
+
logger.info("No missing labels found, trying to collect from fresh metadata fetch...")
|
496
|
+
# Fallback to collection from fresh metadata if no stored metadata
|
497
|
+
await self._collect_labels_from_fresh_metadata(session)
|
498
|
+
missing_label_ids = list(session.collected_label_ids)
|
499
|
+
|
500
|
+
activity.items_total = len(missing_label_ids)
|
501
|
+
logger.info(f"Found {len(missing_label_ids)} missing labels for version {session.global_version_id}")
|
502
|
+
|
503
|
+
label_count = 0
|
504
|
+
if missing_label_ids:
|
505
|
+
# Process labels in batches for better progress reporting
|
506
|
+
batch_size = 50
|
507
|
+
for i in range(0, len(missing_label_ids), batch_size):
|
508
|
+
labels_to_cache = []
|
509
|
+
batch = missing_label_ids[i:i + batch_size]
|
510
|
+
|
511
|
+
activity.current_item = f"Fetching labels {i+1}-{min(i+batch_size, len(missing_label_ids))} of {len(missing_label_ids)}"
|
512
|
+
self._notify_progress(session.session_id)
|
513
|
+
|
514
|
+
# Fetch and store this batch of labels using label operations
|
515
|
+
label_texts = await self.metadata_api.label_ops.get_labels_batch(batch)
|
516
|
+
|
517
|
+
for label_id, label_text in label_texts.items():
|
518
|
+
if label_text: # Only cache labels that have actual text
|
519
|
+
labels_to_cache.append(
|
520
|
+
LabelInfo(id=label_id, language="en-US", value=label_text)
|
521
|
+
)
|
522
|
+
label_count += 1
|
523
|
+
|
524
|
+
# Batch cache labels
|
525
|
+
if labels_to_cache:
|
526
|
+
await self.cache.set_labels_batch(
|
527
|
+
labels_to_cache, session.global_version_id
|
528
|
+
)
|
529
|
+
|
530
|
+
# Update progress
|
531
|
+
activity.items_processed = min(i + batch_size, len(missing_label_ids))
|
532
|
+
activity.progress_percent = (activity.items_processed / len(missing_label_ids)) * 100
|
533
|
+
self._notify_progress(session.session_id)
|
534
|
+
else:
|
535
|
+
logger.info(f"No missing labels found for version {session.global_version_id}")
|
536
|
+
activity.items_processed = 0
|
537
|
+
activity.progress_percent = 100.0
|
538
|
+
|
539
|
+
logger.info(f"Successfully synced {label_count} labels from {len(missing_label_ids)} missing label IDs")
|
540
|
+
await self._complete_phase(session, phase)
|
541
|
+
|
542
|
+
except Exception as e:
|
543
|
+
logger.error(f"Labels-only sync failed: {e}")
|
544
|
+
activity.error = str(e)
|
545
|
+
activity.status = SyncStatus.FAILED
|
546
|
+
raise
|
547
|
+
|
548
|
+
async def _sync_indexing_with_progress(self, session: SyncSession):
|
549
|
+
"""Sync indexing with detailed progress reporting"""
|
550
|
+
phase = SyncPhase.INDEXING
|
551
|
+
await self._update_phase_progress(session, phase, SyncStatus.RUNNING)
|
552
|
+
|
553
|
+
activity = session.phases[phase]
|
554
|
+
activity.current_item = "Building search indexes..."
|
555
|
+
self._notify_progress(session.session_id)
|
556
|
+
|
557
|
+
try:
|
558
|
+
# TODO: Implement search index building
|
559
|
+
# For now, just simulate
|
560
|
+
await asyncio.sleep(0.5)
|
561
|
+
|
562
|
+
activity.progress_percent = 100.0
|
563
|
+
self._notify_progress(session.session_id)
|
564
|
+
await self._complete_phase(session, phase)
|
565
|
+
|
566
|
+
except Exception as e:
|
567
|
+
logger.warning(f"Indexing failed: {e}")
|
568
|
+
activity.error = str(e)
|
569
|
+
activity.status = SyncStatus.FAILED
|
570
|
+
# Don't raise - indexing is optional
|
571
|
+
|
572
|
+
async def _sync_sharing_with_progress(self, session: SyncSession):
|
573
|
+
"""Sync using sharing mode with detailed progress reporting"""
|
574
|
+
phase = SyncPhase.SCHEMAS # Reuse schemas phase for sharing
|
575
|
+
await self._update_phase_progress(session, phase, SyncStatus.RUNNING)
|
576
|
+
|
577
|
+
activity = session.phases[phase]
|
578
|
+
activity.current_item = "Looking for compatible versions..."
|
579
|
+
self._notify_progress(session.session_id)
|
580
|
+
|
581
|
+
try:
|
582
|
+
# Get version info
|
583
|
+
version_info = await self.version_manager.get_global_version_info(
|
584
|
+
session.global_version_id
|
585
|
+
)
|
586
|
+
if not version_info:
|
587
|
+
raise ValueError("Global version not found")
|
588
|
+
|
589
|
+
# Find compatible versions
|
590
|
+
compatible_versions = await self.version_manager.find_compatible_versions(
|
591
|
+
version_info.modules, exact_match=True
|
592
|
+
)
|
593
|
+
|
594
|
+
# Find source version
|
595
|
+
source_version = None
|
596
|
+
for version in compatible_versions:
|
597
|
+
if version.global_version_id != session.global_version_id:
|
598
|
+
if await self.cache._has_complete_metadata(version.global_version_id):
|
599
|
+
source_version = version
|
600
|
+
break
|
601
|
+
|
602
|
+
if not source_version:
|
603
|
+
# No compatible version, fall back to full sync
|
604
|
+
logger.info("No compatible version found for sharing, falling back to full sync")
|
605
|
+
await self._sync_entities_with_progress(session)
|
606
|
+
return
|
607
|
+
|
608
|
+
activity.current_item = f"Copying from version {source_version.global_version_id}"
|
609
|
+
self._notify_progress(session.session_id)
|
610
|
+
|
611
|
+
# Copy metadata
|
612
|
+
counts = await self._copy_metadata_between_versions(
|
613
|
+
source_version.global_version_id, session.global_version_id
|
614
|
+
)
|
615
|
+
|
616
|
+
activity.items_processed = counts.get("entities", 0)
|
617
|
+
activity.progress_percent = 100.0
|
618
|
+
self._notify_progress(session.session_id)
|
619
|
+
await self._complete_phase(session, phase)
|
620
|
+
|
621
|
+
except Exception as e:
|
622
|
+
logger.error(f"Sharing sync failed: {e}")
|
623
|
+
activity.error = str(e)
|
624
|
+
activity.status = SyncStatus.FAILED
|
625
|
+
raise
|
626
|
+
|
627
|
+
async def _update_phase_progress(self, session: SyncSession, phase: SyncPhase, status: SyncStatus):
|
628
|
+
"""Update phase progress"""
|
629
|
+
if phase in session.phases:
|
630
|
+
activity = session.phases[phase]
|
631
|
+
activity.status = status
|
632
|
+
if status == SyncStatus.RUNNING:
|
633
|
+
activity.start_time = datetime.now(timezone.utc)
|
634
|
+
session.current_phase = phase
|
635
|
+
session.current_activity = activity.name
|
636
|
+
|
637
|
+
session.progress_percent = session.get_overall_progress()
|
638
|
+
self._notify_progress(session.session_id)
|
639
|
+
|
640
|
+
async def _complete_phase(self, session: SyncSession, phase: SyncPhase):
|
641
|
+
"""Mark phase as completed"""
|
642
|
+
if phase in session.phases:
|
643
|
+
activity = session.phases[phase]
|
644
|
+
activity.status = SyncStatus.COMPLETED
|
645
|
+
activity.end_time = datetime.now(timezone.utc)
|
646
|
+
activity.progress_percent = 100.0
|
647
|
+
|
648
|
+
session.progress_percent = session.get_overall_progress()
|
649
|
+
self._notify_progress(session.session_id)
|
650
|
+
|
651
|
+
def get_sync_session(self, session_id: str) -> Optional[SyncSession]:
|
652
|
+
"""Get sync session by ID"""
|
653
|
+
return self._active_sessions.get(session_id)
|
654
|
+
|
655
|
+
def get_active_sessions(self) -> List[SyncSessionSummary]:
|
656
|
+
"""Get all active sync sessions"""
|
657
|
+
return [
|
658
|
+
SyncSessionSummary(
|
659
|
+
session_id=session.session_id,
|
660
|
+
global_version_id=session.global_version_id,
|
661
|
+
strategy=session.strategy,
|
662
|
+
status=session.status,
|
663
|
+
start_time=session.start_time,
|
664
|
+
end_time=session.end_time,
|
665
|
+
progress_percent=session.progress_percent,
|
666
|
+
current_phase=session.current_phase,
|
667
|
+
current_activity=session.current_activity,
|
668
|
+
initiated_by=session.initiated_by,
|
669
|
+
duration_seconds=int((datetime.now(timezone.utc) - session.start_time).total_seconds()) if session.start_time else None
|
670
|
+
)
|
671
|
+
for session in self._active_sessions.values()
|
672
|
+
]
|
673
|
+
|
674
|
+
def get_session_history(self, limit: int = 50) -> List[SyncSessionSummary]:
|
675
|
+
"""Get sync session history"""
|
676
|
+
return self._session_history[-limit:]
|
677
|
+
|
678
|
+
async def cancel_sync_session(self, session_id: str) -> bool:
|
679
|
+
"""Cancel running sync session"""
|
680
|
+
session = self._active_sessions.get(session_id)
|
681
|
+
if not session or not session.can_cancel:
|
682
|
+
return False
|
683
|
+
|
684
|
+
if session.status == SyncStatus.RUNNING:
|
685
|
+
session.status = SyncStatus.CANCELLED
|
686
|
+
session.end_time = datetime.now(timezone.utc)
|
687
|
+
session.error = "Cancelled by user"
|
688
|
+
self._notify_progress(session_id)
|
689
|
+
self._archive_session(session_id)
|
690
|
+
return True
|
691
|
+
|
692
|
+
return False
|
693
|
+
|
694
|
+
def add_progress_callback(self, session_id: str, callback: Callable[[SyncSession], None]):
|
695
|
+
"""Add progress callback for specific session"""
|
696
|
+
if session_id not in self._progress_callbacks:
|
697
|
+
self._progress_callbacks[session_id] = []
|
698
|
+
self._progress_callbacks[session_id].append(callback)
|
699
|
+
|
700
|
+
def _notify_progress(self, session_id: str):
|
701
|
+
"""Notify all callbacks for session progress"""
|
702
|
+
session = self._active_sessions.get(session_id)
|
703
|
+
if not session:
|
704
|
+
return
|
705
|
+
|
706
|
+
callbacks = self._progress_callbacks.get(session_id, [])
|
707
|
+
for callback in callbacks:
|
708
|
+
try:
|
709
|
+
callback(session)
|
710
|
+
except Exception as e:
|
711
|
+
logger.warning(f"Progress callback error: {e}")
|
712
|
+
|
713
|
+
def _archive_session(self, session_id: str):
|
714
|
+
"""Archive completed session"""
|
715
|
+
session = self._active_sessions.pop(session_id, None)
|
716
|
+
if session:
|
717
|
+
summary = SyncSessionSummary(
|
718
|
+
session_id=session.session_id,
|
719
|
+
global_version_id=session.global_version_id,
|
720
|
+
strategy=session.strategy,
|
721
|
+
status=session.status,
|
722
|
+
start_time=session.start_time,
|
723
|
+
end_time=session.end_time,
|
724
|
+
progress_percent=session.progress_percent,
|
725
|
+
current_phase=session.current_phase,
|
726
|
+
current_activity=session.current_activity,
|
727
|
+
initiated_by=session.initiated_by,
|
728
|
+
duration_seconds=int((session.end_time - session.start_time).total_seconds()) if session.start_time and session.end_time else None
|
729
|
+
)
|
730
|
+
|
731
|
+
self._session_history.append(summary)
|
732
|
+
|
733
|
+
# Limit history size
|
734
|
+
if len(self._session_history) > self._max_history:
|
735
|
+
self._session_history = self._session_history[-self._max_history:]
|
736
|
+
|
737
|
+
# Clean up callbacks
|
738
|
+
self._progress_callbacks.pop(session_id, None)
|
739
|
+
|
740
|
+
async def _get_missing_label_ids_from_database(self, global_version_id: int) -> List[str]:
|
741
|
+
"""Get missing label IDs from database using efficient SQL query
|
742
|
+
|
743
|
+
This method uses SQL to find all label IDs that exist in metadata tables
|
744
|
+
but are missing from the labels_cache table.
|
745
|
+
|
746
|
+
Args:
|
747
|
+
global_version_id: Global version ID to check for missing labels
|
748
|
+
|
749
|
+
Returns:
|
750
|
+
List of missing label IDs that need to be fetched
|
751
|
+
"""
|
752
|
+
import aiosqlite
|
753
|
+
|
754
|
+
async with aiosqlite.connect(self.cache.db_path) as db:
|
755
|
+
# Comprehensive SQL query to find missing labels from all metadata tables with label_id fields
|
756
|
+
cursor = await db.execute(
|
757
|
+
"""
|
758
|
+
SELECT DISTINCT T1.label_id FROM (
|
759
|
+
SELECT DISTINCT label_id, label_text FROM data_entities
|
760
|
+
WHERE global_version_id = ? AND label_text IS NULL AND label_id LIKE '@%'
|
761
|
+
UNION ALL
|
762
|
+
SELECT DISTINCT label_id, label_text FROM entity_properties
|
763
|
+
WHERE global_version_id = ? AND label_text IS NULL AND label_id LIKE '@%'
|
764
|
+
UNION ALL
|
765
|
+
SELECT DISTINCT label_id, label_text FROM public_entities
|
766
|
+
WHERE global_version_id = ? AND label_text IS NULL AND label_id LIKE '@%'
|
767
|
+
UNION ALL
|
768
|
+
SELECT DISTINCT label_id, label_text FROM enumerations
|
769
|
+
WHERE global_version_id = ? AND label_text IS NULL AND label_id LIKE '@%'
|
770
|
+
UNION ALL
|
771
|
+
SELECT DISTINCT label_id, label_text FROM enumeration_members
|
772
|
+
WHERE global_version_id = ? AND label_text IS NULL AND label_id LIKE '@%'
|
773
|
+
) T1
|
774
|
+
LEFT OUTER JOIN labels_cache T2 ON T1.label_id = T2.label_id AND T2.global_version_id = ?
|
775
|
+
WHERE T2.label_id IS NULL
|
776
|
+
ORDER BY T1.label_id ASC
|
777
|
+
""",
|
778
|
+
(global_version_id, global_version_id, global_version_id,
|
779
|
+
global_version_id, global_version_id, global_version_id)
|
780
|
+
)
|
781
|
+
|
782
|
+
rows = await cursor.fetchall()
|
783
|
+
missing_label_ids = [row[0] for row in rows if row[0]]
|
784
|
+
|
785
|
+
logger.info(f"Found {len(missing_label_ids)} missing labels in database for version {global_version_id}")
|
786
|
+
return missing_label_ids
|
787
|
+
|
788
|
+
async def _collect_labels_from_fresh_metadata(self, session: SyncSession):
|
789
|
+
"""Collect labels from fresh metadata fetch as fallback
|
790
|
+
|
791
|
+
This method is used when no stored metadata is available and we need
|
792
|
+
to fetch fresh metadata to collect label IDs.
|
793
|
+
|
794
|
+
Args:
|
795
|
+
session: Sync session to collect labels for
|
796
|
+
"""
|
797
|
+
activity = session.phases[SyncPhase.LABELS]
|
798
|
+
|
799
|
+
try:
|
800
|
+
activity.current_item = "Fetching entities to collect label IDs..."
|
801
|
+
self._notify_progress(session.session_id)
|
802
|
+
|
803
|
+
# Fetch entities and collect their label IDs
|
804
|
+
entities = await self._get_data_entities()
|
805
|
+
if entities:
|
806
|
+
self._collect_label_ids_from_entities(session, entities)
|
807
|
+
activity.current_item = f"Collected label IDs from {len(entities)} entities"
|
808
|
+
self._notify_progress(session.session_id)
|
809
|
+
|
810
|
+
# Fetch public entities and collect their label IDs
|
811
|
+
try:
|
812
|
+
activity.current_item = "Fetching public entities to collect label IDs..."
|
813
|
+
self._notify_progress(session.session_id)
|
814
|
+
|
815
|
+
public_entities = await self._get_public_entities()
|
816
|
+
if public_entities:
|
817
|
+
self._collect_label_ids_from_public_entities(session, public_entities)
|
818
|
+
activity.current_item = f"Collected label IDs from {len(public_entities)} public entities"
|
819
|
+
self._notify_progress(session.session_id)
|
820
|
+
except Exception as e:
|
821
|
+
logger.warning(f"Error fetching public entities for label collection: {e}")
|
822
|
+
|
823
|
+
# Fetch enumerations and collect their label IDs
|
824
|
+
try:
|
825
|
+
activity.current_item = "Fetching enumerations to collect label IDs..."
|
826
|
+
self._notify_progress(session.session_id)
|
827
|
+
|
828
|
+
enumerations = await self._get_public_enumerations()
|
829
|
+
if enumerations:
|
830
|
+
self._collect_label_ids_from_enumerations(session, enumerations)
|
831
|
+
activity.current_item = f"Collected label IDs from {len(enumerations)} enumerations"
|
832
|
+
self._notify_progress(session.session_id)
|
833
|
+
except Exception as e:
|
834
|
+
logger.warning(f"Error fetching enumerations for label collection: {e}")
|
835
|
+
|
836
|
+
except Exception as e:
|
837
|
+
logger.error(f"Error collecting labels from fresh metadata: {e}")
|
838
|
+
raise
|
839
|
+
|
840
|
+
def _should_collect_label_ids(self, session: SyncSession) -> bool:
|
841
|
+
"""Determine if label IDs should be collected based on sync strategy"""
|
842
|
+
return session.strategy in [
|
843
|
+
SyncStrategy.FULL,
|
844
|
+
SyncStrategy.LABELS_ONLY,
|
845
|
+
# Don't collect for FULL_WITHOUT_LABELS, ENTITIES_ONLY, etc.
|
846
|
+
]
|
847
|
+
|
848
|
+
def _collect_label_id(self, session: SyncSession, label_id: Optional[str]):
|
849
|
+
"""Collect a label ID for later processing if it's valid"""
|
850
|
+
if label_id and label_id.startswith("@"):
|
851
|
+
session.collected_label_ids.add(label_id)
|
852
|
+
|
853
|
+
def _collect_label_ids_from_entities(self, session: SyncSession, entities: List[DataEntityInfo]):
|
854
|
+
"""Collect label IDs from data entities"""
|
855
|
+
for entity in entities:
|
856
|
+
self._collect_label_id(session, entity.label_id)
|
857
|
+
|
858
|
+
def _collect_label_ids_from_public_entities(self, session: SyncSession, public_entities: List[PublicEntityInfo]):
|
859
|
+
"""Collect label IDs from public entities and their fields/actions"""
|
860
|
+
for entity in public_entities:
|
861
|
+
self._collect_label_id(session, entity.label_id)
|
862
|
+
|
863
|
+
# Collect from properties
|
864
|
+
for property in entity.properties:
|
865
|
+
self._collect_label_id(session, property.label_id)
|
866
|
+
|
867
|
+
|
868
|
+
def _collect_label_ids_from_enumerations(self, session: SyncSession, enumerations: List[EnumerationInfo]):
|
869
|
+
"""Collect label IDs from enumerations and their members"""
|
870
|
+
for enum in enumerations:
|
871
|
+
self._collect_label_id(session, enum.label_id)
|
872
|
+
|
873
|
+
# Collect from members
|
874
|
+
for member in enum.members:
|
875
|
+
self._collect_label_id(session, member.label_id)
|
876
|
+
|
877
|
+
# Delegate to metadata API operations and cache methods
|
878
|
+
async def _get_data_entities(self) -> List[DataEntityInfo]:
|
879
|
+
"""Get data entities using MetadataAPIOperations"""
|
880
|
+
try:
|
881
|
+
entities = await self.metadata_api.get_all_data_entities()
|
882
|
+
return entities
|
883
|
+
except Exception as e:
|
884
|
+
logger.error(f"Error getting data entities: {e}")
|
885
|
+
raise
|
886
|
+
|
887
|
+
async def _get_public_entities(self) -> List[PublicEntityInfo]:
|
888
|
+
"""Get public entities with details"""
|
889
|
+
try:
|
890
|
+
return await self.metadata_api.get_all_public_entities_with_details(
|
891
|
+
resolve_labels=False
|
892
|
+
)
|
893
|
+
except Exception as e:
|
894
|
+
logger.error(f"Error getting public entities: {e}")
|
895
|
+
return []
|
896
|
+
|
897
|
+
async def _get_public_enumerations(self) -> List[EnumerationInfo]:
|
898
|
+
"""Get public enumerations with details"""
|
899
|
+
try:
|
900
|
+
enumerations = await self.metadata_api.get_all_public_enumerations_with_details(
|
901
|
+
resolve_labels=False
|
902
|
+
)
|
903
|
+
return enumerations
|
904
|
+
except Exception as e:
|
905
|
+
logger.error(f"Error getting public enumerations: {e}")
|
906
|
+
raise
|
907
|
+
|
908
|
+
async def _sync_common_labels(
|
909
|
+
self,
|
910
|
+
global_version_id: int,
|
911
|
+
entities: List[DataEntityInfo],
|
912
|
+
public_entities: List[PublicEntityInfo],
|
913
|
+
enumerations: List[EnumerationInfo],
|
914
|
+
) -> int:
|
915
|
+
"""Sync common labels from entities, schemas, and enumerations"""
|
916
|
+
try:
|
917
|
+
# Extract label IDs from entities, schemas, and enumerations
|
918
|
+
label_ids = set()
|
919
|
+
|
920
|
+
# From entities
|
921
|
+
for entity in entities:
|
922
|
+
if entity.label_id:
|
923
|
+
label_ids.add(entity.label_id)
|
924
|
+
|
925
|
+
# From public entities (schemas)
|
926
|
+
for entity in public_entities:
|
927
|
+
if entity.label_id:
|
928
|
+
label_ids.add(entity.label_id)
|
929
|
+
for property in entity.properties:
|
930
|
+
if property.label_id:
|
931
|
+
label_ids.add(property.label_id)
|
932
|
+
|
933
|
+
# From enumerations
|
934
|
+
for enum in enumerations:
|
935
|
+
if enum.label_id:
|
936
|
+
label_ids.add(enum.label_id)
|
937
|
+
for member in enum.members:
|
938
|
+
if member.label_id:
|
939
|
+
label_ids.add(member.label_id)
|
940
|
+
|
941
|
+
# Fetch and store labels
|
942
|
+
label_count = 0
|
943
|
+
if label_ids:
|
944
|
+
labels = await self.metadata_api.get_labels(list(label_ids))
|
945
|
+
if labels:
|
946
|
+
await self.cache.store_labels(global_version_id, labels)
|
947
|
+
label_count = len(labels)
|
948
|
+
|
949
|
+
return label_count
|
950
|
+
|
951
|
+
except Exception as e:
|
952
|
+
logger.warning(f"Error syncing common labels: {e}")
|
953
|
+
return 0
|
954
|
+
|
955
|
+
async def _copy_metadata_between_versions(self, source_version_id: int, target_version_id: int) -> Dict[str, int]:
|
956
|
+
"""Copy metadata between global versions"""
|
957
|
+
import aiosqlite
|
958
|
+
|
959
|
+
counts = {}
|
960
|
+
|
961
|
+
async with aiosqlite.connect(self.cache.db_path) as db:
|
962
|
+
# Copy data entities
|
963
|
+
await db.execute(
|
964
|
+
"""INSERT INTO data_entities
|
965
|
+
(global_version_id, name, public_entity_name, public_collection_name,
|
966
|
+
label_id, label_text, entity_category, data_service_enabled,
|
967
|
+
data_management_enabled, is_read_only)
|
968
|
+
SELECT ?, name, public_entity_name, public_collection_name,
|
969
|
+
label_id, label_text, entity_category, data_service_enabled,
|
970
|
+
data_management_enabled, is_read_only
|
971
|
+
FROM data_entities
|
972
|
+
WHERE global_version_id = ?""",
|
973
|
+
(target_version_id, source_version_id),
|
974
|
+
)
|
975
|
+
counts["entities"] = db.total_changes
|
976
|
+
|
977
|
+
# Copy public entities
|
978
|
+
await db.execute(
|
979
|
+
"""INSERT INTO public_entities
|
980
|
+
(global_version_id, name, public_collection_name, label_id, label_text,
|
981
|
+
entity_category, is_read_only, data_source_name)
|
982
|
+
SELECT ?, name, public_collection_name, label_id, label_text,
|
983
|
+
entity_category, is_read_only, data_source_name
|
984
|
+
FROM public_entities
|
985
|
+
WHERE global_version_id = ?""",
|
986
|
+
(target_version_id, source_version_id),
|
987
|
+
)
|
988
|
+
|
989
|
+
# Copy entity fields
|
990
|
+
await db.execute(
|
991
|
+
"""INSERT INTO entity_fields
|
992
|
+
(global_version_id, entity_name, field_name, odata_type, xpp_type,
|
993
|
+
is_key, is_nullable, max_length, scale, precision, label_id, label_text)
|
994
|
+
SELECT ?, entity_name, field_name, odata_type, xpp_type,
|
995
|
+
is_key, is_nullable, max_length, scale, precision, label_id, label_text
|
996
|
+
FROM entity_fields
|
997
|
+
WHERE global_version_id = ?""",
|
998
|
+
(target_version_id, source_version_id),
|
999
|
+
)
|
1000
|
+
|
1001
|
+
# Copy entity actions
|
1002
|
+
await db.execute(
|
1003
|
+
"""INSERT INTO entity_actions
|
1004
|
+
(global_version_id, entity_name, action_name, is_function, label_id, label_text)
|
1005
|
+
SELECT ?, entity_name, action_name, is_function, label_id, label_text
|
1006
|
+
FROM entity_actions
|
1007
|
+
WHERE global_version_id = ?""",
|
1008
|
+
(target_version_id, source_version_id),
|
1009
|
+
)
|
1010
|
+
|
1011
|
+
# Copy enumerations
|
1012
|
+
await db.execute(
|
1013
|
+
"""INSERT INTO enumerations
|
1014
|
+
(global_version_id, name, label_id, label_text, is_flags)
|
1015
|
+
SELECT ?, name, label_id, label_text, is_flags
|
1016
|
+
FROM enumerations
|
1017
|
+
WHERE global_version_id = ?""",
|
1018
|
+
(target_version_id, source_version_id),
|
1019
|
+
)
|
1020
|
+
|
1021
|
+
# Copy enumeration members
|
1022
|
+
await db.execute(
|
1023
|
+
"""INSERT INTO enumeration_members
|
1024
|
+
(global_version_id, enumeration_name, member_name, member_value, label_id, label_text)
|
1025
|
+
SELECT ?, enumeration_name, member_name, member_value, label_id, label_text
|
1026
|
+
FROM enumeration_members
|
1027
|
+
WHERE global_version_id = ?""",
|
1028
|
+
(target_version_id, source_version_id),
|
1029
|
+
)
|
1030
|
+
|
1031
|
+
# Copy labels
|
1032
|
+
await db.execute(
|
1033
|
+
"""INSERT INTO labels
|
1034
|
+
(global_version_id, label_id, label_text)
|
1035
|
+
SELECT ?, label_id, label_text
|
1036
|
+
FROM labels
|
1037
|
+
WHERE global_version_id = ?""",
|
1038
|
+
(target_version_id, source_version_id),
|
1039
|
+
)
|
1040
|
+
|
1041
|
+
await db.commit()
|
1042
|
+
|
1043
|
+
return counts
|