vector-inspector 0.3.9__py3-none-any.whl → 0.3.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. vector_inspector/__init__.py +10 -1
  2. vector_inspector/core/connection_manager.py +91 -19
  3. vector_inspector/core/connections/base_connection.py +43 -43
  4. vector_inspector/core/connections/chroma_connection.py +1 -1
  5. vector_inspector/core/connections/pgvector_connection.py +12 -172
  6. vector_inspector/core/connections/pinecone_connection.py +596 -99
  7. vector_inspector/core/connections/qdrant_connection.py +35 -44
  8. vector_inspector/core/embedding_utils.py +14 -5
  9. vector_inspector/core/logging.py +3 -1
  10. vector_inspector/extensions/__init__.py +6 -0
  11. vector_inspector/extensions/telemetry_settings_panel.py +25 -0
  12. vector_inspector/main.py +45 -2
  13. vector_inspector/services/backup_restore_service.py +228 -15
  14. vector_inspector/services/settings_service.py +79 -19
  15. vector_inspector/services/telemetry_service.py +88 -0
  16. vector_inspector/ui/components/backup_restore_dialog.py +215 -101
  17. vector_inspector/ui/components/connection_manager_panel.py +155 -14
  18. vector_inspector/ui/dialogs/cross_db_migration.py +126 -99
  19. vector_inspector/ui/dialogs/settings_dialog.py +13 -6
  20. vector_inspector/ui/loading_screen.py +169 -0
  21. vector_inspector/ui/main_window.py +44 -19
  22. vector_inspector/ui/services/dialog_service.py +1 -0
  23. vector_inspector/ui/views/collection_browser.py +36 -34
  24. vector_inspector/ui/views/connection_view.py +7 -1
  25. vector_inspector/ui/views/info_panel.py +118 -52
  26. vector_inspector/ui/views/metadata_view.py +30 -31
  27. vector_inspector/ui/views/search_view.py +20 -19
  28. vector_inspector/ui/views/visualization_view.py +18 -15
  29. {vector_inspector-0.3.9.dist-info → vector_inspector-0.3.12.dist-info}/METADATA +19 -37
  30. {vector_inspector-0.3.9.dist-info → vector_inspector-0.3.12.dist-info}/RECORD +33 -29
  31. {vector_inspector-0.3.9.dist-info → vector_inspector-0.3.12.dist-info}/WHEEL +1 -1
  32. vector_inspector-0.3.12.dist-info/licenses/LICENSE +1 -0
  33. {vector_inspector-0.3.9.dist-info → vector_inspector-0.3.12.dist-info}/entry_points.txt +0 -0
@@ -1,13 +1,12 @@
1
1
  """Service for backing up and restoring collections."""
2
2
 
3
- import json
4
- from typing import Dict, Any, Optional
3
+ from datetime import datetime, timezone
5
4
  from pathlib import Path
6
- from datetime import datetime
7
- import shutil
5
+ from typing import Optional
8
6
 
9
- from vector_inspector.core.logging import log_info, log_error, log_debug
10
- from .backup_helpers import write_backup_zip, read_backup_zip, normalize_embeddings
7
+ from vector_inspector.core.logging import log_debug, log_error, log_info
8
+
9
+ from .backup_helpers import normalize_embeddings, read_backup_zip, write_backup_zip
11
10
 
12
11
 
13
12
  class BackupRestoreService:
@@ -15,7 +14,11 @@ class BackupRestoreService:
15
14
 
16
15
  @staticmethod
17
16
  def backup_collection(
18
- connection, collection_name: str, backup_dir: str, include_embeddings: bool = True
17
+ connection,
18
+ collection_name: str,
19
+ backup_dir: str,
20
+ include_embeddings: bool = True,
21
+ profile_name: Optional[str] = None,
19
22
  ) -> Optional[str]:
20
23
  """
21
24
  Backup a collection to a directory.
@@ -25,6 +28,7 @@ class BackupRestoreService:
25
28
  collection_name: Name of collection to backup
26
29
  backup_dir: Directory to store backups
27
30
  include_embeddings: Whether to include embedding vectors
31
+ connection_id: Optional connection ID for retrieving model config from settings
28
32
 
29
33
  Returns:
30
34
  Path to backup file or None if failed
@@ -50,13 +54,51 @@ class BackupRestoreService:
50
54
 
51
55
  backup_metadata = {
52
56
  "collection_name": collection_name,
53
- "backup_timestamp": datetime.now().isoformat(),
57
+ "backup_timestamp": datetime.now(tz=timezone.utc).isoformat(),
54
58
  "item_count": len(all_data["ids"]),
55
59
  "collection_info": collection_info,
56
60
  "include_embeddings": include_embeddings,
57
61
  }
58
-
59
- timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
62
+ # Include embedding model info when available to assist accurate restores
63
+ try:
64
+ embed_model = None
65
+ embed_model_type = None
66
+ # Prefer explicit collection_info entries
67
+ if collection_info and collection_info.get("embedding_model"):
68
+ embed_model = collection_info.get("embedding_model")
69
+ embed_model_type = collection_info.get("embedding_model_type")
70
+ else:
71
+ # Ask connection for a model hint (may consult settings/service)
72
+ try:
73
+ embed_model = connection.get_embedding_model(collection_name)
74
+ except Exception:
75
+ embed_model = None
76
+
77
+ # If not found yet, check app settings as a fallback
78
+ if not embed_model and profile_name:
79
+ try:
80
+ from vector_inspector.services.settings_service import SettingsService
81
+
82
+ settings = SettingsService()
83
+ model_info = settings.get_embedding_model(
84
+ profile_name,
85
+ collection_name,
86
+ )
87
+ if model_info:
88
+ embed_model = model_info.get("model")
89
+ embed_model_type = model_info.get("type", "sentence-transformer")
90
+ except Exception:
91
+ pass
92
+
93
+ if embed_model:
94
+ backup_metadata["embedding_model"] = embed_model
95
+ if embed_model_type:
96
+ backup_metadata["embedding_model_type"] = embed_model_type
97
+ except Exception as e:
98
+ # Embedding metadata is optional; log failure but do not abort backup.
99
+ log_debug("Failed to populate embedding metadata for %s: %s", collection_name, e)
100
+
101
+ timestamp = datetime.now(tz=timezone.utc).strftime("%Y%m%d_%H%M%S")
60
102
  backup_filename = f"{collection_name}_backup_{timestamp}.zip"
61
103
  backup_path = Path(backup_dir) / backup_filename
62
104
 
@@ -67,9 +109,14 @@ class BackupRestoreService:
67
109
  log_error("Backup failed: %s", e)
68
110
  return None
69
111
 
70
- @staticmethod
71
112
  def restore_collection(
72
- connection, backup_file: str, collection_name: Optional[str] = None, overwrite: bool = False
113
+ self,
114
+ connection,
115
+ backup_file: str,
116
+ collection_name: Optional[str] = None,
117
+ overwrite: bool = False,
118
+ recompute_embeddings: Optional[bool] = None,
119
+ profile_name: Optional[str] = None,
73
120
  ) -> bool:
74
121
  """
75
122
  Restore a collection from a backup file.
@@ -79,6 +126,11 @@ class BackupRestoreService:
79
126
  backup_file: Path to backup zip file
80
127
  collection_name: Optional new name for restored collection
81
128
  overwrite: Whether to overwrite existing collection
129
+ recompute_embeddings: How to handle embeddings during restore:
130
+ - None (default): Use stored embeddings as-is from backup (safest, fastest)
131
+ - True: Force recompute embeddings from documents using model metadata
132
+ - False: Omit embeddings entirely (documents/metadata only)
133
+ connection_id: Optional connection ID for saving model config to app settings
82
134
 
83
135
  Returns:
84
136
  True if successful, False otherwise
@@ -96,8 +148,48 @@ class BackupRestoreService:
96
148
  restore_collection_name,
97
149
  )
98
150
  return False
99
- else:
100
- connection.delete_collection(restore_collection_name)
151
+ connection.delete_collection(restore_collection_name)
152
+ else:
153
+ # Collection does not exist on target; attempt to create it.
154
+ # Try to infer vector size from metadata or embedded vectors in backup.
155
+ try:
156
+ inferred_size = None
157
+ col_info = metadata.get("collection_info") if metadata else None
158
+ if (
159
+ col_info
160
+ and col_info.get("vector_dimension")
161
+ and isinstance(col_info.get("vector_dimension"), int)
162
+ ):
163
+ inferred_size = int(col_info.get("vector_dimension"))
164
+
165
+ # Fallback: inspect embeddings in backup data
166
+ if inferred_size is None and data and data.get("embeddings"):
167
+ first_emb = data.get("embeddings")[0]
168
+ if first_emb is not None:
169
+ inferred_size = len(first_emb)
170
+
171
+ # Final fallback: common default
172
+ if inferred_size is None:
173
+ log_error(
174
+ "Unable to infer vector dimension for collection %s from metadata or backup data; restore aborted.",
175
+ restore_collection_name,
176
+ )
177
+ return False
178
+
179
+ created = True
180
+ if hasattr(connection, "create_collection"):
181
+ created = connection.create_collection(
182
+ restore_collection_name, inferred_size
183
+ )
184
+
185
+ if not created:
186
+ log_error(
187
+ "Failed to create collection %s before restore", restore_collection_name
188
+ )
189
+ return False
190
+ except Exception as e:
191
+ log_error("Error while creating collection %s: %s", restore_collection_name, e)
192
+ return False
101
193
 
102
194
  # Provider-specific preparation hook
103
195
  if hasattr(connection, "prepare_restore"):
@@ -109,17 +201,138 @@ class BackupRestoreService:
109
201
  # Ensure embeddings normalized
110
202
  data = normalize_embeddings(data)
111
203
 
204
+ # Decide how to handle embeddings based on user choice
205
+ embeddings_to_use = None
206
+ stored_embeddings = data.get("embeddings")
207
+
208
+ if recompute_embeddings is False:
209
+ # User explicitly chose to omit embeddings
210
+ log_info("Restoring without embeddings (user choice)")
211
+ embeddings_to_use = None
212
+
213
+ elif recompute_embeddings is True:
214
+ # User explicitly chose to recompute embeddings
215
+ log_info("Recomputing embeddings from documents")
216
+ try:
217
+ from vector_inspector.core.embedding_utils import (
218
+ encode_text,
219
+ load_embedding_model,
220
+ )
221
+
222
+ model_name = metadata.get("embedding_model") if metadata else None
223
+ docs = data.get("documents", [])
224
+
225
+ if not model_name:
226
+ log_error(
227
+ "Cannot recompute: No embedding model available in backup metadata"
228
+ )
229
+ embeddings_to_use = None
230
+ elif not docs:
231
+ log_error("Cannot recompute: No documents available in backup")
232
+ embeddings_to_use = None
233
+ else:
234
+ model_type = metadata.get("embedding_model_type", "sentence-transformer")
235
+ log_info("Loading embedding model: %s (%s)", model_name, model_type)
236
+ model = load_embedding_model(model_name, model_type)
237
+ new_embeddings = []
238
+ if model_type == "clip":
239
+ # CLIP: encode per-document
240
+ for d in docs:
241
+ new_embeddings.append(encode_text(d, model, model_type))
242
+ else:
243
+ # sentence-transformer supports batch encode
244
+ new_embeddings = model.encode(docs, show_progress_bar=False).tolist()
245
+
246
+ embeddings_to_use = new_embeddings
247
+ log_info("Successfully recomputed %d embeddings", len(new_embeddings))
248
+ except Exception as e:
249
+ log_error("Failed to recompute embeddings: %s", e)
250
+ embeddings_to_use = None
251
+
252
+ else:
253
+ # Default (None): Use stored embeddings as-is if available
254
+ if stored_embeddings:
255
+ # Check dimension compatibility with target collection
256
+ try:
257
+ if stored_embeddings and len(stored_embeddings) > 0:
258
+ stored_dim = len(stored_embeddings[0])
259
+ target_dim = inferred_size # We already calculated this above
260
+
261
+ if stored_dim == target_dim:
262
+ log_info(
263
+ "Using stored embeddings from backup (dimension: %d)",
264
+ stored_dim,
265
+ )
266
+ embeddings_to_use = stored_embeddings
267
+ else:
268
+ log_error(
269
+ "Dimension mismatch: backup has %d, target needs %d. Omitting embeddings.",
270
+ stored_dim,
271
+ target_dim,
272
+ )
273
+ embeddings_to_use = None
274
+ else:
275
+ embeddings_to_use = stored_embeddings
276
+ except Exception as e:
277
+ log_error("Error checking embedding dimensions: %s", e)
278
+ # Try to use them anyway
279
+ embeddings_to_use = stored_embeddings
280
+ else:
281
+ log_info("No embeddings in backup to restore")
282
+ embeddings_to_use = None
283
+
112
284
  success = connection.add_items(
113
285
  restore_collection_name,
114
286
  documents=data.get("documents", []),
115
287
  metadatas=data.get("metadatas"),
116
288
  ids=data.get("ids"),
117
- embeddings=data.get("embeddings"),
289
+ embeddings=embeddings_to_use,
118
290
  )
119
291
 
120
292
  if success:
121
293
  log_info("Collection '%s' restored from backup", restore_collection_name)
122
294
  log_info("Restored %d items", len(data.get("ids", [])))
295
+
296
+ # Save model config to app settings if available
297
+ if profile_name and restore_collection_name and metadata:
298
+ try:
299
+ embed_model = metadata.get("embedding_model")
300
+ embed_model_type = metadata.get(
301
+ "embedding_model_type", "sentence-transformer"
302
+ )
303
+ if embed_model:
304
+ from vector_inspector.services.settings_service import SettingsService
305
+
306
+ settings = SettingsService()
307
+ settings.save_embedding_model(
308
+ profile_name,
309
+ restore_collection_name,
310
+ embed_model,
311
+ embed_model_type,
312
+ )
313
+ log_info(
314
+ "Saved model config to settings: %s (%s)",
315
+ embed_model,
316
+ embed_model_type,
317
+ )
318
+ except Exception as e:
319
+ log_error("Failed to save model config to settings: %s", e)
320
+
321
+ # Clear the cache for this collection so the info panel gets fresh data
322
+ if profile_name and restore_collection_name:
323
+ try:
324
+ from vector_inspector.core.cache_manager import get_cache_manager
325
+
326
+ cache = get_cache_manager()
327
+ # Use profile_name as the database identifier for cache
328
+ cache.invalidate(profile_name, restore_collection_name)
329
+ log_info(
330
+ "Cleared cache for restored collection: %s",
331
+ restore_collection_name,
332
+ )
333
+ except Exception as e:
334
+ log_error("Failed to clear cache after restore: %s", e)
335
+
123
336
  return True
124
337
 
125
338
  # Failure: attempt cleanup
@@ -1,19 +1,37 @@
1
1
  """Service for persisting application settings."""
2
2
 
3
- import json
4
3
  import base64
5
- from PySide6.QtCore import QObject, Signal
4
+ import json
6
5
  from pathlib import Path
7
- from typing import Dict, Any, Optional, List
6
+ from typing import Any, Optional
7
+
8
+ from PySide6.QtCore import QObject, Signal
9
+
8
10
  from vector_inspector.core.cache_manager import invalidate_cache_on_settings_change
9
11
  from vector_inspector.core.logging import log_error
10
12
 
11
13
 
12
14
  class SettingsService:
13
- """Handles loading and saving application settings."""
15
+ """Handles loading and saving application settings.
16
+
17
+ This is a singleton - all instances share the same settings data.
18
+ """
19
+
20
+ _instance = None
21
+ _initialized = False
22
+
23
+ def __new__(cls):
24
+ """Ensure only one instance exists (singleton pattern)."""
25
+ if cls._instance is None:
26
+ cls._instance = super().__new__(cls)
27
+ return cls._instance
14
28
 
15
29
  def __init__(self):
16
30
  """Initialize settings service."""
31
+ # Only initialize once
32
+ if SettingsService._initialized:
33
+ return
34
+ SettingsService._initialized = True
17
35
 
18
36
  # Expose a shared QObject-based signal emitter so UI can react to
19
37
  # settings changes without polling.
@@ -28,14 +46,14 @@ class SettingsService:
28
46
 
29
47
  self.settings_dir = Path.home() / ".vector-inspector"
30
48
  self.settings_file = self.settings_dir / "settings.json"
31
- self.settings: Dict[str, Any] = {}
49
+ self.settings: dict[str, Any] = {}
32
50
  self._load_settings()
33
51
 
34
52
  def _load_settings(self):
35
53
  """Load settings from file."""
36
54
  try:
37
55
  if self.settings_file.exists():
38
- with open(self.settings_file, "r", encoding="utf-8") as f:
56
+ with open(self.settings_file, encoding="utf-8") as f:
39
57
  self.settings = json.load(f)
40
58
  except Exception as e:
41
59
  log_error("Failed to load settings: %s", e)
@@ -52,11 +70,11 @@ class SettingsService:
52
70
  except Exception as e:
53
71
  log_error("Failed to save settings: %s", e)
54
72
 
55
- def get_last_connection(self) -> Optional[Dict[str, Any]]:
73
+ def get_last_connection(self) -> Optional[dict[str, Any]]:
56
74
  """Get the last connection configuration."""
57
75
  return self.settings.get("last_connection")
58
76
 
59
- def save_last_connection(self, config: Dict[str, Any]):
77
+ def save_last_connection(self, config: dict[str, Any]):
60
78
  """Save the last connection configuration."""
61
79
  self.settings["last_connection"] = config
62
80
  self._save_settings()
@@ -137,6 +155,14 @@ class SettingsService:
137
155
  else:
138
156
  cache.disable()
139
157
 
158
+ def get_telemetry_enabled(self) -> bool:
159
+ """Get whether telemetry is enabled (default: True)."""
160
+ return bool(self.settings.get("telemetry.enabled", True))
161
+
162
+ def set_telemetry_enabled(self, enabled: bool):
163
+ """Set whether telemetry is enabled."""
164
+ self.set("telemetry.enabled", bool(enabled))
165
+
140
166
  def set(self, key: str, value: Any):
141
167
  """Set a setting value."""
142
168
  self.settings[key] = value
@@ -158,7 +184,7 @@ class SettingsService:
158
184
 
159
185
  def save_embedding_model(
160
186
  self,
161
- connection_id: str,
187
+ profile_name: str,
162
188
  collection_name: str,
163
189
  model_name: str,
164
190
  model_type: str = "user-configured",
@@ -166,7 +192,7 @@ class SettingsService:
166
192
  """Save embedding model mapping for a collection.
167
193
 
168
194
  Args:
169
- connection_id: Connection identifier
195
+ profile_name: Profile/connection name
170
196
  collection_name: Collection name
171
197
  model_name: Embedding model name (e.g., 'sentence-transformers/all-MiniLM-L6-v2')
172
198
  model_type: Type of configuration ('user-configured', 'auto-detected', 'stored')
@@ -174,7 +200,7 @@ class SettingsService:
174
200
  if "collection_embedding_models" not in self.settings:
175
201
  self.settings["collection_embedding_models"] = {}
176
202
 
177
- collection_key = f"{connection_id}:{collection_name}"
203
+ collection_key = f"{profile_name}:{collection_name}"
178
204
  self.settings["collection_embedding_models"][collection_key] = {
179
205
  "model": model_name,
180
206
  "type": model_type,
@@ -183,35 +209,69 @@ class SettingsService:
183
209
  self._save_settings()
184
210
 
185
211
  def get_embedding_model(
186
- self, connection_id: str, collection_name: str
187
- ) -> Optional[Dict[str, Any]]:
212
+ self,
213
+ profile_name: str,
214
+ collection_name: str,
215
+ ) -> Optional[dict[str, Any]]:
188
216
  """Get embedding model mapping for a collection.
189
217
 
190
218
  Args:
191
- connection_id: Connection identifier
219
+ profile_name: Profile/connection name
192
220
  collection_name: Collection name
193
221
 
194
222
  Returns:
195
223
  Dictionary with 'model', 'type', and 'timestamp' or None
196
224
  """
197
225
  collection_models = self.settings.get("collection_embedding_models", {})
198
- collection_key = f"{connection_id}:{collection_name}"
226
+ collection_key = f"{profile_name}:{collection_name}"
199
227
  return collection_models.get(collection_key)
200
228
 
201
- def remove_embedding_model(self, connection_id: str, collection_name: str):
229
+ def remove_embedding_model(
230
+ self,
231
+ profile_name: str,
232
+ collection_name: str,
233
+ ):
202
234
  """Remove embedding model mapping for a collection.
203
235
 
204
236
  Args:
205
- connection_id: Connection identifier
237
+ profile_name: Profile/connection name
206
238
  collection_name: Collection name
207
239
  """
208
240
  if "collection_embedding_models" not in self.settings:
209
241
  return
210
242
 
211
- collection_key = f"{connection_id}:{collection_name}"
243
+ collection_key = f"{profile_name}:{collection_name}"
212
244
  self.settings["collection_embedding_models"].pop(collection_key, None)
213
245
  self._save_settings()
214
246
 
247
+ def remove_profile_settings(self, profile_name: str):
248
+ """Remove all settings for a profile (e.g., when profile is deleted).
249
+
250
+ Args:
251
+ profile_name: Profile/connection name
252
+ """
253
+ if "collection_embedding_models" not in self.settings:
254
+ return
255
+
256
+ # Remove all keys that start with profile_name:
257
+ prefix = f"{profile_name}:"
258
+ keys_to_remove = [
259
+ key for key in self.settings["collection_embedding_models"] if key.startswith(prefix)
260
+ ]
261
+
262
+ for key in keys_to_remove:
263
+ self.settings["collection_embedding_models"].pop(key, None)
264
+
265
+ if keys_to_remove:
266
+ self._save_settings()
267
+ from vector_inspector.core.logging import log_info
268
+
269
+ log_info(
270
+ "Removed %d embedding model settings for profile: %s",
271
+ len(keys_to_remove),
272
+ profile_name,
273
+ )
274
+
215
275
  def _get_timestamp(self) -> str:
216
276
  """Get current timestamp as ISO string."""
217
277
  from datetime import datetime
@@ -259,7 +319,7 @@ class SettingsService:
259
319
  )
260
320
  self._save_settings()
261
321
 
262
- def get_custom_embedding_models(self, dimension: Optional[int] = None) -> List[Dict[str, Any]]:
322
+ def get_custom_embedding_models(self, dimension: Optional[int] = None) -> list[dict[str, Any]]:
263
323
  """Get list of custom embedding models.
264
324
 
265
325
  Args:
@@ -0,0 +1,88 @@
1
+ import json
2
+ import platform
3
+ import uuid
4
+ import requests
5
+ from pathlib import Path
6
+ from vector_inspector.services.settings_service import SettingsService
7
+ from vector_inspector.core.logging import log_info, log_error
8
+
9
+ TELEMETRY_ENDPOINT = "https://api.divinedevops.com/api/v1/telemetry"
10
+
11
+
12
+ class TelemetryService:
13
+ def __init__(self, settings_service=None):
14
+ self.settings = settings_service or SettingsService()
15
+ self.queue_file = Path.home() / ".vector-inspector" / "telemetry_queue.json"
16
+ self._load_queue()
17
+
18
+ def _load_queue(self):
19
+ if self.queue_file.exists():
20
+ try:
21
+ with open(self.queue_file, encoding="utf-8") as f:
22
+ self.queue = json.load(f)
23
+ except Exception:
24
+ self.queue = []
25
+ else:
26
+ self.queue = []
27
+
28
+ def _save_queue(self):
29
+ self.queue_file.parent.mkdir(parents=True, exist_ok=True)
30
+ with open(self.queue_file, "w", encoding="utf-8") as f:
31
+ json.dump(self.queue, f, indent=2)
32
+
33
+ def is_enabled(self):
34
+ return bool(self.settings.get("telemetry.enabled", True))
35
+
36
+ def get_hwid(self):
37
+ # Use a persistent UUID for this client
38
+ hwid = self.settings.get("telemetry.hwid")
39
+ if not hwid:
40
+ hwid = str(uuid.uuid4())
41
+ self.settings.set("telemetry.hwid", hwid)
42
+ return hwid
43
+
44
+ def queue_event(self, event):
45
+ self.queue.append(event)
46
+ self._save_queue()
47
+
48
+ def send_batch(self):
49
+ if not self.is_enabled() or not self.queue:
50
+ return
51
+ sent = []
52
+ for event in self.queue:
53
+ try:
54
+ log_info(
55
+ f"[Telemetry] Sending to {TELEMETRY_ENDPOINT}\nPayload: {json.dumps(event, indent=2)}"
56
+ )
57
+ resp = requests.post(TELEMETRY_ENDPOINT, json=event, timeout=5)
58
+ log_info(f"[Telemetry] Response: {resp.status_code} {resp.text}")
59
+ if resp.status_code in (200, 201):
60
+ sent.append(event)
61
+ except Exception as e:
62
+ log_error(f"[Telemetry] Exception: {e}")
63
+ # Remove sent events
64
+ self.queue = [e for e in self.queue if e not in sent]
65
+ self._save_queue()
66
+
67
+ def send_launch_ping(self, app_version, client_type="vector-inspector"):
68
+ log_info("[Telemetry] send_launch_ping called")
69
+ if not self.is_enabled():
70
+ log_info("[Telemetry] Telemetry is not enabled; skipping launch ping.")
71
+ return
72
+ event = {
73
+ "hwid": self.get_hwid(),
74
+ "event_name": "app_launch",
75
+ "app_version": app_version,
76
+ "client_type": client_type,
77
+ "metadata": {"os": platform.system() + "-" + platform.release()},
78
+ }
79
+ log_info(f"[Telemetry] Launch event payload: {json.dumps(event, indent=2)}")
80
+ self.queue_event(event)
81
+ self.send_batch()
82
+
83
+ def purge(self):
84
+ self.queue = []
85
+ self._save_queue()
86
+
87
+ def get_queue(self):
88
+ return list(self.queue)