superlocalmemory 3.4.1 → 3.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/pyproject.toml +11 -2
- package/scripts/postinstall.js +26 -7
- package/src/superlocalmemory/cli/commands.py +42 -60
- package/src/superlocalmemory/cli/daemon.py +107 -47
- package/src/superlocalmemory/cli/main.py +10 -0
- package/src/superlocalmemory/cli/setup_wizard.py +137 -9
- package/src/superlocalmemory/core/config.py +28 -0
- package/src/superlocalmemory/core/consolidation_engine.py +38 -1
- package/src/superlocalmemory/core/engine.py +9 -0
- package/src/superlocalmemory/core/health_monitor.py +313 -0
- package/src/superlocalmemory/core/reranker_worker.py +19 -5
- package/src/superlocalmemory/ingestion/__init__.py +13 -0
- package/src/superlocalmemory/ingestion/adapter_manager.py +234 -0
- package/src/superlocalmemory/ingestion/base_adapter.py +177 -0
- package/src/superlocalmemory/ingestion/calendar_adapter.py +340 -0
- package/src/superlocalmemory/ingestion/credentials.py +118 -0
- package/src/superlocalmemory/ingestion/gmail_adapter.py +369 -0
- package/src/superlocalmemory/ingestion/parsers.py +100 -0
- package/src/superlocalmemory/ingestion/transcript_adapter.py +156 -0
- package/src/superlocalmemory/learning/consolidation_worker.py +47 -1
- package/src/superlocalmemory/learning/entity_compiler.py +377 -0
- package/src/superlocalmemory/mesh/__init__.py +12 -0
- package/src/superlocalmemory/mesh/broker.py +344 -0
- package/src/superlocalmemory/retrieval/entity_channel.py +12 -6
- package/src/superlocalmemory/server/api.py +6 -7
- package/src/superlocalmemory/server/routes/entity.py +95 -0
- package/src/superlocalmemory/server/routes/ingest.py +110 -0
- package/src/superlocalmemory/server/routes/mesh.py +186 -0
- package/src/superlocalmemory/server/unified_daemon.py +691 -0
- package/src/superlocalmemory/storage/schema_v343.py +229 -0
- package/src/superlocalmemory.egg-info/PKG-INFO +0 -597
- package/src/superlocalmemory.egg-info/SOURCES.txt +0 -287
- package/src/superlocalmemory.egg-info/dependency_links.txt +0 -1
- package/src/superlocalmemory.egg-info/entry_points.txt +0 -2
- package/src/superlocalmemory.egg-info/requires.txt +0 -47
- package/src/superlocalmemory.egg-info/top_level.txt +0 -1
|
@@ -0,0 +1,340 @@
|
|
|
1
|
+
# Copyright (c) 2026 Varun Pratap Bhardwaj / Qualixar
|
|
2
|
+
# Licensed under the Elastic License 2.0 - see LICENSE file
|
|
3
|
+
# Part of SuperLocalMemory V3 | https://qualixar.com | https://varunpratap.com
|
|
4
|
+
|
|
5
|
+
"""Google Calendar ingestion adapter — 2 tiers.
|
|
6
|
+
|
|
7
|
+
Tier 1: ICS file import — zero setup
|
|
8
|
+
Tier 2: Google Calendar API with OAuth polling — shares Gmail OAuth credentials
|
|
9
|
+
|
|
10
|
+
OPT-IN only. Enabled via: slm adapters enable calendar
|
|
11
|
+
|
|
12
|
+
Part of Qualixar | Author: Varun Pratap Bhardwaj
|
|
13
|
+
License: Elastic-2.0
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
import json
|
|
19
|
+
import logging
|
|
20
|
+
import sys
|
|
21
|
+
from datetime import datetime, timezone
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
from superlocalmemory.ingestion.base_adapter import BaseAdapter, AdapterConfig, IngestItem
|
|
25
|
+
|
|
26
|
+
logger = logging.getLogger("superlocalmemory.ingestion.calendar")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class CalendarAdapter(BaseAdapter):
|
|
30
|
+
"""Google Calendar ingestion with automatic tier detection."""
|
|
31
|
+
|
|
32
|
+
source_type = "calendar"
|
|
33
|
+
|
|
34
|
+
def __init__(self, config: AdapterConfig | None = None, tier: str = "auto"):
|
|
35
|
+
super().__init__(config)
|
|
36
|
+
self._tier = tier
|
|
37
|
+
self._ics_path: str | None = None
|
|
38
|
+
self._ics_processed = False
|
|
39
|
+
self._sync_token: str | None = None
|
|
40
|
+
self._poll_interval = 900 # 15 min
|
|
41
|
+
|
|
42
|
+
def run(self) -> None:
|
|
43
|
+
self._detect_tier()
|
|
44
|
+
logger.info("Calendar adapter starting (tier=%s)", self._tier)
|
|
45
|
+
super().run()
|
|
46
|
+
|
|
47
|
+
def fetch_items(self) -> list[IngestItem]:
|
|
48
|
+
if self._tier == "ics":
|
|
49
|
+
return self._fetch_ics()
|
|
50
|
+
elif self._tier == "oauth":
|
|
51
|
+
return self._fetch_oauth()
|
|
52
|
+
return []
|
|
53
|
+
|
|
54
|
+
def wait_for_next_cycle(self) -> None:
|
|
55
|
+
if self._tier == "ics" and self._ics_processed:
|
|
56
|
+
logger.info("ICS import complete, adapter stopping")
|
|
57
|
+
self.stop()
|
|
58
|
+
return
|
|
59
|
+
self._stop_event.wait(self._poll_interval)
|
|
60
|
+
|
|
61
|
+
def _detect_tier(self) -> None:
|
|
62
|
+
if self._tier != "auto":
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
adapters_path = Path.home() / ".superlocalmemory" / "adapters.json"
|
|
66
|
+
cfg = {}
|
|
67
|
+
if adapters_path.exists():
|
|
68
|
+
cfg = json.loads(adapters_path.read_text()).get("calendar", {})
|
|
69
|
+
|
|
70
|
+
if cfg.get("tier") == "ics" or cfg.get("ics_path"):
|
|
71
|
+
self._tier = "ics"
|
|
72
|
+
self._ics_path = cfg.get("ics_path", "")
|
|
73
|
+
return
|
|
74
|
+
|
|
75
|
+
# Check for OAuth (shares Gmail credentials)
|
|
76
|
+
from superlocalmemory.ingestion.credentials import has_credential
|
|
77
|
+
if has_credential("gmail", "refresh_token"):
|
|
78
|
+
self._tier = "oauth"
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
# Look for ICS files
|
|
82
|
+
import_dir = Path.home() / ".superlocalmemory" / "import"
|
|
83
|
+
ics_files = list(import_dir.glob("*.ics")) if import_dir.exists() else []
|
|
84
|
+
if ics_files:
|
|
85
|
+
self._tier = "ics"
|
|
86
|
+
self._ics_path = str(ics_files[0])
|
|
87
|
+
return
|
|
88
|
+
|
|
89
|
+
self._tier = "ics"
|
|
90
|
+
|
|
91
|
+
# -- Tier 1: ICS file import --
|
|
92
|
+
|
|
93
|
+
def _fetch_ics(self) -> list[IngestItem]:
|
|
94
|
+
if self._ics_processed or not self._ics_path:
|
|
95
|
+
return []
|
|
96
|
+
|
|
97
|
+
path = Path(self._ics_path)
|
|
98
|
+
if not path.exists():
|
|
99
|
+
logger.warning("ICS file not found: %s", path)
|
|
100
|
+
self._ics_processed = True
|
|
101
|
+
return []
|
|
102
|
+
|
|
103
|
+
items = []
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
# Try icalendar library first
|
|
107
|
+
from icalendar import Calendar
|
|
108
|
+
cal = Calendar.from_ical(path.read_bytes())
|
|
109
|
+
events = [c for c in cal.walk() if c.name == "VEVENT"]
|
|
110
|
+
logger.info("Parsing ICS: %d events", len(events))
|
|
111
|
+
|
|
112
|
+
for event in events:
|
|
113
|
+
if self._stop_event.is_set():
|
|
114
|
+
break
|
|
115
|
+
try:
|
|
116
|
+
summary = str(event.get("SUMMARY", "(no title)"))
|
|
117
|
+
dtstart = event.get("DTSTART")
|
|
118
|
+
dtend = event.get("DTEND")
|
|
119
|
+
description = str(event.get("DESCRIPTION", ""))
|
|
120
|
+
location = str(event.get("LOCATION", ""))
|
|
121
|
+
uid = str(event.get("UID", ""))
|
|
122
|
+
|
|
123
|
+
start_str = dtstart.dt.isoformat() if dtstart else ""
|
|
124
|
+
end_str = dtend.dt.isoformat() if dtend else ""
|
|
125
|
+
|
|
126
|
+
# Extract attendees
|
|
127
|
+
attendees = []
|
|
128
|
+
att_list = event.get("ATTENDEE", [])
|
|
129
|
+
if not isinstance(att_list, list):
|
|
130
|
+
att_list = [att_list]
|
|
131
|
+
for att in att_list:
|
|
132
|
+
email = str(att).replace("mailto:", "").strip()
|
|
133
|
+
if email and "@" in email:
|
|
134
|
+
attendees.append(email.split("@")[0]) # Use name part
|
|
135
|
+
|
|
136
|
+
content = (
|
|
137
|
+
f"Calendar Event: {summary}\n"
|
|
138
|
+
f"When: {start_str} to {end_str}\n"
|
|
139
|
+
)
|
|
140
|
+
if location:
|
|
141
|
+
content += f"Where: {location}\n"
|
|
142
|
+
if attendees:
|
|
143
|
+
content += f"Attendees: {', '.join(attendees)}\n"
|
|
144
|
+
if description:
|
|
145
|
+
content += f"\n{description[:2000]}"
|
|
146
|
+
|
|
147
|
+
dedup_key = f"{uid}-{start_str}" if uid else f"ics-{summary}-{start_str}"
|
|
148
|
+
|
|
149
|
+
items.append(IngestItem(
|
|
150
|
+
content=content,
|
|
151
|
+
dedup_key=dedup_key,
|
|
152
|
+
metadata={
|
|
153
|
+
"summary": summary,
|
|
154
|
+
"start": start_str,
|
|
155
|
+
"end": end_str,
|
|
156
|
+
"attendees": attendees,
|
|
157
|
+
"source": "ics_import",
|
|
158
|
+
},
|
|
159
|
+
))
|
|
160
|
+
|
|
161
|
+
# Entity propagation for attendees
|
|
162
|
+
for attendee in attendees:
|
|
163
|
+
items.append(IngestItem(
|
|
164
|
+
content=f"{attendee} attended meeting: {summary} on {start_str}",
|
|
165
|
+
dedup_key=f"attendee-{attendee}-{dedup_key}",
|
|
166
|
+
metadata={
|
|
167
|
+
"entity_name": attendee,
|
|
168
|
+
"event": summary,
|
|
169
|
+
"source": "entity_propagation",
|
|
170
|
+
},
|
|
171
|
+
))
|
|
172
|
+
|
|
173
|
+
except Exception as exc:
|
|
174
|
+
logger.debug("Failed to parse event: %s", exc)
|
|
175
|
+
|
|
176
|
+
except ImportError:
|
|
177
|
+
# Fallback: basic ICS parsing without icalendar library
|
|
178
|
+
logger.info("icalendar not installed, using basic parser")
|
|
179
|
+
items = self._parse_ics_basic(path)
|
|
180
|
+
|
|
181
|
+
self._ics_processed = True
|
|
182
|
+
logger.info("ICS import: %d items", len(items))
|
|
183
|
+
return items
|
|
184
|
+
|
|
185
|
+
def _parse_ics_basic(self, path: Path) -> list[IngestItem]:
|
|
186
|
+
"""Basic ICS parser without icalendar library."""
|
|
187
|
+
content = path.read_text(encoding="utf-8", errors="replace")
|
|
188
|
+
items = []
|
|
189
|
+
events = content.split("BEGIN:VEVENT")
|
|
190
|
+
|
|
191
|
+
for i, block in enumerate(events[1:]): # Skip first (before any VEVENT)
|
|
192
|
+
try:
|
|
193
|
+
lines = block.split("\n")
|
|
194
|
+
props = {}
|
|
195
|
+
for line in lines:
|
|
196
|
+
if ":" in line and not line.startswith(" "):
|
|
197
|
+
key, _, val = line.partition(":")
|
|
198
|
+
key = key.split(";")[0].strip()
|
|
199
|
+
props[key] = val.strip()
|
|
200
|
+
|
|
201
|
+
summary = props.get("SUMMARY", "(no title)")
|
|
202
|
+
dtstart = props.get("DTSTART", "")
|
|
203
|
+
uid = props.get("UID", f"basic-{i}")
|
|
204
|
+
|
|
205
|
+
content_text = f"Calendar Event: {summary}\nWhen: {dtstart}"
|
|
206
|
+
items.append(IngestItem(
|
|
207
|
+
content=content_text,
|
|
208
|
+
dedup_key=f"{uid}-{dtstart}",
|
|
209
|
+
metadata={"summary": summary, "start": dtstart, "source": "ics_basic"},
|
|
210
|
+
))
|
|
211
|
+
except Exception:
|
|
212
|
+
pass
|
|
213
|
+
|
|
214
|
+
return items
|
|
215
|
+
|
|
216
|
+
# -- Tier 2: OAuth API polling --
|
|
217
|
+
|
|
218
|
+
def _fetch_oauth(self) -> list[IngestItem]:
|
|
219
|
+
"""Poll Google Calendar API with OAuth."""
|
|
220
|
+
try:
|
|
221
|
+
from superlocalmemory.ingestion.credentials import load_credential
|
|
222
|
+
from google.oauth2.credentials import Credentials
|
|
223
|
+
from googleapiclient.discovery import build
|
|
224
|
+
|
|
225
|
+
refresh_token = load_credential("gmail", "refresh_token")
|
|
226
|
+
client_id = load_credential("gmail", "client_id")
|
|
227
|
+
client_secret = load_credential("gmail", "client_secret")
|
|
228
|
+
|
|
229
|
+
if not all([refresh_token, client_id, client_secret]):
|
|
230
|
+
logger.warning("Calendar OAuth credentials incomplete")
|
|
231
|
+
return []
|
|
232
|
+
|
|
233
|
+
creds = Credentials(
|
|
234
|
+
token=None,
|
|
235
|
+
refresh_token=refresh_token,
|
|
236
|
+
client_id=client_id,
|
|
237
|
+
client_secret=client_secret,
|
|
238
|
+
token_uri="https://oauth2.googleapis.com/token",
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
service = build("calendar", "v3", credentials=creds)
|
|
242
|
+
|
|
243
|
+
# Incremental sync
|
|
244
|
+
kwargs = {"calendarId": "primary", "singleEvents": True, "maxResults": 50}
|
|
245
|
+
if self._sync_token:
|
|
246
|
+
kwargs["syncToken"] = self._sync_token
|
|
247
|
+
else:
|
|
248
|
+
# Initial sync: last 30 days
|
|
249
|
+
from datetime import timedelta
|
|
250
|
+
time_min = (datetime.now(timezone.utc) - timedelta(days=30)).isoformat()
|
|
251
|
+
kwargs["timeMin"] = time_min
|
|
252
|
+
|
|
253
|
+
try:
|
|
254
|
+
results = service.events().list(**kwargs).execute()
|
|
255
|
+
except Exception as api_err:
|
|
256
|
+
if "410" in str(api_err):
|
|
257
|
+
# Sync token expired — full re-sync
|
|
258
|
+
logger.info("Calendar sync token expired, doing full re-sync")
|
|
259
|
+
self._sync_token = None
|
|
260
|
+
return self._fetch_oauth()
|
|
261
|
+
raise
|
|
262
|
+
|
|
263
|
+
self._sync_token = results.get("nextSyncToken")
|
|
264
|
+
events = results.get("items", [])
|
|
265
|
+
|
|
266
|
+
items = []
|
|
267
|
+
for event in events:
|
|
268
|
+
if self._stop_event.is_set():
|
|
269
|
+
break
|
|
270
|
+
try:
|
|
271
|
+
summary = event.get("summary", "(no title)")
|
|
272
|
+
start = event.get("start", {}).get("dateTime", event.get("start", {}).get("date", ""))
|
|
273
|
+
end = event.get("end", {}).get("dateTime", event.get("end", {}).get("date", ""))
|
|
274
|
+
description = event.get("description", "")
|
|
275
|
+
location = event.get("location", "")
|
|
276
|
+
event_id = event.get("id", "")
|
|
277
|
+
updated = event.get("updated", "")
|
|
278
|
+
|
|
279
|
+
attendees = []
|
|
280
|
+
for att in event.get("attendees", []):
|
|
281
|
+
name = att.get("displayName") or att.get("email", "").split("@")[0]
|
|
282
|
+
if name:
|
|
283
|
+
attendees.append(name)
|
|
284
|
+
|
|
285
|
+
content = f"Calendar Event: {summary}\nWhen: {start} to {end}\n"
|
|
286
|
+
if location:
|
|
287
|
+
content += f"Where: {location}\n"
|
|
288
|
+
if attendees:
|
|
289
|
+
content += f"Attendees: {', '.join(attendees)}\n"
|
|
290
|
+
if description:
|
|
291
|
+
content += f"\n{description[:2000]}"
|
|
292
|
+
|
|
293
|
+
dedup_key = f"{event_id}-{updated}"
|
|
294
|
+
|
|
295
|
+
items.append(IngestItem(
|
|
296
|
+
content=content,
|
|
297
|
+
dedup_key=dedup_key,
|
|
298
|
+
metadata={
|
|
299
|
+
"summary": summary, "start": start,
|
|
300
|
+
"attendees": attendees, "source": "oauth",
|
|
301
|
+
},
|
|
302
|
+
))
|
|
303
|
+
|
|
304
|
+
# Entity propagation
|
|
305
|
+
for attendee in attendees:
|
|
306
|
+
items.append(IngestItem(
|
|
307
|
+
content=f"{attendee} attended: {summary} on {start}",
|
|
308
|
+
dedup_key=f"cal-attendee-{attendee}-{event_id}",
|
|
309
|
+
metadata={"entity_name": attendee, "event": summary, "source": "entity_propagation"},
|
|
310
|
+
))
|
|
311
|
+
|
|
312
|
+
except Exception as exc:
|
|
313
|
+
logger.debug("Calendar event parse error: %s", exc)
|
|
314
|
+
|
|
315
|
+
return items
|
|
316
|
+
|
|
317
|
+
except ImportError:
|
|
318
|
+
logger.warning("Calendar OAuth requires: pip install 'superlocalmemory[ingestion]'")
|
|
319
|
+
return []
|
|
320
|
+
except Exception as exc:
|
|
321
|
+
logger.warning("Calendar OAuth failed: %s", exc)
|
|
322
|
+
return []
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
# ---------------------------------------------------------------------------
|
|
326
|
+
# CLI entry point
|
|
327
|
+
# ---------------------------------------------------------------------------
|
|
328
|
+
|
|
329
|
+
if __name__ == "__main__":
|
|
330
|
+
import logging as _logging
|
|
331
|
+
_logging.basicConfig(level=_logging.INFO, format="%(asctime)s %(message)s")
|
|
332
|
+
|
|
333
|
+
adapters_path = Path.home() / ".superlocalmemory" / "adapters.json"
|
|
334
|
+
tier = "auto"
|
|
335
|
+
if adapters_path.exists():
|
|
336
|
+
cfg = json.loads(adapters_path.read_text()).get("calendar", {})
|
|
337
|
+
tier = cfg.get("tier", "auto")
|
|
338
|
+
|
|
339
|
+
adapter = CalendarAdapter(tier=tier)
|
|
340
|
+
adapter.run()
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
# Copyright (c) 2026 Varun Pratap Bhardwaj / Qualixar
|
|
2
|
+
# Licensed under the Elastic License 2.0 - see LICENSE file
|
|
3
|
+
# Part of SuperLocalMemory V3 | https://qualixar.com | https://varunpratap.com
|
|
4
|
+
|
|
5
|
+
"""Cross-platform credential storage for ingestion adapters.
|
|
6
|
+
|
|
7
|
+
Uses OS keychain via keyring library (macOS Keychain, Windows Credential Locker,
|
|
8
|
+
Linux SecretService). Falls back to file-based storage with restricted permissions.
|
|
9
|
+
|
|
10
|
+
Part of Qualixar | Author: Varun Pratap Bhardwaj
|
|
11
|
+
License: Elastic-2.0
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
import json
|
|
17
|
+
import logging
|
|
18
|
+
import os
|
|
19
|
+
import sys
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger("superlocalmemory.ingestion.credentials")
|
|
23
|
+
|
|
24
|
+
_CRED_DIR = Path.home() / ".superlocalmemory" / "credentials"
|
|
25
|
+
_SERVICE_PREFIX = "slm"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def store_credential(service: str, key: str, value: str) -> bool:
|
|
29
|
+
"""Store a credential securely. Returns True on success."""
|
|
30
|
+
# Try OS keychain first
|
|
31
|
+
try:
|
|
32
|
+
import keyring
|
|
33
|
+
keyring.set_password(f"{_SERVICE_PREFIX}-{service}", key, value)
|
|
34
|
+
logger.debug("Stored %s/%s in OS keychain", service, key)
|
|
35
|
+
return True
|
|
36
|
+
except Exception:
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
# Fallback: encrypted file with restricted permissions
|
|
40
|
+
try:
|
|
41
|
+
_CRED_DIR.mkdir(parents=True, exist_ok=True)
|
|
42
|
+
cred_file = _CRED_DIR / f"{service}.json"
|
|
43
|
+
|
|
44
|
+
existing = {}
|
|
45
|
+
if cred_file.exists():
|
|
46
|
+
try:
|
|
47
|
+
existing = json.loads(cred_file.read_text())
|
|
48
|
+
except (json.JSONDecodeError, OSError):
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
existing[key] = value
|
|
52
|
+
cred_file.write_text(json.dumps(existing, indent=2))
|
|
53
|
+
|
|
54
|
+
# Restrict permissions (Unix only — Windows skipped)
|
|
55
|
+
if sys.platform != "win32":
|
|
56
|
+
os.chmod(cred_file, 0o600)
|
|
57
|
+
os.chmod(_CRED_DIR, 0o700)
|
|
58
|
+
|
|
59
|
+
logger.debug("Stored %s/%s in file (keychain unavailable)", service, key)
|
|
60
|
+
return True
|
|
61
|
+
except Exception as exc:
|
|
62
|
+
logger.error("Failed to store credential %s/%s: %s", service, key, exc)
|
|
63
|
+
return False
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def load_credential(service: str, key: str) -> str | None:
|
|
67
|
+
"""Load a credential. Tries keychain first, then file."""
|
|
68
|
+
# Try OS keychain
|
|
69
|
+
try:
|
|
70
|
+
import keyring
|
|
71
|
+
value = keyring.get_password(f"{_SERVICE_PREFIX}-{service}", key)
|
|
72
|
+
if value:
|
|
73
|
+
return value
|
|
74
|
+
except Exception:
|
|
75
|
+
pass
|
|
76
|
+
|
|
77
|
+
# Fallback: file
|
|
78
|
+
try:
|
|
79
|
+
cred_file = _CRED_DIR / f"{service}.json"
|
|
80
|
+
if cred_file.exists():
|
|
81
|
+
data = json.loads(cred_file.read_text())
|
|
82
|
+
return data.get(key)
|
|
83
|
+
except Exception:
|
|
84
|
+
pass
|
|
85
|
+
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def delete_credential(service: str, key: str) -> bool:
|
|
90
|
+
"""Delete a credential from both keychain and file."""
|
|
91
|
+
deleted = False
|
|
92
|
+
|
|
93
|
+
# Try keychain
|
|
94
|
+
try:
|
|
95
|
+
import keyring
|
|
96
|
+
keyring.delete_password(f"{_SERVICE_PREFIX}-{service}", key)
|
|
97
|
+
deleted = True
|
|
98
|
+
except Exception:
|
|
99
|
+
pass
|
|
100
|
+
|
|
101
|
+
# Also remove from file
|
|
102
|
+
try:
|
|
103
|
+
cred_file = _CRED_DIR / f"{service}.json"
|
|
104
|
+
if cred_file.exists():
|
|
105
|
+
data = json.loads(cred_file.read_text())
|
|
106
|
+
if key in data:
|
|
107
|
+
del data[key]
|
|
108
|
+
cred_file.write_text(json.dumps(data, indent=2))
|
|
109
|
+
deleted = True
|
|
110
|
+
except Exception:
|
|
111
|
+
pass
|
|
112
|
+
|
|
113
|
+
return deleted
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def has_credential(service: str, key: str) -> bool:
|
|
117
|
+
"""Check if a credential exists."""
|
|
118
|
+
return load_credential(service, key) is not None
|