superlocalmemory 3.4.0 → 3.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -8
- package/docs/screenshots/01-dashboard-main.png +0 -0
- package/docs/screenshots/02-knowledge-graph.png +0 -0
- package/docs/screenshots/03-patterns-learning.png +0 -0
- package/docs/screenshots/04-learning-dashboard.png +0 -0
- package/docs/screenshots/05-behavioral-analysis.png +0 -0
- package/docs/screenshots/06-graph-communities.png +0 -0
- package/package.json +2 -2
- package/pyproject.toml +11 -2
- package/scripts/postinstall.js +26 -7
- package/src/superlocalmemory/cli/commands.py +42 -60
- package/src/superlocalmemory/cli/daemon.py +107 -47
- package/src/superlocalmemory/cli/main.py +10 -0
- package/src/superlocalmemory/cli/setup_wizard.py +137 -9
- package/src/superlocalmemory/core/config.py +28 -0
- package/src/superlocalmemory/core/consolidation_engine.py +38 -1
- package/src/superlocalmemory/core/engine.py +9 -0
- package/src/superlocalmemory/core/engine_wiring.py +5 -1
- package/src/superlocalmemory/core/graph_analyzer.py +254 -12
- package/src/superlocalmemory/core/health_monitor.py +313 -0
- package/src/superlocalmemory/core/reranker_worker.py +19 -5
- package/src/superlocalmemory/ingestion/__init__.py +13 -0
- package/src/superlocalmemory/ingestion/adapter_manager.py +234 -0
- package/src/superlocalmemory/ingestion/base_adapter.py +177 -0
- package/src/superlocalmemory/ingestion/calendar_adapter.py +340 -0
- package/src/superlocalmemory/ingestion/credentials.py +118 -0
- package/src/superlocalmemory/ingestion/gmail_adapter.py +369 -0
- package/src/superlocalmemory/ingestion/parsers.py +100 -0
- package/src/superlocalmemory/ingestion/transcript_adapter.py +156 -0
- package/src/superlocalmemory/learning/consolidation_worker.py +287 -53
- package/src/superlocalmemory/learning/entity_compiler.py +377 -0
- package/src/superlocalmemory/mesh/__init__.py +12 -0
- package/src/superlocalmemory/mesh/broker.py +344 -0
- package/src/superlocalmemory/retrieval/entity_channel.py +141 -4
- package/src/superlocalmemory/retrieval/spreading_activation.py +45 -0
- package/src/superlocalmemory/server/api.py +15 -8
- package/src/superlocalmemory/server/routes/behavioral.py +8 -4
- package/src/superlocalmemory/server/routes/chat.py +320 -0
- package/src/superlocalmemory/server/routes/entity.py +95 -0
- package/src/superlocalmemory/server/routes/ingest.py +110 -0
- package/src/superlocalmemory/server/routes/insights.py +368 -0
- package/src/superlocalmemory/server/routes/learning.py +106 -6
- package/src/superlocalmemory/server/routes/memories.py +20 -9
- package/src/superlocalmemory/server/routes/mesh.py +186 -0
- package/src/superlocalmemory/server/routes/stats.py +25 -3
- package/src/superlocalmemory/server/routes/timeline.py +252 -0
- package/src/superlocalmemory/server/routes/v3_api.py +161 -0
- package/src/superlocalmemory/server/ui.py +8 -0
- package/src/superlocalmemory/server/unified_daemon.py +691 -0
- package/src/superlocalmemory/storage/schema_v343.py +229 -0
- package/src/superlocalmemory/ui/index.html +168 -58
- package/src/superlocalmemory/ui/js/graph-event-bus.js +83 -0
- package/src/superlocalmemory/ui/js/graph-filters.js +1 -1
- package/src/superlocalmemory/ui/js/knowledge-graph.js +942 -0
- package/src/superlocalmemory/ui/js/memory-chat.js +344 -0
- package/src/superlocalmemory/ui/js/memory-timeline.js +265 -0
- package/src/superlocalmemory/ui/js/quick-actions.js +334 -0
- package/src/superlocalmemory.egg-info/PKG-INFO +0 -594
- package/src/superlocalmemory.egg-info/SOURCES.txt +0 -279
- package/src/superlocalmemory.egg-info/dependency_links.txt +0 -1
- package/src/superlocalmemory.egg-info/entry_points.txt +0 -2
- package/src/superlocalmemory.egg-info/requires.txt +0 -47
- package/src/superlocalmemory.egg-info/top_level.txt +0 -1
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
# Copyright (c) 2026 Varun Pratap Bhardwaj / Qualixar
|
|
2
|
+
# Licensed under the Elastic License 2.0 - see LICENSE file
|
|
3
|
+
# Part of SuperLocalMemory V3 | https://qualixar.com | https://varunpratap.com
|
|
4
|
+
|
|
5
|
+
"""Base adapter class for all ingestion adapters.
|
|
6
|
+
|
|
7
|
+
All adapters inherit this. Enforces stateless, safe, cross-platform operation:
|
|
8
|
+
- Clean shutdown via stop event + parent PID watchdog
|
|
9
|
+
- Rate limiting per hour
|
|
10
|
+
- Batch throttling with interruptible delays
|
|
11
|
+
- Retry on 429 responses
|
|
12
|
+
- Structured logging
|
|
13
|
+
|
|
14
|
+
Part of Qualixar | Author: Varun Pratap Bhardwaj
|
|
15
|
+
License: Elastic-2.0
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import json
|
|
21
|
+
import logging
|
|
22
|
+
import os
|
|
23
|
+
import signal
|
|
24
|
+
import sys
|
|
25
|
+
import threading
|
|
26
|
+
import time
|
|
27
|
+
from dataclasses import dataclass, field
|
|
28
|
+
from pathlib import Path
|
|
29
|
+
from typing import NamedTuple
|
|
30
|
+
|
|
31
|
+
logger = logging.getLogger("superlocalmemory.ingestion")
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass
|
|
35
|
+
class AdapterConfig:
|
|
36
|
+
daemon_port: int = 8765
|
|
37
|
+
batch_size: int = 50
|
|
38
|
+
batch_delay_sec: float = 5.0
|
|
39
|
+
rate_limit_per_hour: int = 100
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class IngestItem(NamedTuple):
|
|
43
|
+
content: str
|
|
44
|
+
dedup_key: str
|
|
45
|
+
metadata: dict = {}
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class BaseAdapter:
|
|
49
|
+
"""All ingestion adapters inherit this class.
|
|
50
|
+
|
|
51
|
+
Provides: run loop, rate limiting, retry, shutdown, parent watchdog.
|
|
52
|
+
Subclasses implement: fetch_items(), wait_for_next_cycle(), source_type.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
source_type: str = "unknown"
|
|
56
|
+
|
|
57
|
+
def __init__(self, config: AdapterConfig | None = None):
|
|
58
|
+
self.config = config or AdapterConfig()
|
|
59
|
+
self.daemon_url = f"http://127.0.0.1:{self.config.daemon_port}"
|
|
60
|
+
self._items_this_hour = 0
|
|
61
|
+
self._hour_start = time.time()
|
|
62
|
+
self._stop_event = threading.Event()
|
|
63
|
+
self._parent_pid = os.getppid()
|
|
64
|
+
self._total_ingested = 0
|
|
65
|
+
|
|
66
|
+
def run(self) -> None:
|
|
67
|
+
"""Main adapter loop. Subclasses don't override this."""
|
|
68
|
+
self._setup_signals()
|
|
69
|
+
logger.info("%s adapter started (PID %d)", self.source_type, os.getpid())
|
|
70
|
+
|
|
71
|
+
while not self._stop_event.is_set():
|
|
72
|
+
# Parent watchdog: exit if daemon died
|
|
73
|
+
try:
|
|
74
|
+
import psutil
|
|
75
|
+
if not psutil.pid_exists(self._parent_pid):
|
|
76
|
+
logger.info("Parent daemon died, adapter exiting")
|
|
77
|
+
break
|
|
78
|
+
except ImportError:
|
|
79
|
+
try:
|
|
80
|
+
os.kill(self._parent_pid, 0)
|
|
81
|
+
except (ProcessLookupError, PermissionError):
|
|
82
|
+
logger.info("Parent daemon died, adapter exiting")
|
|
83
|
+
break
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
items = self.fetch_items()
|
|
87
|
+
except Exception as exc:
|
|
88
|
+
logger.warning("fetch_items failed: %s", exc)
|
|
89
|
+
self._stop_event.wait(30)
|
|
90
|
+
continue
|
|
91
|
+
|
|
92
|
+
if not items:
|
|
93
|
+
self.wait_for_next_cycle()
|
|
94
|
+
continue
|
|
95
|
+
|
|
96
|
+
# Process in batches
|
|
97
|
+
for i in range(0, len(items), self.config.batch_size):
|
|
98
|
+
if self._stop_event.is_set():
|
|
99
|
+
break
|
|
100
|
+
batch = items[i:i + self.config.batch_size]
|
|
101
|
+
for item in batch:
|
|
102
|
+
if self._stop_event.is_set():
|
|
103
|
+
break
|
|
104
|
+
if self._rate_limited():
|
|
105
|
+
logger.info("Rate limit reached (%d/hr), waiting",
|
|
106
|
+
self.config.rate_limit_per_hour)
|
|
107
|
+
self._stop_event.wait(60)
|
|
108
|
+
continue
|
|
109
|
+
self._ingest(item)
|
|
110
|
+
# Interruptible batch delay
|
|
111
|
+
self._stop_event.wait(self.config.batch_delay_sec)
|
|
112
|
+
|
|
113
|
+
self.wait_for_next_cycle()
|
|
114
|
+
|
|
115
|
+
logger.info("%s adapter stopped (total ingested: %d)",
|
|
116
|
+
self.source_type, self._total_ingested)
|
|
117
|
+
|
|
118
|
+
def stop(self) -> None:
|
|
119
|
+
self._stop_event.set()
|
|
120
|
+
|
|
121
|
+
# -- Subclass interface --
|
|
122
|
+
|
|
123
|
+
def fetch_items(self) -> list[IngestItem]:
|
|
124
|
+
"""Fetch items from the source. Subclass MUST implement."""
|
|
125
|
+
raise NotImplementedError
|
|
126
|
+
|
|
127
|
+
def wait_for_next_cycle(self) -> None:
|
|
128
|
+
"""Wait before next fetch cycle. Default: 5 min interruptible."""
|
|
129
|
+
self._stop_event.wait(300)
|
|
130
|
+
|
|
131
|
+
# -- Internal --
|
|
132
|
+
|
|
133
|
+
def _ingest(self, item: IngestItem) -> bool:
|
|
134
|
+
"""POST to daemon /ingest endpoint. Returns True on success."""
|
|
135
|
+
try:
|
|
136
|
+
import urllib.request
|
|
137
|
+
payload = json.dumps({
|
|
138
|
+
"content": item.content,
|
|
139
|
+
"source_type": self.source_type,
|
|
140
|
+
"dedup_key": item.dedup_key,
|
|
141
|
+
"metadata": item.metadata if item.metadata else {},
|
|
142
|
+
}).encode()
|
|
143
|
+
req = urllib.request.Request(
|
|
144
|
+
f"{self.daemon_url}/ingest",
|
|
145
|
+
data=payload,
|
|
146
|
+
headers={"Content-Type": "application/json"},
|
|
147
|
+
method="POST",
|
|
148
|
+
)
|
|
149
|
+
resp = urllib.request.urlopen(req, timeout=30)
|
|
150
|
+
data = json.loads(resp.read().decode())
|
|
151
|
+
if data.get("ingested"):
|
|
152
|
+
self._items_this_hour += 1
|
|
153
|
+
self._total_ingested += 1
|
|
154
|
+
return True
|
|
155
|
+
return False # Already ingested (dedup)
|
|
156
|
+
except Exception as exc:
|
|
157
|
+
error_str = str(exc)
|
|
158
|
+
if "429" in error_str:
|
|
159
|
+
logger.info("Daemon returned 429, backing off 5s")
|
|
160
|
+
self._stop_event.wait(5)
|
|
161
|
+
return self._ingest(item) # Retry once
|
|
162
|
+
logger.debug("Ingest failed: %s", exc)
|
|
163
|
+
return False
|
|
164
|
+
|
|
165
|
+
def _rate_limited(self) -> bool:
|
|
166
|
+
if time.time() - self._hour_start > 3600:
|
|
167
|
+
self._items_this_hour = 0
|
|
168
|
+
self._hour_start = time.time()
|
|
169
|
+
return self._items_this_hour >= self.config.rate_limit_per_hour
|
|
170
|
+
|
|
171
|
+
def _setup_signals(self) -> None:
|
|
172
|
+
"""Set up clean shutdown on SIGTERM."""
|
|
173
|
+
def _handler(sig, frame):
|
|
174
|
+
self.stop()
|
|
175
|
+
signal.signal(signal.SIGTERM, _handler)
|
|
176
|
+
if sys.platform != "win32":
|
|
177
|
+
signal.signal(signal.SIGINT, _handler)
|
|
@@ -0,0 +1,340 @@
|
|
|
1
|
+
# Copyright (c) 2026 Varun Pratap Bhardwaj / Qualixar
|
|
2
|
+
# Licensed under the Elastic License 2.0 - see LICENSE file
|
|
3
|
+
# Part of SuperLocalMemory V3 | https://qualixar.com | https://varunpratap.com
|
|
4
|
+
|
|
5
|
+
"""Google Calendar ingestion adapter — 2 tiers.
|
|
6
|
+
|
|
7
|
+
Tier 1: ICS file import — zero setup
|
|
8
|
+
Tier 2: Google Calendar API with OAuth polling — shares Gmail OAuth credentials
|
|
9
|
+
|
|
10
|
+
OPT-IN only. Enabled via: slm adapters enable calendar
|
|
11
|
+
|
|
12
|
+
Part of Qualixar | Author: Varun Pratap Bhardwaj
|
|
13
|
+
License: Elastic-2.0
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
import json
|
|
19
|
+
import logging
|
|
20
|
+
import sys
|
|
21
|
+
from datetime import datetime, timezone
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
from superlocalmemory.ingestion.base_adapter import BaseAdapter, AdapterConfig, IngestItem
|
|
25
|
+
|
|
26
|
+
logger = logging.getLogger("superlocalmemory.ingestion.calendar")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class CalendarAdapter(BaseAdapter):
|
|
30
|
+
"""Google Calendar ingestion with automatic tier detection."""
|
|
31
|
+
|
|
32
|
+
source_type = "calendar"
|
|
33
|
+
|
|
34
|
+
def __init__(self, config: AdapterConfig | None = None, tier: str = "auto"):
|
|
35
|
+
super().__init__(config)
|
|
36
|
+
self._tier = tier
|
|
37
|
+
self._ics_path: str | None = None
|
|
38
|
+
self._ics_processed = False
|
|
39
|
+
self._sync_token: str | None = None
|
|
40
|
+
self._poll_interval = 900 # 15 min
|
|
41
|
+
|
|
42
|
+
def run(self) -> None:
|
|
43
|
+
self._detect_tier()
|
|
44
|
+
logger.info("Calendar adapter starting (tier=%s)", self._tier)
|
|
45
|
+
super().run()
|
|
46
|
+
|
|
47
|
+
def fetch_items(self) -> list[IngestItem]:
|
|
48
|
+
if self._tier == "ics":
|
|
49
|
+
return self._fetch_ics()
|
|
50
|
+
elif self._tier == "oauth":
|
|
51
|
+
return self._fetch_oauth()
|
|
52
|
+
return []
|
|
53
|
+
|
|
54
|
+
def wait_for_next_cycle(self) -> None:
|
|
55
|
+
if self._tier == "ics" and self._ics_processed:
|
|
56
|
+
logger.info("ICS import complete, adapter stopping")
|
|
57
|
+
self.stop()
|
|
58
|
+
return
|
|
59
|
+
self._stop_event.wait(self._poll_interval)
|
|
60
|
+
|
|
61
|
+
def _detect_tier(self) -> None:
|
|
62
|
+
if self._tier != "auto":
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
adapters_path = Path.home() / ".superlocalmemory" / "adapters.json"
|
|
66
|
+
cfg = {}
|
|
67
|
+
if adapters_path.exists():
|
|
68
|
+
cfg = json.loads(adapters_path.read_text()).get("calendar", {})
|
|
69
|
+
|
|
70
|
+
if cfg.get("tier") == "ics" or cfg.get("ics_path"):
|
|
71
|
+
self._tier = "ics"
|
|
72
|
+
self._ics_path = cfg.get("ics_path", "")
|
|
73
|
+
return
|
|
74
|
+
|
|
75
|
+
# Check for OAuth (shares Gmail credentials)
|
|
76
|
+
from superlocalmemory.ingestion.credentials import has_credential
|
|
77
|
+
if has_credential("gmail", "refresh_token"):
|
|
78
|
+
self._tier = "oauth"
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
# Look for ICS files
|
|
82
|
+
import_dir = Path.home() / ".superlocalmemory" / "import"
|
|
83
|
+
ics_files = list(import_dir.glob("*.ics")) if import_dir.exists() else []
|
|
84
|
+
if ics_files:
|
|
85
|
+
self._tier = "ics"
|
|
86
|
+
self._ics_path = str(ics_files[0])
|
|
87
|
+
return
|
|
88
|
+
|
|
89
|
+
self._tier = "ics"
|
|
90
|
+
|
|
91
|
+
# -- Tier 1: ICS file import --
|
|
92
|
+
|
|
93
|
+
def _fetch_ics(self) -> list[IngestItem]:
|
|
94
|
+
if self._ics_processed or not self._ics_path:
|
|
95
|
+
return []
|
|
96
|
+
|
|
97
|
+
path = Path(self._ics_path)
|
|
98
|
+
if not path.exists():
|
|
99
|
+
logger.warning("ICS file not found: %s", path)
|
|
100
|
+
self._ics_processed = True
|
|
101
|
+
return []
|
|
102
|
+
|
|
103
|
+
items = []
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
# Try icalendar library first
|
|
107
|
+
from icalendar import Calendar
|
|
108
|
+
cal = Calendar.from_ical(path.read_bytes())
|
|
109
|
+
events = [c for c in cal.walk() if c.name == "VEVENT"]
|
|
110
|
+
logger.info("Parsing ICS: %d events", len(events))
|
|
111
|
+
|
|
112
|
+
for event in events:
|
|
113
|
+
if self._stop_event.is_set():
|
|
114
|
+
break
|
|
115
|
+
try:
|
|
116
|
+
summary = str(event.get("SUMMARY", "(no title)"))
|
|
117
|
+
dtstart = event.get("DTSTART")
|
|
118
|
+
dtend = event.get("DTEND")
|
|
119
|
+
description = str(event.get("DESCRIPTION", ""))
|
|
120
|
+
location = str(event.get("LOCATION", ""))
|
|
121
|
+
uid = str(event.get("UID", ""))
|
|
122
|
+
|
|
123
|
+
start_str = dtstart.dt.isoformat() if dtstart else ""
|
|
124
|
+
end_str = dtend.dt.isoformat() if dtend else ""
|
|
125
|
+
|
|
126
|
+
# Extract attendees
|
|
127
|
+
attendees = []
|
|
128
|
+
att_list = event.get("ATTENDEE", [])
|
|
129
|
+
if not isinstance(att_list, list):
|
|
130
|
+
att_list = [att_list]
|
|
131
|
+
for att in att_list:
|
|
132
|
+
email = str(att).replace("mailto:", "").strip()
|
|
133
|
+
if email and "@" in email:
|
|
134
|
+
attendees.append(email.split("@")[0]) # Use name part
|
|
135
|
+
|
|
136
|
+
content = (
|
|
137
|
+
f"Calendar Event: {summary}\n"
|
|
138
|
+
f"When: {start_str} to {end_str}\n"
|
|
139
|
+
)
|
|
140
|
+
if location:
|
|
141
|
+
content += f"Where: {location}\n"
|
|
142
|
+
if attendees:
|
|
143
|
+
content += f"Attendees: {', '.join(attendees)}\n"
|
|
144
|
+
if description:
|
|
145
|
+
content += f"\n{description[:2000]}"
|
|
146
|
+
|
|
147
|
+
dedup_key = f"{uid}-{start_str}" if uid else f"ics-{summary}-{start_str}"
|
|
148
|
+
|
|
149
|
+
items.append(IngestItem(
|
|
150
|
+
content=content,
|
|
151
|
+
dedup_key=dedup_key,
|
|
152
|
+
metadata={
|
|
153
|
+
"summary": summary,
|
|
154
|
+
"start": start_str,
|
|
155
|
+
"end": end_str,
|
|
156
|
+
"attendees": attendees,
|
|
157
|
+
"source": "ics_import",
|
|
158
|
+
},
|
|
159
|
+
))
|
|
160
|
+
|
|
161
|
+
# Entity propagation for attendees
|
|
162
|
+
for attendee in attendees:
|
|
163
|
+
items.append(IngestItem(
|
|
164
|
+
content=f"{attendee} attended meeting: {summary} on {start_str}",
|
|
165
|
+
dedup_key=f"attendee-{attendee}-{dedup_key}",
|
|
166
|
+
metadata={
|
|
167
|
+
"entity_name": attendee,
|
|
168
|
+
"event": summary,
|
|
169
|
+
"source": "entity_propagation",
|
|
170
|
+
},
|
|
171
|
+
))
|
|
172
|
+
|
|
173
|
+
except Exception as exc:
|
|
174
|
+
logger.debug("Failed to parse event: %s", exc)
|
|
175
|
+
|
|
176
|
+
except ImportError:
|
|
177
|
+
# Fallback: basic ICS parsing without icalendar library
|
|
178
|
+
logger.info("icalendar not installed, using basic parser")
|
|
179
|
+
items = self._parse_ics_basic(path)
|
|
180
|
+
|
|
181
|
+
self._ics_processed = True
|
|
182
|
+
logger.info("ICS import: %d items", len(items))
|
|
183
|
+
return items
|
|
184
|
+
|
|
185
|
+
def _parse_ics_basic(self, path: Path) -> list[IngestItem]:
|
|
186
|
+
"""Basic ICS parser without icalendar library."""
|
|
187
|
+
content = path.read_text(encoding="utf-8", errors="replace")
|
|
188
|
+
items = []
|
|
189
|
+
events = content.split("BEGIN:VEVENT")
|
|
190
|
+
|
|
191
|
+
for i, block in enumerate(events[1:]): # Skip first (before any VEVENT)
|
|
192
|
+
try:
|
|
193
|
+
lines = block.split("\n")
|
|
194
|
+
props = {}
|
|
195
|
+
for line in lines:
|
|
196
|
+
if ":" in line and not line.startswith(" "):
|
|
197
|
+
key, _, val = line.partition(":")
|
|
198
|
+
key = key.split(";")[0].strip()
|
|
199
|
+
props[key] = val.strip()
|
|
200
|
+
|
|
201
|
+
summary = props.get("SUMMARY", "(no title)")
|
|
202
|
+
dtstart = props.get("DTSTART", "")
|
|
203
|
+
uid = props.get("UID", f"basic-{i}")
|
|
204
|
+
|
|
205
|
+
content_text = f"Calendar Event: {summary}\nWhen: {dtstart}"
|
|
206
|
+
items.append(IngestItem(
|
|
207
|
+
content=content_text,
|
|
208
|
+
dedup_key=f"{uid}-{dtstart}",
|
|
209
|
+
metadata={"summary": summary, "start": dtstart, "source": "ics_basic"},
|
|
210
|
+
))
|
|
211
|
+
except Exception:
|
|
212
|
+
pass
|
|
213
|
+
|
|
214
|
+
return items
|
|
215
|
+
|
|
216
|
+
# -- Tier 2: OAuth API polling --
|
|
217
|
+
|
|
218
|
+
def _fetch_oauth(self) -> list[IngestItem]:
|
|
219
|
+
"""Poll Google Calendar API with OAuth."""
|
|
220
|
+
try:
|
|
221
|
+
from superlocalmemory.ingestion.credentials import load_credential
|
|
222
|
+
from google.oauth2.credentials import Credentials
|
|
223
|
+
from googleapiclient.discovery import build
|
|
224
|
+
|
|
225
|
+
refresh_token = load_credential("gmail", "refresh_token")
|
|
226
|
+
client_id = load_credential("gmail", "client_id")
|
|
227
|
+
client_secret = load_credential("gmail", "client_secret")
|
|
228
|
+
|
|
229
|
+
if not all([refresh_token, client_id, client_secret]):
|
|
230
|
+
logger.warning("Calendar OAuth credentials incomplete")
|
|
231
|
+
return []
|
|
232
|
+
|
|
233
|
+
creds = Credentials(
|
|
234
|
+
token=None,
|
|
235
|
+
refresh_token=refresh_token,
|
|
236
|
+
client_id=client_id,
|
|
237
|
+
client_secret=client_secret,
|
|
238
|
+
token_uri="https://oauth2.googleapis.com/token",
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
service = build("calendar", "v3", credentials=creds)
|
|
242
|
+
|
|
243
|
+
# Incremental sync
|
|
244
|
+
kwargs = {"calendarId": "primary", "singleEvents": True, "maxResults": 50}
|
|
245
|
+
if self._sync_token:
|
|
246
|
+
kwargs["syncToken"] = self._sync_token
|
|
247
|
+
else:
|
|
248
|
+
# Initial sync: last 30 days
|
|
249
|
+
from datetime import timedelta
|
|
250
|
+
time_min = (datetime.now(timezone.utc) - timedelta(days=30)).isoformat()
|
|
251
|
+
kwargs["timeMin"] = time_min
|
|
252
|
+
|
|
253
|
+
try:
|
|
254
|
+
results = service.events().list(**kwargs).execute()
|
|
255
|
+
except Exception as api_err:
|
|
256
|
+
if "410" in str(api_err):
|
|
257
|
+
# Sync token expired — full re-sync
|
|
258
|
+
logger.info("Calendar sync token expired, doing full re-sync")
|
|
259
|
+
self._sync_token = None
|
|
260
|
+
return self._fetch_oauth()
|
|
261
|
+
raise
|
|
262
|
+
|
|
263
|
+
self._sync_token = results.get("nextSyncToken")
|
|
264
|
+
events = results.get("items", [])
|
|
265
|
+
|
|
266
|
+
items = []
|
|
267
|
+
for event in events:
|
|
268
|
+
if self._stop_event.is_set():
|
|
269
|
+
break
|
|
270
|
+
try:
|
|
271
|
+
summary = event.get("summary", "(no title)")
|
|
272
|
+
start = event.get("start", {}).get("dateTime", event.get("start", {}).get("date", ""))
|
|
273
|
+
end = event.get("end", {}).get("dateTime", event.get("end", {}).get("date", ""))
|
|
274
|
+
description = event.get("description", "")
|
|
275
|
+
location = event.get("location", "")
|
|
276
|
+
event_id = event.get("id", "")
|
|
277
|
+
updated = event.get("updated", "")
|
|
278
|
+
|
|
279
|
+
attendees = []
|
|
280
|
+
for att in event.get("attendees", []):
|
|
281
|
+
name = att.get("displayName") or att.get("email", "").split("@")[0]
|
|
282
|
+
if name:
|
|
283
|
+
attendees.append(name)
|
|
284
|
+
|
|
285
|
+
content = f"Calendar Event: {summary}\nWhen: {start} to {end}\n"
|
|
286
|
+
if location:
|
|
287
|
+
content += f"Where: {location}\n"
|
|
288
|
+
if attendees:
|
|
289
|
+
content += f"Attendees: {', '.join(attendees)}\n"
|
|
290
|
+
if description:
|
|
291
|
+
content += f"\n{description[:2000]}"
|
|
292
|
+
|
|
293
|
+
dedup_key = f"{event_id}-{updated}"
|
|
294
|
+
|
|
295
|
+
items.append(IngestItem(
|
|
296
|
+
content=content,
|
|
297
|
+
dedup_key=dedup_key,
|
|
298
|
+
metadata={
|
|
299
|
+
"summary": summary, "start": start,
|
|
300
|
+
"attendees": attendees, "source": "oauth",
|
|
301
|
+
},
|
|
302
|
+
))
|
|
303
|
+
|
|
304
|
+
# Entity propagation
|
|
305
|
+
for attendee in attendees:
|
|
306
|
+
items.append(IngestItem(
|
|
307
|
+
content=f"{attendee} attended: {summary} on {start}",
|
|
308
|
+
dedup_key=f"cal-attendee-{attendee}-{event_id}",
|
|
309
|
+
metadata={"entity_name": attendee, "event": summary, "source": "entity_propagation"},
|
|
310
|
+
))
|
|
311
|
+
|
|
312
|
+
except Exception as exc:
|
|
313
|
+
logger.debug("Calendar event parse error: %s", exc)
|
|
314
|
+
|
|
315
|
+
return items
|
|
316
|
+
|
|
317
|
+
except ImportError:
|
|
318
|
+
logger.warning("Calendar OAuth requires: pip install 'superlocalmemory[ingestion]'")
|
|
319
|
+
return []
|
|
320
|
+
except Exception as exc:
|
|
321
|
+
logger.warning("Calendar OAuth failed: %s", exc)
|
|
322
|
+
return []
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
# ---------------------------------------------------------------------------
|
|
326
|
+
# CLI entry point
|
|
327
|
+
# ---------------------------------------------------------------------------
|
|
328
|
+
|
|
329
|
+
if __name__ == "__main__":
|
|
330
|
+
import logging as _logging
|
|
331
|
+
_logging.basicConfig(level=_logging.INFO, format="%(asctime)s %(message)s")
|
|
332
|
+
|
|
333
|
+
adapters_path = Path.home() / ".superlocalmemory" / "adapters.json"
|
|
334
|
+
tier = "auto"
|
|
335
|
+
if adapters_path.exists():
|
|
336
|
+
cfg = json.loads(adapters_path.read_text()).get("calendar", {})
|
|
337
|
+
tier = cfg.get("tier", "auto")
|
|
338
|
+
|
|
339
|
+
adapter = CalendarAdapter(tier=tier)
|
|
340
|
+
adapter.run()
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
# Copyright (c) 2026 Varun Pratap Bhardwaj / Qualixar
|
|
2
|
+
# Licensed under the Elastic License 2.0 - see LICENSE file
|
|
3
|
+
# Part of SuperLocalMemory V3 | https://qualixar.com | https://varunpratap.com
|
|
4
|
+
|
|
5
|
+
"""Cross-platform credential storage for ingestion adapters.
|
|
6
|
+
|
|
7
|
+
Uses OS keychain via keyring library (macOS Keychain, Windows Credential Locker,
|
|
8
|
+
Linux SecretService). Falls back to file-based storage with restricted permissions.
|
|
9
|
+
|
|
10
|
+
Part of Qualixar | Author: Varun Pratap Bhardwaj
|
|
11
|
+
License: Elastic-2.0
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
import json
|
|
17
|
+
import logging
|
|
18
|
+
import os
|
|
19
|
+
import sys
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger("superlocalmemory.ingestion.credentials")
|
|
23
|
+
|
|
24
|
+
_CRED_DIR = Path.home() / ".superlocalmemory" / "credentials"
|
|
25
|
+
_SERVICE_PREFIX = "slm"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def store_credential(service: str, key: str, value: str) -> bool:
|
|
29
|
+
"""Store a credential securely. Returns True on success."""
|
|
30
|
+
# Try OS keychain first
|
|
31
|
+
try:
|
|
32
|
+
import keyring
|
|
33
|
+
keyring.set_password(f"{_SERVICE_PREFIX}-{service}", key, value)
|
|
34
|
+
logger.debug("Stored %s/%s in OS keychain", service, key)
|
|
35
|
+
return True
|
|
36
|
+
except Exception:
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
# Fallback: encrypted file with restricted permissions
|
|
40
|
+
try:
|
|
41
|
+
_CRED_DIR.mkdir(parents=True, exist_ok=True)
|
|
42
|
+
cred_file = _CRED_DIR / f"{service}.json"
|
|
43
|
+
|
|
44
|
+
existing = {}
|
|
45
|
+
if cred_file.exists():
|
|
46
|
+
try:
|
|
47
|
+
existing = json.loads(cred_file.read_text())
|
|
48
|
+
except (json.JSONDecodeError, OSError):
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
existing[key] = value
|
|
52
|
+
cred_file.write_text(json.dumps(existing, indent=2))
|
|
53
|
+
|
|
54
|
+
# Restrict permissions (Unix only — Windows skipped)
|
|
55
|
+
if sys.platform != "win32":
|
|
56
|
+
os.chmod(cred_file, 0o600)
|
|
57
|
+
os.chmod(_CRED_DIR, 0o700)
|
|
58
|
+
|
|
59
|
+
logger.debug("Stored %s/%s in file (keychain unavailable)", service, key)
|
|
60
|
+
return True
|
|
61
|
+
except Exception as exc:
|
|
62
|
+
logger.error("Failed to store credential %s/%s: %s", service, key, exc)
|
|
63
|
+
return False
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def load_credential(service: str, key: str) -> str | None:
|
|
67
|
+
"""Load a credential. Tries keychain first, then file."""
|
|
68
|
+
# Try OS keychain
|
|
69
|
+
try:
|
|
70
|
+
import keyring
|
|
71
|
+
value = keyring.get_password(f"{_SERVICE_PREFIX}-{service}", key)
|
|
72
|
+
if value:
|
|
73
|
+
return value
|
|
74
|
+
except Exception:
|
|
75
|
+
pass
|
|
76
|
+
|
|
77
|
+
# Fallback: file
|
|
78
|
+
try:
|
|
79
|
+
cred_file = _CRED_DIR / f"{service}.json"
|
|
80
|
+
if cred_file.exists():
|
|
81
|
+
data = json.loads(cred_file.read_text())
|
|
82
|
+
return data.get(key)
|
|
83
|
+
except Exception:
|
|
84
|
+
pass
|
|
85
|
+
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def delete_credential(service: str, key: str) -> bool:
|
|
90
|
+
"""Delete a credential from both keychain and file."""
|
|
91
|
+
deleted = False
|
|
92
|
+
|
|
93
|
+
# Try keychain
|
|
94
|
+
try:
|
|
95
|
+
import keyring
|
|
96
|
+
keyring.delete_password(f"{_SERVICE_PREFIX}-{service}", key)
|
|
97
|
+
deleted = True
|
|
98
|
+
except Exception:
|
|
99
|
+
pass
|
|
100
|
+
|
|
101
|
+
# Also remove from file
|
|
102
|
+
try:
|
|
103
|
+
cred_file = _CRED_DIR / f"{service}.json"
|
|
104
|
+
if cred_file.exists():
|
|
105
|
+
data = json.loads(cred_file.read_text())
|
|
106
|
+
if key in data:
|
|
107
|
+
del data[key]
|
|
108
|
+
cred_file.write_text(json.dumps(data, indent=2))
|
|
109
|
+
deleted = True
|
|
110
|
+
except Exception:
|
|
111
|
+
pass
|
|
112
|
+
|
|
113
|
+
return deleted
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def has_credential(service: str, key: str) -> bool:
|
|
117
|
+
"""Check if a credential exists."""
|
|
118
|
+
return load_credential(service, key) is not None
|