memex-graph 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- memex/__init__.py +14 -0
- memex/app.py +181 -0
- memex/binaries.py +236 -0
- memex/chat.py +150 -0
- memex/onboarding.py +23 -0
- memex/provider.py +130 -0
- memex/services.py +79 -0
- memex/stack.py +236 -0
- memex/tools.py +383 -0
- memex_graph-0.1.0.dist-info/METADATA +16 -0
- memex_graph-0.1.0.dist-info/RECORD +15 -0
- memex_graph-0.1.0.dist-info/WHEEL +5 -0
- memex_graph-0.1.0.dist-info/entry_points.txt +3 -0
- memex_graph-0.1.0.dist-info/licenses/LICENSE +29 -0
- memex_graph-0.1.0.dist-info/top_level.txt +1 -0
memex/provider.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""Chat provider for Memex."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import json
|
|
5
|
+
from typing import Generator
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
|
|
8
|
+
from dotenv import load_dotenv
|
|
9
|
+
|
|
10
|
+
load_dotenv()
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class ToolCall:
|
|
15
|
+
"""A tool call request from the model."""
|
|
16
|
+
|
|
17
|
+
id: str
|
|
18
|
+
name: str
|
|
19
|
+
arguments: dict
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class Chunk:
|
|
24
|
+
"""Stream chunk from model."""
|
|
25
|
+
|
|
26
|
+
type: str # "text" | "tool_call" | "done" | "error"
|
|
27
|
+
text: str = ""
|
|
28
|
+
tool_call: ToolCall | None = None
|
|
29
|
+
error: str = ""
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class ChatProvider:
|
|
33
|
+
"""OpenAI-compatible chat provider with streaming and tool support."""
|
|
34
|
+
|
|
35
|
+
def __init__(self, model: str | None = None):
|
|
36
|
+
self._client = None
|
|
37
|
+
self.model = model or os.getenv("OPENAI_MODEL", "gpt-4o")
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def client(self):
|
|
41
|
+
"""Lazy-initialize OpenAI client."""
|
|
42
|
+
if self._client is None:
|
|
43
|
+
from openai import OpenAI
|
|
44
|
+
|
|
45
|
+
self._client = OpenAI()
|
|
46
|
+
return self._client
|
|
47
|
+
|
|
48
|
+
def stream(
|
|
49
|
+
self,
|
|
50
|
+
system: str,
|
|
51
|
+
messages: list[dict],
|
|
52
|
+
tools: list[dict] | None = None,
|
|
53
|
+
) -> Generator[Chunk, None, None]:
|
|
54
|
+
"""Stream response with tool support.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
system: System prompt
|
|
58
|
+
messages: Conversation messages
|
|
59
|
+
tools: OpenAI-format tool definitions
|
|
60
|
+
|
|
61
|
+
Yields:
|
|
62
|
+
Chunk objects with text, tool_calls, or completion status
|
|
63
|
+
"""
|
|
64
|
+
api_messages = []
|
|
65
|
+
if system:
|
|
66
|
+
api_messages.append({"role": "system", "content": system})
|
|
67
|
+
api_messages.extend(messages)
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
kwargs = {
|
|
71
|
+
"model": self.model,
|
|
72
|
+
"messages": api_messages,
|
|
73
|
+
"stream": True,
|
|
74
|
+
}
|
|
75
|
+
if tools:
|
|
76
|
+
kwargs["tools"] = tools
|
|
77
|
+
|
|
78
|
+
stream = self.client.chat.completions.create(**kwargs)
|
|
79
|
+
|
|
80
|
+
# Accumulate tool calls across chunks
|
|
81
|
+
tool_calls: dict[int, dict] = {}
|
|
82
|
+
|
|
83
|
+
for chunk in stream:
|
|
84
|
+
if not chunk.choices:
|
|
85
|
+
continue
|
|
86
|
+
|
|
87
|
+
delta = chunk.choices[0].delta
|
|
88
|
+
finish_reason = chunk.choices[0].finish_reason
|
|
89
|
+
|
|
90
|
+
# Text content
|
|
91
|
+
if delta.content:
|
|
92
|
+
yield Chunk(type="text", text=delta.content)
|
|
93
|
+
|
|
94
|
+
# Tool calls (accumulate across chunks)
|
|
95
|
+
if delta.tool_calls:
|
|
96
|
+
for tc in delta.tool_calls:
|
|
97
|
+
idx = tc.index
|
|
98
|
+
if idx not in tool_calls:
|
|
99
|
+
tool_calls[idx] = {"id": "", "name": "", "args": ""}
|
|
100
|
+
if tc.id:
|
|
101
|
+
tool_calls[idx]["id"] = tc.id
|
|
102
|
+
if tc.function:
|
|
103
|
+
if tc.function.name:
|
|
104
|
+
tool_calls[idx]["name"] = tc.function.name
|
|
105
|
+
if tc.function.arguments:
|
|
106
|
+
tool_calls[idx]["args"] += tc.function.arguments
|
|
107
|
+
|
|
108
|
+
# Finish with tool calls
|
|
109
|
+
if finish_reason == "tool_calls":
|
|
110
|
+
for idx in sorted(tool_calls.keys()):
|
|
111
|
+
tc = tool_calls[idx]
|
|
112
|
+
try:
|
|
113
|
+
args = json.loads(tc["args"]) if tc["args"] else {}
|
|
114
|
+
yield Chunk(
|
|
115
|
+
type="tool_call",
|
|
116
|
+
tool_call=ToolCall(tc["id"], tc["name"], args),
|
|
117
|
+
)
|
|
118
|
+
except json.JSONDecodeError:
|
|
119
|
+
yield Chunk(
|
|
120
|
+
type="error",
|
|
121
|
+
error=f"Failed to parse tool args: {tc['args']}",
|
|
122
|
+
)
|
|
123
|
+
tool_calls.clear()
|
|
124
|
+
|
|
125
|
+
# Normal completion
|
|
126
|
+
elif finish_reason == "stop":
|
|
127
|
+
yield Chunk(type="done")
|
|
128
|
+
|
|
129
|
+
except Exception as e:
|
|
130
|
+
yield Chunk(type="error", error=str(e))
|
memex/services.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""Manage IPFS daemon lifecycle and dagit identity."""
|
|
2
|
+
|
|
3
|
+
import subprocess
|
|
4
|
+
import time
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def is_ipfs_running() -> bool:
|
|
11
|
+
"""Check if IPFS daemon is already running."""
|
|
12
|
+
try:
|
|
13
|
+
resp = httpx.post("http://localhost:5001/api/v0/id", timeout=2)
|
|
14
|
+
return resp.status_code == 200
|
|
15
|
+
except httpx.RequestError:
|
|
16
|
+
return False
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def ensure_ipfs_repo(ipfs_bin: str) -> None:
|
|
20
|
+
"""Initialize IPFS repo if it doesn't exist."""
|
|
21
|
+
if (Path.home() / ".ipfs").exists():
|
|
22
|
+
return
|
|
23
|
+
|
|
24
|
+
print("\033[0;32m[memex]\033[0m Initializing IPFS repository...")
|
|
25
|
+
result = subprocess.run(
|
|
26
|
+
[ipfs_bin, "init"],
|
|
27
|
+
capture_output=True,
|
|
28
|
+
text=True,
|
|
29
|
+
)
|
|
30
|
+
if result.returncode != 0:
|
|
31
|
+
print(f"\033[0;31m[memex]\033[0m IPFS init failed: {result.stderr}")
|
|
32
|
+
raise SystemExit(1)
|
|
33
|
+
print("\033[0;32m[memex]\033[0m IPFS repository initialized")
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def start_ipfs_daemon(ipfs_bin: str) -> subprocess.Popen:
|
|
37
|
+
"""Start IPFS daemon as a background process."""
|
|
38
|
+
print("\033[0;32m[memex]\033[0m Starting IPFS daemon...")
|
|
39
|
+
proc = subprocess.Popen(
|
|
40
|
+
[ipfs_bin, "daemon"],
|
|
41
|
+
stdout=subprocess.DEVNULL,
|
|
42
|
+
stderr=subprocess.DEVNULL,
|
|
43
|
+
)
|
|
44
|
+
return proc
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def wait_for_ipfs(timeout: float = 30) -> bool:
|
|
48
|
+
"""Wait for IPFS daemon API to become available."""
|
|
49
|
+
deadline = time.time() + timeout
|
|
50
|
+
while time.time() < deadline:
|
|
51
|
+
if is_ipfs_running():
|
|
52
|
+
return True
|
|
53
|
+
time.sleep(0.5)
|
|
54
|
+
return False
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def ensure_dagit_identity() -> str:
|
|
58
|
+
"""Create dagit identity if missing, return DID.
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
The user's DID string.
|
|
62
|
+
"""
|
|
63
|
+
identity_path = Path.home() / ".dagit" / "identity.json"
|
|
64
|
+
|
|
65
|
+
if identity_path.exists():
|
|
66
|
+
import json
|
|
67
|
+
data = json.loads(identity_path.read_text())
|
|
68
|
+
return data.get("did", "unknown")
|
|
69
|
+
|
|
70
|
+
print("\033[0;32m[memex]\033[0m Creating dagit identity...")
|
|
71
|
+
try:
|
|
72
|
+
from dagit.identity import create
|
|
73
|
+
identity = create()
|
|
74
|
+
did = identity.get("did", "unknown") if isinstance(identity, dict) else str(identity)
|
|
75
|
+
print(f"\033[0;32m[memex]\033[0m Identity created: {did}")
|
|
76
|
+
return did
|
|
77
|
+
except Exception as e:
|
|
78
|
+
print(f"\033[1;33m[memex]\033[0m Could not create dagit identity: {e}")
|
|
79
|
+
return "unknown"
|
memex/stack.py
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
"""memex-stack - One command to download, start, and launch everything."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import signal
|
|
5
|
+
import subprocess
|
|
6
|
+
import sys
|
|
7
|
+
import time
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
import httpx
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def log(msg: str) -> None:
|
|
14
|
+
print(f"\033[0;32m[memex]\033[0m {msg}")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def warn(msg: str) -> None:
|
|
18
|
+
print(f"\033[1;33m[memex]\033[0m {msg}")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def error(msg: str) -> None:
|
|
22
|
+
print(f"\033[0;31m[memex]\033[0m {msg}", file=sys.stderr)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def wait_for_server(url: str, timeout: float = 10.0) -> bool:
|
|
26
|
+
"""Wait for memex-server to be ready."""
|
|
27
|
+
deadline = time.time() + timeout
|
|
28
|
+
while time.time() < deadline:
|
|
29
|
+
try:
|
|
30
|
+
resp = httpx.get(f"{url}/health", timeout=0.5)
|
|
31
|
+
if resp.status_code == 200:
|
|
32
|
+
return True
|
|
33
|
+
except httpx.RequestError:
|
|
34
|
+
pass
|
|
35
|
+
time.sleep(0.2)
|
|
36
|
+
return False
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def is_graph_empty(server_url: str) -> bool:
|
|
40
|
+
"""Check if the knowledge graph has any nodes."""
|
|
41
|
+
try:
|
|
42
|
+
resp = httpx.get(f"{server_url}/api/nodes", params={"limit": 1}, timeout=5)
|
|
43
|
+
if resp.status_code == 200:
|
|
44
|
+
data = resp.json()
|
|
45
|
+
nodes = data if isinstance(data, list) else data.get("nodes", [])
|
|
46
|
+
return len(nodes) == 0
|
|
47
|
+
except Exception:
|
|
48
|
+
pass
|
|
49
|
+
return True
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def main() -> int:
|
|
53
|
+
"""Entry point for memex-stack command."""
|
|
54
|
+
import argparse
|
|
55
|
+
|
|
56
|
+
parser = argparse.ArgumentParser(
|
|
57
|
+
description="Launch memex: downloads binaries, starts services, opens TUI"
|
|
58
|
+
)
|
|
59
|
+
parser.add_argument(
|
|
60
|
+
"--server-only",
|
|
61
|
+
action="store_true",
|
|
62
|
+
help="Start server without TUI",
|
|
63
|
+
)
|
|
64
|
+
parser.add_argument(
|
|
65
|
+
"--port",
|
|
66
|
+
type=int,
|
|
67
|
+
default=int(os.environ.get("PORT", "8080")),
|
|
68
|
+
help="Server port (default: 8080)",
|
|
69
|
+
)
|
|
70
|
+
parser.add_argument(
|
|
71
|
+
"--backend",
|
|
72
|
+
choices=["sqlite", "neo4j"],
|
|
73
|
+
default=os.environ.get("MEMEX_BACKEND", "sqlite"),
|
|
74
|
+
help="Storage backend (default: sqlite)",
|
|
75
|
+
)
|
|
76
|
+
parser.add_argument(
|
|
77
|
+
"--db-path",
|
|
78
|
+
type=str,
|
|
79
|
+
default=os.environ.get("SQLITE_PATH", str(Path.home() / ".memex" / "memex.db")),
|
|
80
|
+
help="SQLite database path",
|
|
81
|
+
)
|
|
82
|
+
parser.add_argument(
|
|
83
|
+
"--skip-ipfs",
|
|
84
|
+
action="store_true",
|
|
85
|
+
help="Skip IPFS daemon setup",
|
|
86
|
+
)
|
|
87
|
+
parser.add_argument(
|
|
88
|
+
"--skip-download",
|
|
89
|
+
action="store_true",
|
|
90
|
+
help="Skip automatic binary downloads (use only local binaries)",
|
|
91
|
+
)
|
|
92
|
+
args = parser.parse_args()
|
|
93
|
+
|
|
94
|
+
# Track all subprocesses for cleanup
|
|
95
|
+
procs: list[subprocess.Popen] = []
|
|
96
|
+
|
|
97
|
+
def cleanup(signum=None, frame=None):
|
|
98
|
+
for proc in reversed(procs):
|
|
99
|
+
if proc.poll() is None:
|
|
100
|
+
log(f"Stopping process (PID {proc.pid})...")
|
|
101
|
+
proc.terminate()
|
|
102
|
+
try:
|
|
103
|
+
proc.wait(timeout=5)
|
|
104
|
+
except subprocess.TimeoutExpired:
|
|
105
|
+
proc.kill()
|
|
106
|
+
|
|
107
|
+
signal.signal(signal.SIGINT, cleanup)
|
|
108
|
+
signal.signal(signal.SIGTERM, cleanup)
|
|
109
|
+
|
|
110
|
+
# --- Step 1: Check OPENAI_API_KEY ---
|
|
111
|
+
if not os.environ.get("OPENAI_API_KEY"):
|
|
112
|
+
error("OPENAI_API_KEY environment variable is not set.")
|
|
113
|
+
error("Get your API key from: https://platform.openai.com/api-keys")
|
|
114
|
+
error("Then run: export OPENAI_API_KEY=sk-...")
|
|
115
|
+
return 1
|
|
116
|
+
|
|
117
|
+
# --- Step 2: Ensure memex-server binary ---
|
|
118
|
+
if args.skip_download:
|
|
119
|
+
import shutil
|
|
120
|
+
server_bin = os.environ.get("MEMEX_SERVER") or shutil.which("memex-server")
|
|
121
|
+
if not server_bin:
|
|
122
|
+
cached = Path.home() / ".memex" / "bin" / "memex-server"
|
|
123
|
+
if cached.is_file():
|
|
124
|
+
server_bin = str(cached)
|
|
125
|
+
else:
|
|
126
|
+
error("memex-server not found (--skip-download active)")
|
|
127
|
+
error("Install from: https://github.com/systemshift/memex-server/releases")
|
|
128
|
+
return 1
|
|
129
|
+
else:
|
|
130
|
+
from .binaries import ensure_memex_server
|
|
131
|
+
server_bin = ensure_memex_server()
|
|
132
|
+
log(f"memex-server: {server_bin}")
|
|
133
|
+
|
|
134
|
+
# --- Step 3: Ensure IPFS binary ---
|
|
135
|
+
ipfs_bin = None
|
|
136
|
+
if not args.skip_ipfs:
|
|
137
|
+
if args.skip_download:
|
|
138
|
+
import shutil
|
|
139
|
+
ipfs_bin = shutil.which("ipfs")
|
|
140
|
+
if not ipfs_bin:
|
|
141
|
+
cached = Path.home() / ".memex" / "bin" / "ipfs"
|
|
142
|
+
if cached.is_file():
|
|
143
|
+
ipfs_bin = str(cached)
|
|
144
|
+
else:
|
|
145
|
+
warn("IPFS not found (--skip-download active), skipping IPFS")
|
|
146
|
+
else:
|
|
147
|
+
from .binaries import ensure_ipfs
|
|
148
|
+
ipfs_bin = ensure_ipfs()
|
|
149
|
+
log(f"IPFS: {ipfs_bin}")
|
|
150
|
+
|
|
151
|
+
# --- Step 4: Ensure IPFS repo ---
|
|
152
|
+
if ipfs_bin and not args.skip_ipfs:
|
|
153
|
+
from .services import ensure_ipfs_repo
|
|
154
|
+
ensure_ipfs_repo(ipfs_bin)
|
|
155
|
+
|
|
156
|
+
# --- Step 5: Start IPFS daemon if needed ---
|
|
157
|
+
if ipfs_bin and not args.skip_ipfs:
|
|
158
|
+
from .services import is_ipfs_running, start_ipfs_daemon, wait_for_ipfs
|
|
159
|
+
if is_ipfs_running():
|
|
160
|
+
log("IPFS daemon already running")
|
|
161
|
+
else:
|
|
162
|
+
ipfs_proc = start_ipfs_daemon(ipfs_bin)
|
|
163
|
+
procs.append(ipfs_proc)
|
|
164
|
+
if wait_for_ipfs():
|
|
165
|
+
log("IPFS daemon ready")
|
|
166
|
+
else:
|
|
167
|
+
warn("IPFS daemon did not start in time, continuing without it")
|
|
168
|
+
|
|
169
|
+
# --- Step 6: Ensure dagit identity ---
|
|
170
|
+
from .services import ensure_dagit_identity
|
|
171
|
+
did = ensure_dagit_identity()
|
|
172
|
+
if did != "unknown":
|
|
173
|
+
log(f"Identity: {did}")
|
|
174
|
+
|
|
175
|
+
# --- Step 7: Start memex-server ---
|
|
176
|
+
db_path = Path(args.db_path)
|
|
177
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
178
|
+
|
|
179
|
+
log(f"Starting memex-server on port {args.port} ({args.backend} backend)...")
|
|
180
|
+
|
|
181
|
+
env = os.environ.copy()
|
|
182
|
+
env["PORT"] = str(args.port)
|
|
183
|
+
env["MEMEX_BACKEND"] = args.backend
|
|
184
|
+
env["SQLITE_PATH"] = str(db_path)
|
|
185
|
+
|
|
186
|
+
server_proc = subprocess.Popen(
|
|
187
|
+
[server_bin],
|
|
188
|
+
env=env,
|
|
189
|
+
stdout=subprocess.DEVNULL,
|
|
190
|
+
stderr=subprocess.DEVNULL,
|
|
191
|
+
)
|
|
192
|
+
procs.append(server_proc)
|
|
193
|
+
|
|
194
|
+
# --- Step 8: Wait for server ---
|
|
195
|
+
server_url = f"http://localhost:{args.port}"
|
|
196
|
+
if not wait_for_server(server_url):
|
|
197
|
+
error("Server failed to start")
|
|
198
|
+
cleanup()
|
|
199
|
+
return 1
|
|
200
|
+
log("Server ready")
|
|
201
|
+
|
|
202
|
+
if args.server_only:
|
|
203
|
+
log("Server running. Press Ctrl+C to stop.")
|
|
204
|
+
try:
|
|
205
|
+
server_proc.wait()
|
|
206
|
+
except KeyboardInterrupt:
|
|
207
|
+
pass
|
|
208
|
+
finally:
|
|
209
|
+
cleanup()
|
|
210
|
+
return 0
|
|
211
|
+
|
|
212
|
+
# Set server URL for tools module
|
|
213
|
+
os.environ["MEMEX_URL"] = server_url
|
|
214
|
+
|
|
215
|
+
# --- Step 9: Check if graph is empty ---
|
|
216
|
+
first_run = is_graph_empty(server_url)
|
|
217
|
+
if first_run:
|
|
218
|
+
log("Empty graph detected — starting onboarding")
|
|
219
|
+
|
|
220
|
+
# --- Step 10: Launch TUI ---
|
|
221
|
+
log("Launching memex...")
|
|
222
|
+
try:
|
|
223
|
+
from .app import MemexApp
|
|
224
|
+
app = MemexApp(first_run=first_run)
|
|
225
|
+
app.run()
|
|
226
|
+
except KeyboardInterrupt:
|
|
227
|
+
pass
|
|
228
|
+
finally:
|
|
229
|
+
# --- Step 11: Cleanup ---
|
|
230
|
+
cleanup()
|
|
231
|
+
|
|
232
|
+
return 0
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
if __name__ == "__main__":
|
|
236
|
+
sys.exit(main())
|