@memograph/cli 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +402 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +97 -0
- package/dist/cli.js.map +1 -0
- package/dist/core/detect.d.ts +30 -0
- package/dist/core/detect.d.ts.map +1 -0
- package/dist/core/detect.js +212 -0
- package/dist/core/detect.js.map +1 -0
- package/dist/core/extract.d.ts +6 -0
- package/dist/core/extract.d.ts.map +1 -0
- package/dist/core/extract.js +104 -0
- package/dist/core/extract.js.map +1 -0
- package/dist/core/inspect.d.ts +7 -0
- package/dist/core/inspect.d.ts.map +1 -0
- package/dist/core/inspect.js +98 -0
- package/dist/core/inspect.js.map +1 -0
- package/dist/core/llm/client.d.ts +55 -0
- package/dist/core/llm/client.d.ts.map +1 -0
- package/dist/core/llm/client.js +199 -0
- package/dist/core/llm/client.js.map +1 -0
- package/dist/core/llm/detect-llm.d.ts +28 -0
- package/dist/core/llm/detect-llm.d.ts.map +1 -0
- package/dist/core/llm/detect-llm.js +212 -0
- package/dist/core/llm/detect-llm.js.map +1 -0
- package/dist/core/llm/extract-llm.d.ts +27 -0
- package/dist/core/llm/extract-llm.d.ts.map +1 -0
- package/dist/core/llm/extract-llm.js +151 -0
- package/dist/core/llm/extract-llm.js.map +1 -0
- package/dist/core/llm/prompts.d.ts +28 -0
- package/dist/core/llm/prompts.d.ts.map +1 -0
- package/dist/core/llm/prompts.js +172 -0
- package/dist/core/llm/prompts.js.map +1 -0
- package/dist/core/llm/providers.d.ts +34 -0
- package/dist/core/llm/providers.d.ts.map +1 -0
- package/dist/core/llm/providers.js +169 -0
- package/dist/core/llm/providers.js.map +1 -0
- package/dist/core/load.d.ts +10 -0
- package/dist/core/load.d.ts.map +1 -0
- package/dist/core/load.js +106 -0
- package/dist/core/load.js.map +1 -0
- package/dist/core/normalize.d.ts +30 -0
- package/dist/core/normalize.d.ts.map +1 -0
- package/dist/core/normalize.js +63 -0
- package/dist/core/normalize.js.map +1 -0
- package/dist/core/render.d.ts +10 -0
- package/dist/core/render.d.ts.map +1 -0
- package/dist/core/render.js +60 -0
- package/dist/core/render.js.map +1 -0
- package/dist/core/score.d.ts +27 -0
- package/dist/core/score.d.ts.map +1 -0
- package/dist/core/score.js +59 -0
- package/dist/core/score.js.map +1 -0
- package/dist/core/types.d.ts +162 -0
- package/dist/core/types.d.ts.map +1 -0
- package/dist/core/types.js +6 -0
- package/dist/core/types.js.map +1 -0
- package/dist/interactive/index.d.ts +67 -0
- package/dist/interactive/index.d.ts.map +1 -0
- package/dist/interactive/index.js +794 -0
- package/dist/interactive/index.js.map +1 -0
- package/dist/interactive/settings.d.ts +36 -0
- package/dist/interactive/settings.d.ts.map +1 -0
- package/dist/interactive/settings.js +174 -0
- package/dist/interactive/settings.js.map +1 -0
- package/dist/interactive/wizard.d.ts +10 -0
- package/dist/interactive/wizard.d.ts.map +1 -0
- package/dist/interactive/wizard.js +249 -0
- package/dist/interactive/wizard.js.map +1 -0
- package/package.json +49 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Memograph CLI contributors
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,402 @@
|
|
|
1
|
+
# Memograph CLI
|
|
2
|
+
|
|
3
|
+
**Memory Drift Inspector for Conversational AI**
|
|
4
|
+
|
|
5
|
+
Analyze conversation transcripts and detect when AI assistants lose context. Get a drift score, identify repetitions, forgotten preferences, and contradictions using AI-powered semantic analysis.
|
|
6
|
+
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
## Table of Contents
|
|
10
|
+
|
|
11
|
+
- [What it does](#what-it-does)
|
|
12
|
+
- [Why this exists](#why-this-exists)
|
|
13
|
+
- [Try it now](#try-it-now)
|
|
14
|
+
- [Install](#install)
|
|
15
|
+
- [Quickstart](#quickstart)
|
|
16
|
+
- [Using Memograph](#using-memograph)
|
|
17
|
+
- [Interactive Mode](#interactive-mode-recommended)
|
|
18
|
+
- [CLI Mode](#cli-mode-for-scripts--automation)
|
|
19
|
+
- [Configuration](#configuration)
|
|
20
|
+
- [Input Format](#input-format)
|
|
21
|
+
- [Output](#output)
|
|
22
|
+
- [Privacy & Security](#privacy--security)
|
|
23
|
+
- [For Developers & Contributors](#for-developers--contributors)
|
|
24
|
+
- [Troubleshooting](#troubleshooting)
|
|
25
|
+
- [License](#license)
|
|
26
|
+
|
|
27
|
+
---
|
|
28
|
+
|
|
29
|
+
## What it does
|
|
30
|
+
|
|
31
|
+
Memograph analyzes conversation transcripts to detect when AI assistants lose context or "forget" information:
|
|
32
|
+
|
|
33
|
+
- **Detects repetitions**: User forced to repeat themselves
|
|
34
|
+
- **Finds session resets**: Assistant language suggesting it forgot context
|
|
35
|
+
- **Identifies forgotten preferences**: User restating preferences
|
|
36
|
+
- **Spots contradictions**: Conflicting facts over time
|
|
37
|
+
- **Calculates drift score** (0-100) and token waste percentage
|
|
38
|
+
|
|
39
|
+
---
|
|
40
|
+
|
|
41
|
+
## Why this exists
|
|
42
|
+
|
|
43
|
+
When building conversational apps, memory failures often look like:
|
|
44
|
+
|
|
45
|
+
- Users repeating preferences: "I already said I want Bangla…"
|
|
46
|
+
- The assistant resets context: "Let's start over…"
|
|
47
|
+
- The same question is asked multiple times because the assistant doesn't converge
|
|
48
|
+
- Contradictory facts creep in
|
|
49
|
+
|
|
50
|
+
Memograph CLI gives you a **quick, local diagnostic** before you rebuild prompts, memory layers, or retrieval logic.
|
|
51
|
+
|
|
52
|
+
---
|
|
53
|
+
|
|
54
|
+
## Try it now
|
|
55
|
+
|
|
56
|
+
Get started in one command:
|
|
57
|
+
|
|
58
|
+
```bash
|
|
59
|
+
npx memograph-cli
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
This launches interactive mode with:
|
|
63
|
+
- Visual menu (arrow keys + Enter)
|
|
64
|
+
- Setup wizard for AI configuration
|
|
65
|
+
- Settings that persist across sessions
|
|
66
|
+
- Real-time progress indicators
|
|
67
|
+
|
|
68
|
+
---
|
|
69
|
+
|
|
70
|
+
## Using Memograph
|
|
71
|
+
|
|
72
|
+
### Interactive Mode (Recommended)
|
|
73
|
+
|
|
74
|
+
Run without arguments for a guided experience with arrow key navigation:
|
|
75
|
+
|
|
76
|
+
```bash
|
|
77
|
+
npx memograph-cli
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
**Main features:**
|
|
81
|
+
- **Visual menu** with ↑/↓ arrow key navigation
|
|
82
|
+
- **Inspect transcripts**: Enter file path → Choose output format → View results
|
|
83
|
+
- **Manage settings**: Configure once, settings persist in `~/.memograph/config.json`
|
|
84
|
+
- **Setup wizard**: 5-step guided configuration for AI providers
|
|
85
|
+
|
|
86
|
+
**Quick setup wizard:**
|
|
87
|
+
1. Select provider category (Cloud/Aggregators/Local)
|
|
88
|
+
2. Choose specific provider (OpenAI, Anthropic, Ollama, etc.)
|
|
89
|
+
3. Configure base URL (if needed)
|
|
90
|
+
4. Enter API key (if required)
|
|
91
|
+
5. Select model
|
|
92
|
+
|
|
93
|
+
**Keyboard shortcuts:**
|
|
94
|
+
- `↑` / `↓` - Navigate options
|
|
95
|
+
- `Enter` - Select/confirm
|
|
96
|
+
- `Ctrl+C` - Exit
|
|
97
|
+
|
|
98
|
+
---
|
|
99
|
+
|
|
100
|
+
## Install
|
|
101
|
+
|
|
102
|
+
### Option A: Try instantly (no installation) ⚡
|
|
103
|
+
|
|
104
|
+
**Recommended for first-time users and quick analysis:**
|
|
105
|
+
|
|
106
|
+
\`\`\`bash
|
|
107
|
+
npx memograph-cli
|
|
108
|
+
\`\`\`
|
|
109
|
+
|
|
110
|
+
Launches the interactive mode immediately. Configure your AI model on first run, and you're ready to analyze transcripts!
|
|
111
|
+
|
|
112
|
+
### Option B: Install globally 📦
|
|
113
|
+
|
|
114
|
+
**Best for regular use:**
|
|
115
|
+
|
|
116
|
+
\`\`\`bash
|
|
117
|
+
npm i -g memograph-cli
|
|
118
|
+
\`\`\`
|
|
119
|
+
|
|
120
|
+
After installation, run from anywhere:
|
|
121
|
+
|
|
122
|
+
\`\`\`bash
|
|
123
|
+
# Interactive mode
|
|
124
|
+
memograph
|
|
125
|
+
|
|
126
|
+
# Or CLI mode
|
|
127
|
+
memograph inspect -i ./transcript.json
|
|
128
|
+
\`\`\`
|
|
129
|
+
|
|
130
|
+
The package name is `memograph-cli` and the command is `memograph`.
|
|
131
|
+
|
|
132
|
+
### Option C: Local development 🛠️
|
|
133
|
+
|
|
134
|
+
**For contributors and local testing:**
|
|
135
|
+
|
|
136
|
+
\`\`\`bash
|
|
137
|
+
git clone https://github.com/yourusername/memograph-cli
|
|
138
|
+
cd memograph-cli
|
|
139
|
+
npm install
|
|
140
|
+
npm run build
|
|
141
|
+
|
|
142
|
+
# Run directly
|
|
143
|
+
node dist/cli.js
|
|
144
|
+
|
|
145
|
+
# Or use npm scripts
|
|
146
|
+
npm start
|
|
147
|
+
\`\`\`
|
|
148
|
+
|
|
149
|
+
---
|
|
150
|
+
|
|
151
|
+
## Quickstart
|
|
152
|
+
|
|
153
|
+
### Interactive Mode (Recommended)
|
|
154
|
+
|
|
155
|
+
**Get started in 3 steps:**
|
|
156
|
+
|
|
157
|
+
\`\`\`bash
|
|
158
|
+
# 1. Launch interactive mode
|
|
159
|
+
npx memograph-cli
|
|
160
|
+
|
|
161
|
+
# 2. First time? Run the setup wizard
|
|
162
|
+
# - Select your AI provider (OpenAI, Anthropic, Ollama, etc.)
|
|
163
|
+
# - Enter API key (if required)
|
|
164
|
+
# - Choose a model
|
|
165
|
+
# Settings are saved to ~/.memograph/config.json
|
|
166
|
+
|
|
167
|
+
# 3. Select "Inspect a transcript"
|
|
168
|
+
# - Enter path: ./transcript.json
|
|
169
|
+
# - Choose format: Text or JSON
|
|
170
|
+
# - View your drift analysis!
|
|
171
|
+
\`\`\`
|
|
172
|
+
|
|
173
|
+
### CLI Mode (For Scripts & Power Users)
|
|
174
|
+
|
|
175
|
+
**Quick example:**
|
|
176
|
+
|
|
177
|
+
1. Create a transcript file:
|
|
178
|
+
|
|
179
|
+
**transcript.json**
|
|
180
|
+
|
|
181
|
+
\`\`\`json
|
|
182
|
+
{
|
|
183
|
+
"schema_version": "1.0",
|
|
184
|
+
"messages": [
|
|
185
|
+
{ "idx": 0, "role": "user", "content": "My name is Tusher" },
|
|
186
|
+
{ "idx": 1, "role": "assistant", "content": "Nice to meet you!" },
|
|
187
|
+
{ "idx": 2, "role": "user", "content": "Please reply in Bangla from now on" },
|
|
188
|
+
{ "idx": 3, "role": "assistant", "content": "Sure." },
|
|
189
|
+
{ "idx": 4, "role": "user", "content": "Reply in Bangla please (I told you before)" }
|
|
190
|
+
]
|
|
191
|
+
}
|
|
192
|
+
\`\`\`
|
|
193
|
+
|
|
194
|
+
2. Run inspect with flags:
|
|
195
|
+
|
|
196
|
+
\`\`\`bash
|
|
197
|
+
# Text output (uses settings from interactive mode or env vars)
|
|
198
|
+
memograph inspect -i transcript.json
|
|
199
|
+
|
|
200
|
+
# Or specify all options via CLI flags
|
|
201
|
+
memograph inspect -i transcript.json \
|
|
202
|
+
--llm-provider openai \
|
|
203
|
+
--llm-model gpt-4o-mini \
|
|
204
|
+
--llm-api-key sk-...
|
|
205
|
+
|
|
206
|
+
# JSON output for CI/scripts
|
|
207
|
+
memograph inspect -i transcript.json --json
|
|
208
|
+
\`\`\`
|
|
209
|
+
|
|
210
|
+
**Note:** If you've configured settings in interactive mode, CLI commands automatically use those settings. You can override any setting with CLI flags.
|
|
211
|
+
|
|
212
|
+
---
|
|
213
|
+
|
|
214
|
+
### CLI Mode (For Scripts & Automation)
|
|
215
|
+
|
|
216
|
+
For scripting and automation, use the `inspect` command directly:
|
|
217
|
+
|
|
218
|
+
```bash
|
|
219
|
+
memograph-cli inspect -i transcript.json
|
|
220
|
+
```
|
|
221
|
+
|
|
222
|
+
**When to use CLI mode:**
|
|
223
|
+
- Automation scripts and CI/CD pipelines
|
|
224
|
+
- Batch processing multiple files
|
|
225
|
+
- When you already know your settings
|
|
226
|
+
|
|
227
|
+
**Pro tip:** Configure settings once in interactive mode, then use CLI mode for automated workflows!
|
|
228
|
+
|
|
229
|
+
---
|
|
230
|
+
|
|
231
|
+
**CLI inspect command:**
|
|
232
|
+
|
|
233
|
+
```bash
|
|
234
|
+
memograph-cli inspect -i <path> [--json] [--llm-model <model>]
|
|
235
|
+
```
|
|
236
|
+
|
|
237
|
+
**Common options:**
|
|
238
|
+
- `-i, --input <path>` - Transcript file (required)
|
|
239
|
+
- `--json` - Output JSON instead of text
|
|
240
|
+
- `--llm-model <model>` - Override model (e.g., gpt-4o)
|
|
241
|
+
- `--llm-provider <provider>` - Override provider (openai, anthropic)
|
|
242
|
+
- `--max-messages <n>` - Limit messages processed
|
|
243
|
+
|
|
244
|
+
**Examples:**
|
|
245
|
+
|
|
246
|
+
```bash
|
|
247
|
+
# Basic usage (uses saved settings)
|
|
248
|
+
memograph-cli inspect -i transcript.json
|
|
249
|
+
|
|
250
|
+
# JSON output for scripts
|
|
251
|
+
memograph-cli inspect -i transcript.json --json
|
|
252
|
+
|
|
253
|
+
# Use different model
|
|
254
|
+
memograph-cli inspect -i transcript.json --llm-model gpt-4o
|
|
255
|
+
```
|
|
256
|
+
|
|
257
|
+
For all options, run: `memograph-cli inspect --help`
|
|
258
|
+
|
|
259
|
+
---
|
|
260
|
+
|
|
261
|
+
## Configuration
|
|
262
|
+
|
|
263
|
+
**Easiest: Interactive Setup**
|
|
264
|
+
|
|
265
|
+
```bash
|
|
266
|
+
npx memograph-cli
|
|
267
|
+
# Select "Manage settings" → Follow wizard
|
|
268
|
+
# Settings saved to ~/.memograph/config.json
|
|
269
|
+
```
|
|
270
|
+
|
|
271
|
+
**Alternative: Environment Variables**
|
|
272
|
+
|
|
273
|
+
```bash
|
|
274
|
+
# Create .env file
|
|
275
|
+
OPENAI_API_KEY=sk-your-key-here
|
|
276
|
+
LLM_MODEL=gpt-4o-mini
|
|
277
|
+
```
|
|
278
|
+
|
|
279
|
+
**Using Local Models (Ollama)**
|
|
280
|
+
|
|
281
|
+
```bash
|
|
282
|
+
# Install and start Ollama
|
|
283
|
+
brew install ollama
|
|
284
|
+
ollama pull llama3.2
|
|
285
|
+
ollama serve
|
|
286
|
+
|
|
287
|
+
# Configure in interactive mode or use CLI flags
|
|
288
|
+
```
|
|
289
|
+
|
|
290
|
+
Settings priority: CLI flags > Environment variables > Config file
|
|
291
|
+
|
|
292
|
+
---
|
|
293
|
+
|
|
294
|
+
## Input Format
|
|
295
|
+
|
|
296
|
+
Provide a JSON file with conversation messages:
|
|
297
|
+
|
|
298
|
+
```json
|
|
299
|
+
{
|
|
300
|
+
"schema_version": "1.0",
|
|
301
|
+
"messages": [
|
|
302
|
+
{ "idx": 0, "role": "user", "content": "Hello" },
|
|
303
|
+
{ "idx": 1, "role": "assistant", "content": "Hi!" }
|
|
304
|
+
]
|
|
305
|
+
}
|
|
306
|
+
```
|
|
307
|
+
|
|
308
|
+
**Required fields:**
|
|
309
|
+
- `role`: "user", "assistant", "system", or "tool"
|
|
310
|
+
- `content`: Message text
|
|
311
|
+
|
|
312
|
+
**Optional fields:**
|
|
313
|
+
- `idx`: Message index (auto-assigned if missing)
|
|
314
|
+
- `ts`: ISO timestamp
|
|
315
|
+
- `tokens`: Token count (estimated if missing)
|
|
316
|
+
|
|
317
|
+
---
|
|
318
|
+
|
|
319
|
+
## Output
|
|
320
|
+
|
|
321
|
+
**Text output** (default): Human-readable report with drift score, events, and extracted facts.
|
|
322
|
+
|
|
323
|
+
**JSON output** (`--json` flag): Machine-readable format for scripts and CI/CD.
|
|
324
|
+
|
|
325
|
+
```json
|
|
326
|
+
{
|
|
327
|
+
"drift_score": 25,
|
|
328
|
+
"token_waste_pct": 7.1,
|
|
329
|
+
"events": [...],
|
|
330
|
+
"should_have_been_memory": [...]
|
|
331
|
+
}
|
|
332
|
+
```
|
|
333
|
+
|
|
334
|
+
---
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
## Privacy & Security
|
|
339
|
+
|
|
340
|
+
**Your data stays local:**
|
|
341
|
+
- Memograph reads transcript files from your local filesystem
|
|
342
|
+
- Only sends data to LLM APIs for analysis (or uses local models)
|
|
343
|
+
- No data is stored or transmitted elsewhere
|
|
344
|
+
|
|
345
|
+
**API Key Safety:**
|
|
346
|
+
- Keys are stored in `~/.memograph/config.json` or environment variables
|
|
347
|
+
- Never commit API keys to git (add `.env` to `.gitignore`)
|
|
348
|
+
- Use local models (Ollama) to avoid sending data to external APIs
|
|
349
|
+
|
|
350
|
+
---
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
## For Developers & Contributors
|
|
355
|
+
|
|
356
|
+
Interested in contributing or understanding how Memograph works? Check out [CONTRIBUTING.md](CONTRIBUTING.md) for:
|
|
357
|
+
|
|
358
|
+
- **How it works**: Detection algorithms, scoring, performance optimizations
|
|
359
|
+
- **Development setup**: Local environment, project structure, testing
|
|
360
|
+
- **Roadmap**: Planned features and improvements
|
|
361
|
+
- **Publishing**: Guidelines for releasing new versions
|
|
362
|
+
|
|
363
|
+
---
|
|
364
|
+
|
|
365
|
+
## Troubleshooting
|
|
366
|
+
|
|
367
|
+
### Common Issues
|
|
368
|
+
|
|
369
|
+
**"API key not found"**
|
|
370
|
+
- Run `npx memograph-cli` and use "Manage settings" → "Set/Update API Key"
|
|
371
|
+
- Or set environment variable: `export OPENAI_API_KEY=sk-...`
|
|
372
|
+
|
|
373
|
+
**Interactive mode doesn't start**
|
|
374
|
+
- Don't pass any arguments (they trigger CLI mode)
|
|
375
|
+
- Ensure terminal supports ANSI colors and arrow keys
|
|
376
|
+
|
|
377
|
+
**Settings not saving**
|
|
378
|
+
- Settings are in `~/.memograph/config.json`
|
|
379
|
+
- Reset with: `rm ~/.memograph/config.json && npx memograph-cli`
|
|
380
|
+
|
|
381
|
+
**Ollama not working**
|
|
382
|
+
- Ensure Ollama is running: `ollama serve`
|
|
383
|
+
- Use correct URL: `http://localhost:11434/v1`
|
|
384
|
+
- Install model: `ollama pull llama3.2`
|
|
385
|
+
|
|
386
|
+
**Network/API errors**
|
|
387
|
+
- Check internet connection
|
|
388
|
+
- Verify API status (status.openai.com / status.anthropic.com)
|
|
389
|
+
- Try a different model or use local models
|
|
390
|
+
|
|
391
|
+
**Where are settings stored?**
|
|
392
|
+
- Location: `~/.memograph/config.json`
|
|
393
|
+
- View: `cat ~/.memograph/config.json`
|
|
394
|
+
- Edit via interactive mode: "Manage settings" → "Show raw config"
|
|
395
|
+
|
|
396
|
+
**Settings priority:** CLI flags > Environment variables > Config file
|
|
397
|
+
|
|
398
|
+
---
|
|
399
|
+
|
|
400
|
+
## License
|
|
401
|
+
|
|
402
|
+
MIT License - see LICENSE file for details.
|
package/dist/cli.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cli.d.ts","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";AAEA,OAAO,eAAe,CAAC"}
|
package/dist/cli.js
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
require("dotenv/config");
|
|
5
|
+
const commander_1 = require("commander");
|
|
6
|
+
const load_js_1 = require("./core/load.js");
|
|
7
|
+
const inspect_js_1 = require("./core/inspect.js");
|
|
8
|
+
const render_js_1 = require("./core/render.js");
|
|
9
|
+
const index_js_1 = require("./interactive/index.js");
|
|
10
|
+
const program = new commander_1.Command();
|
|
11
|
+
program
|
|
12
|
+
.name('memograph')
|
|
13
|
+
.version('0.1.0')
|
|
14
|
+
.description('LLM-powered CLI tool for analyzing conversation transcripts and detecting memory drift');
|
|
15
|
+
program
|
|
16
|
+
.command('inspect')
|
|
17
|
+
.description('Analyze a transcript for drift and repetition using LLM-based semantic analysis')
|
|
18
|
+
.requiredOption('-i, --input <path>', 'path to transcript JSON file')
|
|
19
|
+
.option('--json', 'output JSON (machine-readable)', false)
|
|
20
|
+
.option('--max-messages <n>', 'cap number of messages processed', (val) => parseInt(val, 10), 2000)
|
|
21
|
+
.option('--llm-provider <provider>', 'LLM provider: openai or anthropic', 'openai')
|
|
22
|
+
.option('--llm-model <model>', 'LLM model to use (e.g., gpt-4o-mini, claude-3-5-sonnet-20241022)')
|
|
23
|
+
.option('--llm-api-key <key>', 'LLM API key (or set OPENAI_API_KEY or ANTHROPIC_API_KEY env var)')
|
|
24
|
+
.option('--llm-base-url <url>', 'Custom base URL for LLM API (useful for local models like Ollama)')
|
|
25
|
+
.option('--llm-temperature <temp>', 'LLM temperature (0.0-1.0)', (val) => parseFloat(val), 0.3)
|
|
26
|
+
.option('--llm-max-tokens <tokens>', 'Maximum tokens for LLM response', (val) => parseInt(val, 10), 4096)
|
|
27
|
+
.action(async (options) => {
|
|
28
|
+
try {
|
|
29
|
+
// Load transcript
|
|
30
|
+
const transcript = await (0, load_js_1.loadTranscript)(options.input, options.maxMessages);
|
|
31
|
+
// Build inspection config (LLM-only mode)
|
|
32
|
+
const config = {
|
|
33
|
+
max_messages: options.maxMessages,
|
|
34
|
+
llm: {
|
|
35
|
+
provider: options.llmProvider,
|
|
36
|
+
apiKey: options.llmApiKey,
|
|
37
|
+
model: options.llmModel,
|
|
38
|
+
temperature: options.llmTemperature,
|
|
39
|
+
maxTokens: options.llmMaxTokens,
|
|
40
|
+
baseUrl: options.llmBaseUrl,
|
|
41
|
+
},
|
|
42
|
+
};
|
|
43
|
+
// Run inspection (LLM-based, async)
|
|
44
|
+
const result = await (0, inspect_js_1.inspectTranscript)(transcript, config);
|
|
45
|
+
// Render output
|
|
46
|
+
const output = options.json
|
|
47
|
+
? (0, render_js_1.renderJsonReport)(result)
|
|
48
|
+
: (0, render_js_1.renderTextReport)(result);
|
|
49
|
+
console.log(output);
|
|
50
|
+
process.exit(0);
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
if (error instanceof Error) {
|
|
54
|
+
console.error(`Error: ${error.message}`);
|
|
55
|
+
}
|
|
56
|
+
else {
|
|
57
|
+
console.error('Unknown error occurred');
|
|
58
|
+
}
|
|
59
|
+
process.exit(1);
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
program
|
|
63
|
+
.command('interactive', { hidden: true })
|
|
64
|
+
.description('Launch interactive mode (automatically used when no arguments provided)')
|
|
65
|
+
.action(async () => {
|
|
66
|
+
try {
|
|
67
|
+
await (0, index_js_1.runInteractiveMode)();
|
|
68
|
+
process.exit(0);
|
|
69
|
+
}
|
|
70
|
+
catch (error) {
|
|
71
|
+
if (error instanceof Error) {
|
|
72
|
+
console.error(`Error: ${error.message}`);
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
console.error('Unknown error occurred');
|
|
76
|
+
}
|
|
77
|
+
process.exit(1);
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
// Check if we should launch interactive mode
|
|
81
|
+
const args = process.argv.slice(2);
|
|
82
|
+
// Check for help or version flags
|
|
83
|
+
const isHelpOrVersion = args.length === 1 && (args[0] === '-h' || args[0] === '--help' || args[0] === '-V' || args[0] === '--version');
|
|
84
|
+
// Check if there's a command (inspect, interactive)
|
|
85
|
+
const hasCommand = args.length > 0 && (args[0] === 'inspect' || args[0] === 'interactive' || args[0] === 'help');
|
|
86
|
+
if (!hasCommand && !isHelpOrVersion) {
|
|
87
|
+
// No command provided, launch interactive mode
|
|
88
|
+
(0, index_js_1.runInteractiveMode)().catch((error) => {
|
|
89
|
+
console.error('Error:', error instanceof Error ? error.message : 'Unknown error');
|
|
90
|
+
process.exit(1);
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
else {
|
|
94
|
+
// Parse command line arguments for CLI commands
|
|
95
|
+
program.parse(process.argv);
|
|
96
|
+
}
|
|
97
|
+
//# sourceMappingURL=cli.js.map
|
package/dist/cli.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cli.js","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";;;AAEA,yBAAuB;AACvB,yCAAoC;AACpC,4CAAgD;AAChD,kDAAsD;AACtD,gDAAsE;AACtE,qDAA4D;AAE5D,MAAM,OAAO,GAAG,IAAI,mBAAO,EAAE,CAAC;AAE9B,OAAO;KACJ,IAAI,CAAC,WAAW,CAAC;KACjB,OAAO,CAAC,OAAO,CAAC;KAChB,WAAW,CAAC,wFAAwF,CAAC,CAAC;AAEzG,OAAO;KACJ,OAAO,CAAC,SAAS,CAAC;KAClB,WAAW,CAAC,iFAAiF,CAAC;KAC9F,cAAc,CAAC,oBAAoB,EAAE,8BAA8B,CAAC;KACpE,MAAM,CAAC,QAAQ,EAAE,gCAAgC,EAAE,KAAK,CAAC;KACzD,MAAM,CAAC,oBAAoB,EAAE,kCAAkC,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,IAAI,CAAC;KAClG,MAAM,CAAC,2BAA2B,EAAE,mCAAmC,EAAE,QAAQ,CAAC;KAClF,MAAM,CAAC,qBAAqB,EAAE,kEAAkE,CAAC;KACjG,MAAM,CAAC,qBAAqB,EAAE,kEAAkE,CAAC;KACjG,MAAM,CAAC,sBAAsB,EAAE,mEAAmE,CAAC;KACnG,MAAM,CAAC,0BAA0B,EAAE,2BAA2B,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC;KAC9F,MAAM,CAAC,2BAA2B,EAAE,iCAAiC,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,IAAI,CAAC;KACxG,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE,EAAE;IACxB,IAAI,CAAC;QACH,kBAAkB;QAClB,MAAM,UAAU,GAAG,MAAM,IAAA,wBAAc,EAAC,OAAO,CAAC,KAAK,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;QAE5E,0CAA0C;QAC1C,MAAM,MAAM,GAAQ;YAClB,YAAY,EAAE,OAAO,CAAC,WAAW;YACjC,GAAG,EAAE;gBACH,QAAQ,EAAE,OAAO,CAAC,WAAW;gBAC7B,MAAM,EAAE,OAAO,CAAC,SAAS;gBACzB,KAAK,EAAE,OAAO,CAAC,QAAQ;gBACvB,WAAW,EAAE,OAAO,CAAC,cAAc;gBACnC,SAAS,EAAE,OAAO,CAAC,YAAY;gBAC/B,OAAO,EAAE,OAAO,CAAC,UAAU;aAC5B;SACF,CAAC;QAEF,oCAAoC;QACpC,MAAM,MAAM,GAAG,MAAM,IAAA,8BAAiB,EAAC,UAAU,EAAE,MAAM,CAAC,CAAC;QAE3D,gBAAgB;QAChB,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI;YACzB,CAAC,CAAC,IAAA,4BAAgB,EAAC,MAAM,CAAC;YAC1B,CAAC,CAAC,IAAA,4BAAgB,EAAC,MAAM,CAAC,CAAC;QAE7B,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;QACpB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,KAAK,YAAY,KAAK,EAAE,CAAC;YAC3B,OAAO,CAAC,KAAK,CAAC,UAAU,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QAC3C,CAAC;aAAM,CAAC;YACN,OAAO,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;QAC1C,CAAC;QACD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;AACH,CAAC,CAAC,CAAC;AAEL,OAAO;KACJ,OAAO,CAAC,aAAa,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;KACxC,WAAW,CAAC,yEAAyE,CAAC;KACtF,MAAM,CAAC,KAAK,IAAI,EAAE;IACjB,IAAI,CAAC;QACH,MAAM,IAAA,6BAAkB,GAAE,CAAC;QAC3B,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,KAAK,YAAY,KAAK,EAAE,CAAC;YAC3B,OAAO,CAAC,KAAK,CAAC,UAAU,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QAC3C,CAAC;aAAM,CAAC;YACN,OAAO,CAAC,KAAK,CAAC,wBAAwB,CAAC,CAAC;QAC1C,CAAC;QACD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;AACH,CAAC,CAAC,CAAC;AAEL,6CAA6C;AAC7C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAEnC,kCAAkC;AAClC,MAAM,eAAe,GAAG,IAAI,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,QAAQ,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,WAAW,CAAC,CAAC;AAEvI,oDAAoD;AACpD,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,aAAa,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,MAAM,CAAC,CAAC;AAEjH,IAAI,CAAC,UAAU,IAAI,CAAC,eAAe,EAAE,CAAC;IACpC,+CAA+C;IAC/C,IAAA,6BAAkB,GAAE,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;QACnC,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC;QAClF,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC,CAAC,CAAC;AACL,CAAC;KAAM,CAAC;IACN,gDAAgD;IAChD,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC"}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { TranscriptMessage, ExtractedFact, RepetitionCluster, SessionReset, PreferenceForgotten, Contradiction } from './types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Detect repetition clusters in user messages
|
|
4
|
+
*/
|
|
5
|
+
export declare function detectRepetitionClusters(messages: TranscriptMessage[], similarityThreshold?: number): {
|
|
6
|
+
events: RepetitionCluster[];
|
|
7
|
+
timing_ms: number;
|
|
8
|
+
};
|
|
9
|
+
/**
|
|
10
|
+
* Detect session reset signals in assistant messages
|
|
11
|
+
*/
|
|
12
|
+
export declare function detectSessionReset(messages: TranscriptMessage[]): {
|
|
13
|
+
events: SessionReset[];
|
|
14
|
+
timing_ms: number;
|
|
15
|
+
};
|
|
16
|
+
/**
|
|
17
|
+
* Detect when user restates preferences (suggesting they were forgotten)
|
|
18
|
+
*/
|
|
19
|
+
export declare function detectPreferenceForgotten(messages: TranscriptMessage[], facts: ExtractedFact[], gapThreshold?: number): {
|
|
20
|
+
events: PreferenceForgotten[];
|
|
21
|
+
timing_ms: number;
|
|
22
|
+
};
|
|
23
|
+
/**
|
|
24
|
+
* Detect contradictions in extracted facts
|
|
25
|
+
*/
|
|
26
|
+
export declare function detectContradictions(facts: ExtractedFact[]): {
|
|
27
|
+
events: Contradiction[];
|
|
28
|
+
timing_ms: number;
|
|
29
|
+
};
|
|
30
|
+
//# sourceMappingURL=detect.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"detect.d.ts","sourceRoot":"","sources":["../../src/core/detect.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,iBAAiB,EACjB,YAAY,EACZ,mBAAmB,EACnB,aAAa,EACd,MAAM,YAAY,CAAC;AAGpB;;GAEG;AAEH,wBAAgB,wBAAwB,CACtC,QAAQ,EAAE,iBAAiB,EAAE,EAC7B,mBAAmB,SAAO,GACzB;IAAE,MAAM,EAAE,iBAAiB,EAAE,CAAC;IAAC,SAAS,EAAE,MAAM,CAAA;CAAE,CA+EpD;AAwBD;;GAEG;AACH,wBAAgB,kBAAkB,CAChC,QAAQ,EAAE,iBAAiB,EAAE,GAC5B;IAAE,MAAM,EAAE,YAAY,EAAE,CAAC;IAAC,SAAS,EAAE,MAAM,CAAA;CAAE,CAuC/C;AAED;;GAEG;AACH,wBAAgB,yBAAyB,CACvC,QAAQ,EAAE,iBAAiB,EAAE,EAC7B,KAAK,EAAE,aAAa,EAAE,EACtB,YAAY,SAAI,GACf;IAAE,MAAM,EAAE,mBAAmB,EAAE,CAAC;IAAC,SAAS,EAAE,MAAM,CAAA;CAAE,CAyDtD;AAED;;GAEG;AACH,wBAAgB,oBAAoB,CAClC,KAAK,EAAE,aAAa,EAAE,GACrB;IAAE,MAAM,EAAE,aAAa,EAAE,CAAC;IAAC,SAAS,EAAE,MAAM,CAAA;CAAE,CAmChD"}
|