@telvok/librarian-mcp 1.5.3 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/library/errors.d.ts +48 -0
- package/dist/library/errors.js +80 -0
- package/dist/library/parsers/jsonl.d.ts +9 -4
- package/dist/library/parsers/jsonl.js +52 -20
- package/dist/library/schemas.d.ts +6 -6
- package/dist/library/storage.d.ts +2 -2
- package/dist/library/storage.js +2 -2
- package/dist/library 2/embeddings.d.ts +21 -0
- package/dist/library 2/embeddings.js +86 -0
- package/dist/library 2/manager.d.ts +42 -0
- package/dist/library 2/manager.js +218 -0
- package/dist/library 2/parsers/cursor.d.ts +15 -0
- package/dist/library 2/parsers/cursor.js +168 -0
- package/dist/library 2/parsers/index.d.ts +6 -0
- package/dist/library 2/parsers/index.js +5 -0
- package/dist/library 2/parsers/json.d.ts +11 -0
- package/dist/library 2/parsers/json.js +95 -0
- package/dist/library 2/parsers/jsonl.d.ts +14 -0
- package/dist/library 2/parsers/jsonl.js +85 -0
- package/dist/library 2/parsers/markdown.d.ts +15 -0
- package/dist/library 2/parsers/markdown.js +77 -0
- package/dist/library 2/parsers/sqlite.d.ts +8 -0
- package/dist/library 2/parsers/sqlite.js +123 -0
- package/dist/library 2/parsers/types.d.ts +21 -0
- package/dist/library 2/parsers/types.js +4 -0
- package/dist/library 2/query.d.ts +26 -0
- package/dist/library 2/query.js +104 -0
- package/dist/library 2/schemas.d.ts +324 -0
- package/dist/library 2/schemas.js +79 -0
- package/dist/library 2/storage.d.ts +22 -0
- package/dist/library 2/storage.js +36 -0
- package/dist/library 2/vector-index.d.ts +55 -0
- package/dist/library 2/vector-index.js +160 -0
- package/dist/server 2.js +199 -0
- package/dist/server.d 2.ts +2 -0
- package/dist/server.js +102 -54
- package/dist/tools/adopt.d.ts +1 -0
- package/dist/tools/adopt.js +37 -10
- package/dist/tools/auth.d.ts +69 -0
- package/dist/tools/auth.js +379 -0
- package/dist/tools/bounty-claim.d.ts +28 -0
- package/dist/tools/bounty-claim.js +92 -0
- package/dist/tools/bounty-create.d.ts +47 -0
- package/dist/tools/bounty-create.js +118 -0
- package/dist/tools/bounty-list.d.ts +50 -0
- package/dist/tools/bounty-list.js +116 -0
- package/dist/tools/bounty-submit.d.ts +34 -0
- package/dist/tools/bounty-submit.js +94 -0
- package/dist/tools/brief.d.ts +94 -0
- package/dist/tools/brief.js +234 -15
- package/dist/tools/delete.d.ts +87 -0
- package/dist/tools/delete.js +266 -0
- package/dist/tools/feedback.d.ts +27 -0
- package/dist/tools/feedback.js +98 -0
- package/dist/tools/help.d.ts +22 -0
- package/dist/tools/help.js +482 -0
- package/dist/tools/import-memories.d.ts +1 -0
- package/dist/tools/import-memories.js +18 -13
- package/dist/tools/index.d.ts +11 -0
- package/dist/tools/index.js +12 -0
- package/dist/tools/library-buy.d.ts +31 -0
- package/dist/tools/library-buy.js +104 -0
- package/dist/tools/library-download.d.ts +27 -0
- package/dist/tools/library-download.js +177 -0
- package/dist/tools/library-publish.d.ts +112 -0
- package/dist/tools/library-publish.js +387 -0
- package/dist/tools/library-search.d.ts +110 -0
- package/dist/tools/library-search.js +132 -0
- package/dist/tools/mark-hit.d.ts +1 -0
- package/dist/tools/mark-hit.js +83 -5
- package/dist/tools/my-books.d.ts +51 -0
- package/dist/tools/my-books.js +115 -0
- package/dist/tools/my-bounties.d.ts +43 -0
- package/dist/tools/my-bounties.js +126 -0
- package/dist/tools/rate-book.d.ts +40 -0
- package/dist/tools/rate-book.js +147 -0
- package/dist/tools/rebuild-index.d.ts +1 -0
- package/dist/tools/rebuild-index.js +40 -8
- package/dist/tools/record.d.ts +18 -0
- package/dist/tools/record.js +30 -26
- package/dist/tools/seller-analytics.d.ts +53 -0
- package/dist/tools/seller-analytics.js +180 -0
- package/dist/tools/sync.d.ts +55 -0
- package/dist/tools/sync.js +304 -0
- package/dist/tools/unsubscribe.d.ts +48 -0
- package/dist/tools/unsubscribe.js +120 -0
- package/dist/tools 2/adopt.d.ts +24 -0
- package/dist/tools 2/adopt.js +154 -0
- package/dist/tools 2/auth.d.ts +35 -0
- package/dist/tools 2/auth.js +229 -0
- package/dist/tools 2/brief.d.ts +56 -0
- package/dist/tools 2/brief.js +414 -0
- package/dist/tools 2/help.d.ts +21 -0
- package/dist/tools 2/help.js +267 -0
- package/dist/tools 2/import-memories.d.ts +32 -0
- package/dist/tools 2/import-memories.js +231 -0
- package/dist/tools 2/index.d.ts +12 -0
- package/dist/tools 2/index.js +12 -0
- package/dist/tools 2/mark-hit.d.ts +20 -0
- package/dist/tools 2/mark-hit.js +71 -0
- package/dist/tools 2/marketplace-buy.d.ts +30 -0
- package/dist/tools 2/marketplace-buy.js +97 -0
- package/dist/tools 2/marketplace-download.d.ts +26 -0
- package/dist/tools 2/marketplace-download.js +160 -0
- package/dist/tools 2/marketplace-publish.d.ts +111 -0
- package/dist/tools 2/marketplace-publish.js +377 -0
- package/dist/tools 2/marketplace-search.d.ts +57 -0
- package/dist/tools 2/marketplace-search.js +96 -0
- package/dist/tools 2/my-books.d.ts +50 -0
- package/dist/tools 2/my-books.js +107 -0
- package/dist/tools 2/rate-book.d.ts +39 -0
- package/dist/tools 2/rate-book.js +139 -0
- package/dist/tools 2/rebuild-index.d.ts +23 -0
- package/dist/tools 2/rebuild-index.js +107 -0
- package/dist/tools 2/record.d.ts +40 -0
- package/dist/tools 2/record.js +205 -0
- package/dist/tools 2/seller-analytics.d.ts +35 -0
- package/dist/tools 2/seller-analytics.js +102 -0
- package/dist/tools 2/sync.d.ts +54 -0
- package/dist/tools 2/sync.js +298 -0
- package/package.json +1 -1
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import matter from 'gray-matter';
|
|
4
|
+
import { glob } from 'glob';
|
|
5
|
+
import { v4 as uuidv4 } from 'uuid';
|
|
6
|
+
import { getLibraryPath, getLocalPath, getImportedPath, getArchivedPath, } from './storage.js';
|
|
7
|
+
// ============================================================================
|
|
8
|
+
// Library Manager
|
|
9
|
+
// ============================================================================
|
|
10
|
+
export class LibraryManager {
|
|
11
|
+
libraryPath;
|
|
12
|
+
constructor() {
|
|
13
|
+
this.libraryPath = getLibraryPath();
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Initialize the library directory structure.
|
|
17
|
+
*/
|
|
18
|
+
async initialize() {
|
|
19
|
+
const dirs = [
|
|
20
|
+
getLocalPath(this.libraryPath),
|
|
21
|
+
getImportedPath(this.libraryPath),
|
|
22
|
+
getArchivedPath(this.libraryPath),
|
|
23
|
+
];
|
|
24
|
+
for (const dir of dirs) {
|
|
25
|
+
await fs.mkdir(dir, { recursive: true });
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Get all entries from local library.
|
|
30
|
+
*/
|
|
31
|
+
async getLocalEntries() {
|
|
32
|
+
const localPath = getLocalPath(this.libraryPath);
|
|
33
|
+
return this.readEntriesFromPath(localPath, 'local');
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Get all entries from imported libraries.
|
|
37
|
+
*/
|
|
38
|
+
async getImportedEntries() {
|
|
39
|
+
const importedPath = getImportedPath(this.libraryPath);
|
|
40
|
+
return this.readEntriesFromPath(importedPath, 'imported');
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Get all archived entries.
|
|
44
|
+
*/
|
|
45
|
+
async getArchivedEntries() {
|
|
46
|
+
const archivedPath = getArchivedPath(this.libraryPath);
|
|
47
|
+
return this.readEntriesFromPath(archivedPath, 'archived');
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Query entries by topic.
|
|
51
|
+
*/
|
|
52
|
+
async queryByTopic(topic) {
|
|
53
|
+
const [local, imported] = await Promise.all([
|
|
54
|
+
this.getLocalEntries(),
|
|
55
|
+
this.getImportedEntries(),
|
|
56
|
+
]);
|
|
57
|
+
const allEntries = [...local, ...imported];
|
|
58
|
+
const searchTerm = topic.toLowerCase();
|
|
59
|
+
return allEntries.filter(entry => entry.topics.some(t => t.toLowerCase().includes(searchTerm)) ||
|
|
60
|
+
entry.content.toLowerCase().includes(searchTerm));
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Record a new entry to local library.
|
|
64
|
+
*/
|
|
65
|
+
async record(topics, content) {
|
|
66
|
+
const localPath = getLocalPath(this.libraryPath);
|
|
67
|
+
await fs.mkdir(localPath, { recursive: true });
|
|
68
|
+
const id = uuidv4();
|
|
69
|
+
const created = new Date().toISOString();
|
|
70
|
+
const entry = {
|
|
71
|
+
id,
|
|
72
|
+
topics,
|
|
73
|
+
content,
|
|
74
|
+
created,
|
|
75
|
+
source: 'local',
|
|
76
|
+
origin: 'manual',
|
|
77
|
+
};
|
|
78
|
+
// Generate filename
|
|
79
|
+
const slug = topics[0]
|
|
80
|
+
.toLowerCase()
|
|
81
|
+
.replace(/[^a-z0-9]+/g, '-')
|
|
82
|
+
.replace(/^-|-$/g, '');
|
|
83
|
+
const timestamp = created.slice(0, 10);
|
|
84
|
+
let filename = `${slug}-${timestamp}.md`;
|
|
85
|
+
let filePath = path.join(localPath, filename);
|
|
86
|
+
// Handle collisions
|
|
87
|
+
let counter = 1;
|
|
88
|
+
while (await this.fileExists(filePath)) {
|
|
89
|
+
filename = `${slug}-${timestamp}-${counter}.md`;
|
|
90
|
+
filePath = path.join(localPath, filename);
|
|
91
|
+
counter++;
|
|
92
|
+
}
|
|
93
|
+
// Write file
|
|
94
|
+
const frontmatter = {
|
|
95
|
+
id,
|
|
96
|
+
topics,
|
|
97
|
+
created,
|
|
98
|
+
source: 'manual',
|
|
99
|
+
};
|
|
100
|
+
const fileContent = matter.stringify(content, frontmatter);
|
|
101
|
+
await fs.writeFile(filePath, fileContent, 'utf-8');
|
|
102
|
+
return {
|
|
103
|
+
entry,
|
|
104
|
+
path: path.relative(this.libraryPath, filePath),
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Archive an entry (move to archived/).
|
|
109
|
+
*/
|
|
110
|
+
async archive(entryId) {
|
|
111
|
+
const localPath = getLocalPath(this.libraryPath);
|
|
112
|
+
const archivedPath = getArchivedPath(this.libraryPath);
|
|
113
|
+
// Find the entry
|
|
114
|
+
const found = await this.findEntryById(localPath, entryId);
|
|
115
|
+
if (!found) {
|
|
116
|
+
return { success: false, message: `Entry not found: ${entryId}` };
|
|
117
|
+
}
|
|
118
|
+
await fs.mkdir(archivedPath, { recursive: true });
|
|
119
|
+
const filename = path.basename(found.filePath);
|
|
120
|
+
const newPath = path.join(archivedPath, filename);
|
|
121
|
+
await fs.rename(found.filePath, newPath);
|
|
122
|
+
return {
|
|
123
|
+
success: true,
|
|
124
|
+
message: `Archived to ${path.relative(this.libraryPath, newPath)}`,
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
// ============================================================================
|
|
128
|
+
// Private Helpers
|
|
129
|
+
// ============================================================================
|
|
130
|
+
async readEntriesFromPath(dirPath, source) {
|
|
131
|
+
const entries = [];
|
|
132
|
+
try {
|
|
133
|
+
const files = await glob(path.join(dirPath, '**/*.md'), { nodir: true });
|
|
134
|
+
for (const filePath of files) {
|
|
135
|
+
try {
|
|
136
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
137
|
+
const { data, content: body } = matter(content);
|
|
138
|
+
let topics;
|
|
139
|
+
if (Array.isArray(data.topics)) {
|
|
140
|
+
topics = data.topics;
|
|
141
|
+
}
|
|
142
|
+
else if (data.topic) {
|
|
143
|
+
topics = [data.topic];
|
|
144
|
+
}
|
|
145
|
+
else {
|
|
146
|
+
topics = ['general'];
|
|
147
|
+
}
|
|
148
|
+
entries.push({
|
|
149
|
+
id: data.id || uuidv4(),
|
|
150
|
+
topics,
|
|
151
|
+
content: body.trim(),
|
|
152
|
+
created: data.created || new Date().toISOString(),
|
|
153
|
+
source,
|
|
154
|
+
origin: data.source,
|
|
155
|
+
imported_from: data.imported_from,
|
|
156
|
+
});
|
|
157
|
+
}
|
|
158
|
+
catch {
|
|
159
|
+
// Skip unreadable files
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
catch {
|
|
164
|
+
// Directory doesn't exist
|
|
165
|
+
}
|
|
166
|
+
return entries;
|
|
167
|
+
}
|
|
168
|
+
async findEntryById(dirPath, entryId) {
|
|
169
|
+
try {
|
|
170
|
+
const files = await glob(path.join(dirPath, '**/*.md'), { nodir: true });
|
|
171
|
+
for (const filePath of files) {
|
|
172
|
+
try {
|
|
173
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
174
|
+
const { data, content: body } = matter(content);
|
|
175
|
+
if (data.id === entryId) {
|
|
176
|
+
let topics;
|
|
177
|
+
if (Array.isArray(data.topics)) {
|
|
178
|
+
topics = data.topics;
|
|
179
|
+
}
|
|
180
|
+
else if (data.topic) {
|
|
181
|
+
topics = [data.topic];
|
|
182
|
+
}
|
|
183
|
+
else {
|
|
184
|
+
topics = ['general'];
|
|
185
|
+
}
|
|
186
|
+
return {
|
|
187
|
+
entry: {
|
|
188
|
+
id: data.id,
|
|
189
|
+
topics,
|
|
190
|
+
content: body.trim(),
|
|
191
|
+
created: data.created || new Date().toISOString(),
|
|
192
|
+
source: 'local',
|
|
193
|
+
origin: data.source,
|
|
194
|
+
},
|
|
195
|
+
filePath,
|
|
196
|
+
};
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
catch {
|
|
200
|
+
// Skip unreadable files
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
catch {
|
|
205
|
+
// Directory doesn't exist
|
|
206
|
+
}
|
|
207
|
+
return null;
|
|
208
|
+
}
|
|
209
|
+
async fileExists(filePath) {
|
|
210
|
+
try {
|
|
211
|
+
await fs.access(filePath);
|
|
212
|
+
return true;
|
|
213
|
+
}
|
|
214
|
+
catch {
|
|
215
|
+
return false;
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { ParseResult } from './types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Parse a Cursor Memory Bank folder (.cursor-memory/).
|
|
4
|
+
*
|
|
5
|
+
* Cursor Memory Bank typically contains:
|
|
6
|
+
* - activeContext.md - Current working context
|
|
7
|
+
* - progress.md - Progress log
|
|
8
|
+
* - projectBrief.md - Project overview
|
|
9
|
+
* - systemPatterns.md - System patterns
|
|
10
|
+
* - decisionLog.md - Decision history
|
|
11
|
+
* - techStack.md - Technology stack info
|
|
12
|
+
*
|
|
13
|
+
* Can also contain JSON files and subdirectories.
|
|
14
|
+
*/
|
|
15
|
+
export declare function parseCursorMemory(dirPath: string): Promise<ParseResult>;
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import { glob } from 'glob';
|
|
4
|
+
import matter from 'gray-matter';
|
|
5
|
+
// ============================================================================
|
|
6
|
+
// Cursor Memory Bank Parser
|
|
7
|
+
// ============================================================================
|
|
8
|
+
/**
|
|
9
|
+
* Parse a Cursor Memory Bank folder (.cursor-memory/).
|
|
10
|
+
*
|
|
11
|
+
* Cursor Memory Bank typically contains:
|
|
12
|
+
* - activeContext.md - Current working context
|
|
13
|
+
* - progress.md - Progress log
|
|
14
|
+
* - projectBrief.md - Project overview
|
|
15
|
+
* - systemPatterns.md - System patterns
|
|
16
|
+
* - decisionLog.md - Decision history
|
|
17
|
+
* - techStack.md - Technology stack info
|
|
18
|
+
*
|
|
19
|
+
* Can also contain JSON files and subdirectories.
|
|
20
|
+
*/
|
|
21
|
+
export async function parseCursorMemory(dirPath) {
|
|
22
|
+
const entries = [];
|
|
23
|
+
const errors = [];
|
|
24
|
+
let skipped = 0;
|
|
25
|
+
try {
|
|
26
|
+
const stats = await fs.stat(dirPath);
|
|
27
|
+
if (!stats.isDirectory()) {
|
|
28
|
+
errors.push(`${dirPath} is not a directory`);
|
|
29
|
+
return { entries, skipped, errors };
|
|
30
|
+
}
|
|
31
|
+
// Parse markdown files
|
|
32
|
+
const mdFiles = await glob(path.join(dirPath, '**/*.md'), { nodir: true });
|
|
33
|
+
for (const filePath of mdFiles) {
|
|
34
|
+
try {
|
|
35
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
36
|
+
const { data, content: body } = matter(content);
|
|
37
|
+
const trimmedBody = body.trim();
|
|
38
|
+
if (!trimmedBody) {
|
|
39
|
+
skipped++;
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
// Extract title from filename (convert camelCase/snake_case to Title Case)
|
|
43
|
+
const filename = path.basename(filePath, '.md');
|
|
44
|
+
const title = data.title || formatFilename(filename);
|
|
45
|
+
// Determine context based on filename
|
|
46
|
+
const context = data.context || inferContext(filename);
|
|
47
|
+
entries.push({
|
|
48
|
+
title,
|
|
49
|
+
content: trimmedBody,
|
|
50
|
+
context,
|
|
51
|
+
intent: data.intent,
|
|
52
|
+
source: 'cursor',
|
|
53
|
+
originalPath: filePath,
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
catch (fileError) {
|
|
57
|
+
errors.push(`${filePath}: ${fileError instanceof Error ? fileError.message : String(fileError)}`);
|
|
58
|
+
skipped++;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
// Parse JSON files
|
|
62
|
+
const jsonFiles = await glob(path.join(dirPath, '**/*.json'), { nodir: true });
|
|
63
|
+
for (const filePath of jsonFiles) {
|
|
64
|
+
try {
|
|
65
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
66
|
+
const data = JSON.parse(content);
|
|
67
|
+
// Handle different JSON structures
|
|
68
|
+
if (Array.isArray(data)) {
|
|
69
|
+
// Array of memory items
|
|
70
|
+
for (const item of data) {
|
|
71
|
+
if (typeof item === 'object' && item !== null) {
|
|
72
|
+
const entry = extractFromJSON(item, filePath);
|
|
73
|
+
if (entry) {
|
|
74
|
+
entries.push(entry);
|
|
75
|
+
}
|
|
76
|
+
else {
|
|
77
|
+
skipped++;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
else if (typeof data === 'object' && data !== null) {
|
|
83
|
+
// Single memory object
|
|
84
|
+
const entry = extractFromJSON(data, filePath);
|
|
85
|
+
if (entry) {
|
|
86
|
+
entries.push(entry);
|
|
87
|
+
}
|
|
88
|
+
else {
|
|
89
|
+
skipped++;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
catch (fileError) {
|
|
94
|
+
errors.push(`${filePath}: ${fileError instanceof Error ? fileError.message : String(fileError)}`);
|
|
95
|
+
skipped++;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
catch (dirError) {
|
|
100
|
+
errors.push(`Failed to access path: ${dirError instanceof Error ? dirError.message : String(dirError)}`);
|
|
101
|
+
}
|
|
102
|
+
return { entries, skipped, errors };
|
|
103
|
+
}
|
|
104
|
+
// ============================================================================
|
|
105
|
+
// Helper Functions
|
|
106
|
+
// ============================================================================
|
|
107
|
+
/**
|
|
108
|
+
* Convert filename to readable title.
|
|
109
|
+
* activeContext -> Active Context
|
|
110
|
+
* system_patterns -> System Patterns
|
|
111
|
+
*/
|
|
112
|
+
function formatFilename(filename) {
|
|
113
|
+
return filename
|
|
114
|
+
// Handle camelCase
|
|
115
|
+
.replace(/([a-z])([A-Z])/g, '$1 $2')
|
|
116
|
+
// Handle snake_case
|
|
117
|
+
.replace(/_/g, ' ')
|
|
118
|
+
// Handle kebab-case
|
|
119
|
+
.replace(/-/g, ' ')
|
|
120
|
+
// Capitalize first letter of each word
|
|
121
|
+
.replace(/\b\w/g, c => c.toUpperCase());
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Infer context from Cursor Memory Bank filename patterns.
|
|
125
|
+
*/
|
|
126
|
+
function inferContext(filename) {
|
|
127
|
+
const lower = filename.toLowerCase();
|
|
128
|
+
if (lower.includes('context'))
|
|
129
|
+
return 'context';
|
|
130
|
+
if (lower.includes('progress'))
|
|
131
|
+
return 'progress';
|
|
132
|
+
if (lower.includes('brief') || lower.includes('overview'))
|
|
133
|
+
return 'project';
|
|
134
|
+
if (lower.includes('pattern'))
|
|
135
|
+
return 'patterns';
|
|
136
|
+
if (lower.includes('decision'))
|
|
137
|
+
return 'decisions';
|
|
138
|
+
if (lower.includes('stack') || lower.includes('tech'))
|
|
139
|
+
return 'technology';
|
|
140
|
+
return 'cursor-memory';
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Extract a ParsedEntry from a JSON object.
|
|
144
|
+
*/
|
|
145
|
+
function extractFromJSON(obj, filePath) {
|
|
146
|
+
// Try various common keys for title
|
|
147
|
+
const title = obj.title ||
|
|
148
|
+
obj.name ||
|
|
149
|
+
obj.key ||
|
|
150
|
+
path.basename(filePath, '.json');
|
|
151
|
+
// Try various common keys for content
|
|
152
|
+
const content = obj.content ||
|
|
153
|
+
obj.description ||
|
|
154
|
+
obj.text ||
|
|
155
|
+
obj.value ||
|
|
156
|
+
obj.memory;
|
|
157
|
+
if (!content) {
|
|
158
|
+
return null;
|
|
159
|
+
}
|
|
160
|
+
return {
|
|
161
|
+
title,
|
|
162
|
+
content,
|
|
163
|
+
context: obj.context || obj.category || obj.type,
|
|
164
|
+
intent: obj.intent,
|
|
165
|
+
source: 'cursor',
|
|
166
|
+
originalPath: filePath,
|
|
167
|
+
};
|
|
168
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export type { ParsedEntry, ParseResult } from './types.js';
|
|
2
|
+
export { parseJSONL } from './jsonl.js';
|
|
3
|
+
export { parseMarkdown } from './markdown.js';
|
|
4
|
+
export { parseCursorMemory } from './cursor.js';
|
|
5
|
+
export { parseJSON } from './json.js';
|
|
6
|
+
export { parseSQLite } from './sqlite.js';
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import type { ParseResult } from './types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Parse a JSON file containing an array of memory entries.
|
|
4
|
+
*
|
|
5
|
+
* Supports various common structures:
|
|
6
|
+
* - Array of objects with title/content
|
|
7
|
+
* - Array of objects with name/description
|
|
8
|
+
* - Object with entries array
|
|
9
|
+
* - Object with memories array
|
|
10
|
+
*/
|
|
11
|
+
export declare function parseJSON(filePath: string): Promise<ParseResult>;
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
/**
|
|
3
|
+
* Parse a JSON file containing an array of memory entries.
|
|
4
|
+
*
|
|
5
|
+
* Supports various common structures:
|
|
6
|
+
* - Array of objects with title/content
|
|
7
|
+
* - Array of objects with name/description
|
|
8
|
+
* - Object with entries array
|
|
9
|
+
* - Object with memories array
|
|
10
|
+
*/
|
|
11
|
+
export async function parseJSON(filePath) {
|
|
12
|
+
const entries = [];
|
|
13
|
+
const errors = [];
|
|
14
|
+
let skipped = 0;
|
|
15
|
+
try {
|
|
16
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
17
|
+
const data = JSON.parse(content);
|
|
18
|
+
// Handle different JSON structures
|
|
19
|
+
let items = [];
|
|
20
|
+
if (Array.isArray(data)) {
|
|
21
|
+
// Direct array of entries
|
|
22
|
+
items = data;
|
|
23
|
+
}
|
|
24
|
+
else if (typeof data === 'object' && data !== null) {
|
|
25
|
+
// Object with entries/memories/items array
|
|
26
|
+
if (Array.isArray(data.entries)) {
|
|
27
|
+
items = data.entries;
|
|
28
|
+
}
|
|
29
|
+
else if (Array.isArray(data.memories)) {
|
|
30
|
+
items = data.memories;
|
|
31
|
+
}
|
|
32
|
+
else if (Array.isArray(data.items)) {
|
|
33
|
+
items = data.items;
|
|
34
|
+
}
|
|
35
|
+
else if (Array.isArray(data.data)) {
|
|
36
|
+
items = data.data;
|
|
37
|
+
}
|
|
38
|
+
else {
|
|
39
|
+
// Single object - treat as one entry
|
|
40
|
+
items = [data];
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
for (let i = 0; i < items.length; i++) {
|
|
44
|
+
const item = items[i];
|
|
45
|
+
if (typeof item !== 'object' || item === null) {
|
|
46
|
+
skipped++;
|
|
47
|
+
continue;
|
|
48
|
+
}
|
|
49
|
+
// Extract title
|
|
50
|
+
const title = item.title || item.name || item.key || `Entry ${i + 1}`;
|
|
51
|
+
// Extract content
|
|
52
|
+
let entryContent = item.content ||
|
|
53
|
+
item.text ||
|
|
54
|
+
item.description ||
|
|
55
|
+
item.value ||
|
|
56
|
+
item.memory ||
|
|
57
|
+
item.observation;
|
|
58
|
+
// Handle observations array
|
|
59
|
+
if (!entryContent && item.observations && Array.isArray(item.observations)) {
|
|
60
|
+
entryContent = item.observations.join('\n\n');
|
|
61
|
+
}
|
|
62
|
+
if (!entryContent) {
|
|
63
|
+
skipped++;
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
// Extract context
|
|
67
|
+
let context;
|
|
68
|
+
if (item.context) {
|
|
69
|
+
context = item.context;
|
|
70
|
+
}
|
|
71
|
+
else if (item.category) {
|
|
72
|
+
context = item.category;
|
|
73
|
+
}
|
|
74
|
+
else if (item.type) {
|
|
75
|
+
context = item.type;
|
|
76
|
+
}
|
|
77
|
+
else if (item.tags && Array.isArray(item.tags)) {
|
|
78
|
+
context = item.tags.join(', ');
|
|
79
|
+
}
|
|
80
|
+
entries.push({
|
|
81
|
+
title: String(title),
|
|
82
|
+
content: String(entryContent),
|
|
83
|
+
context,
|
|
84
|
+
intent: item.intent ? String(item.intent) : undefined,
|
|
85
|
+
reasoning: item.reasoning ? String(item.reasoning) : undefined,
|
|
86
|
+
example: item.example ? String(item.example) : undefined,
|
|
87
|
+
source: 'json',
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
catch (error) {
|
|
92
|
+
errors.push(`Failed to parse JSON: ${error instanceof Error ? error.message : String(error)}`);
|
|
93
|
+
}
|
|
94
|
+
return { entries, skipped, errors };
|
|
95
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { ParseResult } from './types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Parse a JSONL file. Supports multiple formats:
|
|
4
|
+
*
|
|
5
|
+
* mcp-knowledge-graph:
|
|
6
|
+
* {"type":"entity","name":"Topic","observations":["fact 1","fact 2"]}
|
|
7
|
+
*
|
|
8
|
+
* Generic memory formats:
|
|
9
|
+
* {"title":"Topic","content":"..."}
|
|
10
|
+
* {"content":"...","timestamp":"..."}
|
|
11
|
+
* {"text":"...","metadata":{}}
|
|
12
|
+
* {"memory":"...","created_at":"..."}
|
|
13
|
+
*/
|
|
14
|
+
export declare function parseJSONL(filePath: string): Promise<ParseResult>;
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
/**
|
|
3
|
+
* Parse a JSONL file. Supports multiple formats:
|
|
4
|
+
*
|
|
5
|
+
* mcp-knowledge-graph:
|
|
6
|
+
* {"type":"entity","name":"Topic","observations":["fact 1","fact 2"]}
|
|
7
|
+
*
|
|
8
|
+
* Generic memory formats:
|
|
9
|
+
* {"title":"Topic","content":"..."}
|
|
10
|
+
* {"content":"...","timestamp":"..."}
|
|
11
|
+
* {"text":"...","metadata":{}}
|
|
12
|
+
* {"memory":"...","created_at":"..."}
|
|
13
|
+
*/
|
|
14
|
+
export async function parseJSONL(filePath) {
|
|
15
|
+
const entries = [];
|
|
16
|
+
const errors = [];
|
|
17
|
+
let skipped = 0;
|
|
18
|
+
try {
|
|
19
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
20
|
+
const lines = content.trim().split('\n').filter(line => line.trim());
|
|
21
|
+
for (let i = 0; i < lines.length; i++) {
|
|
22
|
+
const line = lines[i];
|
|
23
|
+
try {
|
|
24
|
+
const item = JSON.parse(line);
|
|
25
|
+
// Skip internal markers and relations
|
|
26
|
+
if (item.type === '_aim' || item.type === 'relation') {
|
|
27
|
+
skipped++;
|
|
28
|
+
continue;
|
|
29
|
+
}
|
|
30
|
+
// Extract title (try multiple fields)
|
|
31
|
+
const title = item.name || item.title || item.key || `Entry ${i + 1}`;
|
|
32
|
+
// Extract content (try multiple fields)
|
|
33
|
+
let entryContent;
|
|
34
|
+
// Check observations array first (mcp-knowledge-graph)
|
|
35
|
+
if (item.observations && Array.isArray(item.observations) && item.observations.length > 0) {
|
|
36
|
+
entryContent = item.observations.join('\n\n');
|
|
37
|
+
}
|
|
38
|
+
// Fall back to common content fields
|
|
39
|
+
if (!entryContent) {
|
|
40
|
+
entryContent = item.content || item.text || item.description ||
|
|
41
|
+
item.value || item.memory || item.observation || item.body;
|
|
42
|
+
}
|
|
43
|
+
// Skip if no content found
|
|
44
|
+
if (!entryContent) {
|
|
45
|
+
skipped++;
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
// Extract context
|
|
49
|
+
let context;
|
|
50
|
+
if (item.entityType) {
|
|
51
|
+
context = item.entityType;
|
|
52
|
+
}
|
|
53
|
+
else if (item.context) {
|
|
54
|
+
context = item.context;
|
|
55
|
+
}
|
|
56
|
+
else if (item.category) {
|
|
57
|
+
context = item.category;
|
|
58
|
+
}
|
|
59
|
+
else if (item.type && item.type !== 'entity' && item.type !== 'memory') {
|
|
60
|
+
context = item.type;
|
|
61
|
+
}
|
|
62
|
+
else if (item.tags && Array.isArray(item.tags)) {
|
|
63
|
+
context = item.tags.join(', ');
|
|
64
|
+
}
|
|
65
|
+
entries.push({
|
|
66
|
+
title: String(title),
|
|
67
|
+
content: String(entryContent),
|
|
68
|
+
context,
|
|
69
|
+
intent: item.intent ? String(item.intent) : undefined,
|
|
70
|
+
reasoning: item.reasoning ? String(item.reasoning) : undefined,
|
|
71
|
+
example: item.example ? String(item.example) : undefined,
|
|
72
|
+
source: 'jsonl',
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
catch (parseError) {
|
|
76
|
+
errors.push(`Line ${i + 1}: Invalid JSON - ${parseError instanceof Error ? parseError.message : String(parseError)}`);
|
|
77
|
+
skipped++;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
catch (fileError) {
|
|
82
|
+
errors.push(`Failed to read file: ${fileError instanceof Error ? fileError.message : String(fileError)}`);
|
|
83
|
+
}
|
|
84
|
+
return { entries, skipped, errors };
|
|
85
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { ParseResult } from './types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Parse a folder of markdown files (Basic Memory MCP / Obsidian / any .md).
|
|
4
|
+
*
|
|
5
|
+
* Input format:
|
|
6
|
+
* ---
|
|
7
|
+
* title: API Rate Limits
|
|
8
|
+
* tags: [api, performance]
|
|
9
|
+
* ---
|
|
10
|
+
*
|
|
11
|
+
* # API Rate Limits
|
|
12
|
+
*
|
|
13
|
+
* Always implement exponential backoff...
|
|
14
|
+
*/
|
|
15
|
+
export declare function parseMarkdown(dirPath: string): Promise<ParseResult>;
|