@getlore/cli 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +13 -0
- package/README.md +80 -0
- package/dist/cli/colors.d.ts +48 -0
- package/dist/cli/colors.js +48 -0
- package/dist/cli/commands/ask.d.ts +7 -0
- package/dist/cli/commands/ask.js +97 -0
- package/dist/cli/commands/auth.d.ts +10 -0
- package/dist/cli/commands/auth.js +484 -0
- package/dist/cli/commands/daemon.d.ts +22 -0
- package/dist/cli/commands/daemon.js +244 -0
- package/dist/cli/commands/docs.d.ts +7 -0
- package/dist/cli/commands/docs.js +188 -0
- package/dist/cli/commands/extensions.d.ts +7 -0
- package/dist/cli/commands/extensions.js +204 -0
- package/dist/cli/commands/misc.d.ts +7 -0
- package/dist/cli/commands/misc.js +172 -0
- package/dist/cli/commands/pending.d.ts +7 -0
- package/dist/cli/commands/pending.js +63 -0
- package/dist/cli/commands/projects.d.ts +7 -0
- package/dist/cli/commands/projects.js +136 -0
- package/dist/cli/commands/search.d.ts +7 -0
- package/dist/cli/commands/search.js +102 -0
- package/dist/cli/commands/skills.d.ts +24 -0
- package/dist/cli/commands/skills.js +447 -0
- package/dist/cli/commands/sources.d.ts +7 -0
- package/dist/cli/commands/sources.js +121 -0
- package/dist/cli/commands/sync.d.ts +31 -0
- package/dist/cli/commands/sync.js +768 -0
- package/dist/cli/helpers.d.ts +30 -0
- package/dist/cli/helpers.js +119 -0
- package/dist/core/auth.d.ts +62 -0
- package/dist/core/auth.js +330 -0
- package/dist/core/config.d.ts +41 -0
- package/dist/core/config.js +96 -0
- package/dist/core/data-repo.d.ts +31 -0
- package/dist/core/data-repo.js +146 -0
- package/dist/core/embedder.d.ts +22 -0
- package/dist/core/embedder.js +104 -0
- package/dist/core/git.d.ts +37 -0
- package/dist/core/git.js +140 -0
- package/dist/core/index.d.ts +4 -0
- package/dist/core/index.js +5 -0
- package/dist/core/insight-extractor.d.ts +26 -0
- package/dist/core/insight-extractor.js +114 -0
- package/dist/core/local-search.d.ts +43 -0
- package/dist/core/local-search.js +221 -0
- package/dist/core/themes.d.ts +15 -0
- package/dist/core/themes.js +77 -0
- package/dist/core/types.d.ts +177 -0
- package/dist/core/types.js +9 -0
- package/dist/core/user-settings.d.ts +15 -0
- package/dist/core/user-settings.js +42 -0
- package/dist/core/vector-store-lance.d.ts +98 -0
- package/dist/core/vector-store-lance.js +384 -0
- package/dist/core/vector-store-supabase.d.ts +89 -0
- package/dist/core/vector-store-supabase.js +295 -0
- package/dist/core/vector-store.d.ts +131 -0
- package/dist/core/vector-store.js +503 -0
- package/dist/daemon-runner.d.ts +8 -0
- package/dist/daemon-runner.js +246 -0
- package/dist/extensions/config.d.ts +22 -0
- package/dist/extensions/config.js +102 -0
- package/dist/extensions/proposals.d.ts +30 -0
- package/dist/extensions/proposals.js +178 -0
- package/dist/extensions/registry.d.ts +35 -0
- package/dist/extensions/registry.js +309 -0
- package/dist/extensions/sandbox.d.ts +16 -0
- package/dist/extensions/sandbox.js +17 -0
- package/dist/extensions/types.d.ts +114 -0
- package/dist/extensions/types.js +4 -0
- package/dist/extensions/worker.d.ts +1 -0
- package/dist/extensions/worker.js +49 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.js +105 -0
- package/dist/mcp/handlers/archive-project.d.ts +51 -0
- package/dist/mcp/handlers/archive-project.js +112 -0
- package/dist/mcp/handlers/get-quotes.d.ts +27 -0
- package/dist/mcp/handlers/get-quotes.js +61 -0
- package/dist/mcp/handlers/get-source.d.ts +9 -0
- package/dist/mcp/handlers/get-source.js +40 -0
- package/dist/mcp/handlers/ingest.d.ts +25 -0
- package/dist/mcp/handlers/ingest.js +305 -0
- package/dist/mcp/handlers/list-projects.d.ts +4 -0
- package/dist/mcp/handlers/list-projects.js +16 -0
- package/dist/mcp/handlers/list-sources.d.ts +11 -0
- package/dist/mcp/handlers/list-sources.js +20 -0
- package/dist/mcp/handlers/research-agent.d.ts +21 -0
- package/dist/mcp/handlers/research-agent.js +369 -0
- package/dist/mcp/handlers/research.d.ts +22 -0
- package/dist/mcp/handlers/research.js +225 -0
- package/dist/mcp/handlers/retain.d.ts +18 -0
- package/dist/mcp/handlers/retain.js +92 -0
- package/dist/mcp/handlers/search.d.ts +52 -0
- package/dist/mcp/handlers/search.js +145 -0
- package/dist/mcp/handlers/sync.d.ts +47 -0
- package/dist/mcp/handlers/sync.js +211 -0
- package/dist/mcp/server.d.ts +10 -0
- package/dist/mcp/server.js +268 -0
- package/dist/mcp/tools.d.ts +16 -0
- package/dist/mcp/tools.js +297 -0
- package/dist/sync/config.d.ts +26 -0
- package/dist/sync/config.js +140 -0
- package/dist/sync/discover.d.ts +51 -0
- package/dist/sync/discover.js +190 -0
- package/dist/sync/index.d.ts +11 -0
- package/dist/sync/index.js +11 -0
- package/dist/sync/process.d.ts +50 -0
- package/dist/sync/process.js +285 -0
- package/dist/sync/processors.d.ts +24 -0
- package/dist/sync/processors.js +351 -0
- package/dist/tui/browse-handlers-ask.d.ts +30 -0
- package/dist/tui/browse-handlers-ask.js +372 -0
- package/dist/tui/browse-handlers-autocomplete.d.ts +49 -0
- package/dist/tui/browse-handlers-autocomplete.js +270 -0
- package/dist/tui/browse-handlers-extensions.d.ts +18 -0
- package/dist/tui/browse-handlers-extensions.js +107 -0
- package/dist/tui/browse-handlers-pending.d.ts +22 -0
- package/dist/tui/browse-handlers-pending.js +100 -0
- package/dist/tui/browse-handlers-research.d.ts +32 -0
- package/dist/tui/browse-handlers-research.js +363 -0
- package/dist/tui/browse-handlers-tools.d.ts +42 -0
- package/dist/tui/browse-handlers-tools.js +289 -0
- package/dist/tui/browse-handlers.d.ts +239 -0
- package/dist/tui/browse-handlers.js +1944 -0
- package/dist/tui/browse-render-extensions.d.ts +14 -0
- package/dist/tui/browse-render-extensions.js +114 -0
- package/dist/tui/browse-render-tools.d.ts +18 -0
- package/dist/tui/browse-render-tools.js +259 -0
- package/dist/tui/browse-render.d.ts +51 -0
- package/dist/tui/browse-render.js +599 -0
- package/dist/tui/browse-types.d.ts +142 -0
- package/dist/tui/browse-types.js +70 -0
- package/dist/tui/browse-ui.d.ts +10 -0
- package/dist/tui/browse-ui.js +432 -0
- package/dist/tui/browse.d.ts +17 -0
- package/dist/tui/browse.js +625 -0
- package/dist/tui/markdown.d.ts +22 -0
- package/dist/tui/markdown.js +223 -0
- package/package.json +71 -0
- package/plugins/claude-code/.claude-plugin/plugin.json +10 -0
- package/plugins/claude-code/.mcp.json +6 -0
- package/plugins/claude-code/skills/lore/SKILL.md +63 -0
- package/plugins/codex/SKILL.md +36 -0
- package/plugins/codex/agents/openai.yaml +10 -0
- package/plugins/gemini/GEMINI.md +31 -0
- package/plugins/gemini/gemini-extension.json +11 -0
- package/skills/generic-agent.md +99 -0
- package/skills/openclaw.md +67 -0
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Retain Handler - Save insights, decisions, and notes
|
|
3
|
+
*
|
|
4
|
+
* This is the "push" mechanism for adding knowledge explicitly.
|
|
5
|
+
* Retained items are immediately added to the vector store for instant searchability.
|
|
6
|
+
* Auto-pushes to git remote if configured.
|
|
7
|
+
*/
|
|
8
|
+
import { writeFile, mkdir } from 'fs/promises';
|
|
9
|
+
import path from 'path';
|
|
10
|
+
import { randomUUID } from 'crypto';
|
|
11
|
+
import { addSource } from '../../core/vector-store.js';
|
|
12
|
+
import { generateEmbedding, createSearchableText } from '../../core/embedder.js';
|
|
13
|
+
import { gitCommitAndPush } from '../../core/git.js';
|
|
14
|
+
export async function handleRetain(dbPath, dataDir, args, options = {}) {
|
|
15
|
+
const { content, project, type, source_context, tags = [] } = args;
|
|
16
|
+
const { autoPush = true } = options;
|
|
17
|
+
const id = randomUUID();
|
|
18
|
+
const timestamp = new Date().toISOString();
|
|
19
|
+
// Create the retained knowledge entry
|
|
20
|
+
const entry = {
|
|
21
|
+
id,
|
|
22
|
+
content,
|
|
23
|
+
project,
|
|
24
|
+
type,
|
|
25
|
+
source_context: source_context || 'Explicitly retained via MCP',
|
|
26
|
+
tags,
|
|
27
|
+
created_at: timestamp,
|
|
28
|
+
};
|
|
29
|
+
// Save to disk
|
|
30
|
+
const retainedDir = path.join(dataDir, 'retained', project);
|
|
31
|
+
await mkdir(retainedDir, { recursive: true });
|
|
32
|
+
const filename = `${type}-${id.slice(0, 8)}.json`;
|
|
33
|
+
const filepath = path.join(retainedDir, filename);
|
|
34
|
+
await writeFile(filepath, JSON.stringify(entry, null, 2));
|
|
35
|
+
// Add to vector store immediately for instant searchability
|
|
36
|
+
try {
|
|
37
|
+
// Generate embedding for the content
|
|
38
|
+
const searchableText = createSearchableText({
|
|
39
|
+
type: type === 'decision' ? 'theme' : 'summary',
|
|
40
|
+
text: content,
|
|
41
|
+
project,
|
|
42
|
+
});
|
|
43
|
+
const vector = await generateEmbedding(searchableText);
|
|
44
|
+
// Create source record
|
|
45
|
+
const sourceRecord = {
|
|
46
|
+
id,
|
|
47
|
+
title: `${type.charAt(0).toUpperCase() + type.slice(1)}: ${content.substring(0, 50)}...`,
|
|
48
|
+
source_type: 'retained',
|
|
49
|
+
content_type: type === 'decision' ? 'decision' : 'note',
|
|
50
|
+
projects: JSON.stringify([project]),
|
|
51
|
+
tags: JSON.stringify(tags),
|
|
52
|
+
created_at: timestamp,
|
|
53
|
+
summary: content,
|
|
54
|
+
themes_json: JSON.stringify([]),
|
|
55
|
+
quotes_json: JSON.stringify([]),
|
|
56
|
+
has_full_content: true,
|
|
57
|
+
vector: [],
|
|
58
|
+
};
|
|
59
|
+
await addSource(dbPath, sourceRecord, vector);
|
|
60
|
+
// Auto-push to git if enabled
|
|
61
|
+
let pushed = false;
|
|
62
|
+
if (autoPush) {
|
|
63
|
+
const pushResult = await gitCommitAndPush(dataDir, `Retain ${type}: ${content.substring(0, 50)}...`);
|
|
64
|
+
pushed = pushResult.success && (pushResult.message?.includes('pushed') || false);
|
|
65
|
+
}
|
|
66
|
+
return {
|
|
67
|
+
success: true,
|
|
68
|
+
id,
|
|
69
|
+
message: `Retained ${type} for project "${project}"`,
|
|
70
|
+
indexed: true,
|
|
71
|
+
synced: pushed,
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
catch (error) {
|
|
75
|
+
// Still saved to disk, just not indexed yet
|
|
76
|
+
console.error('Failed to index retained item:', error);
|
|
77
|
+
// Still try to push even if indexing failed
|
|
78
|
+
let pushed = false;
|
|
79
|
+
if (autoPush) {
|
|
80
|
+
const pushResult = await gitCommitAndPush(dataDir, `Retain ${type}: ${content.substring(0, 50)}...`);
|
|
81
|
+
pushed = pushResult.success && (pushResult.message?.includes('pushed') || false);
|
|
82
|
+
}
|
|
83
|
+
return {
|
|
84
|
+
success: true,
|
|
85
|
+
id,
|
|
86
|
+
message: `Retained ${type} for project "${project}"`,
|
|
87
|
+
indexed: false,
|
|
88
|
+
synced: pushed,
|
|
89
|
+
note: 'Saved to disk but indexing failed. Run "lore sync" to index.',
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Search Handler - Semantic and hybrid search across sources
|
|
3
|
+
*
|
|
4
|
+
* Supports multiple search modes:
|
|
5
|
+
* - semantic: Vector similarity only (conceptual queries)
|
|
6
|
+
* - keyword: Full-text search only (exact terms)
|
|
7
|
+
* - hybrid: RRF fusion of semantic + keyword (default)
|
|
8
|
+
* - regex: Local file grep (pattern matching)
|
|
9
|
+
*
|
|
10
|
+
* By default, excludes sources from archived projects.
|
|
11
|
+
* Use include_archived: true to search everything.
|
|
12
|
+
*/
|
|
13
|
+
import type { SourceType, ContentType, Quote, SearchMode } from '../../core/types.js';
|
|
14
|
+
interface SearchArgs {
|
|
15
|
+
query: string;
|
|
16
|
+
project?: string;
|
|
17
|
+
source_type?: SourceType;
|
|
18
|
+
content_type?: ContentType;
|
|
19
|
+
limit?: number;
|
|
20
|
+
include_archived?: boolean;
|
|
21
|
+
mode?: SearchMode;
|
|
22
|
+
}
|
|
23
|
+
interface SearchResultSource {
|
|
24
|
+
id: string;
|
|
25
|
+
title: string;
|
|
26
|
+
source_type: SourceType;
|
|
27
|
+
content_type: ContentType;
|
|
28
|
+
projects: string[];
|
|
29
|
+
created_at: string;
|
|
30
|
+
summary: string;
|
|
31
|
+
relevance_score: number;
|
|
32
|
+
matching_quotes: Quote[];
|
|
33
|
+
themes: string[];
|
|
34
|
+
/** Rank in semantic search (only for hybrid/semantic modes) */
|
|
35
|
+
semantic_rank?: number;
|
|
36
|
+
/** Rank in keyword search (only for hybrid/keyword modes) */
|
|
37
|
+
keyword_rank?: number;
|
|
38
|
+
/** Matching lines (only for regex mode) */
|
|
39
|
+
matching_lines?: Array<{
|
|
40
|
+
line_number: number;
|
|
41
|
+
snippet: string;
|
|
42
|
+
}>;
|
|
43
|
+
}
|
|
44
|
+
interface SearchResult {
|
|
45
|
+
sources: SearchResultSource[];
|
|
46
|
+
total_found: number;
|
|
47
|
+
query: string;
|
|
48
|
+
mode: SearchMode;
|
|
49
|
+
archived_excluded?: number;
|
|
50
|
+
}
|
|
51
|
+
export declare function handleSearch(dbPath: string, dataDir: string, args: SearchArgs): Promise<SearchResult>;
|
|
52
|
+
export {};
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Search Handler - Semantic and hybrid search across sources
|
|
3
|
+
*
|
|
4
|
+
* Supports multiple search modes:
|
|
5
|
+
* - semantic: Vector similarity only (conceptual queries)
|
|
6
|
+
* - keyword: Full-text search only (exact terms)
|
|
7
|
+
* - hybrid: RRF fusion of semantic + keyword (default)
|
|
8
|
+
* - regex: Local file grep (pattern matching)
|
|
9
|
+
*
|
|
10
|
+
* By default, excludes sources from archived projects.
|
|
11
|
+
* Use include_archived: true to search everything.
|
|
12
|
+
*/
|
|
13
|
+
import { searchSources, getSourceById } from '../../core/vector-store.js';
|
|
14
|
+
import { generateEmbedding } from '../../core/embedder.js';
|
|
15
|
+
import { searchLocalFiles, getMatchSnippet } from '../../core/local-search.js';
|
|
16
|
+
import { loadArchivedProjects } from './archive-project.js';
|
|
17
|
+
export async function handleSearch(dbPath, dataDir, args) {
|
|
18
|
+
const { query, project, source_type, content_type, limit = 10, include_archived = false, mode = 'hybrid', } = args;
|
|
19
|
+
// Handle regex mode separately - uses local file search
|
|
20
|
+
if (mode === 'regex') {
|
|
21
|
+
return handleRegexSearch(dbPath, dataDir, {
|
|
22
|
+
query,
|
|
23
|
+
project,
|
|
24
|
+
limit,
|
|
25
|
+
include_archived,
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
// Generate embedding for query (needed for semantic/hybrid modes)
|
|
29
|
+
const queryVector = await generateEmbedding(query);
|
|
30
|
+
// Search sources (fetch more to account for archived filtering)
|
|
31
|
+
const fetchLimit = include_archived ? limit : limit * 2;
|
|
32
|
+
const results = await searchSources(dbPath, queryVector, {
|
|
33
|
+
limit: fetchLimit,
|
|
34
|
+
project,
|
|
35
|
+
source_type,
|
|
36
|
+
content_type,
|
|
37
|
+
mode,
|
|
38
|
+
queryText: query,
|
|
39
|
+
});
|
|
40
|
+
// Filter out archived projects unless explicitly requested
|
|
41
|
+
let filteredResults = results;
|
|
42
|
+
let archivedExcluded = 0;
|
|
43
|
+
if (!include_archived) {
|
|
44
|
+
const archivedProjects = await loadArchivedProjects(dataDir);
|
|
45
|
+
const archivedNames = new Set(archivedProjects.map((p) => p.project.toLowerCase()));
|
|
46
|
+
filteredResults = results.filter((result) => {
|
|
47
|
+
const isArchived = result.projects.some((p) => archivedNames.has(p.toLowerCase()));
|
|
48
|
+
if (isArchived)
|
|
49
|
+
archivedExcluded++;
|
|
50
|
+
return !isArchived;
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
// Format results with relevant quotes highlighted
|
|
54
|
+
const sources = filteredResults.slice(0, limit).map((result) => {
|
|
55
|
+
// Find quotes most relevant to the query (simple keyword match for now)
|
|
56
|
+
const queryWords = query.toLowerCase().split(/\s+/);
|
|
57
|
+
const matchingQuotes = result.quotes
|
|
58
|
+
.filter((q) => queryWords.some((word) => q.text.toLowerCase().includes(word)))
|
|
59
|
+
.slice(0, 3);
|
|
60
|
+
return {
|
|
61
|
+
id: result.id,
|
|
62
|
+
title: result.title,
|
|
63
|
+
source_type: result.source_type,
|
|
64
|
+
content_type: result.content_type,
|
|
65
|
+
projects: result.projects,
|
|
66
|
+
created_at: result.created_at,
|
|
67
|
+
summary: result.summary,
|
|
68
|
+
relevance_score: result.score,
|
|
69
|
+
matching_quotes: matchingQuotes,
|
|
70
|
+
themes: result.themes.map((t) => t.name),
|
|
71
|
+
semantic_rank: result.semantic_rank,
|
|
72
|
+
keyword_rank: result.keyword_rank,
|
|
73
|
+
};
|
|
74
|
+
});
|
|
75
|
+
return {
|
|
76
|
+
sources,
|
|
77
|
+
total_found: sources.length,
|
|
78
|
+
query,
|
|
79
|
+
mode,
|
|
80
|
+
archived_excluded: archivedExcluded > 0 ? archivedExcluded : undefined,
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Handle regex search using local file grep
|
|
85
|
+
*/
|
|
86
|
+
async function handleRegexSearch(dbPath, dataDir, args) {
|
|
87
|
+
const { query, project, limit = 10, include_archived = false } = args;
|
|
88
|
+
// Search local files
|
|
89
|
+
const localResults = await searchLocalFiles(dataDir, query, {
|
|
90
|
+
maxTotalResults: limit * 2, // Fetch extra for filtering
|
|
91
|
+
maxMatchesPerFile: 5,
|
|
92
|
+
ignoreCase: false,
|
|
93
|
+
});
|
|
94
|
+
// Get source details from database to enrich results
|
|
95
|
+
const sources = [];
|
|
96
|
+
let archivedExcluded = 0;
|
|
97
|
+
const archivedProjects = include_archived
|
|
98
|
+
? []
|
|
99
|
+
: await loadArchivedProjects(dataDir);
|
|
100
|
+
const archivedNames = new Set(archivedProjects.map((p) => p.project.toLowerCase()));
|
|
101
|
+
for (const localResult of localResults) {
|
|
102
|
+
if (sources.length >= limit)
|
|
103
|
+
break;
|
|
104
|
+
// Get source metadata from database
|
|
105
|
+
const sourceData = await getSourceById(dbPath, localResult.source_id);
|
|
106
|
+
if (!sourceData)
|
|
107
|
+
continue;
|
|
108
|
+
// Filter by project if specified
|
|
109
|
+
if (project && !sourceData.projects.includes(project))
|
|
110
|
+
continue;
|
|
111
|
+
// Filter out archived projects
|
|
112
|
+
if (!include_archived) {
|
|
113
|
+
const isArchived = sourceData.projects.some((p) => archivedNames.has(p.toLowerCase()));
|
|
114
|
+
if (isArchived) {
|
|
115
|
+
archivedExcluded++;
|
|
116
|
+
continue;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
// Format matching lines
|
|
120
|
+
const matchingLines = localResult.matches.slice(0, 3).map((m) => ({
|
|
121
|
+
line_number: m.line_number,
|
|
122
|
+
snippet: getMatchSnippet(m.line_content, m.match_start, m.match_end, 80),
|
|
123
|
+
}));
|
|
124
|
+
sources.push({
|
|
125
|
+
id: sourceData.id,
|
|
126
|
+
title: sourceData.title,
|
|
127
|
+
source_type: sourceData.source_type,
|
|
128
|
+
content_type: sourceData.content_type,
|
|
129
|
+
projects: sourceData.projects,
|
|
130
|
+
created_at: sourceData.created_at,
|
|
131
|
+
summary: sourceData.summary,
|
|
132
|
+
relevance_score: localResult.matches.length / 10, // Simple score based on match count
|
|
133
|
+
matching_quotes: [],
|
|
134
|
+
themes: sourceData.themes.map((t) => t.name),
|
|
135
|
+
matching_lines: matchingLines,
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
return {
|
|
139
|
+
sources,
|
|
140
|
+
total_found: sources.length,
|
|
141
|
+
query,
|
|
142
|
+
mode: 'regex',
|
|
143
|
+
archived_excluded: archivedExcluded > 0 ? archivedExcluded : undefined,
|
|
144
|
+
};
|
|
145
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sync Handler - Universal file sync with two-phase processing
|
|
3
|
+
*
|
|
4
|
+
* Phase 1: Discovery (NO LLM calls - essentially free)
|
|
5
|
+
* - Scan configured directories
|
|
6
|
+
* - Compute content hashes
|
|
7
|
+
* - Check Supabase for existing hashes
|
|
8
|
+
*
|
|
9
|
+
* Phase 2: Processing (only for NEW files)
|
|
10
|
+
* - Claude extracts metadata
|
|
11
|
+
* - Generate embeddings
|
|
12
|
+
* - Store in Supabase + local data dir
|
|
13
|
+
*/
|
|
14
|
+
interface SyncArgs {
|
|
15
|
+
git_pull?: boolean;
|
|
16
|
+
git_push?: boolean;
|
|
17
|
+
index_new?: boolean;
|
|
18
|
+
dry_run?: boolean;
|
|
19
|
+
use_legacy?: boolean;
|
|
20
|
+
}
|
|
21
|
+
interface SyncResult {
|
|
22
|
+
git_pulled: boolean;
|
|
23
|
+
git_pushed: boolean;
|
|
24
|
+
git_error?: string;
|
|
25
|
+
sources_found: number;
|
|
26
|
+
sources_indexed: number;
|
|
27
|
+
already_indexed: number;
|
|
28
|
+
discovery?: {
|
|
29
|
+
sources_scanned: number;
|
|
30
|
+
total_files: number;
|
|
31
|
+
new_files: number;
|
|
32
|
+
edited_files: number;
|
|
33
|
+
existing_files: number;
|
|
34
|
+
errors: number;
|
|
35
|
+
};
|
|
36
|
+
processing?: {
|
|
37
|
+
processed: number;
|
|
38
|
+
errors: number;
|
|
39
|
+
titles: string[];
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
export declare function handleSync(dbPath: string, dataDir: string, args: SyncArgs, options?: {
|
|
43
|
+
hookContext?: {
|
|
44
|
+
mode: 'mcp' | 'cli';
|
|
45
|
+
};
|
|
46
|
+
}): Promise<SyncResult>;
|
|
47
|
+
export {};
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sync Handler - Universal file sync with two-phase processing
|
|
3
|
+
*
|
|
4
|
+
* Phase 1: Discovery (NO LLM calls - essentially free)
|
|
5
|
+
* - Scan configured directories
|
|
6
|
+
* - Compute content hashes
|
|
7
|
+
* - Check Supabase for existing hashes
|
|
8
|
+
*
|
|
9
|
+
* Phase 2: Processing (only for NEW files)
|
|
10
|
+
* - Claude extracts metadata
|
|
11
|
+
* - Generate embeddings
|
|
12
|
+
* - Store in Supabase + local data dir
|
|
13
|
+
*/
|
|
14
|
+
import { readdir, readFile } from 'fs/promises';
|
|
15
|
+
import { existsSync } from 'fs';
|
|
16
|
+
import path from 'path';
|
|
17
|
+
import { getAllSources, addSource, resetDatabaseConnection, } from '../../core/vector-store.js';
|
|
18
|
+
import { generateEmbedding, createSearchableText } from '../../core/embedder.js';
|
|
19
|
+
import { gitPull, gitCommitAndPush } from '../../core/git.js';
|
|
20
|
+
import { loadSyncConfig, getEnabledSources } from '../../sync/config.js';
|
|
21
|
+
import { discoverAllSources, summarizeDiscovery } from '../../sync/discover.js';
|
|
22
|
+
import { processFiles } from '../../sync/process.js';
|
|
23
|
+
// ============================================================================
|
|
24
|
+
// Legacy Disk-Based Sync (for backward compatibility)
|
|
25
|
+
// ============================================================================
|
|
26
|
+
async function loadSourceFromDisk(sourcesDir, sourceId) {
|
|
27
|
+
const sourceDir = path.join(sourcesDir, sourceId);
|
|
28
|
+
try {
|
|
29
|
+
const metadata = JSON.parse(await readFile(path.join(sourceDir, 'metadata.json'), 'utf-8'));
|
|
30
|
+
const content = await readFile(path.join(sourceDir, 'content.md'), 'utf-8');
|
|
31
|
+
let insights = { summary: '', themes: [] };
|
|
32
|
+
try {
|
|
33
|
+
const insightsFile = JSON.parse(await readFile(path.join(sourceDir, 'insights.json'), 'utf-8'));
|
|
34
|
+
insights.summary = insightsFile.summary || '';
|
|
35
|
+
insights.themes = insightsFile.themes || [];
|
|
36
|
+
}
|
|
37
|
+
catch {
|
|
38
|
+
insights.summary = content.substring(0, 500) + (content.length > 500 ? '...' : '');
|
|
39
|
+
}
|
|
40
|
+
return {
|
|
41
|
+
source: {
|
|
42
|
+
...metadata,
|
|
43
|
+
content,
|
|
44
|
+
},
|
|
45
|
+
insights,
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
catch {
|
|
49
|
+
return null;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
async function indexSource(dbPath, source, insights) {
|
|
53
|
+
const summary = insights.summary || source.content.substring(0, 500);
|
|
54
|
+
const searchableText = createSearchableText({
|
|
55
|
+
type: 'summary',
|
|
56
|
+
text: summary,
|
|
57
|
+
project: source.projects[0],
|
|
58
|
+
});
|
|
59
|
+
const vector = await generateEmbedding(searchableText);
|
|
60
|
+
const sourceRecord = {
|
|
61
|
+
id: source.id,
|
|
62
|
+
title: source.title,
|
|
63
|
+
source_type: source.source_type,
|
|
64
|
+
content_type: source.content_type,
|
|
65
|
+
projects: JSON.stringify(source.projects),
|
|
66
|
+
tags: JSON.stringify(source.tags),
|
|
67
|
+
created_at: source.created_at,
|
|
68
|
+
summary,
|
|
69
|
+
themes_json: JSON.stringify(insights.themes || []),
|
|
70
|
+
quotes_json: JSON.stringify([]),
|
|
71
|
+
has_full_content: true,
|
|
72
|
+
vector: [],
|
|
73
|
+
};
|
|
74
|
+
await addSource(dbPath, sourceRecord, vector, {
|
|
75
|
+
content_hash: source.content_hash,
|
|
76
|
+
source_path: source.source_path,
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
async function legacyDiskSync(dbPath, dataDir) {
|
|
80
|
+
const sourcesDir = path.join(dataDir, 'sources');
|
|
81
|
+
const result = { sources_found: 0, sources_indexed: 0, already_indexed: 0 };
|
|
82
|
+
if (!existsSync(sourcesDir)) {
|
|
83
|
+
return result;
|
|
84
|
+
}
|
|
85
|
+
try {
|
|
86
|
+
const diskSources = await readdir(sourcesDir, { withFileTypes: true });
|
|
87
|
+
const diskIds = diskSources
|
|
88
|
+
.filter((d) => d.isDirectory() && !d.name.startsWith('.'))
|
|
89
|
+
.map((d) => d.name);
|
|
90
|
+
result.sources_found = diskIds.length;
|
|
91
|
+
const indexedSources = await getAllSources(dbPath, {});
|
|
92
|
+
const indexedIds = new Set(indexedSources.map((s) => s.id));
|
|
93
|
+
result.already_indexed = indexedIds.size;
|
|
94
|
+
const unsyncedIds = diskIds.filter((id) => !indexedIds.has(id));
|
|
95
|
+
for (const sourceId of unsyncedIds) {
|
|
96
|
+
const data = await loadSourceFromDisk(sourcesDir, sourceId);
|
|
97
|
+
if (data) {
|
|
98
|
+
await indexSource(dbPath, data.source, data.insights);
|
|
99
|
+
result.sources_indexed++;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
catch {
|
|
104
|
+
// Sources directory doesn't exist or other error
|
|
105
|
+
}
|
|
106
|
+
return result;
|
|
107
|
+
}
|
|
108
|
+
// ============================================================================
|
|
109
|
+
// Universal Sync (new system)
|
|
110
|
+
// ============================================================================
|
|
111
|
+
async function universalSync(dataDir, dryRun, hookContext) {
|
|
112
|
+
// Load sync configuration
|
|
113
|
+
const config = await loadSyncConfig();
|
|
114
|
+
const enabledSources = getEnabledSources(config);
|
|
115
|
+
if (enabledSources.length === 0) {
|
|
116
|
+
return {
|
|
117
|
+
discovery: {
|
|
118
|
+
sources_scanned: 0,
|
|
119
|
+
total_files: 0,
|
|
120
|
+
new_files: 0,
|
|
121
|
+
edited_files: 0,
|
|
122
|
+
existing_files: 0,
|
|
123
|
+
errors: 0,
|
|
124
|
+
},
|
|
125
|
+
processing: undefined,
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
// Phase 1: Discovery
|
|
129
|
+
const discoveryResults = await discoverAllSources(enabledSources);
|
|
130
|
+
const summary = summarizeDiscovery(discoveryResults);
|
|
131
|
+
const discovery = {
|
|
132
|
+
sources_scanned: summary.totalSources,
|
|
133
|
+
total_files: summary.totalFiles,
|
|
134
|
+
new_files: summary.newFiles,
|
|
135
|
+
edited_files: summary.editedFiles,
|
|
136
|
+
existing_files: summary.existingFiles,
|
|
137
|
+
errors: summary.errors,
|
|
138
|
+
};
|
|
139
|
+
// If dry run or no new/edited files, stop here
|
|
140
|
+
const totalToProcess = summary.newFiles + summary.editedFiles;
|
|
141
|
+
if (dryRun || totalToProcess === 0) {
|
|
142
|
+
return { discovery, processing: undefined };
|
|
143
|
+
}
|
|
144
|
+
// Phase 2: Process new and edited files
|
|
145
|
+
const allNewFiles = discoveryResults.flatMap(r => r.newFiles);
|
|
146
|
+
const allEditedFiles = discoveryResults.flatMap(r => r.editedFiles);
|
|
147
|
+
const allFilesToProcess = [...allNewFiles, ...allEditedFiles];
|
|
148
|
+
const processResult = await processFiles(allFilesToProcess, dataDir, {
|
|
149
|
+
gitPush: false, // We'll handle git at the end
|
|
150
|
+
hookContext,
|
|
151
|
+
});
|
|
152
|
+
const processing = {
|
|
153
|
+
processed: processResult.processed.length,
|
|
154
|
+
errors: processResult.errors.length,
|
|
155
|
+
titles: processResult.processed.map(p => p.metadata.title),
|
|
156
|
+
};
|
|
157
|
+
return { discovery, processing };
|
|
158
|
+
}
|
|
159
|
+
// ============================================================================
|
|
160
|
+
// Main Handler
|
|
161
|
+
// ============================================================================
|
|
162
|
+
export async function handleSync(dbPath, dataDir, args, options = {}) {
|
|
163
|
+
const doPull = args.git_pull !== false;
|
|
164
|
+
const doPush = args.git_push !== false;
|
|
165
|
+
const indexNew = args.index_new !== false;
|
|
166
|
+
const dryRun = args.dry_run === true;
|
|
167
|
+
const useLegacy = args.use_legacy === true;
|
|
168
|
+
resetDatabaseConnection();
|
|
169
|
+
const result = {
|
|
170
|
+
git_pulled: false,
|
|
171
|
+
git_pushed: false,
|
|
172
|
+
sources_found: 0,
|
|
173
|
+
sources_indexed: 0,
|
|
174
|
+
already_indexed: 0,
|
|
175
|
+
};
|
|
176
|
+
// 1. Git pull
|
|
177
|
+
if (doPull) {
|
|
178
|
+
const pullResult = await gitPull(dataDir);
|
|
179
|
+
result.git_pulled = pullResult.success && (pullResult.message?.includes('Pulled') || false);
|
|
180
|
+
if (pullResult.error) {
|
|
181
|
+
result.git_error = pullResult.error;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
// 2. Sync sources
|
|
185
|
+
if (indexNew) {
|
|
186
|
+
// Check if we have sync config
|
|
187
|
+
const config = await loadSyncConfig();
|
|
188
|
+
const hasUniversalSources = getEnabledSources(config).length > 0;
|
|
189
|
+
if (hasUniversalSources && !useLegacy) {
|
|
190
|
+
// Use new universal sync
|
|
191
|
+
const { discovery, processing } = await universalSync(dataDir, dryRun, options.hookContext);
|
|
192
|
+
result.discovery = discovery;
|
|
193
|
+
result.processing = processing;
|
|
194
|
+
}
|
|
195
|
+
// Always run legacy disk sync for backward compatibility
|
|
196
|
+
// (picks up sources added via old `lore ingest` command)
|
|
197
|
+
const legacyResult = await legacyDiskSync(dbPath, dataDir);
|
|
198
|
+
result.sources_found = legacyResult.sources_found;
|
|
199
|
+
result.sources_indexed = legacyResult.sources_indexed;
|
|
200
|
+
result.already_indexed = legacyResult.already_indexed;
|
|
201
|
+
}
|
|
202
|
+
// 3. Git push
|
|
203
|
+
if (doPush && !dryRun) {
|
|
204
|
+
const totalNew = (result.processing?.processed || 0) + result.sources_indexed;
|
|
205
|
+
if (totalNew > 0) {
|
|
206
|
+
const pushResult = await gitCommitAndPush(dataDir, `Sync: Added ${totalNew} source(s)`);
|
|
207
|
+
result.git_pushed = pushResult.success && (pushResult.message?.includes('pushed') || false);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
return result;
|
|
211
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Lore - MCP Server
|
|
4
|
+
*
|
|
5
|
+
* Exposes knowledge repository tools via Model Context Protocol.
|
|
6
|
+
* Supports both simple query tools and agentic research capabilities.
|
|
7
|
+
*
|
|
8
|
+
* Auto-syncs: Periodically checks for new sources and syncs them.
|
|
9
|
+
*/
|
|
10
|
+
export {};
|