@getlore/cli 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +13 -0
- package/README.md +80 -0
- package/dist/cli/colors.d.ts +48 -0
- package/dist/cli/colors.js +48 -0
- package/dist/cli/commands/ask.d.ts +7 -0
- package/dist/cli/commands/ask.js +97 -0
- package/dist/cli/commands/auth.d.ts +10 -0
- package/dist/cli/commands/auth.js +484 -0
- package/dist/cli/commands/daemon.d.ts +22 -0
- package/dist/cli/commands/daemon.js +244 -0
- package/dist/cli/commands/docs.d.ts +7 -0
- package/dist/cli/commands/docs.js +188 -0
- package/dist/cli/commands/extensions.d.ts +7 -0
- package/dist/cli/commands/extensions.js +204 -0
- package/dist/cli/commands/misc.d.ts +7 -0
- package/dist/cli/commands/misc.js +172 -0
- package/dist/cli/commands/pending.d.ts +7 -0
- package/dist/cli/commands/pending.js +63 -0
- package/dist/cli/commands/projects.d.ts +7 -0
- package/dist/cli/commands/projects.js +136 -0
- package/dist/cli/commands/search.d.ts +7 -0
- package/dist/cli/commands/search.js +102 -0
- package/dist/cli/commands/skills.d.ts +24 -0
- package/dist/cli/commands/skills.js +447 -0
- package/dist/cli/commands/sources.d.ts +7 -0
- package/dist/cli/commands/sources.js +121 -0
- package/dist/cli/commands/sync.d.ts +31 -0
- package/dist/cli/commands/sync.js +768 -0
- package/dist/cli/helpers.d.ts +30 -0
- package/dist/cli/helpers.js +119 -0
- package/dist/core/auth.d.ts +62 -0
- package/dist/core/auth.js +330 -0
- package/dist/core/config.d.ts +41 -0
- package/dist/core/config.js +96 -0
- package/dist/core/data-repo.d.ts +31 -0
- package/dist/core/data-repo.js +146 -0
- package/dist/core/embedder.d.ts +22 -0
- package/dist/core/embedder.js +104 -0
- package/dist/core/git.d.ts +37 -0
- package/dist/core/git.js +140 -0
- package/dist/core/index.d.ts +4 -0
- package/dist/core/index.js +5 -0
- package/dist/core/insight-extractor.d.ts +26 -0
- package/dist/core/insight-extractor.js +114 -0
- package/dist/core/local-search.d.ts +43 -0
- package/dist/core/local-search.js +221 -0
- package/dist/core/themes.d.ts +15 -0
- package/dist/core/themes.js +77 -0
- package/dist/core/types.d.ts +177 -0
- package/dist/core/types.js +9 -0
- package/dist/core/user-settings.d.ts +15 -0
- package/dist/core/user-settings.js +42 -0
- package/dist/core/vector-store-lance.d.ts +98 -0
- package/dist/core/vector-store-lance.js +384 -0
- package/dist/core/vector-store-supabase.d.ts +89 -0
- package/dist/core/vector-store-supabase.js +295 -0
- package/dist/core/vector-store.d.ts +131 -0
- package/dist/core/vector-store.js +503 -0
- package/dist/daemon-runner.d.ts +8 -0
- package/dist/daemon-runner.js +246 -0
- package/dist/extensions/config.d.ts +22 -0
- package/dist/extensions/config.js +102 -0
- package/dist/extensions/proposals.d.ts +30 -0
- package/dist/extensions/proposals.js +178 -0
- package/dist/extensions/registry.d.ts +35 -0
- package/dist/extensions/registry.js +309 -0
- package/dist/extensions/sandbox.d.ts +16 -0
- package/dist/extensions/sandbox.js +17 -0
- package/dist/extensions/types.d.ts +114 -0
- package/dist/extensions/types.js +4 -0
- package/dist/extensions/worker.d.ts +1 -0
- package/dist/extensions/worker.js +49 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.js +105 -0
- package/dist/mcp/handlers/archive-project.d.ts +51 -0
- package/dist/mcp/handlers/archive-project.js +112 -0
- package/dist/mcp/handlers/get-quotes.d.ts +27 -0
- package/dist/mcp/handlers/get-quotes.js +61 -0
- package/dist/mcp/handlers/get-source.d.ts +9 -0
- package/dist/mcp/handlers/get-source.js +40 -0
- package/dist/mcp/handlers/ingest.d.ts +25 -0
- package/dist/mcp/handlers/ingest.js +305 -0
- package/dist/mcp/handlers/list-projects.d.ts +4 -0
- package/dist/mcp/handlers/list-projects.js +16 -0
- package/dist/mcp/handlers/list-sources.d.ts +11 -0
- package/dist/mcp/handlers/list-sources.js +20 -0
- package/dist/mcp/handlers/research-agent.d.ts +21 -0
- package/dist/mcp/handlers/research-agent.js +369 -0
- package/dist/mcp/handlers/research.d.ts +22 -0
- package/dist/mcp/handlers/research.js +225 -0
- package/dist/mcp/handlers/retain.d.ts +18 -0
- package/dist/mcp/handlers/retain.js +92 -0
- package/dist/mcp/handlers/search.d.ts +52 -0
- package/dist/mcp/handlers/search.js +145 -0
- package/dist/mcp/handlers/sync.d.ts +47 -0
- package/dist/mcp/handlers/sync.js +211 -0
- package/dist/mcp/server.d.ts +10 -0
- package/dist/mcp/server.js +268 -0
- package/dist/mcp/tools.d.ts +16 -0
- package/dist/mcp/tools.js +297 -0
- package/dist/sync/config.d.ts +26 -0
- package/dist/sync/config.js +140 -0
- package/dist/sync/discover.d.ts +51 -0
- package/dist/sync/discover.js +190 -0
- package/dist/sync/index.d.ts +11 -0
- package/dist/sync/index.js +11 -0
- package/dist/sync/process.d.ts +50 -0
- package/dist/sync/process.js +285 -0
- package/dist/sync/processors.d.ts +24 -0
- package/dist/sync/processors.js +351 -0
- package/dist/tui/browse-handlers-ask.d.ts +30 -0
- package/dist/tui/browse-handlers-ask.js +372 -0
- package/dist/tui/browse-handlers-autocomplete.d.ts +49 -0
- package/dist/tui/browse-handlers-autocomplete.js +270 -0
- package/dist/tui/browse-handlers-extensions.d.ts +18 -0
- package/dist/tui/browse-handlers-extensions.js +107 -0
- package/dist/tui/browse-handlers-pending.d.ts +22 -0
- package/dist/tui/browse-handlers-pending.js +100 -0
- package/dist/tui/browse-handlers-research.d.ts +32 -0
- package/dist/tui/browse-handlers-research.js +363 -0
- package/dist/tui/browse-handlers-tools.d.ts +42 -0
- package/dist/tui/browse-handlers-tools.js +289 -0
- package/dist/tui/browse-handlers.d.ts +239 -0
- package/dist/tui/browse-handlers.js +1944 -0
- package/dist/tui/browse-render-extensions.d.ts +14 -0
- package/dist/tui/browse-render-extensions.js +114 -0
- package/dist/tui/browse-render-tools.d.ts +18 -0
- package/dist/tui/browse-render-tools.js +259 -0
- package/dist/tui/browse-render.d.ts +51 -0
- package/dist/tui/browse-render.js +599 -0
- package/dist/tui/browse-types.d.ts +142 -0
- package/dist/tui/browse-types.js +70 -0
- package/dist/tui/browse-ui.d.ts +10 -0
- package/dist/tui/browse-ui.js +432 -0
- package/dist/tui/browse.d.ts +17 -0
- package/dist/tui/browse.js +625 -0
- package/dist/tui/markdown.d.ts +22 -0
- package/dist/tui/markdown.js +223 -0
- package/package.json +71 -0
- package/plugins/claude-code/.claude-plugin/plugin.json +10 -0
- package/plugins/claude-code/.mcp.json +6 -0
- package/plugins/claude-code/skills/lore/SKILL.md +63 -0
- package/plugins/codex/SKILL.md +36 -0
- package/plugins/codex/agents/openai.yaml +10 -0
- package/plugins/gemini/GEMINI.md +31 -0
- package/plugins/gemini/gemini-extension.json +11 -0
- package/skills/generic-agent.md +99 -0
- package/skills/openclaw.md +67 -0
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Archive Project Handler - Mark a project as archived
|
|
3
|
+
*
|
|
4
|
+
* Archived projects are excluded from search by default but preserved for history.
|
|
5
|
+
* This is a human-triggered curation action, not automatic.
|
|
6
|
+
* Auto-pushes to git remote if configured.
|
|
7
|
+
*/
|
|
8
|
+
import { readFile, writeFile, mkdir } from 'fs/promises';
|
|
9
|
+
import path from 'path';
|
|
10
|
+
import { getProjectStats } from '../../core/vector-store.js';
|
|
11
|
+
import { gitCommitAndPush } from '../../core/git.js';
|
|
12
|
+
const ARCHIVED_PROJECTS_FILE = 'archived-projects.json';
|
|
13
|
+
/**
|
|
14
|
+
* Load archived projects list
|
|
15
|
+
*/
|
|
16
|
+
export async function loadArchivedProjects(dataDir) {
|
|
17
|
+
const filePath = path.join(dataDir, ARCHIVED_PROJECTS_FILE);
|
|
18
|
+
try {
|
|
19
|
+
const content = await readFile(filePath, 'utf-8');
|
|
20
|
+
return JSON.parse(content);
|
|
21
|
+
}
|
|
22
|
+
catch {
|
|
23
|
+
return [];
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Save archived projects list
|
|
28
|
+
*/
|
|
29
|
+
async function saveArchivedProjects(dataDir, projects) {
|
|
30
|
+
const filePath = path.join(dataDir, ARCHIVED_PROJECTS_FILE);
|
|
31
|
+
await mkdir(dataDir, { recursive: true });
|
|
32
|
+
await writeFile(filePath, JSON.stringify(projects, null, 2));
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Check if a project is archived
|
|
36
|
+
*/
|
|
37
|
+
export async function isProjectArchived(dataDir, project) {
|
|
38
|
+
const archived = await loadArchivedProjects(dataDir);
|
|
39
|
+
return archived.some((p) => p.project.toLowerCase() === project.toLowerCase());
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Get archived project info
|
|
43
|
+
*/
|
|
44
|
+
export async function getArchivedProjectInfo(dataDir, project) {
|
|
45
|
+
const archived = await loadArchivedProjects(dataDir);
|
|
46
|
+
return archived.find((p) => p.project.toLowerCase() === project.toLowerCase()) || null;
|
|
47
|
+
}
|
|
48
|
+
export async function handleArchiveProject(dbPath, dataDir, args, options = {}) {
|
|
49
|
+
const { project, reason, successor_project } = args;
|
|
50
|
+
const { autoPush = true } = options;
|
|
51
|
+
// Check if project exists
|
|
52
|
+
const projectStats = await getProjectStats(dbPath);
|
|
53
|
+
const existingProject = projectStats.find((p) => p.project.toLowerCase() === project.toLowerCase());
|
|
54
|
+
if (!existingProject) {
|
|
55
|
+
return {
|
|
56
|
+
success: false,
|
|
57
|
+
project,
|
|
58
|
+
archived_at: new Date().toISOString(),
|
|
59
|
+
error: `Project "${project}" not found`,
|
|
60
|
+
sources_affected: 0,
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
// Check if already archived
|
|
64
|
+
const alreadyArchived = await isProjectArchived(dataDir, project);
|
|
65
|
+
if (alreadyArchived) {
|
|
66
|
+
return {
|
|
67
|
+
success: false,
|
|
68
|
+
project,
|
|
69
|
+
archived_at: new Date().toISOString(),
|
|
70
|
+
error: `Project "${project}" is already archived`,
|
|
71
|
+
sources_affected: 0,
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
// Add to archived list
|
|
75
|
+
const archived = await loadArchivedProjects(dataDir);
|
|
76
|
+
const archivedProject = {
|
|
77
|
+
project: existingProject.project, // Use exact case from DB
|
|
78
|
+
archived_at: new Date().toISOString(),
|
|
79
|
+
reason,
|
|
80
|
+
successor_project,
|
|
81
|
+
};
|
|
82
|
+
archived.push(archivedProject);
|
|
83
|
+
await saveArchivedProjects(dataDir, archived);
|
|
84
|
+
// Auto-push to git if enabled
|
|
85
|
+
let synced = false;
|
|
86
|
+
if (autoPush) {
|
|
87
|
+
const pushResult = await gitCommitAndPush(dataDir, `Archive project: ${existingProject.project}${reason ? ` (${reason})` : ''}`);
|
|
88
|
+
synced = pushResult.success && (pushResult.message?.includes('pushed') || false);
|
|
89
|
+
}
|
|
90
|
+
return {
|
|
91
|
+
success: true,
|
|
92
|
+
project: existingProject.project,
|
|
93
|
+
archived_at: archivedProject.archived_at,
|
|
94
|
+
reason,
|
|
95
|
+
successor_project,
|
|
96
|
+
sources_affected: existingProject.source_count,
|
|
97
|
+
synced,
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Unarchive a project (restore to active)
|
|
102
|
+
*/
|
|
103
|
+
export async function handleUnarchiveProject(dataDir, project) {
|
|
104
|
+
const archived = await loadArchivedProjects(dataDir);
|
|
105
|
+
const index = archived.findIndex((p) => p.project.toLowerCase() === project.toLowerCase());
|
|
106
|
+
if (index === -1) {
|
|
107
|
+
return { success: false, error: `Project "${project}" is not archived` };
|
|
108
|
+
}
|
|
109
|
+
archived.splice(index, 1);
|
|
110
|
+
await saveArchivedProjects(dataDir, archived);
|
|
111
|
+
return { success: true };
|
|
112
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Get Quotes Handler - Find quotes with citations
|
|
3
|
+
*/
|
|
4
|
+
import type { ThemeName } from '../../core/types.js';
|
|
5
|
+
interface GetQuotesArgs {
|
|
6
|
+
query?: string;
|
|
7
|
+
theme?: ThemeName;
|
|
8
|
+
project?: string;
|
|
9
|
+
limit?: number;
|
|
10
|
+
}
|
|
11
|
+
interface QuoteResult {
|
|
12
|
+
text: string;
|
|
13
|
+
speaker?: string;
|
|
14
|
+
timestamp?: string;
|
|
15
|
+
theme?: string;
|
|
16
|
+
source: {
|
|
17
|
+
id: string;
|
|
18
|
+
title: string;
|
|
19
|
+
type: string;
|
|
20
|
+
};
|
|
21
|
+
relevance_score?: number;
|
|
22
|
+
}
|
|
23
|
+
export declare function handleGetQuotes(dbPath: string, args: GetQuotesArgs): Promise<{
|
|
24
|
+
quotes: QuoteResult[];
|
|
25
|
+
total: number;
|
|
26
|
+
}>;
|
|
27
|
+
export {};
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Get Quotes Handler - Find quotes with citations
|
|
3
|
+
*/
|
|
4
|
+
import { searchChunks, getSourceById } from '../../core/vector-store.js';
|
|
5
|
+
import { generateEmbedding } from '../../core/embedder.js';
|
|
6
|
+
export async function handleGetQuotes(dbPath, args) {
|
|
7
|
+
const { query, theme, project, limit = 20 } = args;
|
|
8
|
+
let results = [];
|
|
9
|
+
if (query) {
|
|
10
|
+
// Semantic search for quotes
|
|
11
|
+
const queryVector = await generateEmbedding(query);
|
|
12
|
+
results = await searchChunks(dbPath, queryVector, {
|
|
13
|
+
limit,
|
|
14
|
+
type: 'quote',
|
|
15
|
+
theme_name: theme,
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
else if (theme) {
|
|
19
|
+
// Just filter by theme (use a generic embedding)
|
|
20
|
+
const queryVector = await generateEmbedding(`${theme} user feedback`);
|
|
21
|
+
results = await searchChunks(dbPath, queryVector, {
|
|
22
|
+
limit: limit * 2,
|
|
23
|
+
type: 'quote',
|
|
24
|
+
theme_name: theme,
|
|
25
|
+
});
|
|
26
|
+
results = results.slice(0, limit);
|
|
27
|
+
}
|
|
28
|
+
else {
|
|
29
|
+
return { quotes: [], total: 0 };
|
|
30
|
+
}
|
|
31
|
+
// Enrich with source info
|
|
32
|
+
const sourceCache = new Map();
|
|
33
|
+
const quotes = [];
|
|
34
|
+
for (const result of results) {
|
|
35
|
+
// Get source info (with caching)
|
|
36
|
+
let sourceInfo = sourceCache.get(result.source_id);
|
|
37
|
+
if (!sourceInfo) {
|
|
38
|
+
const source = await getSourceById(dbPath, result.source_id);
|
|
39
|
+
if (source) {
|
|
40
|
+
sourceInfo = { title: source.title, type: source.source_type };
|
|
41
|
+
sourceCache.set(result.source_id, sourceInfo);
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
sourceInfo = { title: 'Unknown', type: 'unknown' };
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
quotes.push({
|
|
48
|
+
text: result.content,
|
|
49
|
+
speaker: result.speaker || undefined,
|
|
50
|
+
timestamp: result.timestamp || undefined,
|
|
51
|
+
theme: result.theme_name || undefined,
|
|
52
|
+
source: {
|
|
53
|
+
id: result.source_id,
|
|
54
|
+
title: sourceInfo.title,
|
|
55
|
+
type: sourceInfo.type,
|
|
56
|
+
},
|
|
57
|
+
relevance_score: result.score,
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
return { quotes, total: quotes.length };
|
|
61
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Get Source Handler - Retrieve full source document details
|
|
3
|
+
*/
|
|
4
|
+
interface GetSourceArgs {
|
|
5
|
+
source_id: string;
|
|
6
|
+
include_content?: boolean;
|
|
7
|
+
}
|
|
8
|
+
export declare function handleGetSource(dbPath: string, dataDir: string, args: GetSourceArgs): Promise<unknown>;
|
|
9
|
+
export {};
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Get Source Handler - Retrieve full source document details
|
|
3
|
+
*/
|
|
4
|
+
import { getSourceById } from '../../core/vector-store.js';
|
|
5
|
+
import { readFile } from 'fs/promises';
|
|
6
|
+
import path from 'path';
|
|
7
|
+
export async function handleGetSource(dbPath, dataDir, args) {
|
|
8
|
+
const { source_id, include_content = false } = args;
|
|
9
|
+
const source = await getSourceById(dbPath, source_id);
|
|
10
|
+
if (!source) {
|
|
11
|
+
return { error: `Source not found: ${source_id}` };
|
|
12
|
+
}
|
|
13
|
+
const result = {
|
|
14
|
+
id: source.id,
|
|
15
|
+
title: source.title,
|
|
16
|
+
source_type: source.source_type,
|
|
17
|
+
content_type: source.content_type,
|
|
18
|
+
projects: source.projects,
|
|
19
|
+
tags: source.tags,
|
|
20
|
+
created_at: source.created_at,
|
|
21
|
+
summary: source.summary,
|
|
22
|
+
themes: source.themes,
|
|
23
|
+
quotes: source.quotes,
|
|
24
|
+
source_url: source.source_url || undefined,
|
|
25
|
+
source_name: source.source_name || undefined,
|
|
26
|
+
};
|
|
27
|
+
// Include full content if requested
|
|
28
|
+
if (include_content) {
|
|
29
|
+
try {
|
|
30
|
+
const contentPath = path.join(dataDir, 'sources', source_id, 'content.md');
|
|
31
|
+
const content = await readFile(contentPath, 'utf-8');
|
|
32
|
+
result.full_content = content;
|
|
33
|
+
}
|
|
34
|
+
catch {
|
|
35
|
+
result.full_content = null;
|
|
36
|
+
result.content_note = 'Full content not available on disk';
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
return result;
|
|
40
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ingest Handler - Primary agent-facing surface for pushing content into Lore
|
|
3
|
+
*
|
|
4
|
+
* Agents (Claude Code, OpenClaw, ChatGPT, etc.) use this to push content
|
|
5
|
+
* from external systems (Slack, Notion, GitHub, meetings, etc.) into Lore.
|
|
6
|
+
* Content is deduplicated by SHA256 hash, saved to disk, and immediately indexed.
|
|
7
|
+
*/
|
|
8
|
+
interface IngestArgs {
|
|
9
|
+
content: string;
|
|
10
|
+
title: string;
|
|
11
|
+
project: string;
|
|
12
|
+
source_type?: string;
|
|
13
|
+
date?: string;
|
|
14
|
+
participants?: string[];
|
|
15
|
+
tags?: string[];
|
|
16
|
+
source_url?: string;
|
|
17
|
+
source_name?: string;
|
|
18
|
+
}
|
|
19
|
+
export declare function handleIngest(dbPath: string, dataDir: string, args: IngestArgs, options?: {
|
|
20
|
+
autoPush?: boolean;
|
|
21
|
+
hookContext?: {
|
|
22
|
+
mode: 'mcp' | 'cli';
|
|
23
|
+
};
|
|
24
|
+
}): Promise<unknown>;
|
|
25
|
+
export {};
|
|
@@ -0,0 +1,305 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ingest Handler - Primary agent-facing surface for pushing content into Lore
|
|
3
|
+
*
|
|
4
|
+
* Agents (Claude Code, OpenClaw, ChatGPT, etc.) use this to push content
|
|
5
|
+
* from external systems (Slack, Notion, GitHub, meetings, etc.) into Lore.
|
|
6
|
+
* Content is deduplicated by SHA256 hash, saved to disk, and immediately indexed.
|
|
7
|
+
*/
|
|
8
|
+
import { writeFile, mkdir } from 'fs/promises';
|
|
9
|
+
import path from 'path';
|
|
10
|
+
import { randomUUID, createHash } from 'crypto';
|
|
11
|
+
import { addSource, checkContentHashExists } from '../../core/vector-store.js';
|
|
12
|
+
import { generateEmbedding, createSearchableText } from '../../core/embedder.js';
|
|
13
|
+
import { extractInsights } from '../../core/insight-extractor.js';
|
|
14
|
+
import { gitCommitAndPush } from '../../core/git.js';
|
|
15
|
+
import { getExtensionRegistry } from '../../extensions/registry.js';
|
|
16
|
+
// Normalize source_type to canonical kebab-case form.
|
|
17
|
+
// Agents pass free-form strings — this ensures consistency for filtering.
|
|
18
|
+
const SOURCE_TYPE_ALIASES = {
|
|
19
|
+
// Granola
|
|
20
|
+
'granola': 'meeting',
|
|
21
|
+
'granola-app': 'meeting',
|
|
22
|
+
'granola meeting': 'meeting',
|
|
23
|
+
// Meetings
|
|
24
|
+
'meeting notes': 'meeting',
|
|
25
|
+
'meeting-notes': 'meeting',
|
|
26
|
+
'meetings': 'meeting',
|
|
27
|
+
// Interviews
|
|
28
|
+
'interviews': 'interview',
|
|
29
|
+
'user interview': 'interview',
|
|
30
|
+
'user-interview': 'interview',
|
|
31
|
+
// Slack
|
|
32
|
+
'slack thread': 'slack',
|
|
33
|
+
'slack-thread': 'slack',
|
|
34
|
+
'slack message': 'slack',
|
|
35
|
+
'slack-message': 'slack',
|
|
36
|
+
// Email
|
|
37
|
+
'emails': 'email',
|
|
38
|
+
'email thread': 'email',
|
|
39
|
+
'email-thread': 'email',
|
|
40
|
+
// GitHub
|
|
41
|
+
'github issue': 'github-issue',
|
|
42
|
+
'github-pr': 'github-pr',
|
|
43
|
+
'github pull request': 'github-pr',
|
|
44
|
+
'pull request': 'github-pr',
|
|
45
|
+
'pull-request': 'github-pr',
|
|
46
|
+
// Notion
|
|
47
|
+
'notion page': 'notion',
|
|
48
|
+
'notion-page': 'notion',
|
|
49
|
+
// Conversations
|
|
50
|
+
'conversations': 'conversation',
|
|
51
|
+
'chat': 'conversation',
|
|
52
|
+
// Notes
|
|
53
|
+
'notes': 'notes',
|
|
54
|
+
'note': 'notes',
|
|
55
|
+
// Documents
|
|
56
|
+
'doc': 'document',
|
|
57
|
+
'docs': 'document',
|
|
58
|
+
'documents': 'document',
|
|
59
|
+
'markdown': 'document',
|
|
60
|
+
'md': 'document',
|
|
61
|
+
// Articles
|
|
62
|
+
'blog post': 'article',
|
|
63
|
+
'blog-post': 'article',
|
|
64
|
+
'web article': 'article',
|
|
65
|
+
'web-article': 'article',
|
|
66
|
+
'online article': 'article',
|
|
67
|
+
'blog': 'article',
|
|
68
|
+
'post': 'article',
|
|
69
|
+
// Media / files
|
|
70
|
+
'pdf': 'pdf',
|
|
71
|
+
'pdf document': 'pdf',
|
|
72
|
+
'docx': 'document',
|
|
73
|
+
'word doc': 'document',
|
|
74
|
+
'word document': 'document',
|
|
75
|
+
'google doc': 'document',
|
|
76
|
+
'google-doc': 'document',
|
|
77
|
+
'image': 'image',
|
|
78
|
+
'screenshot': 'image',
|
|
79
|
+
'photo': 'image',
|
|
80
|
+
'diagram': 'image',
|
|
81
|
+
'video': 'video',
|
|
82
|
+
'recording': 'video',
|
|
83
|
+
'loom': 'video',
|
|
84
|
+
'audio': 'audio',
|
|
85
|
+
'podcast': 'audio',
|
|
86
|
+
'voice memo': 'audio',
|
|
87
|
+
'voice-memo': 'audio',
|
|
88
|
+
// Specs / RFCs
|
|
89
|
+
'specification': 'spec',
|
|
90
|
+
'rfc': 'rfc',
|
|
91
|
+
'design doc': 'spec',
|
|
92
|
+
'design-doc': 'spec',
|
|
93
|
+
// Transcripts
|
|
94
|
+
'transcript': 'transcript',
|
|
95
|
+
// Legacy types
|
|
96
|
+
'claude-code': 'conversation',
|
|
97
|
+
'claude-desktop': 'conversation',
|
|
98
|
+
'chatgpt': 'conversation',
|
|
99
|
+
};
|
|
100
|
+
function normalizeSourceType(raw) {
|
|
101
|
+
if (!raw)
|
|
102
|
+
return 'document';
|
|
103
|
+
const key = raw.toLowerCase().trim();
|
|
104
|
+
return SOURCE_TYPE_ALIASES[key] || key.toLowerCase().replace(/\s+/g, '-');
|
|
105
|
+
}
|
|
106
|
+
// Map source_type to ContentType
|
|
107
|
+
function mapContentType(sourceType) {
|
|
108
|
+
switch (sourceType) {
|
|
109
|
+
case 'meeting':
|
|
110
|
+
return 'meeting';
|
|
111
|
+
case 'interview':
|
|
112
|
+
return 'interview';
|
|
113
|
+
case 'conversation':
|
|
114
|
+
return 'conversation';
|
|
115
|
+
case 'analysis':
|
|
116
|
+
return 'analysis';
|
|
117
|
+
case 'notes':
|
|
118
|
+
case 'note':
|
|
119
|
+
return 'note';
|
|
120
|
+
case 'document':
|
|
121
|
+
default:
|
|
122
|
+
return 'document';
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
export async function handleIngest(dbPath, dataDir, args, options = {}) {
|
|
126
|
+
const { content, title, project, source_type: raw_source_type, date, participants = [], tags = [], source_url, source_name, } = args;
|
|
127
|
+
const { autoPush = true, hookContext } = options;
|
|
128
|
+
const source_type = normalizeSourceType(raw_source_type);
|
|
129
|
+
// Content hash deduplication — skip everything if already ingested
|
|
130
|
+
const contentHash = createHash('sha256').update(content).digest('hex');
|
|
131
|
+
try {
|
|
132
|
+
const exists = await checkContentHashExists(dbPath, contentHash);
|
|
133
|
+
if (exists) {
|
|
134
|
+
return {
|
|
135
|
+
success: true,
|
|
136
|
+
deduplicated: true,
|
|
137
|
+
message: 'Content already exists in the knowledge base (identical content hash).',
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
catch (error) {
|
|
142
|
+
// If dedup check fails, continue with ingestion rather than blocking
|
|
143
|
+
console.error('Dedup check failed, continuing with ingestion:', error);
|
|
144
|
+
}
|
|
145
|
+
const id = randomUUID();
|
|
146
|
+
const timestamp = date || new Date().toISOString();
|
|
147
|
+
const contentType = mapContentType(source_type);
|
|
148
|
+
// Create source directory structure (matches CLI ingest format)
|
|
149
|
+
const sourceDir = path.join(dataDir, 'sources', id);
|
|
150
|
+
await mkdir(sourceDir, { recursive: true });
|
|
151
|
+
// Save metadata.json
|
|
152
|
+
const metadata = {
|
|
153
|
+
id,
|
|
154
|
+
title,
|
|
155
|
+
source_type: source_type,
|
|
156
|
+
content_type: contentType,
|
|
157
|
+
created_at: timestamp,
|
|
158
|
+
imported_at: new Date().toISOString(),
|
|
159
|
+
projects: [project],
|
|
160
|
+
tags,
|
|
161
|
+
participants,
|
|
162
|
+
content_hash: contentHash,
|
|
163
|
+
};
|
|
164
|
+
if (source_url) {
|
|
165
|
+
metadata.source_url = source_url;
|
|
166
|
+
}
|
|
167
|
+
if (source_name) {
|
|
168
|
+
metadata.source_name = source_name;
|
|
169
|
+
}
|
|
170
|
+
await writeFile(path.join(sourceDir, 'metadata.json'), JSON.stringify(metadata, null, 2));
|
|
171
|
+
// Save content.md
|
|
172
|
+
await writeFile(path.join(sourceDir, 'content.md'), content);
|
|
173
|
+
// Extract insights using LLM
|
|
174
|
+
let summary = content.slice(0, 200) + (content.length > 200 ? '...' : '');
|
|
175
|
+
let themes = [];
|
|
176
|
+
let quotes = [];
|
|
177
|
+
try {
|
|
178
|
+
if (content.trim().length > 100) {
|
|
179
|
+
const insights = await extractInsights(content, title, id, { contentType });
|
|
180
|
+
summary = insights.summary;
|
|
181
|
+
themes = insights.themes.map((t) => ({ name: t.name, quotes: [] }));
|
|
182
|
+
quotes = insights.quotes.map((q) => ({ text: q.text, speaker: q.speaker }));
|
|
183
|
+
// Save insights.json
|
|
184
|
+
await writeFile(path.join(sourceDir, 'insights.json'), JSON.stringify({ summary, themes, quotes }, null, 2));
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
catch (error) {
|
|
188
|
+
console.error('Failed to extract insights:', error);
|
|
189
|
+
// Continue with basic summary
|
|
190
|
+
}
|
|
191
|
+
// Add to vector store immediately
|
|
192
|
+
try {
|
|
193
|
+
const searchableText = createSearchableText({
|
|
194
|
+
type: 'summary',
|
|
195
|
+
text: summary,
|
|
196
|
+
project,
|
|
197
|
+
});
|
|
198
|
+
const vector = await generateEmbedding(searchableText);
|
|
199
|
+
const sourceRecord = {
|
|
200
|
+
id,
|
|
201
|
+
title,
|
|
202
|
+
source_type,
|
|
203
|
+
content_type: contentType,
|
|
204
|
+
projects: JSON.stringify([project]),
|
|
205
|
+
tags: JSON.stringify(tags),
|
|
206
|
+
created_at: timestamp,
|
|
207
|
+
summary,
|
|
208
|
+
themes_json: JSON.stringify(themes),
|
|
209
|
+
quotes_json: JSON.stringify([]),
|
|
210
|
+
has_full_content: true,
|
|
211
|
+
vector: [],
|
|
212
|
+
};
|
|
213
|
+
await addSource(dbPath, sourceRecord, vector, {
|
|
214
|
+
content_hash: contentHash,
|
|
215
|
+
source_url,
|
|
216
|
+
source_name,
|
|
217
|
+
});
|
|
218
|
+
// Auto-push to git if enabled
|
|
219
|
+
let pushed = false;
|
|
220
|
+
if (autoPush) {
|
|
221
|
+
const pushResult = await gitCommitAndPush(dataDir, `Ingest: ${title}`);
|
|
222
|
+
pushed = pushResult.success && (pushResult.message?.includes('pushed') || false);
|
|
223
|
+
}
|
|
224
|
+
const result = {
|
|
225
|
+
success: true,
|
|
226
|
+
id,
|
|
227
|
+
title,
|
|
228
|
+
project,
|
|
229
|
+
source_type,
|
|
230
|
+
filepath: `sources/${id}`,
|
|
231
|
+
summary,
|
|
232
|
+
indexed: true,
|
|
233
|
+
synced: pushed,
|
|
234
|
+
};
|
|
235
|
+
await runSourceCreatedHook({
|
|
236
|
+
id,
|
|
237
|
+
title,
|
|
238
|
+
source_type: source_type,
|
|
239
|
+
content_type: contentType,
|
|
240
|
+
created_at: timestamp,
|
|
241
|
+
imported_at: new Date().toISOString(),
|
|
242
|
+
projects: [project],
|
|
243
|
+
tags,
|
|
244
|
+
source_path: path.join(dataDir, 'sources', id),
|
|
245
|
+
content_hash: contentHash,
|
|
246
|
+
}, {
|
|
247
|
+
mode: hookContext?.mode || 'mcp',
|
|
248
|
+
dataDir,
|
|
249
|
+
dbPath,
|
|
250
|
+
});
|
|
251
|
+
return result;
|
|
252
|
+
}
|
|
253
|
+
catch (error) {
|
|
254
|
+
console.error('Failed to index ingested document:', error);
|
|
255
|
+
// Still try to push even if indexing failed
|
|
256
|
+
let pushed = false;
|
|
257
|
+
if (autoPush) {
|
|
258
|
+
const pushResult = await gitCommitAndPush(dataDir, `Ingest: ${title}`);
|
|
259
|
+
pushed = pushResult.success && (pushResult.message?.includes('pushed') || false);
|
|
260
|
+
}
|
|
261
|
+
const result = {
|
|
262
|
+
success: true,
|
|
263
|
+
id,
|
|
264
|
+
title,
|
|
265
|
+
project,
|
|
266
|
+
source_type,
|
|
267
|
+
filepath: `sources/${id}`,
|
|
268
|
+
indexed: false,
|
|
269
|
+
synced: pushed,
|
|
270
|
+
note: 'Saved to disk but indexing failed. Run "lore sync" to index.',
|
|
271
|
+
};
|
|
272
|
+
await runSourceCreatedHook({
|
|
273
|
+
id,
|
|
274
|
+
title,
|
|
275
|
+
source_type: source_type,
|
|
276
|
+
content_type: contentType,
|
|
277
|
+
created_at: timestamp,
|
|
278
|
+
imported_at: new Date().toISOString(),
|
|
279
|
+
projects: [project],
|
|
280
|
+
tags,
|
|
281
|
+
source_path: path.join(dataDir, 'sources', id),
|
|
282
|
+
content_hash: contentHash,
|
|
283
|
+
}, {
|
|
284
|
+
mode: hookContext?.mode || 'mcp',
|
|
285
|
+
dataDir,
|
|
286
|
+
dbPath,
|
|
287
|
+
});
|
|
288
|
+
return result;
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
async function runSourceCreatedHook(event, context) {
|
|
292
|
+
try {
|
|
293
|
+
const registry = await getExtensionRegistry({
|
|
294
|
+
logger: (message) => console.error(message),
|
|
295
|
+
});
|
|
296
|
+
await registry.runHook('onSourceCreated', event, {
|
|
297
|
+
mode: context.mode,
|
|
298
|
+
dataDir: context.dataDir,
|
|
299
|
+
dbPath: context.dbPath,
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
catch (error) {
|
|
303
|
+
console.error('[extensions] Failed to run onSourceCreated hook:', error);
|
|
304
|
+
}
|
|
305
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* List Projects Handler - Show all projects with stats
|
|
3
|
+
*/
|
|
4
|
+
import { getProjectStats } from '../../core/vector-store.js';
|
|
5
|
+
export async function handleListProjects(dbPath) {
|
|
6
|
+
const projects = await getProjectStats(dbPath);
|
|
7
|
+
return {
|
|
8
|
+
projects: projects.map((p) => ({
|
|
9
|
+
name: p.project,
|
|
10
|
+
source_count: p.source_count,
|
|
11
|
+
quote_count: p.quote_count,
|
|
12
|
+
latest_activity: p.latest_activity,
|
|
13
|
+
})),
|
|
14
|
+
total: projects.length,
|
|
15
|
+
};
|
|
16
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* List Sources Handler - List all sources with optional filtering
|
|
3
|
+
*/
|
|
4
|
+
import type { SourceType } from '../../core/types.js';
|
|
5
|
+
interface ListSourcesArgs {
|
|
6
|
+
project?: string;
|
|
7
|
+
source_type?: SourceType;
|
|
8
|
+
limit?: number;
|
|
9
|
+
}
|
|
10
|
+
export declare function handleListSources(dbPath: string, args: ListSourcesArgs): Promise<unknown>;
|
|
11
|
+
export {};
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* List Sources Handler - List all sources with optional filtering
|
|
3
|
+
*/
|
|
4
|
+
import { getAllSources } from '../../core/vector-store.js';
|
|
5
|
+
export async function handleListSources(dbPath, args) {
|
|
6
|
+
const { project, source_type, limit = 20 } = args;
|
|
7
|
+
const sources = await getAllSources(dbPath, {
|
|
8
|
+
project,
|
|
9
|
+
source_type,
|
|
10
|
+
limit,
|
|
11
|
+
});
|
|
12
|
+
return {
|
|
13
|
+
sources,
|
|
14
|
+
total: sources.length,
|
|
15
|
+
filters: {
|
|
16
|
+
project: project || 'all',
|
|
17
|
+
source_type: source_type || 'all',
|
|
18
|
+
},
|
|
19
|
+
};
|
|
20
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Lore - Agentic Research using Claude Agent SDK
|
|
3
|
+
*
|
|
4
|
+
* This is the "real" agent that:
|
|
5
|
+
* 1. Takes a research task
|
|
6
|
+
* 2. Uses Lore's own tools iteratively (search, get_source, list_sources)
|
|
7
|
+
* 3. Follows leads, cross-references, refines queries
|
|
8
|
+
* 4. Synthesizes findings into a comprehensive research package
|
|
9
|
+
*/
|
|
10
|
+
import type { ResearchPackage } from '../../core/types.js';
|
|
11
|
+
interface ResearchAgentArgs {
|
|
12
|
+
task: string;
|
|
13
|
+
project?: string;
|
|
14
|
+
content_type?: string;
|
|
15
|
+
include_sources?: boolean;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Run the agentic research
|
|
19
|
+
*/
|
|
20
|
+
export declare function runResearchAgent(dbPath: string, dataDir: string, args: ResearchAgentArgs): Promise<ResearchPackage>;
|
|
21
|
+
export {};
|