uda-cli 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +85 -0
- package/bin/uda.js +4 -0
- package/package.json +47 -0
- package/src/adapters/agents-md.js +22 -0
- package/src/adapters/base.js +14 -0
- package/src/adapters/claude.js +138 -0
- package/src/adapters/cursor.js +46 -0
- package/src/adapters/raw.js +34 -0
- package/src/adapters/registry.js +18 -0
- package/src/cli.js +106 -0
- package/src/commands/config.js +55 -0
- package/src/commands/export.js +77 -0
- package/src/commands/init.js +62 -0
- package/src/commands/learn.js +82 -0
- package/src/commands/logs.js +70 -0
- package/src/commands/plugin.js +100 -0
- package/src/commands/scan.js +71 -0
- package/src/commands/search.js +69 -0
- package/src/commands/status.js +74 -0
- package/src/commands/sync.js +84 -0
- package/src/core/config.js +47 -0
- package/src/core/constants.js +34 -0
- package/src/core/init.js +86 -0
- package/src/core/knowledge-loader.js +93 -0
- package/src/core/validators.js +123 -0
- package/src/plugins/manager.js +160 -0
- package/src/rag/chunker.js +86 -0
- package/src/rag/embedder.js +29 -0
- package/src/rag/manager.js +56 -0
- package/src/rag/store.js +77 -0
- package/src/workflows/parser.js +49 -0
package/src/core/init.js
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
// src/core/init.js
|
|
2
|
+
import { mkdir, writeFile, access } from 'fs/promises';
|
|
3
|
+
import { udaPaths } from './constants.js';
|
|
4
|
+
|
|
5
|
+
const DEFAULT_CONFIG = {
|
|
6
|
+
version: '0.1.0',
|
|
7
|
+
language: 'en',
|
|
8
|
+
adapters: ['claude', 'cursor', 'windsurf', 'agents-md', 'raw'],
|
|
9
|
+
plugins: [],
|
|
10
|
+
rag: {
|
|
11
|
+
embedding_model: 'Xenova/all-MiniLM-L6-v2',
|
|
12
|
+
chunk_size: 512,
|
|
13
|
+
chunk_overlap: 50,
|
|
14
|
+
},
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
const INITIAL_STATE = `# Project State
|
|
18
|
+
|
|
19
|
+
## Last Updated: ${new Date().toISOString().split('T')[0]}
|
|
20
|
+
|
|
21
|
+
## Active Work
|
|
22
|
+
None yet. Run \`uda scan\` to index your project.
|
|
23
|
+
|
|
24
|
+
## Completed
|
|
25
|
+
- [x] UDA initialized
|
|
26
|
+
|
|
27
|
+
## Decisions
|
|
28
|
+
(Architectural decisions will be recorded here)
|
|
29
|
+
`;
|
|
30
|
+
|
|
31
|
+
export async function initProject(root) {
|
|
32
|
+
const paths = udaPaths(root);
|
|
33
|
+
|
|
34
|
+
// Create all directories
|
|
35
|
+
const dirs = [
|
|
36
|
+
paths.root,
|
|
37
|
+
paths.knowledge.root,
|
|
38
|
+
paths.knowledge.engine,
|
|
39
|
+
paths.knowledge.project,
|
|
40
|
+
paths.knowledge.community,
|
|
41
|
+
paths.workflows,
|
|
42
|
+
paths.agents,
|
|
43
|
+
paths.state.root,
|
|
44
|
+
paths.state.features,
|
|
45
|
+
paths.state.history,
|
|
46
|
+
paths.rag.root,
|
|
47
|
+
paths.rag.lancedb,
|
|
48
|
+
paths.rag.cache,
|
|
49
|
+
paths.plugins,
|
|
50
|
+
paths.generated,
|
|
51
|
+
];
|
|
52
|
+
|
|
53
|
+
for (const dir of dirs) {
|
|
54
|
+
await mkdir(dir, { recursive: true });
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Create config.json (skip if exists)
|
|
58
|
+
const configExists = await fileExists(paths.config);
|
|
59
|
+
if (!configExists) {
|
|
60
|
+
await writeFile(paths.config, JSON.stringify(DEFAULT_CONFIG, null, 2));
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Create initial state
|
|
64
|
+
const stateExists = await fileExists(paths.state.current);
|
|
65
|
+
if (!stateExists) {
|
|
66
|
+
await writeFile(paths.state.current, INITIAL_STATE);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Create .gitignore for rag and generated
|
|
70
|
+
const gitignorePath = `${paths.root}/.gitignore`;
|
|
71
|
+
const gitignoreExists = await fileExists(gitignorePath);
|
|
72
|
+
if (!gitignoreExists) {
|
|
73
|
+
await writeFile(gitignorePath, 'rag/\n.generated/\n');
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
return paths;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
async function fileExists(path) {
|
|
80
|
+
try {
|
|
81
|
+
await access(path);
|
|
82
|
+
return true;
|
|
83
|
+
} catch {
|
|
84
|
+
return false;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
// src/core/knowledge-loader.js
|
|
2
|
+
import { readFile, readdir } from 'fs/promises';
|
|
3
|
+
import { join } from 'path';
|
|
4
|
+
import { parse as parseYaml } from 'yaml';
|
|
5
|
+
|
|
6
|
+
export async function loadKnowledge(paths) {
|
|
7
|
+
const knowledge = { project: {}, conventions: [], decisions: [] };
|
|
8
|
+
|
|
9
|
+
try {
|
|
10
|
+
const profile = await readFile(join(paths.knowledge.project, 'profile.md'), 'utf8');
|
|
11
|
+
const nameMatch = profile.match(/Project:\s*(.+)/i);
|
|
12
|
+
const engineMatch = profile.match(/Engine:\s*(.+)/i);
|
|
13
|
+
if (nameMatch) knowledge.project.name = nameMatch[1].trim();
|
|
14
|
+
if (engineMatch) knowledge.project.engine = engineMatch[1].trim();
|
|
15
|
+
} catch (err) {
|
|
16
|
+
if (err.code !== 'ENOENT') {
|
|
17
|
+
console.error(`Warning: Failed to read project profile: ${err.message}`);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
const decisions = await readFile(join(paths.knowledge.project, 'decisions.md'), 'utf8');
|
|
23
|
+
knowledge.decisions = decisions.split('\n')
|
|
24
|
+
.filter(l => l.startsWith('- '))
|
|
25
|
+
.map(l => l.slice(2));
|
|
26
|
+
} catch (err) {
|
|
27
|
+
if (err.code !== 'ENOENT') {
|
|
28
|
+
console.error(`Warning: Failed to read decisions file: ${err.message}`);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
return knowledge;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export async function loadWorkflows(paths) {
|
|
36
|
+
const workflows = [];
|
|
37
|
+
let files;
|
|
38
|
+
try {
|
|
39
|
+
files = await readdir(paths.workflows);
|
|
40
|
+
} catch (err) {
|
|
41
|
+
if (err.code === 'ENOENT') return [];
|
|
42
|
+
throw err;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
for (const file of files) {
|
|
46
|
+
if (!file.endsWith('.yaml') && !file.endsWith('.yml')) continue;
|
|
47
|
+
try {
|
|
48
|
+
const content = await readFile(join(paths.workflows, file), 'utf8');
|
|
49
|
+
const parsed = parseYaml(content);
|
|
50
|
+
if (parsed && parsed.name) {
|
|
51
|
+
workflows.push(parsed);
|
|
52
|
+
}
|
|
53
|
+
} catch (err) {
|
|
54
|
+
console.error(`Warning: Failed to parse workflow "${file}": ${err.message}`);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
return workflows;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export async function loadAgents(paths) {
|
|
62
|
+
const agents = [];
|
|
63
|
+
let files;
|
|
64
|
+
try {
|
|
65
|
+
files = await readdir(paths.agents);
|
|
66
|
+
} catch (err) {
|
|
67
|
+
if (err.code === 'ENOENT') return [];
|
|
68
|
+
throw err;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
for (const file of files) {
|
|
72
|
+
if (!file.endsWith('.md')) continue;
|
|
73
|
+
try {
|
|
74
|
+
const content = await readFile(join(paths.agents, file), 'utf8');
|
|
75
|
+
const agent = parseFrontmatter(content);
|
|
76
|
+
if (agent.name) {
|
|
77
|
+
agents.push(agent);
|
|
78
|
+
}
|
|
79
|
+
} catch (err) {
|
|
80
|
+
console.error(`Warning: Failed to parse agent "${file}": ${err.message}`);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return agents;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
function parseFrontmatter(content) {
|
|
88
|
+
const match = content.match(/^---\n([\s\S]*?)\n---\n?([\s\S]*)$/);
|
|
89
|
+
if (!match) return { prompt: content };
|
|
90
|
+
|
|
91
|
+
const meta = parseYaml(match[1]);
|
|
92
|
+
return { ...meta, prompt: match[2].trim() };
|
|
93
|
+
}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
// src/core/validators.js
|
|
2
|
+
|
|
3
|
+
const VALID_ENGINES = ['unity', 'godot', 'unreal'];
|
|
4
|
+
const VALID_LEARN_TYPES = ['bug-fix', 'feature', 'pattern', 'knowledge'];
|
|
5
|
+
const VALID_EXPORT_FORMATS = ['claude', 'cursor', 'agents-md', 'raw'];
|
|
6
|
+
const VALID_SEARCH_FORMATS = ['terminal', 'md', 'clipboard'];
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Validate result: { valid: true } or { valid: false, error: string }
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
export function validateEngine(engine) {
|
|
13
|
+
if (typeof engine !== 'string' || engine.trim() === '') {
|
|
14
|
+
return { valid: false, error: 'Engine name must be a non-empty string.' };
|
|
15
|
+
}
|
|
16
|
+
if (!VALID_ENGINES.includes(engine)) {
|
|
17
|
+
return { valid: false, error: `Unknown engine "${engine}". Valid engines: ${VALID_ENGINES.join(', ')}` };
|
|
18
|
+
}
|
|
19
|
+
return { valid: true };
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export function validateConfigKey(key) {
|
|
23
|
+
if (typeof key !== 'string' || key.trim() === '') {
|
|
24
|
+
return { valid: false, error: 'Config key must be a non-empty string.' };
|
|
25
|
+
}
|
|
26
|
+
if (!/^[a-zA-Z0-9_]+(\.[a-zA-Z0-9_]+)*$/.test(key)) {
|
|
27
|
+
return { valid: false, error: `Invalid config key "${key}". Use dot notation (e.g. "rag.enabled").` };
|
|
28
|
+
}
|
|
29
|
+
return { valid: true };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function validateExportFormat(format) {
|
|
33
|
+
if (typeof format !== 'string' || format.trim() === '') {
|
|
34
|
+
return { valid: false, error: 'Export format must be a non-empty string.' };
|
|
35
|
+
}
|
|
36
|
+
if (!VALID_EXPORT_FORMATS.includes(format)) {
|
|
37
|
+
return { valid: false, error: `Unknown format "${format}". Available: ${VALID_EXPORT_FORMATS.join(', ')}` };
|
|
38
|
+
}
|
|
39
|
+
return { valid: true };
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export function validatePluginRepo(repo) {
|
|
43
|
+
if (typeof repo !== 'string' || repo.trim() === '') {
|
|
44
|
+
return { valid: false, error: 'Plugin repository URL must be a non-empty string.' };
|
|
45
|
+
}
|
|
46
|
+
const gitUrlPattern = /^(https?:\/\/.+\.git|git@.+:.+\.git|[a-zA-Z0-9_-]+\/[a-zA-Z0-9_.-]+)$/;
|
|
47
|
+
if (!gitUrlPattern.test(repo.trim())) {
|
|
48
|
+
return { valid: false, error: `Invalid repository URL "${repo}". Expected a git URL (e.g. https://github.com/user/repo.git) or shorthand (user/repo).` };
|
|
49
|
+
}
|
|
50
|
+
return { valid: true };
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export function validatePluginName(name) {
|
|
54
|
+
if (typeof name !== 'string' || name.trim() === '') {
|
|
55
|
+
return { valid: false, error: 'Plugin name must be a non-empty string.' };
|
|
56
|
+
}
|
|
57
|
+
if (!/^[a-zA-Z0-9_-]+$/.test(name)) {
|
|
58
|
+
return { valid: false, error: `Invalid plugin name "${name}". Use only letters, numbers, hyphens and underscores.` };
|
|
59
|
+
}
|
|
60
|
+
return { valid: true };
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export function validateLearnType(type) {
|
|
64
|
+
if (typeof type !== 'string' || type.trim() === '') {
|
|
65
|
+
return { valid: false, error: 'Learn type must be a non-empty string.' };
|
|
66
|
+
}
|
|
67
|
+
if (!VALID_LEARN_TYPES.includes(type)) {
|
|
68
|
+
return { valid: false, error: `Unknown type "${type}". Valid types: ${VALID_LEARN_TYPES.join(', ')}` };
|
|
69
|
+
}
|
|
70
|
+
return { valid: true };
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export function validateSearchQuery(query) {
|
|
74
|
+
if (typeof query !== 'string' || query.trim() === '') {
|
|
75
|
+
return { valid: false, error: 'Search query must be a non-empty string.' };
|
|
76
|
+
}
|
|
77
|
+
return { valid: true };
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export function validatePositiveInt(value, name) {
|
|
81
|
+
const parsed = parseInt(value, 10);
|
|
82
|
+
if (isNaN(parsed) || parsed < 1) {
|
|
83
|
+
return { valid: false, error: `${name} must be a positive integer. Got "${value}".` };
|
|
84
|
+
}
|
|
85
|
+
return { valid: true, value: parsed };
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
export function validateSearchFormat(format) {
|
|
89
|
+
if (typeof format !== 'string' || format.trim() === '') {
|
|
90
|
+
return { valid: false, error: 'Search format must be a non-empty string.' };
|
|
91
|
+
}
|
|
92
|
+
if (!VALID_SEARCH_FORMATS.includes(format)) {
|
|
93
|
+
return { valid: false, error: `Unknown search format "${format}". Available: ${VALID_SEARCH_FORMATS.join(', ')}` };
|
|
94
|
+
}
|
|
95
|
+
return { valid: true };
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const MANIFEST_REQUIRED = ['name', 'version', 'engine', 'uda_version'];
|
|
99
|
+
|
|
100
|
+
export function validateManifest(manifest) {
|
|
101
|
+
if (!manifest || typeof manifest !== 'object') {
|
|
102
|
+
return { valid: false, error: 'Manifest must be a JSON object.' };
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
for (const field of MANIFEST_REQUIRED) {
|
|
106
|
+
if (!manifest[field] || typeof manifest[field] !== 'string') {
|
|
107
|
+
return { valid: false, error: `Manifest must include a "${field}" string field.` };
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (manifest.capabilities && typeof manifest.capabilities === 'object') {
|
|
112
|
+
const caps = manifest.capabilities;
|
|
113
|
+
if (caps.logs && typeof caps.logs === 'object') {
|
|
114
|
+
if (!caps.logs.source || typeof caps.logs.source !== 'string') {
|
|
115
|
+
return { valid: false, error: 'Capability "logs" must include a "source" path string.' };
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
return { valid: true };
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
export { VALID_ENGINES, VALID_LEARN_TYPES, VALID_EXPORT_FORMATS, VALID_SEARCH_FORMATS };
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import { simpleGit } from 'simple-git';
|
|
2
|
+
import { readFile, writeFile, rm, readdir, mkdir, cp } from 'fs/promises';
|
|
3
|
+
import { join, basename } from 'path';
|
|
4
|
+
import { udaPaths } from '../core/constants.js';
|
|
5
|
+
import { validateManifest } from '../core/validators.js';
|
|
6
|
+
|
|
7
|
+
export class PluginManager {
|
|
8
|
+
constructor(projectRoot) {
|
|
9
|
+
this.root = projectRoot;
|
|
10
|
+
this.paths = udaPaths(projectRoot);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
async add(repoUrl) {
|
|
14
|
+
const tmpDir = join(this.paths.root, '.tmp-plugin');
|
|
15
|
+
const git = simpleGit();
|
|
16
|
+
|
|
17
|
+
try {
|
|
18
|
+
// Clone repo
|
|
19
|
+
await git.clone(repoUrl, tmpDir, ['--depth', '1']);
|
|
20
|
+
|
|
21
|
+
// Get commit hash
|
|
22
|
+
const log = await git.cwd(tmpDir).log(['-1']);
|
|
23
|
+
const commitHash = log.latest?.hash || 'unknown';
|
|
24
|
+
|
|
25
|
+
// Read manifest
|
|
26
|
+
let manifestRaw;
|
|
27
|
+
try {
|
|
28
|
+
manifestRaw = await readFile(join(tmpDir, 'manifest.json'), 'utf8');
|
|
29
|
+
} catch (err) {
|
|
30
|
+
throw new Error(`Plugin repository is missing manifest.json`);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
let manifest;
|
|
34
|
+
try {
|
|
35
|
+
manifest = JSON.parse(manifestRaw);
|
|
36
|
+
} catch (err) {
|
|
37
|
+
throw new Error(`Plugin manifest.json contains invalid JSON: ${err.message}`);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Validate manifest
|
|
41
|
+
const validation = validateManifest(manifest);
|
|
42
|
+
if (!validation.valid) {
|
|
43
|
+
throw new Error(`Invalid plugin manifest: ${validation.error}`);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const pluginName = manifest.name;
|
|
47
|
+
|
|
48
|
+
// Copy knowledge files
|
|
49
|
+
const knowledgeDir = join(tmpDir, 'knowledge');
|
|
50
|
+
const targetKnowledge = join(this.paths.knowledge.engine, manifest.engine || pluginName);
|
|
51
|
+
await mkdir(targetKnowledge, { recursive: true });
|
|
52
|
+
await cpDir(knowledgeDir, targetKnowledge);
|
|
53
|
+
|
|
54
|
+
// Copy workflows
|
|
55
|
+
const workflowDir = join(tmpDir, 'workflows');
|
|
56
|
+
await cpDir(workflowDir, this.paths.workflows);
|
|
57
|
+
|
|
58
|
+
// Copy agents
|
|
59
|
+
const agentDir = join(tmpDir, 'agents');
|
|
60
|
+
await cpDir(agentDir, this.paths.agents);
|
|
61
|
+
|
|
62
|
+
// Copy editor files (e.g., UdaLogBridge.cs → Assets/Editor/)
|
|
63
|
+
if (manifest.capabilities?.logs?.bridge && manifest.capabilities?.logs?.install_to) {
|
|
64
|
+
const bridgeSrc = join(tmpDir, manifest.capabilities.logs.bridge);
|
|
65
|
+
const bridgeDest = join(this.root, manifest.capabilities.logs.install_to);
|
|
66
|
+
await mkdir(bridgeDest, { recursive: true });
|
|
67
|
+
await cp(bridgeSrc, join(bridgeDest, basename(manifest.capabilities.logs.bridge)));
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Save plugin metadata
|
|
71
|
+
const pluginMeta = {
|
|
72
|
+
...manifest,
|
|
73
|
+
repo: repoUrl,
|
|
74
|
+
installedAt: new Date().toISOString(),
|
|
75
|
+
commitHash,
|
|
76
|
+
};
|
|
77
|
+
await writeFile(
|
|
78
|
+
join(this.paths.plugins, `${manifest.engine || pluginName}.json`),
|
|
79
|
+
JSON.stringify(pluginMeta, null, 2)
|
|
80
|
+
);
|
|
81
|
+
|
|
82
|
+
return manifest;
|
|
83
|
+
} finally {
|
|
84
|
+
await rm(tmpDir, { recursive: true, force: true });
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
async list() {
|
|
89
|
+
try {
|
|
90
|
+
const files = await readdir(this.paths.plugins);
|
|
91
|
+
const plugins = [];
|
|
92
|
+
for (const f of files) {
|
|
93
|
+
if (f.endsWith('.json')) {
|
|
94
|
+
try {
|
|
95
|
+
const data = JSON.parse(await readFile(join(this.paths.plugins, f), 'utf8'));
|
|
96
|
+
plugins.push(data);
|
|
97
|
+
} catch (err) {
|
|
98
|
+
console.error(`Warning: Failed to read plugin metadata "${f}": ${err.message}`);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
return plugins;
|
|
103
|
+
} catch (err) {
|
|
104
|
+
if (err.code !== 'ENOENT') {
|
|
105
|
+
console.error(`Warning: Failed to list plugins: ${err.message}`);
|
|
106
|
+
}
|
|
107
|
+
return [];
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
async remove(name) {
|
|
112
|
+
const metaPath = join(this.paths.plugins, `${name}.json`);
|
|
113
|
+
const meta = JSON.parse(await readFile(metaPath, 'utf8'));
|
|
114
|
+
|
|
115
|
+
// Remove engine knowledge
|
|
116
|
+
const engineDir = join(this.paths.knowledge.engine, meta.engine || name);
|
|
117
|
+
await rm(engineDir, { recursive: true, force: true });
|
|
118
|
+
|
|
119
|
+
// Remove metadata
|
|
120
|
+
await rm(metaPath);
|
|
121
|
+
|
|
122
|
+
return meta;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
async update(name) {
|
|
126
|
+
const metaPath = join(this.paths.plugins, `${name}.json`);
|
|
127
|
+
const meta = JSON.parse(await readFile(metaPath, 'utf8'));
|
|
128
|
+
|
|
129
|
+
// Remove old files and metadata
|
|
130
|
+
await this.remove(name);
|
|
131
|
+
|
|
132
|
+
// Re-add from repo
|
|
133
|
+
const newManifest = await this.add(meta.repo);
|
|
134
|
+
|
|
135
|
+
return { old: meta, new: newManifest };
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
async updateAll() {
|
|
139
|
+
const plugins = await this.list();
|
|
140
|
+
const results = [];
|
|
141
|
+
|
|
142
|
+
for (const plugin of plugins) {
|
|
143
|
+
const name = plugin.engine || plugin.name;
|
|
144
|
+
const result = await this.update(name);
|
|
145
|
+
results.push({ name, ...result });
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
return results;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
async function cpDir(src, dest) {
|
|
153
|
+
try {
|
|
154
|
+
await cp(src, dest, { recursive: true });
|
|
155
|
+
} catch (err) {
|
|
156
|
+
if (err.code !== 'ENOENT') {
|
|
157
|
+
console.error(`Warning: Failed to copy "${src}" to "${dest}": ${err.message}`);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
// src/rag/chunker.js
|
|
2
|
+
|
|
3
|
+
export function chunkMarkdown(text, options = {}) {
|
|
4
|
+
const {
|
|
5
|
+
source = 'unknown',
|
|
6
|
+
engine = null,
|
|
7
|
+
tags = [],
|
|
8
|
+
type = 'knowledge',
|
|
9
|
+
maxChunkSize = 512,
|
|
10
|
+
} = options;
|
|
11
|
+
|
|
12
|
+
const baseMetadata = { source, engine, tags, type, date: new Date().toISOString().split('T')[0] };
|
|
13
|
+
const chunks = [];
|
|
14
|
+
const lines = text.split('\n');
|
|
15
|
+
|
|
16
|
+
let currentChunk = { heading: '', lines: [], type: 'text' };
|
|
17
|
+
|
|
18
|
+
for (let i = 0; i < lines.length; i++) {
|
|
19
|
+
const line = lines[i];
|
|
20
|
+
|
|
21
|
+
// Code block detection
|
|
22
|
+
if (line.startsWith('```')) {
|
|
23
|
+
// Flush current text chunk
|
|
24
|
+
if (currentChunk.lines.length > 0) {
|
|
25
|
+
chunks.push(buildChunk(currentChunk, baseMetadata, maxChunkSize));
|
|
26
|
+
currentChunk = { heading: currentChunk.heading, lines: [], type: 'text' };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// Collect code block
|
|
30
|
+
const codeLines = [line];
|
|
31
|
+
i++;
|
|
32
|
+
while (i < lines.length && !lines[i].startsWith('```')) {
|
|
33
|
+
codeLines.push(lines[i]);
|
|
34
|
+
i++;
|
|
35
|
+
}
|
|
36
|
+
if (i < lines.length) codeLines.push(lines[i]); // closing ```
|
|
37
|
+
|
|
38
|
+
chunks.push({
|
|
39
|
+
content: codeLines.join('\n'),
|
|
40
|
+
type: 'code',
|
|
41
|
+
metadata: { ...baseMetadata },
|
|
42
|
+
});
|
|
43
|
+
continue;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Heading detection
|
|
47
|
+
if (line.match(/^#{1,6}\s/)) {
|
|
48
|
+
// Flush previous chunk
|
|
49
|
+
if (currentChunk.lines.length > 0) {
|
|
50
|
+
chunks.push(buildChunk(currentChunk, baseMetadata, maxChunkSize));
|
|
51
|
+
}
|
|
52
|
+
currentChunk = { heading: line, lines: [line], type: 'text' };
|
|
53
|
+
continue;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
currentChunk.lines.push(line);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Flush last chunk
|
|
60
|
+
if (currentChunk.lines.length > 0) {
|
|
61
|
+
chunks.push(buildChunk(currentChunk, baseMetadata, maxChunkSize));
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return chunks;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function buildChunk(chunk, baseMetadata, maxChunkSize) {
|
|
68
|
+
const content = chunk.lines.join('\n').trim();
|
|
69
|
+
|
|
70
|
+
// If within size limit, return as-is
|
|
71
|
+
if (content.length <= maxChunkSize) {
|
|
72
|
+
return {
|
|
73
|
+
content,
|
|
74
|
+
type: chunk.type,
|
|
75
|
+
metadata: { ...baseMetadata },
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Split oversized chunks by paragraph
|
|
80
|
+
// For simplicity, return single chunk trimmed (full split in future)
|
|
81
|
+
return {
|
|
82
|
+
content: content.slice(0, maxChunkSize),
|
|
83
|
+
type: chunk.type,
|
|
84
|
+
metadata: { ...baseMetadata },
|
|
85
|
+
};
|
|
86
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { pipeline } from '@xenova/transformers';
|
|
2
|
+
|
|
3
|
+
export class Embedder {
|
|
4
|
+
constructor(modelName = 'Xenova/all-MiniLM-L6-v2') {
|
|
5
|
+
this.modelName = modelName;
|
|
6
|
+
this.extractor = null;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
async init() {
|
|
10
|
+
if (!this.extractor) {
|
|
11
|
+
this.extractor = await pipeline('feature-extraction', this.modelName);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
async embed(text) {
|
|
16
|
+
await this.init();
|
|
17
|
+
const output = await this.extractor(text, { pooling: 'mean', normalize: true });
|
|
18
|
+
return Array.from(output.data);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
async embedBatch(texts) {
|
|
22
|
+
await this.init();
|
|
23
|
+
const results = [];
|
|
24
|
+
for (const text of texts) {
|
|
25
|
+
results.push(await this.embed(text));
|
|
26
|
+
}
|
|
27
|
+
return results;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
// src/rag/manager.js
|
|
2
|
+
import { readFile } from 'fs/promises';
|
|
3
|
+
import { basename } from 'path';
|
|
4
|
+
import { chunkMarkdown } from './chunker.js';
|
|
5
|
+
import { Embedder } from './embedder.js';
|
|
6
|
+
import { VectorStore } from './store.js';
|
|
7
|
+
|
|
8
|
+
export class RagManager {
|
|
9
|
+
constructor(dbPath) {
|
|
10
|
+
this.embedder = new Embedder();
|
|
11
|
+
this.store = new VectorStore(dbPath);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
async init() {
|
|
15
|
+
await this.embedder.init();
|
|
16
|
+
await this.store.init();
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async learnFile(filePath, options = {}) {
|
|
20
|
+
const content = await readFile(filePath, 'utf8');
|
|
21
|
+
const source = options.source || basename(filePath);
|
|
22
|
+
const chunks = chunkMarkdown(content, { source, ...options });
|
|
23
|
+
|
|
24
|
+
await this._indexChunks(chunks);
|
|
25
|
+
return chunks.length;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async learnText(text, options = {}) {
|
|
29
|
+
const chunks = chunkMarkdown(text, { source: 'manual', ...options });
|
|
30
|
+
await this._indexChunks(chunks);
|
|
31
|
+
return chunks.length;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async search(query, limit = 5) {
|
|
35
|
+
const queryVector = await this.embedder.embed(query);
|
|
36
|
+
return await this.store.search(queryVector, limit);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async _indexChunks(chunks) {
|
|
40
|
+
const documents = [];
|
|
41
|
+
|
|
42
|
+
for (const chunk of chunks) {
|
|
43
|
+
const vector = await this.embedder.embed(chunk.content);
|
|
44
|
+
documents.push({
|
|
45
|
+
id: `${chunk.metadata.source}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
|
|
46
|
+
content: chunk.content,
|
|
47
|
+
vector,
|
|
48
|
+
metadata: chunk.metadata,
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (documents.length > 0) {
|
|
53
|
+
await this.store.add(documents);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|