moflo 4.7.8 → 4.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +4 -1
- package/.claude/workflow-state.json +3 -7
- package/README.md +3 -1
- package/bin/build-embeddings.mjs +59 -3
- package/bin/generate-code-map.mjs +3 -1
- package/bin/hooks.mjs +23 -20
- package/bin/index-guidance.mjs +3 -1
- package/bin/lib/moflo-resolve.mjs +14 -0
- package/bin/semantic-search.mjs +10 -5
- package/bin/session-start-launcher.mjs +116 -3
- package/package.json +6 -6
- package/src/@claude-flow/cli/dist/src/appliance/ruvllm-bridge.js +3 -7
- package/src/@claude-flow/cli/dist/src/commands/daemon.js +42 -95
- package/src/@claude-flow/cli/dist/src/commands/doctor.js +127 -6
- package/src/@claude-flow/cli/dist/src/commands/embeddings.js +4 -3
- package/src/@claude-flow/cli/dist/src/commands/hooks.js +3 -2
- package/src/@claude-flow/cli/dist/src/commands/mcp.js +38 -22
- package/src/@claude-flow/cli/dist/src/commands/memory.js +2 -1
- package/src/@claude-flow/cli/dist/src/commands/neural.js +10 -5
- package/src/@claude-flow/cli/dist/src/config/moflo-config.d.ts +5 -0
- package/src/@claude-flow/cli/dist/src/config/moflo-config.js +16 -0
- package/src/@claude-flow/cli/dist/src/index.js +12 -0
- package/src/@claude-flow/cli/dist/src/init/executor.js +74 -0
- package/src/@claude-flow/cli/dist/src/init/moflo-init.js +49 -0
- package/src/@claude-flow/cli/dist/src/mcp-tools/memory-tools.js +2 -2
- package/src/@claude-flow/cli/dist/src/mcp-tools/neural-tools.js +2 -1
- package/src/@claude-flow/cli/dist/src/memory/memory-bridge.js +5 -1
- package/src/@claude-flow/cli/dist/src/memory/memory-initializer.js +29 -24
- package/src/@claude-flow/cli/dist/src/ruvector/ast-analyzer.js +2 -1
- package/src/@claude-flow/cli/dist/src/ruvector/coverage-router.js +2 -1
- package/src/@claude-flow/cli/dist/src/ruvector/diff-classifier.js +2 -1
- package/src/@claude-flow/cli/dist/src/ruvector/enhanced-model-router.js +3 -3
- package/src/@claude-flow/cli/dist/src/ruvector/index.js +6 -13
- package/src/@claude-flow/cli/dist/src/ruvector/q-learning-router.js +4 -1
- package/src/@claude-flow/cli/dist/src/services/daemon-lock.d.ts +39 -0
- package/src/@claude-flow/cli/dist/src/services/daemon-lock.js +213 -0
- package/src/@claude-flow/cli/dist/src/services/learning-service.js +2 -1
- package/src/@claude-flow/cli/dist/src/services/moflo-require.d.ts +34 -0
- package/src/@claude-flow/cli/dist/src/services/moflo-require.js +67 -0
- package/src/@claude-flow/cli/dist/src/services/ruvector-training.js +8 -6
- package/src/@claude-flow/cli/package.json +6 -6
- package/.claude/helpers/README.md +0 -97
- package/.claude/helpers/adr-compliance.sh +0 -186
- package/.claude/helpers/aggressive-microcompact.mjs +0 -36
- package/.claude/helpers/auto-commit.sh +0 -178
- package/.claude/helpers/checkpoint-manager.sh +0 -251
- package/.claude/helpers/context-persistence-hook.mjs +0 -1979
- package/.claude/helpers/daemon-manager.sh +0 -252
- package/.claude/helpers/ddd-tracker.sh +0 -144
- package/.claude/helpers/github-safe.js +0 -106
- package/.claude/helpers/github-setup.sh +0 -28
- package/.claude/helpers/guidance-hook.sh +0 -13
- package/.claude/helpers/guidance-hooks.sh +0 -102
- package/.claude/helpers/health-monitor.sh +0 -108
- package/.claude/helpers/learning-hooks.sh +0 -329
- package/.claude/helpers/learning-optimizer.sh +0 -127
- package/.claude/helpers/learning-service.mjs +0 -1211
- package/.claude/helpers/memory.cjs +0 -84
- package/.claude/helpers/metrics-db.mjs +0 -492
- package/.claude/helpers/patch-aggressive-prune.mjs +0 -184
- package/.claude/helpers/pattern-consolidator.sh +0 -86
- package/.claude/helpers/perf-worker.sh +0 -160
- package/.claude/helpers/quick-start.sh +0 -19
- package/.claude/helpers/router.cjs +0 -62
- package/.claude/helpers/security-scanner.sh +0 -127
- package/.claude/helpers/session.cjs +0 -125
- package/.claude/helpers/setup-mcp.sh +0 -18
- package/.claude/helpers/standard-checkpoint-hooks.sh +0 -189
- package/.claude/helpers/swarm-comms.sh +0 -353
- package/.claude/helpers/swarm-hooks.sh +0 -761
- package/.claude/helpers/swarm-monitor.sh +0 -211
- package/.claude/helpers/sync-v3-metrics.sh +0 -245
- package/.claude/helpers/update-v3-progress.sh +0 -166
- package/.claude/helpers/v3-quick-status.sh +0 -58
- package/.claude/helpers/v3.sh +0 -111
- package/.claude/helpers/validate-v3-config.sh +0 -216
- package/.claude/helpers/worker-manager.sh +0 -170
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Atomic daemon lock — prevents duplicate daemon processes.
|
|
3
|
+
*
|
|
4
|
+
* Uses fs.writeFileSync with { flag: 'wx' } (O_CREAT | O_EXCL) which is
|
|
5
|
+
* atomic on all platforms: the write fails immediately if the file exists,
|
|
6
|
+
* eliminating the TOCTOU race in the old PID-file approach.
|
|
7
|
+
*
|
|
8
|
+
* Also solves Windows PID recycling by storing a label in the lock payload
|
|
9
|
+
* and verifying the process command line before trusting a "live" PID.
|
|
10
|
+
*/
|
|
11
|
+
import * as fs from 'fs';
|
|
12
|
+
import { join } from 'path';
|
|
13
|
+
import { execSync } from 'child_process';
|
|
14
|
+
const LOCK_FILENAME = 'daemon.lock';
|
|
15
|
+
const LOCK_LABEL = 'moflo-daemon';
|
|
16
|
+
/** Resolve the lock file path for a project root. */
|
|
17
|
+
export function lockPath(projectRoot) {
|
|
18
|
+
return join(projectRoot, '.claude-flow', LOCK_FILENAME);
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Try to acquire the daemon lock atomically.
|
|
22
|
+
*
|
|
23
|
+
* @returns `{ acquired: true }` on success,
|
|
24
|
+
* `{ acquired: false, holder: pid }` if another daemon owns the lock.
|
|
25
|
+
*/
|
|
26
|
+
export function acquireDaemonLock(projectRoot, pid = process.pid) {
|
|
27
|
+
const lock = lockPath(projectRoot);
|
|
28
|
+
const stateDir = join(projectRoot, '.claude-flow');
|
|
29
|
+
// Ensure state directory exists
|
|
30
|
+
if (!fs.existsSync(stateDir)) {
|
|
31
|
+
fs.mkdirSync(stateDir, { recursive: true });
|
|
32
|
+
}
|
|
33
|
+
const payload = {
|
|
34
|
+
pid,
|
|
35
|
+
startedAt: Date.now(),
|
|
36
|
+
label: LOCK_LABEL,
|
|
37
|
+
};
|
|
38
|
+
// Attempt 1: atomic exclusive create
|
|
39
|
+
const result = tryExclusiveWrite(lock, payload);
|
|
40
|
+
if (result === 'ok') {
|
|
41
|
+
return { acquired: true };
|
|
42
|
+
}
|
|
43
|
+
// File already exists — check if the holder is still a live daemon
|
|
44
|
+
const existing = readLockPayload(lock);
|
|
45
|
+
if (!existing) {
|
|
46
|
+
// Corrupt or unreadable — remove and retry once
|
|
47
|
+
safeUnlink(lock);
|
|
48
|
+
return tryExclusiveWrite(lock, payload) === 'ok'
|
|
49
|
+
? { acquired: true }
|
|
50
|
+
: { acquired: false, holder: -1 };
|
|
51
|
+
}
|
|
52
|
+
// Same PID as us? We already hold it (re-entrant).
|
|
53
|
+
if (existing.pid === pid) {
|
|
54
|
+
return { acquired: true };
|
|
55
|
+
}
|
|
56
|
+
// Is the process alive AND actually a moflo daemon?
|
|
57
|
+
if (isProcessAlive(existing.pid) && isDaemonProcess(existing.pid)) {
|
|
58
|
+
return { acquired: false, holder: existing.pid };
|
|
59
|
+
}
|
|
60
|
+
// Stale lock (dead process or recycled PID) — remove and retry once
|
|
61
|
+
safeUnlink(lock);
|
|
62
|
+
return tryExclusiveWrite(lock, payload) === 'ok'
|
|
63
|
+
? { acquired: true }
|
|
64
|
+
: { acquired: false, holder: -1 };
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Release the daemon lock. Only removes if we own it (or force = true).
|
|
68
|
+
*/
|
|
69
|
+
export function releaseDaemonLock(projectRoot, pid = process.pid, force = false) {
|
|
70
|
+
const lock = lockPath(projectRoot);
|
|
71
|
+
if (!fs.existsSync(lock))
|
|
72
|
+
return;
|
|
73
|
+
if (force) {
|
|
74
|
+
safeUnlink(lock);
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
const existing = readLockPayload(lock);
|
|
78
|
+
if (existing && existing.pid === pid) {
|
|
79
|
+
safeUnlink(lock);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Check if the daemon lock is currently held by a live daemon.
|
|
84
|
+
* Returns the holder PID or null.
|
|
85
|
+
*/
|
|
86
|
+
export function getDaemonLockHolder(projectRoot) {
|
|
87
|
+
const lock = lockPath(projectRoot);
|
|
88
|
+
if (!fs.existsSync(lock))
|
|
89
|
+
return null;
|
|
90
|
+
const existing = readLockPayload(lock);
|
|
91
|
+
if (!existing) {
|
|
92
|
+
// Corrupt lock file — clean it up
|
|
93
|
+
safeUnlink(lock);
|
|
94
|
+
return null;
|
|
95
|
+
}
|
|
96
|
+
if (isProcessAlive(existing.pid) && isDaemonProcess(existing.pid)) {
|
|
97
|
+
return existing.pid;
|
|
98
|
+
}
|
|
99
|
+
// Stale — clean it up opportunistically
|
|
100
|
+
safeUnlink(lock);
|
|
101
|
+
return null;
|
|
102
|
+
}
|
|
103
|
+
// ---------------------------------------------------------------------------
|
|
104
|
+
// Internal helpers
|
|
105
|
+
// ---------------------------------------------------------------------------
|
|
106
|
+
function tryExclusiveWrite(path, payload) {
|
|
107
|
+
try {
|
|
108
|
+
fs.writeFileSync(path, JSON.stringify(payload), { flag: 'wx' });
|
|
109
|
+
return 'ok';
|
|
110
|
+
}
|
|
111
|
+
catch (err) {
|
|
112
|
+
if (err.code === 'EEXIST')
|
|
113
|
+
return 'exists';
|
|
114
|
+
// Other errors (permissions, disk full) — treat as failure to acquire
|
|
115
|
+
return 'exists';
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
function readLockPayload(path) {
|
|
119
|
+
try {
|
|
120
|
+
const raw = fs.readFileSync(path, 'utf-8');
|
|
121
|
+
const data = JSON.parse(raw);
|
|
122
|
+
if (typeof data.pid === 'number' && typeof data.startedAt === 'number') {
|
|
123
|
+
return data;
|
|
124
|
+
}
|
|
125
|
+
return null;
|
|
126
|
+
}
|
|
127
|
+
catch {
|
|
128
|
+
return null;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
function safeUnlink(path) {
|
|
132
|
+
try {
|
|
133
|
+
fs.unlinkSync(path);
|
|
134
|
+
}
|
|
135
|
+
catch { /* ignore — file may already be gone */ }
|
|
136
|
+
}
|
|
137
|
+
function isProcessAlive(pid) {
|
|
138
|
+
try {
|
|
139
|
+
process.kill(pid, 0);
|
|
140
|
+
return true;
|
|
141
|
+
}
|
|
142
|
+
catch {
|
|
143
|
+
return false;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Cross-platform check: is this PID actually a moflo/claude-flow daemon?
|
|
148
|
+
*
|
|
149
|
+
* This prevents false positives from Windows PID recycling, where a dead
|
|
150
|
+
* daemon's PID gets reused by an unrelated process (e.g. Chrome).
|
|
151
|
+
*
|
|
152
|
+
* - Windows: uses `tasklist /FI` to check the process image + command line
|
|
153
|
+
* - Linux: reads /proc/<pid>/cmdline
|
|
154
|
+
* - macOS: uses `ps -p <pid> -o command=`
|
|
155
|
+
*
|
|
156
|
+
* Falls back to `true` (trust process.kill) if the platform check fails,
|
|
157
|
+
* to avoid accidentally allowing duplicates on exotic platforms.
|
|
158
|
+
*/
|
|
159
|
+
function isDaemonProcess(pid) {
|
|
160
|
+
try {
|
|
161
|
+
if (process.platform === 'win32') {
|
|
162
|
+
return isDaemonProcessWindows(pid);
|
|
163
|
+
}
|
|
164
|
+
else if (process.platform === 'linux') {
|
|
165
|
+
return isDaemonProcessLinux(pid);
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
// macOS and others
|
|
169
|
+
return isDaemonProcessUnix(pid);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
catch {
|
|
173
|
+
// If platform check fails, trust the kill(0) result to avoid
|
|
174
|
+
// accidentally allowing duplicates
|
|
175
|
+
return true;
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
function isDaemonProcessWindows(pid) {
|
|
179
|
+
try {
|
|
180
|
+
const result = execSync(`tasklist /FI "PID eq ${pid}" /FO CSV /NH`, { encoding: 'utf-8', timeout: 3000, windowsHide: true });
|
|
181
|
+
// tasklist returns the image name + PID in CSV; check it's a node process
|
|
182
|
+
// and then verify via wmic/powershell that the command line contains daemon keywords
|
|
183
|
+
if (!result.includes('node'))
|
|
184
|
+
return false;
|
|
185
|
+
const cmdResult = execSync(`powershell -NoProfile -Command "(Get-CimInstance Win32_Process -Filter \\"ProcessId=${pid}\\").CommandLine"`, { encoding: 'utf-8', timeout: 5000, windowsHide: true });
|
|
186
|
+
return /daemon\s+start|moflo|claude-flow/i.test(cmdResult);
|
|
187
|
+
}
|
|
188
|
+
catch {
|
|
189
|
+
return true; // fallback: trust kill(0)
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
function isDaemonProcessLinux(pid) {
|
|
193
|
+
try {
|
|
194
|
+
const cmdline = fs.readFileSync(`/proc/${pid}/cmdline`, 'utf-8');
|
|
195
|
+
return /daemon.*start|moflo|claude-flow/i.test(cmdline);
|
|
196
|
+
}
|
|
197
|
+
catch {
|
|
198
|
+
return true; // fallback
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
function isDaemonProcessUnix(pid) {
|
|
202
|
+
try {
|
|
203
|
+
const result = execSync(`ps -p ${pid} -o command=`, {
|
|
204
|
+
encoding: 'utf-8',
|
|
205
|
+
timeout: 3000,
|
|
206
|
+
});
|
|
207
|
+
return /daemon.*start|moflo|claude-flow/i.test(result);
|
|
208
|
+
}
|
|
209
|
+
catch {
|
|
210
|
+
return true; // fallback
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
//# sourceMappingURL=daemon-lock.js.map
|
|
@@ -14,6 +14,7 @@
|
|
|
14
14
|
*/
|
|
15
15
|
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
|
|
16
16
|
import { join } from 'path';
|
|
17
|
+
import { mofloImport } from './moflo-require.js';
|
|
17
18
|
// ============================================================================
|
|
18
19
|
// Configuration
|
|
19
20
|
// ============================================================================
|
|
@@ -210,7 +211,7 @@ class HNSWIndex {
|
|
|
210
211
|
}
|
|
211
212
|
async function loadSqlJs() {
|
|
212
213
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
213
|
-
const initSqlJs = (await
|
|
214
|
+
const initSqlJs = (await mofloImport('sql.js')).default;
|
|
214
215
|
return initSqlJs();
|
|
215
216
|
}
|
|
216
217
|
function dbAll(db, sql, params = []) {
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* moflo-require — Resolve moflo's own dependencies from its own node_modules.
|
|
3
|
+
*
|
|
4
|
+
* When moflo runs via `npx` in a consuming project, bare `await import('sql.js')`
|
|
5
|
+
* resolves from the *consuming project's* node_modules, not moflo's. Since moflo
|
|
6
|
+
* ships these dependencies itself, we always resolve from moflo's own context first
|
|
7
|
+
* and only fall back to a bare import as a last resort.
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* import { mofloImport } from '../services/moflo-require.js';
|
|
11
|
+
* const sqlJs = await mofloImport('sql.js');
|
|
12
|
+
* const transformers = await mofloImport('@xenova/transformers');
|
|
13
|
+
*/
|
|
14
|
+
/**
|
|
15
|
+
* Dynamically import a package, resolving from moflo's own node_modules first.
|
|
16
|
+
* Falls back to bare import only if local resolution fails (e.g. monorepo hoisting).
|
|
17
|
+
*
|
|
18
|
+
* On Windows, `createRequire.resolve()` returns a native path (C:\...) which
|
|
19
|
+
* `import()` rejects — it requires a file:// URL. We convert via pathToFileURL.
|
|
20
|
+
*
|
|
21
|
+
* @param specifier Package specifier, e.g. 'sql.js' or '@xenova/transformers'
|
|
22
|
+
* @returns The imported module, or null if not available
|
|
23
|
+
*/
|
|
24
|
+
export declare function mofloImport(specifier: string): Promise<any>;
|
|
25
|
+
/**
|
|
26
|
+
* Like mofloImport but throws if the package is not found (for required deps).
|
|
27
|
+
*/
|
|
28
|
+
export declare function mofloImportRequired(specifier: string): Promise<any>;
|
|
29
|
+
/**
|
|
30
|
+
* Resolve a package path without importing (useful for WASM file paths etc).
|
|
31
|
+
* Returns the resolved path, or null if not found.
|
|
32
|
+
*/
|
|
33
|
+
export declare function mofloResolve(specifier: string): string | null;
|
|
34
|
+
//# sourceMappingURL=moflo-require.d.ts.map
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* moflo-require — Resolve moflo's own dependencies from its own node_modules.
|
|
3
|
+
*
|
|
4
|
+
* When moflo runs via `npx` in a consuming project, bare `await import('sql.js')`
|
|
5
|
+
* resolves from the *consuming project's* node_modules, not moflo's. Since moflo
|
|
6
|
+
* ships these dependencies itself, we always resolve from moflo's own context first
|
|
7
|
+
* and only fall back to a bare import as a last resort.
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* import { mofloImport } from '../services/moflo-require.js';
|
|
11
|
+
* const sqlJs = await mofloImport('sql.js');
|
|
12
|
+
* const transformers = await mofloImport('@xenova/transformers');
|
|
13
|
+
*/
|
|
14
|
+
import { createRequire } from 'module';
|
|
15
|
+
import { fileURLToPath, pathToFileURL } from 'url';
|
|
16
|
+
// createRequire anchored to this file — resolves from moflo's own node_modules
|
|
17
|
+
const mofloRequire = createRequire(fileURLToPath(import.meta.url));
|
|
18
|
+
/**
|
|
19
|
+
* Dynamically import a package, resolving from moflo's own node_modules first.
|
|
20
|
+
* Falls back to bare import only if local resolution fails (e.g. monorepo hoisting).
|
|
21
|
+
*
|
|
22
|
+
* On Windows, `createRequire.resolve()` returns a native path (C:\...) which
|
|
23
|
+
* `import()` rejects — it requires a file:// URL. We convert via pathToFileURL.
|
|
24
|
+
*
|
|
25
|
+
* @param specifier Package specifier, e.g. 'sql.js' or '@xenova/transformers'
|
|
26
|
+
* @returns The imported module, or null if not available
|
|
27
|
+
*/
|
|
28
|
+
export async function mofloImport(specifier) {
|
|
29
|
+
try {
|
|
30
|
+
const resolved = mofloRequire.resolve(specifier);
|
|
31
|
+
// Convert native path → file:// URL (required on Windows for ESM import())
|
|
32
|
+
const url = pathToFileURL(resolved).href;
|
|
33
|
+
return await import(url);
|
|
34
|
+
}
|
|
35
|
+
catch {
|
|
36
|
+
// Local resolution failed — try bare import as last resort
|
|
37
|
+
try {
|
|
38
|
+
return await import(specifier);
|
|
39
|
+
}
|
|
40
|
+
catch {
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Like mofloImport but throws if the package is not found (for required deps).
|
|
47
|
+
*/
|
|
48
|
+
export async function mofloImportRequired(specifier) {
|
|
49
|
+
const mod = await mofloImport(specifier);
|
|
50
|
+
if (mod === null) {
|
|
51
|
+
throw new Error(`Required dependency '${specifier}' not found in moflo's node_modules`);
|
|
52
|
+
}
|
|
53
|
+
return mod;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Resolve a package path without importing (useful for WASM file paths etc).
|
|
57
|
+
* Returns the resolved path, or null if not found.
|
|
58
|
+
*/
|
|
59
|
+
export function mofloResolve(specifier) {
|
|
60
|
+
try {
|
|
61
|
+
return mofloRequire.resolve(specifier);
|
|
62
|
+
}
|
|
63
|
+
catch {
|
|
64
|
+
return null;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
//# sourceMappingURL=moflo-require.js.map
|
|
@@ -53,18 +53,20 @@ export async function initializeTraining(config = {}) {
|
|
|
53
53
|
try {
|
|
54
54
|
// Initialize MicroLoRA with direct WASM loading (Node.js compatible)
|
|
55
55
|
const fs = await import('fs');
|
|
56
|
-
const {
|
|
57
|
-
const require = createRequire(import.meta.url);
|
|
56
|
+
const { mofloResolve, mofloImportRequired } = await import('./moflo-require.js');
|
|
58
57
|
// Load WASM file directly instead of using fetch
|
|
59
|
-
const wasmPath =
|
|
58
|
+
const wasmPath = mofloResolve('@ruvector/learning-wasm/ruvector_learning_wasm_bg.wasm');
|
|
59
|
+
if (!wasmPath)
|
|
60
|
+
throw new Error('@ruvector/learning-wasm not found');
|
|
60
61
|
const wasmBuffer = fs.readFileSync(wasmPath);
|
|
61
|
-
const learningWasm = await
|
|
62
|
+
const learningWasm = await mofloImportRequired('@ruvector/learning-wasm');
|
|
62
63
|
learningWasm.initSync({ module: wasmBuffer });
|
|
63
64
|
microLoRA = new learningWasm.WasmMicroLoRA(dim, alpha, lr);
|
|
64
65
|
features.push(`MicroLoRA (${dim}-dim, <1μs adaptation)`);
|
|
65
66
|
// Initialize ScopedLoRA for per-operator learning
|
|
66
|
-
|
|
67
|
-
|
|
67
|
+
const scoped = new learningWasm.WasmScopedLoRA(dim, alpha, lr);
|
|
68
|
+
scoped.set_category_fallback(true);
|
|
69
|
+
scopedLoRA = scoped;
|
|
68
70
|
features.push('ScopedLoRA (17 operators)');
|
|
69
71
|
// Initialize trajectory buffer
|
|
70
72
|
trajectoryBuffer = new learningWasm.WasmTrajectoryBuffer(config.trajectoryCapacity || 10000, dim);
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@moflo/cli",
|
|
3
|
-
"version": "4.
|
|
3
|
+
"version": "4.8.1",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "MoFlo CLI — AI agent orchestration with specialized agents, swarm coordination, MCP server, self-learning hooks, and vector memory for Claude Code",
|
|
6
6
|
"main": "dist/src/index.js",
|
|
@@ -88,12 +88,12 @@
|
|
|
88
88
|
"semver": "^7.6.0"
|
|
89
89
|
},
|
|
90
90
|
"optionalDependencies": {
|
|
91
|
-
"@claude-flow/aidefence": "
|
|
92
|
-
"@claude-flow/embeddings": "
|
|
93
|
-
"@claude-flow/guidance": "
|
|
94
|
-
"@claude-flow/memory": "
|
|
91
|
+
"@claude-flow/aidefence": "file:../aidefence",
|
|
92
|
+
"@claude-flow/embeddings": "file:../embeddings",
|
|
93
|
+
"@claude-flow/guidance": "file:../guidance",
|
|
94
|
+
"@claude-flow/memory": "file:../memory",
|
|
95
95
|
"@claude-flow/plugin-gastown-bridge": "^0.1.3",
|
|
96
|
-
"agentic-flow": "^
|
|
96
|
+
"agentic-flow": "^2.0.7",
|
|
97
97
|
"@ruvector/attention": "^0.1.4",
|
|
98
98
|
"@ruvector/learning-wasm": "^0.1.29",
|
|
99
99
|
"@ruvector/router": "^0.1.27",
|
|
@@ -1,97 +0,0 @@
|
|
|
1
|
-
# RuFlo V3 Helpers
|
|
2
|
-
|
|
3
|
-
This directory contains helper scripts and utilities for V3 development.
|
|
4
|
-
|
|
5
|
-
## 🚀 Quick Start
|
|
6
|
-
|
|
7
|
-
```bash
|
|
8
|
-
# Initialize V3 development environment
|
|
9
|
-
.claude/helpers/v3.sh init
|
|
10
|
-
|
|
11
|
-
# Quick status check
|
|
12
|
-
.claude/helpers/v3.sh status
|
|
13
|
-
|
|
14
|
-
# Update progress metrics
|
|
15
|
-
.claude/helpers/v3.sh update domain 3
|
|
16
|
-
.claude/helpers/v3.sh update agent 8
|
|
17
|
-
.claude/helpers/v3.sh update security 2
|
|
18
|
-
```
|
|
19
|
-
|
|
20
|
-
## Available Helpers
|
|
21
|
-
|
|
22
|
-
### 🎛️ V3 Master Tool
|
|
23
|
-
- **`v3.sh`** - Main command-line interface for all V3 operations
|
|
24
|
-
```bash
|
|
25
|
-
.claude/helpers/v3.sh help # Show all commands
|
|
26
|
-
.claude/helpers/v3.sh status # Quick development status
|
|
27
|
-
.claude/helpers/v3.sh update domain 3 # Update specific metrics
|
|
28
|
-
.claude/helpers/v3.sh validate # Validate configuration
|
|
29
|
-
.claude/helpers/v3.sh full-status # Complete status overview
|
|
30
|
-
```
|
|
31
|
-
|
|
32
|
-
### 📊 V3 Progress Management
|
|
33
|
-
- **`update-v3-progress.sh`** - Update V3 development metrics
|
|
34
|
-
```bash
|
|
35
|
-
# Usage examples:
|
|
36
|
-
.claude/helpers/update-v3-progress.sh domain 3 # Mark 3 domains complete
|
|
37
|
-
.claude/helpers/update-v3-progress.sh agent 8 # 8 agents active
|
|
38
|
-
.claude/helpers/update-v3-progress.sh security 2 # 2 CVEs fixed
|
|
39
|
-
.claude/helpers/update-v3-progress.sh performance 2.5x # Performance boost
|
|
40
|
-
.claude/helpers/update-v3-progress.sh status # Show current status
|
|
41
|
-
```
|
|
42
|
-
|
|
43
|
-
### 🔍 Configuration Validation
|
|
44
|
-
- **`validate-v3-config.sh`** - Comprehensive environment validation
|
|
45
|
-
- Checks all required directories and files
|
|
46
|
-
- Validates JSON configuration files
|
|
47
|
-
- Verifies Node.js and development tools
|
|
48
|
-
- Confirms Git repository status
|
|
49
|
-
- Validates file permissions
|
|
50
|
-
|
|
51
|
-
### ⚡ Quick Status
|
|
52
|
-
- **`v3-quick-status.sh`** - Compact development progress overview
|
|
53
|
-
- Shows domain, agent, and DDD progress
|
|
54
|
-
- Displays security and performance metrics
|
|
55
|
-
- Color-coded status indicators
|
|
56
|
-
- Current Git branch information
|
|
57
|
-
|
|
58
|
-
## Helper Script Standards
|
|
59
|
-
|
|
60
|
-
### File Naming
|
|
61
|
-
- Use kebab-case: `update-v3-progress.sh`
|
|
62
|
-
- Include version prefix: `v3-*` for V3-specific helpers
|
|
63
|
-
- Use descriptive names that indicate purpose
|
|
64
|
-
|
|
65
|
-
### Script Requirements
|
|
66
|
-
- Must be executable (`chmod +x`)
|
|
67
|
-
- Include proper error handling (`set -e`)
|
|
68
|
-
- Provide usage help when called without arguments
|
|
69
|
-
- Use consistent exit codes (0 = success, non-zero = error)
|
|
70
|
-
|
|
71
|
-
### Configuration Integration
|
|
72
|
-
Helpers are configured in `.claude/settings.json`:
|
|
73
|
-
```json
|
|
74
|
-
{
|
|
75
|
-
"helpers": {
|
|
76
|
-
"directory": ".claude/helpers",
|
|
77
|
-
"enabled": true,
|
|
78
|
-
"v3ProgressUpdater": ".claude/helpers/update-v3-progress.sh"
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
```
|
|
82
|
-
|
|
83
|
-
## Development Guidelines
|
|
84
|
-
|
|
85
|
-
1. **Security First**: All helpers must validate inputs
|
|
86
|
-
2. **Idempotent**: Scripts should be safe to run multiple times
|
|
87
|
-
3. **Fast Execution**: Keep helper execution under 1 second when possible
|
|
88
|
-
4. **Clear Output**: Provide clear success/error messages
|
|
89
|
-
5. **JSON Safe**: When updating JSON files, use `jq` for safety
|
|
90
|
-
|
|
91
|
-
## Adding New Helpers
|
|
92
|
-
|
|
93
|
-
1. Create script in `.claude/helpers/`
|
|
94
|
-
2. Make executable: `chmod +x script-name.sh`
|
|
95
|
-
3. Add to settings.json helpers section
|
|
96
|
-
4. Test thoroughly before committing
|
|
97
|
-
5. Update this README with usage documentation
|
|
@@ -1,186 +0,0 @@
|
|
|
1
|
-
#!/bin/bash
|
|
2
|
-
# RuFlo V3 - ADR Compliance Checker Worker
|
|
3
|
-
# Checks compliance with Architecture Decision Records
|
|
4
|
-
|
|
5
|
-
set -euo pipefail
|
|
6
|
-
|
|
7
|
-
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
8
|
-
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
|
9
|
-
METRICS_DIR="$PROJECT_ROOT/.claude-flow/metrics"
|
|
10
|
-
ADR_FILE="$METRICS_DIR/adr-compliance.json"
|
|
11
|
-
LAST_RUN_FILE="$METRICS_DIR/.adr-last-run"
|
|
12
|
-
|
|
13
|
-
mkdir -p "$METRICS_DIR"
|
|
14
|
-
|
|
15
|
-
# V3 ADRs to check
|
|
16
|
-
declare -A ADRS=(
|
|
17
|
-
["ADR-001"]="agentic-flow as core foundation"
|
|
18
|
-
["ADR-002"]="Domain-Driven Design structure"
|
|
19
|
-
["ADR-003"]="Single coordination engine"
|
|
20
|
-
["ADR-004"]="Plugin-based architecture"
|
|
21
|
-
["ADR-005"]="MCP-first API design"
|
|
22
|
-
["ADR-006"]="Unified memory service"
|
|
23
|
-
["ADR-007"]="Event sourcing for state"
|
|
24
|
-
["ADR-008"]="Vitest over Jest"
|
|
25
|
-
["ADR-009"]="Hybrid memory backend"
|
|
26
|
-
["ADR-010"]="Remove Deno support"
|
|
27
|
-
)
|
|
28
|
-
|
|
29
|
-
should_run() {
|
|
30
|
-
if [ ! -f "$LAST_RUN_FILE" ]; then return 0; fi
|
|
31
|
-
local last_run=$(cat "$LAST_RUN_FILE" 2>/dev/null || echo "0")
|
|
32
|
-
local now=$(date +%s)
|
|
33
|
-
[ $((now - last_run)) -ge 900 ] # 15 minutes
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
check_adr_001() {
|
|
37
|
-
# ADR-001: agentic-flow as core foundation
|
|
38
|
-
local score=0
|
|
39
|
-
|
|
40
|
-
# Check package.json for agentic-flow dependency
|
|
41
|
-
grep -q "agentic-flow" "$PROJECT_ROOT/package.json" 2>/dev/null && score=$((score + 50))
|
|
42
|
-
|
|
43
|
-
# Check for imports from agentic-flow
|
|
44
|
-
local imports=$(grep -r "from.*agentic-flow\|require.*agentic-flow" "$PROJECT_ROOT/v3" "$PROJECT_ROOT/src" 2>/dev/null | grep -v node_modules | wc -l)
|
|
45
|
-
[ "$imports" -gt 5 ] && score=$((score + 50))
|
|
46
|
-
|
|
47
|
-
echo "$score"
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
check_adr_002() {
|
|
51
|
-
# ADR-002: Domain-Driven Design structure
|
|
52
|
-
local score=0
|
|
53
|
-
|
|
54
|
-
# Check for domain directories
|
|
55
|
-
[ -d "$PROJECT_ROOT/v3" ] || [ -d "$PROJECT_ROOT/src/domains" ] && score=$((score + 30))
|
|
56
|
-
|
|
57
|
-
# Check for bounded contexts
|
|
58
|
-
local contexts=$(find "$PROJECT_ROOT/v3" "$PROJECT_ROOT/src" -type d -name "domain" 2>/dev/null | wc -l)
|
|
59
|
-
[ "$contexts" -gt 0 ] && score=$((score + 35))
|
|
60
|
-
|
|
61
|
-
# Check for anti-corruption layers
|
|
62
|
-
local acl=$(grep -r "AntiCorruption\|Adapter\|Port" "$PROJECT_ROOT/v3" "$PROJECT_ROOT/src" 2>/dev/null | grep -v node_modules | wc -l)
|
|
63
|
-
[ "$acl" -gt 0 ] && score=$((score + 35))
|
|
64
|
-
|
|
65
|
-
echo "$score"
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
check_adr_003() {
|
|
69
|
-
# ADR-003: Single coordination engine
|
|
70
|
-
local score=0
|
|
71
|
-
|
|
72
|
-
# Check for unified SwarmCoordinator
|
|
73
|
-
grep -rq "SwarmCoordinator\|UnifiedCoordinator" "$PROJECT_ROOT/v3" "$PROJECT_ROOT/src" 2>/dev/null && score=$((score + 50))
|
|
74
|
-
|
|
75
|
-
# Check for no duplicate coordinators
|
|
76
|
-
local coordinators=$(grep -r "class.*Coordinator" "$PROJECT_ROOT/v3" "$PROJECT_ROOT/src" 2>/dev/null | grep -v node_modules | grep -v ".test." | wc -l)
|
|
77
|
-
[ "$coordinators" -le 3 ] && score=$((score + 50))
|
|
78
|
-
|
|
79
|
-
echo "$score"
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
check_adr_005() {
|
|
83
|
-
# ADR-005: MCP-first API design
|
|
84
|
-
local score=0
|
|
85
|
-
|
|
86
|
-
# Check for MCP server implementation
|
|
87
|
-
[ -d "$PROJECT_ROOT/v3/@claude-flow/mcp" ] && score=$((score + 40))
|
|
88
|
-
|
|
89
|
-
# Check for MCP tools
|
|
90
|
-
local tools=$(grep -r "tool.*name\|registerTool" "$PROJECT_ROOT/v3" 2>/dev/null | wc -l)
|
|
91
|
-
[ "$tools" -gt 5 ] && score=$((score + 30))
|
|
92
|
-
|
|
93
|
-
# Check for MCP schemas
|
|
94
|
-
grep -rq "schema\|jsonSchema" "$PROJECT_ROOT/v3/@claude-flow/mcp" 2>/dev/null && score=$((score + 30))
|
|
95
|
-
|
|
96
|
-
echo "$score"
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
check_adr_008() {
|
|
100
|
-
# ADR-008: Vitest over Jest
|
|
101
|
-
local score=0
|
|
102
|
-
|
|
103
|
-
# Check for vitest in package.json
|
|
104
|
-
grep -q "vitest" "$PROJECT_ROOT/package.json" 2>/dev/null && score=$((score + 50))
|
|
105
|
-
|
|
106
|
-
# Check for no jest references
|
|
107
|
-
local jest_refs=$(grep -r "from.*jest\|jest\." "$PROJECT_ROOT/v3" "$PROJECT_ROOT/src" 2>/dev/null | grep -v node_modules | grep -v "vitest" | wc -l)
|
|
108
|
-
[ "$jest_refs" -eq 0 ] && score=$((score + 50))
|
|
109
|
-
|
|
110
|
-
echo "$score"
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
check_compliance() {
|
|
114
|
-
echo "[$(date +%H:%M:%S)] Checking ADR compliance..."
|
|
115
|
-
|
|
116
|
-
local total_score=0
|
|
117
|
-
local compliant_count=0
|
|
118
|
-
local results=""
|
|
119
|
-
|
|
120
|
-
# Check each ADR
|
|
121
|
-
local adr_001=$(check_adr_001)
|
|
122
|
-
local adr_002=$(check_adr_002)
|
|
123
|
-
local adr_003=$(check_adr_003)
|
|
124
|
-
local adr_005=$(check_adr_005)
|
|
125
|
-
local adr_008=$(check_adr_008)
|
|
126
|
-
|
|
127
|
-
# Simple checks for others (assume partial compliance)
|
|
128
|
-
local adr_004=50 # Plugin architecture
|
|
129
|
-
local adr_006=50 # Unified memory
|
|
130
|
-
local adr_007=50 # Event sourcing
|
|
131
|
-
local adr_009=75 # Hybrid memory
|
|
132
|
-
local adr_010=100 # No Deno (easy to verify)
|
|
133
|
-
|
|
134
|
-
# Calculate totals
|
|
135
|
-
for score in $adr_001 $adr_002 $adr_003 $adr_004 $adr_005 $adr_006 $adr_007 $adr_008 $adr_009 $adr_010; do
|
|
136
|
-
total_score=$((total_score + score))
|
|
137
|
-
[ "$score" -ge 50 ] && compliant_count=$((compliant_count + 1))
|
|
138
|
-
done
|
|
139
|
-
|
|
140
|
-
local avg_score=$((total_score / 10))
|
|
141
|
-
|
|
142
|
-
# Write ADR compliance metrics
|
|
143
|
-
cat > "$ADR_FILE" << EOF
|
|
144
|
-
{
|
|
145
|
-
"timestamp": "$(date -Iseconds)",
|
|
146
|
-
"overallCompliance": $avg_score,
|
|
147
|
-
"compliantCount": $compliant_count,
|
|
148
|
-
"totalADRs": 10,
|
|
149
|
-
"adrs": {
|
|
150
|
-
"ADR-001": {"score": $adr_001, "title": "agentic-flow as core foundation"},
|
|
151
|
-
"ADR-002": {"score": $adr_002, "title": "Domain-Driven Design structure"},
|
|
152
|
-
"ADR-003": {"score": $adr_003, "title": "Single coordination engine"},
|
|
153
|
-
"ADR-004": {"score": $adr_004, "title": "Plugin-based architecture"},
|
|
154
|
-
"ADR-005": {"score": $adr_005, "title": "MCP-first API design"},
|
|
155
|
-
"ADR-006": {"score": $adr_006, "title": "Unified memory service"},
|
|
156
|
-
"ADR-007": {"score": $adr_007, "title": "Event sourcing for state"},
|
|
157
|
-
"ADR-008": {"score": $adr_008, "title": "Vitest over Jest"},
|
|
158
|
-
"ADR-009": {"score": $adr_009, "title": "Hybrid memory backend"},
|
|
159
|
-
"ADR-010": {"score": $adr_010, "title": "Remove Deno support"}
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
EOF
|
|
163
|
-
|
|
164
|
-
echo "[$(date +%H:%M:%S)] ✓ ADR Compliance: ${avg_score}% | Compliant: $compliant_count/10"
|
|
165
|
-
|
|
166
|
-
date +%s > "$LAST_RUN_FILE"
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
case "${1:-check}" in
|
|
170
|
-
"run") check_compliance ;;
|
|
171
|
-
"check") should_run && check_compliance || echo "[$(date +%H:%M:%S)] Skipping (throttled)" ;;
|
|
172
|
-
"force") rm -f "$LAST_RUN_FILE"; check_compliance ;;
|
|
173
|
-
"status")
|
|
174
|
-
if [ -f "$ADR_FILE" ]; then
|
|
175
|
-
jq -r '"Compliance: \(.overallCompliance)% | Compliant: \(.compliantCount)/\(.totalADRs)"' "$ADR_FILE"
|
|
176
|
-
else
|
|
177
|
-
echo "No ADR data available"
|
|
178
|
-
fi
|
|
179
|
-
;;
|
|
180
|
-
"details")
|
|
181
|
-
if [ -f "$ADR_FILE" ]; then
|
|
182
|
-
jq -r '.adrs | to_entries[] | "\(.key): \(.value.score)% - \(.value.title)"' "$ADR_FILE"
|
|
183
|
-
fi
|
|
184
|
-
;;
|
|
185
|
-
*) echo "Usage: $0 [run|check|force|status|details]" ;;
|
|
186
|
-
esac
|