@sandrinio/vbounce 1.0.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/bin/vbounce.mjs +42 -7
- package/brains/AGENTS.md +5 -2
- package/brains/CHANGELOG.md +7 -0
- package/brains/CLAUDE.md +5 -2
- package/brains/GEMINI.md +5 -2
- package/brains/SETUP.md +12 -0
- package/brains/claude-agents/architect.md +34 -2
- package/brains/claude-agents/developer.md +31 -3
- package/brains/claude-agents/devops.md +14 -1
- package/brains/claude-agents/qa.md +55 -2
- package/docs/HOTFIX_EDGE_CASES.md +37 -0
- package/docs/agent-skill-profiles.docx +0 -0
- package/docs/vbounce-os-manual.docx +0 -0
- package/package.json +13 -4
- package/scripts/hotfix_manager.sh +157 -0
- package/scripts/pre_bounce_sync.sh +37 -0
- package/scripts/validate_report.mjs +122 -0
- package/scripts/vbounce_ask.mjs +98 -0
- package/scripts/vbounce_index.mjs +184 -0
- package/scripts/verify_framework.mjs +105 -0
- package/scripts/verify_framework.sh +13 -0
- package/skills/agent-team/SKILL.md +15 -2
- package/skills/doc-manager/SKILL.md +1 -2
- package/skills/file-organization/SKILL.md +146 -0
- package/skills/file-organization/TEST-RESULTS.md +193 -0
- package/skills/file-organization/evals/evals.json +41 -0
- package/skills/file-organization/references/gitignore-template.md +53 -0
- package/skills/file-organization/references/quick-checklist.md +48 -0
- package/templates/delivery_plan.md +11 -0
- package/templates/hotfix.md +3 -0
- package/templates/sprint_report.md +7 -1
- package/templates/story.md +1 -0
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# V-Bounce OS: Hotfix Manager
|
|
4
|
+
# Handles edge cases for L1 Trivial tasks to save tokens and ensure framework integrity.
|
|
5
|
+
|
|
6
|
+
set -euo pipefail
|
|
7
|
+
|
|
8
|
+
# Ensure we're in a git repository
|
|
9
|
+
REPO_ROOT=$(git rev-parse --show-toplevel 2>/dev/null) || {
|
|
10
|
+
echo "❌ Error: Not inside a git repository."
|
|
11
|
+
exit 1
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
COMMAND="${1:-}"
|
|
15
|
+
|
|
16
|
+
function show_help {
|
|
17
|
+
echo "V-Bounce OS — Hotfix Manager"
|
|
18
|
+
echo ""
|
|
19
|
+
echo "Usage: ./scripts/hotfix_manager.sh <command> [args]"
|
|
20
|
+
echo ""
|
|
21
|
+
echo "Commands:"
|
|
22
|
+
echo " audit Run a lightweight static analysis on recent commits to detect architectural drift."
|
|
23
|
+
echo " sync Rebase all active git worktrees against the current sprint branch."
|
|
24
|
+
echo " ledger <title> <desc> Append a Hotfix entry to §8 Applied Hotfixes in the active DELIVERY_PLAN.md."
|
|
25
|
+
echo ""
|
|
26
|
+
echo "Examples:"
|
|
27
|
+
echo " ./scripts/hotfix_manager.sh audit"
|
|
28
|
+
echo " ./scripts/hotfix_manager.sh sync"
|
|
29
|
+
echo " ./scripts/hotfix_manager.sh ledger \"Fix Header\" \"Aligned the logo to the left\""
|
|
30
|
+
exit 1
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
if [ -z "$COMMAND" ]; then
|
|
34
|
+
show_help
|
|
35
|
+
fi
|
|
36
|
+
|
|
37
|
+
case "$COMMAND" in
|
|
38
|
+
audit)
|
|
39
|
+
echo "🔍 Running Token-Saving Hotfix Audit..."
|
|
40
|
+
|
|
41
|
+
# Determine how many commits exist on the branch so we don't overshoot
|
|
42
|
+
TOTAL_COMMITS=$(git rev-list --count HEAD 2>/dev/null || echo "0")
|
|
43
|
+
LOOKBACK=5
|
|
44
|
+
if [ "$TOTAL_COMMITS" -lt "$LOOKBACK" ]; then
|
|
45
|
+
LOOKBACK="$TOTAL_COMMITS"
|
|
46
|
+
fi
|
|
47
|
+
|
|
48
|
+
if [ "$LOOKBACK" -eq 0 ]; then
|
|
49
|
+
echo "✅ No commits to audit."
|
|
50
|
+
exit 0
|
|
51
|
+
fi
|
|
52
|
+
|
|
53
|
+
SUSPICIOUS=$(git diff "HEAD~${LOOKBACK}" HEAD -G'style=|console\.log|// TODO' --name-only 2>/dev/null || true)
|
|
54
|
+
|
|
55
|
+
if [ -n "$SUSPICIOUS" ]; then
|
|
56
|
+
echo "⚠️ WARNING: Potential architectural drift detected in recent commits."
|
|
57
|
+
echo "The following files contain inline styles, console.logs, or TODOs:"
|
|
58
|
+
echo "$SUSPICIOUS"
|
|
59
|
+
echo ""
|
|
60
|
+
echo "Action Required: The Architect agent MUST perform a Deep Audit on these files."
|
|
61
|
+
exit 1
|
|
62
|
+
else
|
|
63
|
+
echo "✅ No obvious architectural drift detected in recent commits."
|
|
64
|
+
exit 0
|
|
65
|
+
fi
|
|
66
|
+
;;
|
|
67
|
+
|
|
68
|
+
sync)
|
|
69
|
+
echo "🔄 Syncing active worktrees with the latest changes..."
|
|
70
|
+
|
|
71
|
+
WORKTREE_DIR="${REPO_ROOT}/.worktrees"
|
|
72
|
+
|
|
73
|
+
if [ ! -d "$WORKTREE_DIR" ]; then
|
|
74
|
+
echo "✅ No active worktrees found at ${WORKTREE_DIR}. Nothing to sync."
|
|
75
|
+
exit 0
|
|
76
|
+
fi
|
|
77
|
+
|
|
78
|
+
CURRENT_BRANCH=$(git branch --show-current)
|
|
79
|
+
|
|
80
|
+
if [ -z "$CURRENT_BRANCH" ]; then
|
|
81
|
+
echo "❌ Error: Detached HEAD state. Cannot determine sprint branch for sync."
|
|
82
|
+
exit 1
|
|
83
|
+
fi
|
|
84
|
+
|
|
85
|
+
SYNC_COUNT=0
|
|
86
|
+
FAIL_COUNT=0
|
|
87
|
+
|
|
88
|
+
for dir in "${WORKTREE_DIR}"/*/; do
|
|
89
|
+
if [ -d "$dir" ]; then
|
|
90
|
+
WORKTREE_NAME=$(basename "$dir")
|
|
91
|
+
echo "Syncing worktree: $WORKTREE_NAME..."
|
|
92
|
+
|
|
93
|
+
if (cd "$dir" && git fetch origin && git rebase "origin/$CURRENT_BRANCH"); then
|
|
94
|
+
echo " ✅ Successfully synced $WORKTREE_NAME."
|
|
95
|
+
SYNC_COUNT=$((SYNC_COUNT + 1))
|
|
96
|
+
else
|
|
97
|
+
echo " ❌ Failed to sync $WORKTREE_NAME. Manual intervention required."
|
|
98
|
+
FAIL_COUNT=$((FAIL_COUNT + 1))
|
|
99
|
+
fi
|
|
100
|
+
fi
|
|
101
|
+
done
|
|
102
|
+
|
|
103
|
+
echo ""
|
|
104
|
+
echo "Sync complete: $SYNC_COUNT succeeded, $FAIL_COUNT failed."
|
|
105
|
+
[ "$FAIL_COUNT" -gt 0 ] && exit 1 || exit 0
|
|
106
|
+
;;
|
|
107
|
+
|
|
108
|
+
ledger)
|
|
109
|
+
TITLE="${2:-}"
|
|
110
|
+
DESC="${3:-}"
|
|
111
|
+
|
|
112
|
+
if [ -z "$TITLE" ] || [ -z "$DESC" ]; then
|
|
113
|
+
echo "❌ Error: Missing title or description for the ledger."
|
|
114
|
+
echo "Usage: ./scripts/hotfix_manager.sh ledger \"Fix Header\" \"Aligned the logo to the left\""
|
|
115
|
+
exit 1
|
|
116
|
+
fi
|
|
117
|
+
|
|
118
|
+
# Find the active delivery plan (search from repo root)
|
|
119
|
+
DELIVERY_PLAN=$(find "${REPO_ROOT}/product_plans" -name "DELIVERY_PLAN.md" 2>/dev/null | head -n 1)
|
|
120
|
+
|
|
121
|
+
if [ -z "$DELIVERY_PLAN" ]; then
|
|
122
|
+
echo "❌ Error: No DELIVERY_PLAN.md found in product_plans/."
|
|
123
|
+
exit 1
|
|
124
|
+
fi
|
|
125
|
+
|
|
126
|
+
echo "📝 Updating Hotfix Ledger in $DELIVERY_PLAN..."
|
|
127
|
+
|
|
128
|
+
# Check if §8 Applied Hotfixes exists, if not, create it
|
|
129
|
+
if ! grep -q "## 8. Applied Hotfixes" "$DELIVERY_PLAN"; then
|
|
130
|
+
echo "" >> "$DELIVERY_PLAN"
|
|
131
|
+
echo "---" >> "$DELIVERY_PLAN"
|
|
132
|
+
echo "" >> "$DELIVERY_PLAN"
|
|
133
|
+
echo "## 8. Applied Hotfixes" >> "$DELIVERY_PLAN"
|
|
134
|
+
echo "" >> "$DELIVERY_PLAN"
|
|
135
|
+
echo "> L1 Trivial fixes that bypassed the Epic/Story hierarchy. Auto-appended by \`hotfix_manager.sh ledger\`." >> "$DELIVERY_PLAN"
|
|
136
|
+
echo "" >> "$DELIVERY_PLAN"
|
|
137
|
+
echo "| Date | Title | Brief Description |" >> "$DELIVERY_PLAN"
|
|
138
|
+
echo "|------|-------|-------------------|" >> "$DELIVERY_PLAN"
|
|
139
|
+
fi
|
|
140
|
+
|
|
141
|
+
# Append the new row
|
|
142
|
+
DATE=$(date "+%Y-%m-%d")
|
|
143
|
+
echo "| $DATE | $TITLE | $DESC |" >> "$DELIVERY_PLAN"
|
|
144
|
+
|
|
145
|
+
echo "✅ Ledger updated: \"$TITLE\" added to §8 Applied Hotfixes."
|
|
146
|
+
;;
|
|
147
|
+
|
|
148
|
+
--help|-h|help)
|
|
149
|
+
show_help
|
|
150
|
+
;;
|
|
151
|
+
|
|
152
|
+
*)
|
|
153
|
+
echo "❌ Unknown command: $COMMAND"
|
|
154
|
+
echo ""
|
|
155
|
+
show_help
|
|
156
|
+
;;
|
|
157
|
+
esac
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
|
|
3
|
+
# pre_bounce_sync.sh
|
|
4
|
+
#
|
|
5
|
+
# Run this before kicking off the Implementation Loop (Bounce).
|
|
6
|
+
# This prevents the "Stale Context" edge case by forcing the LanceDB
|
|
7
|
+
# index to refresh with the latest rules from LESSONS.md and ROADMAP.md.
|
|
8
|
+
|
|
9
|
+
echo "==========================================="
|
|
10
|
+
echo " V-Bounce OS: Pre-Bounce Sync Started"
|
|
11
|
+
echo "==========================================="
|
|
12
|
+
|
|
13
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd)"
|
|
14
|
+
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
|
|
15
|
+
|
|
16
|
+
cd "$ROOT_DIR" || exit 1
|
|
17
|
+
|
|
18
|
+
# 1. Check for Node Modules
|
|
19
|
+
if [ ! -d "node_modules" ]; then
|
|
20
|
+
echo "Error: node_modules not found. Run 'npm install'."
|
|
21
|
+
exit 1
|
|
22
|
+
fi
|
|
23
|
+
|
|
24
|
+
# 2. Rebuild the semantic search index
|
|
25
|
+
echo "Syncing V-Bounce Knowledge Base (LanceDB)..."
|
|
26
|
+
node ./scripts/vbounce_index.mjs --all
|
|
27
|
+
|
|
28
|
+
if [ $? -ne 0 ]; then
|
|
29
|
+
echo "Error: LanceDB index sync failed."
|
|
30
|
+
exit 1
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
echo "==========================================="
|
|
34
|
+
echo " Pre-Bounce Sync Complete. RAG is fresh."
|
|
35
|
+
echo " Ready for Team Lead delegation."
|
|
36
|
+
echo "==========================================="
|
|
37
|
+
exit 0
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* validate_report.mjs
|
|
5
|
+
*
|
|
6
|
+
* Strict YAML Frontmatter validation for V-Bounce OS Agent Reports.
|
|
7
|
+
* Fails loudly if an agent hallucinates formatting or omits required fields,
|
|
8
|
+
* so the orchestrator can bounce the prompt back.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import fs from 'fs';
|
|
12
|
+
import path from 'path';
|
|
13
|
+
import yaml from 'js-yaml';
|
|
14
|
+
|
|
15
|
+
// Defined schemas for each report type
|
|
16
|
+
const SCHEMAS = {
|
|
17
|
+
dev: ['status', 'correction_tax', 'files_modified', 'lessons_flagged'],
|
|
18
|
+
qa: {
|
|
19
|
+
base: ['status', 'bounce_count', 'bugs_found', 'gold_plating_detected'],
|
|
20
|
+
conditional: { 'FAIL': ['failed_scenarios'] }
|
|
21
|
+
},
|
|
22
|
+
arch: {
|
|
23
|
+
base: ['status'],
|
|
24
|
+
conditional: { 'PASS': ['safe_zone_score', 'ai_isms_detected', 'regression_risk'], 'FAIL': ['bounce_count', 'critical_failures'] }
|
|
25
|
+
},
|
|
26
|
+
devops: {
|
|
27
|
+
base: ['type', 'status'],
|
|
28
|
+
conditional: { 'story-merge': ['conflicts_detected'], 'sprint-release': ['version'] }
|
|
29
|
+
}
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
function extractFrontmatter(content) {
|
|
33
|
+
// Matches "---" at the start of the file or after whitespace
|
|
34
|
+
const match = content.match(/^---\s*[\r\n]+([\s\S]*?)[\r\n]+---\s*/);
|
|
35
|
+
if (!match) {
|
|
36
|
+
throw new Error('NO_FRONTMATTER: Report missing strict YAML --- delimiters at the top of the file.');
|
|
37
|
+
}
|
|
38
|
+
return match[1];
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function validateDev(data) {
|
|
42
|
+
const missing = SCHEMAS.dev.filter(k => !(k in data));
|
|
43
|
+
if (missing.length > 0) throw new Error(`DEV_SCHEMA_ERROR: Missing required keys: ${missing.join(', ')}`);
|
|
44
|
+
if (!Array.isArray(data.files_modified)) throw new Error(`DEV_SCHEMA_ERROR: 'files_modified' must be an array.`);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function validateQA(data) {
|
|
48
|
+
const missing = SCHEMAS.qa.base.filter(k => !(k in data));
|
|
49
|
+
if (missing.length > 0) throw new Error(`QA_SCHEMA_ERROR: Missing required keys: ${missing.join(', ')}`);
|
|
50
|
+
|
|
51
|
+
if (data.status === 'FAIL') {
|
|
52
|
+
const conditionalMissing = SCHEMAS.qa.conditional.FAIL.filter(k => !(k in data));
|
|
53
|
+
if (conditionalMissing.length > 0) throw new Error(`QA_SCHEMA_ERROR: 'FAIL' status requires keys: ${conditionalMissing.join(', ')}`);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function validateArch(data) {
|
|
58
|
+
const missing = SCHEMAS.arch.base.filter(k => !(k in data));
|
|
59
|
+
if (missing.length > 0) throw new Error(`ARCH_SCHEMA_ERROR: Missing required keys: ${missing.join(', ')}`);
|
|
60
|
+
|
|
61
|
+
const s = data.status === 'PASS' ? 'PASS' : 'FAIL';
|
|
62
|
+
const conditionalMissing = SCHEMAS.arch.conditional[s].filter(k => !(k in data));
|
|
63
|
+
if (conditionalMissing.length > 0) throw new Error(`ARCH_SCHEMA_ERROR: '${s}' status requires keys: ${conditionalMissing.join(', ')}`);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function validateDevops(data) {
|
|
67
|
+
const missing = SCHEMAS.devops.base.filter(k => !(k in data));
|
|
68
|
+
if (missing.length > 0) throw new Error(`DEVOPS_SCHEMA_ERROR: Missing required keys: ${missing.join(', ')}`);
|
|
69
|
+
|
|
70
|
+
const typeStr = String(data.type);
|
|
71
|
+
if (SCHEMAS.devops.conditional[typeStr]) {
|
|
72
|
+
const conditionalMissing = SCHEMAS.devops.conditional[typeStr].filter(k => !(k in data));
|
|
73
|
+
if (conditionalMissing.length > 0) throw new Error(`DEVOPS_SCHEMA_ERROR: '${typeStr}' type requires keys: ${conditionalMissing.join(', ')}`);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function main() {
|
|
78
|
+
const filePath = process.argv[2];
|
|
79
|
+
if (!filePath) {
|
|
80
|
+
console.error("Usage: validate_report.mjs <path-to-markdown-file>");
|
|
81
|
+
process.exit(1);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const filename = path.basename(filePath);
|
|
85
|
+
|
|
86
|
+
// Infer agent type from filename convention
|
|
87
|
+
let agentType = 'unknown';
|
|
88
|
+
if (filename.endsWith('-dev.md')) agentType = 'dev';
|
|
89
|
+
else if (filename.endsWith('-qa.md')) agentType = 'qa';
|
|
90
|
+
else if (filename.endsWith('-arch.md')) agentType = 'arch';
|
|
91
|
+
else if (filename.endsWith('-devops.md')) agentType = 'devops';
|
|
92
|
+
|
|
93
|
+
if (agentType === 'unknown') {
|
|
94
|
+
console.error(`WARNING: Unrecognized report type for ${filename}. Ensure filename ends in -dev.md, -qa.md, -arch.md, or -devops.md.`);
|
|
95
|
+
process.exit(0); // Soft pass, not an agent workflow report
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
try {
|
|
99
|
+
const rawContent = fs.readFileSync(filePath, 'utf8');
|
|
100
|
+
const yamlString = extractFrontmatter(rawContent);
|
|
101
|
+
const data = yaml.load(yamlString);
|
|
102
|
+
|
|
103
|
+
if (!data || typeof data !== 'object') {
|
|
104
|
+
throw new Error("YAML_PARSE_ERROR: Frontmatter parsed to an empty or invalid object.");
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
if (agentType === 'dev') validateDev(data);
|
|
108
|
+
if (agentType === 'qa') validateQA(data);
|
|
109
|
+
if (agentType === 'arch') validateArch(data);
|
|
110
|
+
if (agentType === 'devops') validateDevops(data);
|
|
111
|
+
|
|
112
|
+
console.log(`VALID: ${filename} matches the ${agentType.toUpperCase()} schema.`);
|
|
113
|
+
process.exit(0);
|
|
114
|
+
|
|
115
|
+
} catch (error) {
|
|
116
|
+
// We print specifically to stdout so automation scripts can capture the payload and bounce it back to the AI
|
|
117
|
+
console.log(`VALIDATION_FAILED\n${error.message}`);
|
|
118
|
+
process.exit(1);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
main();
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import fs from 'fs';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
import { connect } from '@lancedb/lancedb';
|
|
6
|
+
import { pipeline } from '@xenova/transformers';
|
|
7
|
+
import { program } from 'commander';
|
|
8
|
+
|
|
9
|
+
const LANCE_DIR = path.join(process.cwd(), '.bounce', '.lancedb');
|
|
10
|
+
|
|
11
|
+
program
|
|
12
|
+
.name('vbounce_ask')
|
|
13
|
+
.description('Query V-Bounce OS LanceDB for relevant context')
|
|
14
|
+
.argument('<query>', 'The semantic query to search for')
|
|
15
|
+
.option('-f, --filter <field=value>', 'Filter by metadata (e.g., type=lesson)')
|
|
16
|
+
.option('-l, --limit <number>', 'Number of results to return', 3)
|
|
17
|
+
.parse(process.argv);
|
|
18
|
+
|
|
19
|
+
const options = program.opts();
|
|
20
|
+
const query = program.args[0];
|
|
21
|
+
|
|
22
|
+
class LocalEmbeddingFunction {
|
|
23
|
+
constructor() {
|
|
24
|
+
this.modelName = 'Xenova/all-MiniLM-L6-v2';
|
|
25
|
+
this.extractor = null;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async init() {
|
|
29
|
+
if (!this.extractor) {
|
|
30
|
+
this.extractor = await pipeline('feature-extraction', this.modelName, {
|
|
31
|
+
quantized: true,
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
async embedQuery(text) {
|
|
37
|
+
await this.init();
|
|
38
|
+
const output = await this.extractor(text, { pooling: 'mean', normalize: true });
|
|
39
|
+
return Array.from(output.data);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
async function main() {
|
|
44
|
+
if (!query) {
|
|
45
|
+
console.error("Error: Must provide a semantic query string.");
|
|
46
|
+
process.exit(1);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (!fs.existsSync(LANCE_DIR)) {
|
|
50
|
+
console.error(`Error: LanceDB not found at ${LANCE_DIR}. Have you run vbounce_index yet?`);
|
|
51
|
+
process.exit(1);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const db = await connect(LANCE_DIR);
|
|
55
|
+
let table;
|
|
56
|
+
try {
|
|
57
|
+
table = await db.openTable('vbounce_context');
|
|
58
|
+
} catch (e) {
|
|
59
|
+
console.error("Error: Table 'vbounce_context' not found. Please index documents first.");
|
|
60
|
+
process.exit(1);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const embedder = new LocalEmbeddingFunction();
|
|
64
|
+
const queryVector = await embedder.embedQuery(query);
|
|
65
|
+
|
|
66
|
+
let search = table.vectorSearch(queryVector).limit(parseInt(options.limit));
|
|
67
|
+
|
|
68
|
+
if (options.filter) {
|
|
69
|
+
const [field, value] = options.filter.split('=');
|
|
70
|
+
// LanceDB JS uses SQL-like string criteria for filtering
|
|
71
|
+
if (field === "type") {
|
|
72
|
+
search = search.where(`\`type\` = '${value}'`);
|
|
73
|
+
} else if (field === "section") {
|
|
74
|
+
search = search.where(`\`section\` = '${value}'`);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const results = await search.toArray();
|
|
79
|
+
|
|
80
|
+
if (results.length === 0) {
|
|
81
|
+
console.log("No relevant context found.");
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
console.log(`\n--- V-Bounce Semantic Retrieval ---`);
|
|
86
|
+
console.log(`Query: "${query}"`);
|
|
87
|
+
console.log(`Found ${results.length} relevant chunks.\n`);
|
|
88
|
+
|
|
89
|
+
results.forEach((r, idx) => {
|
|
90
|
+
console.log(`[Result ${idx + 1}] Source: ${r.file} (${r.type || 'unknown'} > ${r.section || 'General'})`);
|
|
91
|
+
console.log(`Distance: ${r._distance ? r._distance.toFixed(4) : 'N/A'}`);
|
|
92
|
+
console.log('-'.repeat(40));
|
|
93
|
+
console.log(r.text.trim());
|
|
94
|
+
console.log('\n');
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
main().catch(console.error);
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import fs from 'fs';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
import { connect } from '@lancedb/lancedb';
|
|
6
|
+
import { pipeline } from '@xenova/transformers';
|
|
7
|
+
import { marked } from 'marked';
|
|
8
|
+
import { program } from 'commander';
|
|
9
|
+
|
|
10
|
+
const LANCE_DIR = path.join(process.cwd(), '.bounce', '.lancedb');
|
|
11
|
+
|
|
12
|
+
program
|
|
13
|
+
.name('vbounce_index')
|
|
14
|
+
.description('Index V-Bounce OS documents into local LanceDB')
|
|
15
|
+
.argument('[path]', 'File or directory path to index (or use --all)')
|
|
16
|
+
.option('--all', 'Index all standard V-Bounce directories')
|
|
17
|
+
.parse(process.argv);
|
|
18
|
+
|
|
19
|
+
const options = program.opts();
|
|
20
|
+
const targetPath = program.args[0];
|
|
21
|
+
|
|
22
|
+
// Initialize local embedding model wrapper
|
|
23
|
+
class LocalEmbeddingFunction {
|
|
24
|
+
constructor() {
|
|
25
|
+
this.modelName = 'Xenova/all-MiniLM-L6-v2';
|
|
26
|
+
this.extractor = null;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async init() {
|
|
30
|
+
if (!this.extractor) {
|
|
31
|
+
console.log(`Loading embedding model (${this.modelName})...`);
|
|
32
|
+
this.extractor = await pipeline('feature-extraction', this.modelName, {
|
|
33
|
+
quantized: true,
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async computeSourceEmbeddings(texts) {
|
|
39
|
+
await this.init();
|
|
40
|
+
const embeddings = [];
|
|
41
|
+
for (const text of texts) {
|
|
42
|
+
const output = await this.extractor(text, { pooling: 'mean', normalize: true });
|
|
43
|
+
embeddings.push(Array.from(output.data));
|
|
44
|
+
}
|
|
45
|
+
return embeddings;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Function to chunk Markdown files semantically (simplified for MVP)
|
|
50
|
+
function chunkMarkdown(content, metadata) {
|
|
51
|
+
const tokens = marked.lexer(content);
|
|
52
|
+
const chunks = [];
|
|
53
|
+
let currentHeader = 'General';
|
|
54
|
+
let buffer = '';
|
|
55
|
+
|
|
56
|
+
for (const token of tokens) {
|
|
57
|
+
if (token.type === 'heading') {
|
|
58
|
+
if (buffer.trim()) {
|
|
59
|
+
chunks.push({ text: buffer.trim(), section: currentHeader, ...metadata });
|
|
60
|
+
}
|
|
61
|
+
currentHeader = token.text;
|
|
62
|
+
buffer = `${token.raw}\n`;
|
|
63
|
+
} else {
|
|
64
|
+
buffer += `${token.raw}\n`;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (buffer.trim()) {
|
|
69
|
+
chunks.push({ text: buffer.trim(), section: currentHeader, ...metadata });
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
return chunks;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async function indexFile(filePath, embedder) {
|
|
76
|
+
console.log(`Processing file: ${filePath}`);
|
|
77
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
78
|
+
const basename = path.basename(filePath);
|
|
79
|
+
|
|
80
|
+
let type = 'unknown';
|
|
81
|
+
if (filePath.includes('LESSONS.md')) type = 'lesson';
|
|
82
|
+
else if (filePath.includes('ROADMAP.md')) type = 'adr';
|
|
83
|
+
else if (filePath.includes('.bounce/reports')) type = 'report';
|
|
84
|
+
else if (filePath.includes('product_plans')) type = 'plan';
|
|
85
|
+
else if (filePath.includes('product_documentation')) type = 'documentation';
|
|
86
|
+
|
|
87
|
+
const metadata = { file: basename, type };
|
|
88
|
+
const chunks = chunkMarkdown(content, metadata);
|
|
89
|
+
|
|
90
|
+
if (chunks.length === 0) return [];
|
|
91
|
+
|
|
92
|
+
console.log(` Extracted ${chunks.length} chunks. Generating embeddings...`);
|
|
93
|
+
|
|
94
|
+
const textsToEmbed = chunks.map(c => `[${c.type} - ${c.file} - ${c.section}]\n${c.text}`);
|
|
95
|
+
const vectors = await embedder.computeSourceEmbeddings(textsToEmbed);
|
|
96
|
+
|
|
97
|
+
return chunks.map((chunk, i) => ({
|
|
98
|
+
id: `${chunk.file}-${i}`,
|
|
99
|
+
file: chunk.file,
|
|
100
|
+
type: chunk.type,
|
|
101
|
+
section: chunk.section,
|
|
102
|
+
text: chunk.text,
|
|
103
|
+
vector: vectors[i]
|
|
104
|
+
}));
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
async function main() {
|
|
108
|
+
if (!targetPath && !options.all) {
|
|
109
|
+
console.error("Error: Must specify a path or use --all");
|
|
110
|
+
process.exit(1);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
const embedder = new LocalEmbeddingFunction();
|
|
114
|
+
|
|
115
|
+
// Ensure table exists
|
|
116
|
+
if (!fs.existsSync(LANCE_DIR)) {
|
|
117
|
+
fs.mkdirSync(LANCE_DIR, { recursive: true });
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const db = await connect(LANCE_DIR);
|
|
121
|
+
let table;
|
|
122
|
+
|
|
123
|
+
try {
|
|
124
|
+
table = await db.openTable('vbounce_context');
|
|
125
|
+
} catch (e) {
|
|
126
|
+
// Table doesn't exist, will create dynamically on first insert
|
|
127
|
+
console.log("Creating new vbounce_context table...");
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
const filesToIndex = [];
|
|
131
|
+
|
|
132
|
+
function walkDir(dir) {
|
|
133
|
+
if (!fs.existsSync(dir)) return;
|
|
134
|
+
const files = fs.readdirSync(dir);
|
|
135
|
+
for (const file of files) {
|
|
136
|
+
const fullPath = path.join(dir, file);
|
|
137
|
+
const stat = fs.statSync(fullPath);
|
|
138
|
+
if (stat.isDirectory()) {
|
|
139
|
+
walkDir(fullPath);
|
|
140
|
+
} else if (fullPath.endsWith('.md')) {
|
|
141
|
+
filesToIndex.push(fullPath);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (options.all) {
|
|
147
|
+
if (fs.existsSync('LESSONS.md')) filesToIndex.push('LESSONS.md');
|
|
148
|
+
if (fs.existsSync('ROADMAP.md')) filesToIndex.push('ROADMAP.md');
|
|
149
|
+
walkDir('product_plans');
|
|
150
|
+
walkDir('product_documentation');
|
|
151
|
+
walkDir('.bounce/reports');
|
|
152
|
+
} else if (targetPath) {
|
|
153
|
+
const stat = fs.statSync(targetPath);
|
|
154
|
+
if (stat.isFile()) {
|
|
155
|
+
filesToIndex.push(targetPath);
|
|
156
|
+
} else if (stat.isDirectory()) {
|
|
157
|
+
walkDir(targetPath);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
if (filesToIndex.length === 0) {
|
|
162
|
+
console.log("No files found to index.");
|
|
163
|
+
process.exit(0);
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
let allRecords = [];
|
|
167
|
+
for (const file of filesToIndex) {
|
|
168
|
+
const records = await indexFile(file, embedder);
|
|
169
|
+
allRecords = allRecords.concat(records);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (allRecords.length > 0) {
|
|
173
|
+
if (table) {
|
|
174
|
+
console.log(`Adding ${allRecords.length} records to existing table...`);
|
|
175
|
+
await table.add(allRecords);
|
|
176
|
+
} else {
|
|
177
|
+
console.log(`Creating table with ${allRecords.length} records...`);
|
|
178
|
+
table = await db.createTable('vbounce_context', allRecords);
|
|
179
|
+
}
|
|
180
|
+
console.log(`Successfully indexed into LanceDB.`);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
main().catch(console.error);
|