@context-os/core 1.0.1 → 1.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/context.d.ts +10 -0
- package/dist/context.js +34 -0
- package/dist/index.d.ts +8 -9
- package/dist/index.js +9 -27
- package/dist/indexer.d.ts +47 -0
- package/dist/indexer.js +206 -0
- package/dist/services/database.d.ts +30 -0
- package/dist/services/database.js +147 -0
- package/dist/services/embedding.d.ts +24 -0
- package/dist/services/embedding.js +55 -0
- package/dist/services/intelligence.d.ts +22 -0
- package/dist/services/intelligence.js +125 -0
- package/dist/services/knowledge-graph.d.ts +27 -0
- package/dist/services/knowledge-graph.js +62 -0
- package/dist/services/sampling.d.ts +24 -0
- package/dist/services/sampling.js +62 -0
- package/dist/services/validation.d.ts +31 -0
- package/dist/services/validation.js +153 -0
- package/dist/services/watch.d.ts +16 -0
- package/dist/services/watch.js +74 -0
- package/dist/services/workspace.d.ts +18 -0
- package/dist/services/workspace.js +46 -0
- package/dist/tests/core-services.test.d.ts +1 -0
- package/dist/tests/core-services.test.js +49 -0
- package/dist/tests/federated-intelligence.test.d.ts +1 -0
- package/dist/tests/federated-intelligence.test.js +84 -0
- package/dist/tests/semantic-intelligence.test.d.ts +1 -0
- package/dist/tests/semantic-intelligence.test.js +31 -0
- package/dist/tests/sqlite-hybrid.test.d.ts +1 -0
- package/dist/tests/sqlite-hybrid.test.js +51 -0
- package/package.json +14 -3
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import { exec } from "child_process";
|
|
2
|
+
import { promisify } from "util";
|
|
3
|
+
import MiniSearch from "minisearch";
|
|
4
|
+
import { globalIndexer } from "../indexer.js";
|
|
5
|
+
import { getWorkspaceRoot } from "../context.js";
|
|
6
|
+
import { DatabaseService } from "./database.js";
|
|
7
|
+
import { EmbeddingService } from "./embedding.js";
|
|
8
|
+
const execAsync = promisify(exec);
|
|
9
|
+
export class IntelligenceService {
|
|
10
|
+
miniSearch = null;
|
|
11
|
+
dbService = null;
|
|
12
|
+
embeddingService = null;
|
|
13
|
+
async getIndex() {
|
|
14
|
+
if (this.miniSearch)
|
|
15
|
+
return this.miniSearch;
|
|
16
|
+
const indexData = await globalIndexer.reindex();
|
|
17
|
+
this.miniSearch = new MiniSearch({
|
|
18
|
+
fields: ['title', 'tags', 'excerpt', 'content', 'path'],
|
|
19
|
+
storeFields: ['path', 'title', 'tags', 'excerpt'],
|
|
20
|
+
searchOptions: {
|
|
21
|
+
boost: { title: 2, tags: 1.5 },
|
|
22
|
+
fuzzy: 0.2,
|
|
23
|
+
prefix: true
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
this.miniSearch.addAll(indexData.records.map((r, i) => ({ ...r, id: i })));
|
|
27
|
+
return this.miniSearch;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Hybrid Search: Semantic (sqlite-vec) + Keyword (FTS5) -> Lite Index (MiniSearch) -> Grep Fallback
|
|
31
|
+
*/
|
|
32
|
+
async search(query, options = {}) {
|
|
33
|
+
// 1. Try Elite Hybrid Search First (SQLite-Vec + FTS5)
|
|
34
|
+
if (!options.deep) {
|
|
35
|
+
if (!this.dbService)
|
|
36
|
+
this.dbService = new DatabaseService(getWorkspaceRoot());
|
|
37
|
+
if (!this.embeddingService)
|
|
38
|
+
this.embeddingService = new EmbeddingService(process.env.GEMINI_API_KEY);
|
|
39
|
+
try {
|
|
40
|
+
const queryEmbedding = await this.embeddingService.generate(query);
|
|
41
|
+
const { semanticResults, keywordResults } = this.dbService.searchHybrid(queryEmbedding, query);
|
|
42
|
+
const results = [];
|
|
43
|
+
const seenPaths = new Set();
|
|
44
|
+
// Add Semantic Results
|
|
45
|
+
semanticResults.forEach((res) => {
|
|
46
|
+
results.push({
|
|
47
|
+
path: res.path,
|
|
48
|
+
title: res.title,
|
|
49
|
+
tags: [],
|
|
50
|
+
excerpt: res.excerpt,
|
|
51
|
+
score: 1 - res.distance,
|
|
52
|
+
type: 'semantic'
|
|
53
|
+
});
|
|
54
|
+
seenPaths.add(res.path);
|
|
55
|
+
});
|
|
56
|
+
// Add Keyword Results (if not already seen)
|
|
57
|
+
keywordResults.forEach((res) => {
|
|
58
|
+
if (!seenPaths.has(res.path)) {
|
|
59
|
+
results.push({
|
|
60
|
+
path: res.path,
|
|
61
|
+
title: res.title,
|
|
62
|
+
tags: [],
|
|
63
|
+
excerpt: res.excerpt,
|
|
64
|
+
score: res.fts_score,
|
|
65
|
+
type: 'index'
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
if (results.length > 0)
|
|
70
|
+
return results;
|
|
71
|
+
}
|
|
72
|
+
catch (err) {
|
|
73
|
+
console.error("[IntelligenceService] Hybrid search failed, falling back to Lite:", err);
|
|
74
|
+
}
|
|
75
|
+
// 2. Fallback to Lite Index (MiniSearch)
|
|
76
|
+
const ms = await this.getIndex();
|
|
77
|
+
const msResults = ms.search(query);
|
|
78
|
+
if (msResults.length > 0) {
|
|
79
|
+
return msResults.map(res => ({
|
|
80
|
+
path: res.path,
|
|
81
|
+
title: res.title,
|
|
82
|
+
tags: res.tags,
|
|
83
|
+
excerpt: res.excerpt,
|
|
84
|
+
score: res.score,
|
|
85
|
+
type: 'index'
|
|
86
|
+
}));
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
// 3. Fallback to Deep Scan (Grep)
|
|
90
|
+
const workspaceRoot = getWorkspaceRoot();
|
|
91
|
+
const command = `grep -rnIE "${query}" . | head -n 20`;
|
|
92
|
+
try {
|
|
93
|
+
const { stdout } = await execAsync(command, {
|
|
94
|
+
cwd: workspaceRoot,
|
|
95
|
+
maxBuffer: 10 * 1024 * 1024
|
|
96
|
+
});
|
|
97
|
+
if (!stdout)
|
|
98
|
+
return [];
|
|
99
|
+
return stdout.split('\n')
|
|
100
|
+
.filter((line) => line.trim())
|
|
101
|
+
.map((line) => {
|
|
102
|
+
const [filePath, ...rest] = line.split(':');
|
|
103
|
+
const content = rest.join(':').trim();
|
|
104
|
+
return {
|
|
105
|
+
path: filePath,
|
|
106
|
+
title: 'Deep Scan Result',
|
|
107
|
+
tags: [],
|
|
108
|
+
excerpt: content,
|
|
109
|
+
type: 'deep'
|
|
110
|
+
};
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
catch (error) {
|
|
114
|
+
if (error.code === 1)
|
|
115
|
+
return [];
|
|
116
|
+
throw error;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
async extract(text) {
|
|
120
|
+
const mentions = Array.from(text.matchAll(/@(\w+)/g)).map(m => m[1]);
|
|
121
|
+
const tags = Array.from(text.matchAll(/#(\w+)/g)).map(m => m[1]);
|
|
122
|
+
return Array.from(new Set([...mentions, ...tags]));
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
export const intelligenceService = new IntelligenceService();
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { DatabaseService } from "./database.js";
|
|
2
|
+
export interface GraphNode {
|
|
3
|
+
id: string;
|
|
4
|
+
label: string;
|
|
5
|
+
type: 'document' | 'entity' | 'tag';
|
|
6
|
+
metadata: any;
|
|
7
|
+
}
|
|
8
|
+
export interface GraphEdge {
|
|
9
|
+
source: string;
|
|
10
|
+
target: string;
|
|
11
|
+
type: 'mention' | 'tag' | 'semantic';
|
|
12
|
+
weight: number;
|
|
13
|
+
}
|
|
14
|
+
export interface WorkspaceGraph {
|
|
15
|
+
nodes: GraphNode[];
|
|
16
|
+
edges: GraphEdge[];
|
|
17
|
+
}
|
|
18
|
+
export declare class KnowledgeGraphService {
|
|
19
|
+
private dbService;
|
|
20
|
+
constructor(dbService?: DatabaseService);
|
|
21
|
+
/**
|
|
22
|
+
* Builds a unified graph of the workspace.
|
|
23
|
+
* Combines explicit links (@mentions, #tags) and semantic bridges.
|
|
24
|
+
*/
|
|
25
|
+
getGraph(): Promise<WorkspaceGraph>;
|
|
26
|
+
}
|
|
27
|
+
export declare const knowledgeGraphService: KnowledgeGraphService;
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { DatabaseService } from "./database.js";
|
|
2
|
+
import { getWorkspaceRoot } from "../context.js";
|
|
3
|
+
export class KnowledgeGraphService {
|
|
4
|
+
dbService;
|
|
5
|
+
constructor(dbService) {
|
|
6
|
+
this.dbService = dbService || new DatabaseService(getWorkspaceRoot());
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Builds a unified graph of the workspace.
|
|
10
|
+
* Combines explicit links (@mentions, #tags) and semantic bridges.
|
|
11
|
+
*/
|
|
12
|
+
async getGraph() {
|
|
13
|
+
const nodes = [];
|
|
14
|
+
const edges = [];
|
|
15
|
+
const seenNodes = new Set();
|
|
16
|
+
// 1. Load all documents from the database
|
|
17
|
+
const docs = this.dbService.getAllDocuments();
|
|
18
|
+
for (const doc of docs) {
|
|
19
|
+
// Add Document Node
|
|
20
|
+
if (!seenNodes.has(doc.path)) {
|
|
21
|
+
nodes.push({
|
|
22
|
+
id: doc.path,
|
|
23
|
+
label: doc.title,
|
|
24
|
+
type: 'document',
|
|
25
|
+
metadata: { excerpt: doc.excerpt }
|
|
26
|
+
});
|
|
27
|
+
seenNodes.add(doc.path);
|
|
28
|
+
}
|
|
29
|
+
// 2. Extract Explicit Links (Tags/Mentions from metadata JSON)
|
|
30
|
+
const metadata = JSON.parse(doc.metadata || '[]');
|
|
31
|
+
metadata.forEach((tag) => {
|
|
32
|
+
const tagId = `tag:${tag}`;
|
|
33
|
+
if (!seenNodes.has(tagId)) {
|
|
34
|
+
nodes.push({ id: tagId, label: tag, type: 'tag', metadata: {} });
|
|
35
|
+
seenNodes.add(tagId);
|
|
36
|
+
}
|
|
37
|
+
edges.push({ source: doc.path, target: tagId, type: 'tag', weight: 1.0 });
|
|
38
|
+
});
|
|
39
|
+
// 3. Find Semantic Bridges (similarity > 0.85)
|
|
40
|
+
// We'll query for similar documents for each doc
|
|
41
|
+
if (doc.id !== undefined) {
|
|
42
|
+
const vector = this.dbService.getVectorForDocument(doc.id);
|
|
43
|
+
if (vector) {
|
|
44
|
+
const similar = this.dbService.searchSemantic(vector, 10);
|
|
45
|
+
similar.forEach((match) => {
|
|
46
|
+
// Only bridge if similarity is high and it's not the same doc
|
|
47
|
+
if (match.path !== doc.path && (1 - match.distance) > 0.85) {
|
|
48
|
+
edges.push({
|
|
49
|
+
source: doc.path,
|
|
50
|
+
target: match.path,
|
|
51
|
+
type: 'semantic',
|
|
52
|
+
weight: 1 - match.distance
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
return { nodes, edges };
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
export const knowledgeGraphService = new KnowledgeGraphService();
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { DatabaseService } from "./database.js";
|
|
2
|
+
export interface WorkspacePulse {
|
|
3
|
+
timestamp: number;
|
|
4
|
+
healthScore: number;
|
|
5
|
+
topTags: string[];
|
|
6
|
+
activeEntities: string[];
|
|
7
|
+
recentChanges: string[];
|
|
8
|
+
}
|
|
9
|
+
export declare class SamplingService {
|
|
10
|
+
private dbService;
|
|
11
|
+
private cache;
|
|
12
|
+
private CACHE_TTL;
|
|
13
|
+
constructor(dbService?: DatabaseService);
|
|
14
|
+
/**
|
|
15
|
+
* Generates a high-level summary of the entire workspace.
|
|
16
|
+
* Cached for 5 minutes to prevent expensive re-scans.
|
|
17
|
+
*/
|
|
18
|
+
getPulse(): Promise<WorkspacePulse>;
|
|
19
|
+
/**
|
|
20
|
+
* Public method to invalidate cache manually (used by WatchService)
|
|
21
|
+
*/
|
|
22
|
+
flushCache(): void;
|
|
23
|
+
}
|
|
24
|
+
export declare const samplingService: SamplingService;
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { DatabaseService } from "./database.js";
|
|
2
|
+
import { getWorkspaceRoot } from "../context.js";
|
|
3
|
+
export class SamplingService {
|
|
4
|
+
dbService;
|
|
5
|
+
cache = null;
|
|
6
|
+
CACHE_TTL = 5 * 60 * 1000; // 5 minutes
|
|
7
|
+
constructor(dbService) {
|
|
8
|
+
this.dbService = dbService || new DatabaseService(getWorkspaceRoot());
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Generates a high-level summary of the entire workspace.
|
|
12
|
+
* Cached for 5 minutes to prevent expensive re-scans.
|
|
13
|
+
*/
|
|
14
|
+
async getPulse() {
|
|
15
|
+
const now = Date.now();
|
|
16
|
+
// Check if cache is still valid
|
|
17
|
+
if (this.cache && this.cache.expiry > now && this.cache.data) {
|
|
18
|
+
return this.cache.data;
|
|
19
|
+
}
|
|
20
|
+
const docs = this.dbService.getAllDocuments();
|
|
21
|
+
// 1. Calculate Health Score (Basic: % of docs with title and tags)
|
|
22
|
+
let completeDocs = 0;
|
|
23
|
+
const tagCounts = {};
|
|
24
|
+
const entityCounts = {};
|
|
25
|
+
const recent = [];
|
|
26
|
+
docs.forEach((doc) => {
|
|
27
|
+
const tags = JSON.parse(doc.metadata || '[]');
|
|
28
|
+
if (doc.title && tags.length > 0)
|
|
29
|
+
completeDocs++;
|
|
30
|
+
tags.forEach((t) => {
|
|
31
|
+
tagCounts[t] = (tagCounts[t] || 0) + 1;
|
|
32
|
+
});
|
|
33
|
+
// Tracking recent changes (last 5)
|
|
34
|
+
recent.push(doc.path);
|
|
35
|
+
});
|
|
36
|
+
const healthScore = docs.length > 0 ? (completeDocs / docs.length) * 100 : 0;
|
|
37
|
+
// 2. Extract Top 5 Tags
|
|
38
|
+
const topTags = Object.entries(tagCounts)
|
|
39
|
+
.sort((a, b) => b[1] - a[1])
|
|
40
|
+
.slice(0, 5)
|
|
41
|
+
.map(e => e[0]);
|
|
42
|
+
const pulse = {
|
|
43
|
+
timestamp: now,
|
|
44
|
+
healthScore: Math.round(healthScore),
|
|
45
|
+
topTags,
|
|
46
|
+
activeEntities: [], // Placeholder for entity mapping upgrade
|
|
47
|
+
recentChanges: recent.slice(-5).reverse()
|
|
48
|
+
};
|
|
49
|
+
this.cache = {
|
|
50
|
+
data: pulse,
|
|
51
|
+
expiry: now + this.CACHE_TTL
|
|
52
|
+
};
|
|
53
|
+
return pulse;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Public method to invalidate cache manually (used by WatchService)
|
|
57
|
+
*/
|
|
58
|
+
flushCache() {
|
|
59
|
+
this.cache = null;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
export const samplingService = new SamplingService();
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
export interface ValidationIssue {
|
|
2
|
+
project: string;
|
|
3
|
+
file: string;
|
|
4
|
+
message: string;
|
|
5
|
+
details?: any;
|
|
6
|
+
}
|
|
7
|
+
export interface ValidationResult {
|
|
8
|
+
valid: boolean;
|
|
9
|
+
issues: ValidationIssue[];
|
|
10
|
+
totalProjects: number;
|
|
11
|
+
}
|
|
12
|
+
export declare class ValidationService {
|
|
13
|
+
private validFiles;
|
|
14
|
+
/**
|
|
15
|
+
* Recursively validates the entire workspace against JSON schemas.
|
|
16
|
+
*/
|
|
17
|
+
validateWorkspace(): Promise<ValidationResult>;
|
|
18
|
+
/**
|
|
19
|
+
* Validates a single file against its schema based on mapping rules.
|
|
20
|
+
*/
|
|
21
|
+
validateFile(filePath: string): Promise<{
|
|
22
|
+
valid: boolean;
|
|
23
|
+
issues: string[];
|
|
24
|
+
}>;
|
|
25
|
+
/**
|
|
26
|
+
* Robustly extract metadata from a markdown file.
|
|
27
|
+
* Prioritizes Frontmatter, falls back to Section mapping.
|
|
28
|
+
*/
|
|
29
|
+
extractMetadata(content: string): any;
|
|
30
|
+
}
|
|
31
|
+
export declare const validationService: ValidationService;
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import fs from "fs-extra";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import AjvModule from "ajv";
|
|
4
|
+
import addFormatsModule from "ajv-formats";
|
|
5
|
+
import fm from "front-matter";
|
|
6
|
+
import { getWorkspaceRoot } from "../context.js";
|
|
7
|
+
const Ajv = AjvModule.default || AjvModule;
|
|
8
|
+
const addFormats = addFormatsModule.default || addFormatsModule;
|
|
9
|
+
const ajv = new Ajv({ allErrors: true });
|
|
10
|
+
addFormats(ajv);
|
|
11
|
+
export class ValidationService {
|
|
12
|
+
validFiles = [
|
|
13
|
+
{ name: "SOUL.md", schema: "soul.schema.json", required: true },
|
|
14
|
+
{ name: "CONTEXT.md", schema: "context.schema.json", required: true },
|
|
15
|
+
{ name: "memory.md", schema: "memory.schema.json" },
|
|
16
|
+
{ name: "decisions.md", schema: "decision.schema.json" }
|
|
17
|
+
];
|
|
18
|
+
/**
|
|
19
|
+
* Recursively validates the entire workspace against JSON schemas.
|
|
20
|
+
*/
|
|
21
|
+
async validateWorkspace() {
|
|
22
|
+
const workspaceRoot = getWorkspaceRoot();
|
|
23
|
+
const schemasDir = path.join(workspaceRoot, "packages", "core", "schemas");
|
|
24
|
+
const projectsDir = path.join(workspaceRoot, "projects");
|
|
25
|
+
const starterDir = path.join(workspaceRoot, "workspace-starter");
|
|
26
|
+
const issues = [];
|
|
27
|
+
const projectPaths = [];
|
|
28
|
+
// 1. Collect all project paths
|
|
29
|
+
if (await fs.pathExists(projectsDir)) {
|
|
30
|
+
const projects = await fs.readdir(projectsDir);
|
|
31
|
+
for (const p of projects) {
|
|
32
|
+
const fullPath = path.join(projectsDir, p);
|
|
33
|
+
if ((await fs.stat(fullPath)).isDirectory()) {
|
|
34
|
+
projectPaths.push(fullPath);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
if (await fs.pathExists(starterDir)) {
|
|
39
|
+
projectPaths.push(starterDir);
|
|
40
|
+
}
|
|
41
|
+
// 2. Validate each project
|
|
42
|
+
for (const projectPath of projectPaths) {
|
|
43
|
+
const projectName = path.basename(projectPath);
|
|
44
|
+
const filesInDir = await fs.readdir(projectPath);
|
|
45
|
+
for (const config of this.validFiles) {
|
|
46
|
+
const fileName = filesInDir.find(f => f.toLowerCase() === config.name.toLowerCase());
|
|
47
|
+
if (!fileName) {
|
|
48
|
+
if (config.required) {
|
|
49
|
+
issues.push({ project: projectName, file: config.name, message: "Missing required file" });
|
|
50
|
+
}
|
|
51
|
+
continue;
|
|
52
|
+
}
|
|
53
|
+
const filePath = path.join(projectPath, fileName);
|
|
54
|
+
const schemaPath = path.join(schemasDir, config.schema);
|
|
55
|
+
try {
|
|
56
|
+
const schema = await fs.readJson(schemaPath);
|
|
57
|
+
const validate = ajv.compile(schema);
|
|
58
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
59
|
+
const data = this.extractMetadata(content);
|
|
60
|
+
if (!validate(data)) {
|
|
61
|
+
validate.errors?.forEach((err) => {
|
|
62
|
+
issues.push({
|
|
63
|
+
project: projectName,
|
|
64
|
+
file: fileName,
|
|
65
|
+
message: `${err.instancePath || 'root'} ${err.message}`,
|
|
66
|
+
details: err
|
|
67
|
+
});
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
catch (error) {
|
|
72
|
+
issues.push({ project: projectName, file: fileName, message: `Validation failed: ${error.message}` });
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return {
|
|
77
|
+
valid: issues.length === 0,
|
|
78
|
+
issues,
|
|
79
|
+
totalProjects: projectPaths.length
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Validates a single file against its schema based on mapping rules.
|
|
84
|
+
*/
|
|
85
|
+
async validateFile(filePath) {
|
|
86
|
+
const fileName = path.basename(filePath);
|
|
87
|
+
const workspaceRoot = getWorkspaceRoot();
|
|
88
|
+
const schemasDir = path.join(workspaceRoot, "packages", "core", "schemas");
|
|
89
|
+
// Find matching config
|
|
90
|
+
const config = this.validFiles.find(v => path.basename(filePath).toLowerCase() === v.name.toLowerCase());
|
|
91
|
+
if (!config)
|
|
92
|
+
return { valid: true, issues: [] }; // No schema for this file type
|
|
93
|
+
const schemaPath = path.join(schemasDir, config.schema);
|
|
94
|
+
if (!await fs.pathExists(schemaPath))
|
|
95
|
+
return { valid: true, issues: [] };
|
|
96
|
+
const schema = await fs.readJson(schemaPath);
|
|
97
|
+
const validate = ajv.compile(schema);
|
|
98
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
99
|
+
const data = this.extractMetadata(content);
|
|
100
|
+
const issues = [];
|
|
101
|
+
if (!validate(data)) {
|
|
102
|
+
validate.errors?.forEach((err) => {
|
|
103
|
+
issues.push(`${err.instancePath || 'root'} ${err.message}`);
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
return { valid: issues.length === 0, issues };
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Robustly extract metadata from a markdown file.
|
|
110
|
+
* Prioritizes Frontmatter, falls back to Section mapping.
|
|
111
|
+
*/
|
|
112
|
+
extractMetadata(content) {
|
|
113
|
+
let data = {};
|
|
114
|
+
const trimmed = content.trim();
|
|
115
|
+
// 1. Try Frontmatter
|
|
116
|
+
try {
|
|
117
|
+
const parse = fm.default || fm;
|
|
118
|
+
if (trimmed.startsWith("---")) {
|
|
119
|
+
const parsed = parse(content);
|
|
120
|
+
data = parsed.attributes || {};
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
catch (e) {
|
|
124
|
+
// Silently continue
|
|
125
|
+
}
|
|
126
|
+
// 2. Try H1 Header as Title if not set
|
|
127
|
+
if (!data.title || data.title === "") {
|
|
128
|
+
const h1Match = trimmed.match(/^#\s+(.*)$/m);
|
|
129
|
+
if (h1Match) {
|
|
130
|
+
data.title = h1Match[1].trim();
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
// 3. Map Sections (## Header)
|
|
134
|
+
const sections = content.split(/^## /m).slice(1);
|
|
135
|
+
sections.forEach(s => {
|
|
136
|
+
const lines = s.split("\n");
|
|
137
|
+
const title = lines[0].trim();
|
|
138
|
+
const body = lines.slice(1).join("\n").trim();
|
|
139
|
+
if (!data[title] || (Array.isArray(data[title]) && data[title].length === 0)) {
|
|
140
|
+
if (["Core Principles", "Behavioral Rules", "Goals", "Capabilities", "Constraints", "Tags", "Active Tasks", "Backlog"].includes(title)) {
|
|
141
|
+
data[title] = body.split("\n")
|
|
142
|
+
.map(l => l.replace(/^[-*]\s*/, "").trim())
|
|
143
|
+
.filter(l => l.length > 0);
|
|
144
|
+
}
|
|
145
|
+
else {
|
|
146
|
+
data[title] = body;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
return data;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
export const validationService = new ValidationService();
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export declare class WatchService {
|
|
2
|
+
private watchers;
|
|
3
|
+
private debouncers;
|
|
4
|
+
private DEBOUNCE_MS;
|
|
5
|
+
/**
|
|
6
|
+
* Starts watching the allowed buckets for changes.
|
|
7
|
+
*/
|
|
8
|
+
start(): void;
|
|
9
|
+
/**
|
|
10
|
+
* Stops all active watchers.
|
|
11
|
+
*/
|
|
12
|
+
stop(): void;
|
|
13
|
+
private debounceChange;
|
|
14
|
+
private handleEvent;
|
|
15
|
+
}
|
|
16
|
+
export declare const watchService: WatchService;
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { workspaceRoot, ALLOWED_BUCKETS } from '../context.js';
|
|
4
|
+
import { globalIndexer } from '../indexer.js';
|
|
5
|
+
import { samplingService } from './sampling.js';
|
|
6
|
+
export class WatchService {
|
|
7
|
+
watchers = [];
|
|
8
|
+
debouncers = new Map();
|
|
9
|
+
DEBOUNCE_MS = 300;
|
|
10
|
+
/**
|
|
11
|
+
* Starts watching the allowed buckets for changes.
|
|
12
|
+
*/
|
|
13
|
+
start() {
|
|
14
|
+
console.log('📡 Starting ContextOS Watch Service...');
|
|
15
|
+
for (const bucket of ALLOWED_BUCKETS) {
|
|
16
|
+
const bucketPath = path.join(workspaceRoot, bucket);
|
|
17
|
+
if (!fs.existsSync(bucketPath))
|
|
18
|
+
continue;
|
|
19
|
+
try {
|
|
20
|
+
// On macOS, recursive: true is supported and very efficient
|
|
21
|
+
const watcher = fs.watch(bucketPath, { recursive: true }, (eventType, filename) => {
|
|
22
|
+
if (!filename || !filename.endsWith('.md'))
|
|
23
|
+
return;
|
|
24
|
+
const fullPath = path.join(bucketPath, filename);
|
|
25
|
+
this.debounceChange(fullPath);
|
|
26
|
+
});
|
|
27
|
+
this.watchers.push(watcher);
|
|
28
|
+
console.log(` - Watching [${bucket}]`);
|
|
29
|
+
}
|
|
30
|
+
catch (error) {
|
|
31
|
+
console.error(` - Failed to watch [${bucket}]:`, error);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Stops all active watchers.
|
|
37
|
+
*/
|
|
38
|
+
stop() {
|
|
39
|
+
for (const watcher of this.watchers) {
|
|
40
|
+
watcher.close();
|
|
41
|
+
}
|
|
42
|
+
this.watchers = [];
|
|
43
|
+
console.log('🛑 Watch Service stopped.');
|
|
44
|
+
}
|
|
45
|
+
debounceChange(filePath) {
|
|
46
|
+
if (this.debouncers.has(filePath)) {
|
|
47
|
+
clearTimeout(this.debouncers.get(filePath));
|
|
48
|
+
}
|
|
49
|
+
const timeout = setTimeout(async () => {
|
|
50
|
+
this.debouncers.delete(filePath);
|
|
51
|
+
await this.handleEvent(filePath);
|
|
52
|
+
}, this.DEBOUNCE_MS);
|
|
53
|
+
this.debouncers.set(filePath, timeout);
|
|
54
|
+
}
|
|
55
|
+
async handleEvent(filePath) {
|
|
56
|
+
const relativePath = path.relative(workspaceRoot, filePath);
|
|
57
|
+
try {
|
|
58
|
+
if (fs.existsSync(filePath)) {
|
|
59
|
+
console.log(`📝 Change detected: ${relativePath}`);
|
|
60
|
+
await globalIndexer.indexFile(filePath);
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
console.log(`🗑️ Deletion detected: ${relativePath}`);
|
|
64
|
+
await globalIndexer.removeFile(relativePath);
|
|
65
|
+
}
|
|
66
|
+
// Invalidate Sampling cache to ensure pulse is fresh
|
|
67
|
+
samplingService.flushCache();
|
|
68
|
+
}
|
|
69
|
+
catch (error) {
|
|
70
|
+
console.error(`❌ Watch error for ${relativePath}:`, error);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
export const watchService = new WatchService();
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export declare class WorkspaceService {
|
|
2
|
+
/**
|
|
3
|
+
* Synchronizes workspace context and updates metadata.
|
|
4
|
+
*/
|
|
5
|
+
sync(project?: string, options?: {
|
|
6
|
+
force?: boolean;
|
|
7
|
+
}): Promise<{
|
|
8
|
+
success: boolean;
|
|
9
|
+
message: string;
|
|
10
|
+
}>;
|
|
11
|
+
/**
|
|
12
|
+
* Initializes a new workspace or project.
|
|
13
|
+
*/
|
|
14
|
+
init(projectName: string, options?: {
|
|
15
|
+
template?: string;
|
|
16
|
+
}): Promise<void>;
|
|
17
|
+
}
|
|
18
|
+
export declare const workspaceService: WorkspaceService;
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import fs from "fs-extra";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import { getWorkspaceRoot } from "../context.js";
|
|
4
|
+
import { globalIndexer } from "../indexer.js";
|
|
5
|
+
export class WorkspaceService {
|
|
6
|
+
/**
|
|
7
|
+
* Synchronizes workspace context and updates metadata.
|
|
8
|
+
*/
|
|
9
|
+
async sync(project, options = {}) {
|
|
10
|
+
const workspaceRoot = getWorkspaceRoot();
|
|
11
|
+
const date = new Date().toISOString().split("T")[0];
|
|
12
|
+
if (project) {
|
|
13
|
+
const projectDir = path.join(workspaceRoot, "projects", project);
|
|
14
|
+
const memoryPath = path.join(projectDir, "memory.md");
|
|
15
|
+
if (await fs.pathExists(memoryPath)) {
|
|
16
|
+
let content = await fs.readFile(memoryPath, "utf-8");
|
|
17
|
+
const syncMark = `\n> [!NOTE]\n> Last Sync: ${date} ${new Date().toLocaleTimeString()}\n`;
|
|
18
|
+
if (!content.includes("Last Sync:")) {
|
|
19
|
+
await fs.appendFile(memoryPath, syncMark);
|
|
20
|
+
}
|
|
21
|
+
else {
|
|
22
|
+
content = content.replace(/> \[!NOTE\]\n> Last Sync: .*/, syncMark.trim());
|
|
23
|
+
await fs.writeFile(memoryPath, content);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
else {
|
|
27
|
+
return { success: false, message: `Memory file not found for ${project}` };
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
// Auto-refresh the intelligence index as part of sync (v1.4: Incremental by default)
|
|
31
|
+
await globalIndexer.reindex({ force: options.force });
|
|
32
|
+
return {
|
|
33
|
+
success: true,
|
|
34
|
+
message: project
|
|
35
|
+
? `Synced memory for ${project} and refreshed index (incremental).`
|
|
36
|
+
: `Workspace indexed (${options.force ? 'full re-scan' : 'incremental'}).`
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Initializes a new workspace or project.
|
|
41
|
+
*/
|
|
42
|
+
async init(projectName, options = {}) {
|
|
43
|
+
// Logic for init will go here in Phase 2
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
export const workspaceService = new WorkspaceService();
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|