@pleaseai/context-please-core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +24 -0
- package/README.md +287 -0
- package/dist/.tsbuildinfo +1 -0
- package/dist/context.d.ts +276 -0
- package/dist/context.d.ts.map +1 -0
- package/dist/context.js +1072 -0
- package/dist/context.js.map +1 -0
- package/dist/embedding/base-embedding.d.ts +51 -0
- package/dist/embedding/base-embedding.d.ts.map +1 -0
- package/dist/embedding/base-embedding.js +36 -0
- package/dist/embedding/base-embedding.js.map +1 -0
- package/dist/embedding/gemini-embedding.d.ts +53 -0
- package/dist/embedding/gemini-embedding.d.ts.map +1 -0
- package/dist/embedding/gemini-embedding.js +152 -0
- package/dist/embedding/gemini-embedding.js.map +1 -0
- package/dist/embedding/index.d.ts +6 -0
- package/dist/embedding/index.d.ts.map +1 -0
- package/dist/embedding/index.js +24 -0
- package/dist/embedding/index.js.map +1 -0
- package/dist/embedding/ollama-embedding.d.ts +55 -0
- package/dist/embedding/ollama-embedding.d.ts.map +1 -0
- package/dist/embedding/ollama-embedding.js +192 -0
- package/dist/embedding/ollama-embedding.js.map +1 -0
- package/dist/embedding/openai-embedding.d.ts +36 -0
- package/dist/embedding/openai-embedding.d.ts.map +1 -0
- package/dist/embedding/openai-embedding.js +159 -0
- package/dist/embedding/openai-embedding.js.map +1 -0
- package/dist/embedding/voyageai-embedding.d.ts +44 -0
- package/dist/embedding/voyageai-embedding.d.ts.map +1 -0
- package/dist/embedding/voyageai-embedding.js +227 -0
- package/dist/embedding/voyageai-embedding.js.map +1 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +24 -0
- package/dist/index.js.map +1 -0
- package/dist/splitter/ast-splitter.d.ts +22 -0
- package/dist/splitter/ast-splitter.d.ts.map +1 -0
- package/dist/splitter/ast-splitter.js +234 -0
- package/dist/splitter/ast-splitter.js.map +1 -0
- package/dist/splitter/index.d.ts +41 -0
- package/dist/splitter/index.d.ts.map +1 -0
- package/dist/splitter/index.js +27 -0
- package/dist/splitter/index.js.map +1 -0
- package/dist/splitter/langchain-splitter.d.ts +13 -0
- package/dist/splitter/langchain-splitter.d.ts.map +1 -0
- package/dist/splitter/langchain-splitter.js +118 -0
- package/dist/splitter/langchain-splitter.js.map +1 -0
- package/dist/sync/merkle.d.ts +26 -0
- package/dist/sync/merkle.d.ts.map +1 -0
- package/dist/sync/merkle.js +112 -0
- package/dist/sync/merkle.js.map +1 -0
- package/dist/sync/synchronizer.d.ts +30 -0
- package/dist/sync/synchronizer.d.ts.map +1 -0
- package/dist/sync/synchronizer.js +339 -0
- package/dist/sync/synchronizer.js.map +1 -0
- package/dist/types.d.ts +14 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +3 -0
- package/dist/types.js.map +1 -0
- package/dist/utils/env-manager.d.ts +19 -0
- package/dist/utils/env-manager.d.ts.map +1 -0
- package/dist/utils/env-manager.js +125 -0
- package/dist/utils/env-manager.js.map +1 -0
- package/dist/utils/index.d.ts +2 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +7 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/vectordb/base/base-vector-database.d.ts +58 -0
- package/dist/vectordb/base/base-vector-database.d.ts.map +1 -0
- package/dist/vectordb/base/base-vector-database.js +32 -0
- package/dist/vectordb/base/base-vector-database.js.map +1 -0
- package/dist/vectordb/factory.d.ts +80 -0
- package/dist/vectordb/factory.d.ts.map +1 -0
- package/dist/vectordb/factory.js +89 -0
- package/dist/vectordb/factory.js.map +1 -0
- package/dist/vectordb/index.d.ts +12 -0
- package/dist/vectordb/index.d.ts.map +1 -0
- package/dist/vectordb/index.js +27 -0
- package/dist/vectordb/index.js.map +1 -0
- package/dist/vectordb/milvus-restful-vectordb.d.ts +75 -0
- package/dist/vectordb/milvus-restful-vectordb.d.ts.map +1 -0
- package/dist/vectordb/milvus-restful-vectordb.js +707 -0
- package/dist/vectordb/milvus-restful-vectordb.js.map +1 -0
- package/dist/vectordb/milvus-vectordb.d.ts +59 -0
- package/dist/vectordb/milvus-vectordb.d.ts.map +1 -0
- package/dist/vectordb/milvus-vectordb.js +641 -0
- package/dist/vectordb/milvus-vectordb.js.map +1 -0
- package/dist/vectordb/qdrant-vectordb.d.ts +124 -0
- package/dist/vectordb/qdrant-vectordb.d.ts.map +1 -0
- package/dist/vectordb/qdrant-vectordb.js +582 -0
- package/dist/vectordb/qdrant-vectordb.js.map +1 -0
- package/dist/vectordb/sparse/index.d.ts +4 -0
- package/dist/vectordb/sparse/index.d.ts.map +1 -0
- package/dist/vectordb/sparse/index.js +23 -0
- package/dist/vectordb/sparse/index.js.map +1 -0
- package/dist/vectordb/sparse/simple-bm25.d.ts +104 -0
- package/dist/vectordb/sparse/simple-bm25.d.ts.map +1 -0
- package/dist/vectordb/sparse/simple-bm25.js +189 -0
- package/dist/vectordb/sparse/simple-bm25.js.map +1 -0
- package/dist/vectordb/sparse/sparse-vector-generator.d.ts +54 -0
- package/dist/vectordb/sparse/sparse-vector-generator.d.ts.map +1 -0
- package/dist/vectordb/sparse/sparse-vector-generator.js +3 -0
- package/dist/vectordb/sparse/sparse-vector-generator.js.map +1 -0
- package/dist/vectordb/sparse/types.d.ts +38 -0
- package/dist/vectordb/sparse/types.d.ts.map +1 -0
- package/dist/vectordb/sparse/types.js +3 -0
- package/dist/vectordb/sparse/types.js.map +1 -0
- package/dist/vectordb/types.d.ts +120 -0
- package/dist/vectordb/types.d.ts.map +1 -0
- package/dist/vectordb/types.js +9 -0
- package/dist/vectordb/types.js.map +1 -0
- package/dist/vectordb/zilliz-utils.d.ts +135 -0
- package/dist/vectordb/zilliz-utils.d.ts.map +1 -0
- package/dist/vectordb/zilliz-utils.js +192 -0
- package/dist/vectordb/zilliz-utils.js.map +1 -0
- package/package.json +61 -0
@@ -0,0 +1,118 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.LangChainCodeSplitter = void 0;
|
4
|
+
const text_splitter_1 = require("langchain/text_splitter");
|
5
|
+
class LangChainCodeSplitter {
|
6
|
+
constructor(chunkSize, chunkOverlap) {
|
7
|
+
this.chunkSize = 1000;
|
8
|
+
this.chunkOverlap = 200;
|
9
|
+
if (chunkSize)
|
10
|
+
this.chunkSize = chunkSize;
|
11
|
+
if (chunkOverlap)
|
12
|
+
this.chunkOverlap = chunkOverlap;
|
13
|
+
}
|
14
|
+
async split(code, language, filePath) {
|
15
|
+
try {
|
16
|
+
// Create language-specific splitter
|
17
|
+
const mappedLanguage = this.mapLanguage(language);
|
18
|
+
if (mappedLanguage) {
|
19
|
+
const splitter = text_splitter_1.RecursiveCharacterTextSplitter.fromLanguage(mappedLanguage, {
|
20
|
+
chunkSize: this.chunkSize,
|
21
|
+
chunkOverlap: this.chunkOverlap,
|
22
|
+
});
|
23
|
+
// Split code
|
24
|
+
const documents = await splitter.createDocuments([code]);
|
25
|
+
// Convert to CodeChunk format
|
26
|
+
return documents.map((doc, index) => {
|
27
|
+
const lines = doc.metadata?.loc?.lines || { from: 1, to: 1 };
|
28
|
+
return {
|
29
|
+
content: doc.pageContent,
|
30
|
+
metadata: {
|
31
|
+
startLine: lines.from,
|
32
|
+
endLine: lines.to,
|
33
|
+
language,
|
34
|
+
filePath,
|
35
|
+
},
|
36
|
+
};
|
37
|
+
});
|
38
|
+
}
|
39
|
+
else {
|
40
|
+
// If language is not supported, use generic splitter directly
|
41
|
+
return this.fallbackSplit(code, language, filePath);
|
42
|
+
}
|
43
|
+
}
|
44
|
+
catch (error) {
|
45
|
+
console.error('[LangChainSplitter] ❌ Error splitting code:', error);
|
46
|
+
// If specific language splitting fails, use generic splitter
|
47
|
+
return this.fallbackSplit(code, language, filePath);
|
48
|
+
}
|
49
|
+
}
|
50
|
+
setChunkSize(chunkSize) {
|
51
|
+
this.chunkSize = chunkSize;
|
52
|
+
}
|
53
|
+
setChunkOverlap(chunkOverlap) {
|
54
|
+
this.chunkOverlap = chunkOverlap;
|
55
|
+
}
|
56
|
+
mapLanguage(language) {
|
57
|
+
// Map common language names to LangChain supported formats
|
58
|
+
const languageMap = {
|
59
|
+
'javascript': 'js',
|
60
|
+
'typescript': 'js',
|
61
|
+
'python': 'python',
|
62
|
+
'java': 'java',
|
63
|
+
'cpp': 'cpp',
|
64
|
+
'c++': 'cpp',
|
65
|
+
'c': 'cpp',
|
66
|
+
'go': 'go',
|
67
|
+
'rust': 'rust',
|
68
|
+
'php': 'php',
|
69
|
+
'ruby': 'ruby',
|
70
|
+
'swift': 'swift',
|
71
|
+
'scala': 'scala',
|
72
|
+
'html': 'html',
|
73
|
+
'markdown': 'markdown',
|
74
|
+
'md': 'markdown',
|
75
|
+
'latex': 'latex',
|
76
|
+
'tex': 'latex',
|
77
|
+
'solidity': 'sol',
|
78
|
+
'sol': 'sol',
|
79
|
+
};
|
80
|
+
return languageMap[language.toLowerCase()] || null;
|
81
|
+
}
|
82
|
+
async fallbackSplit(code, language, filePath) {
|
83
|
+
// Generic splitter as fallback
|
84
|
+
const splitter = new text_splitter_1.RecursiveCharacterTextSplitter({
|
85
|
+
chunkSize: this.chunkSize,
|
86
|
+
chunkOverlap: this.chunkOverlap,
|
87
|
+
});
|
88
|
+
const documents = await splitter.createDocuments([code]);
|
89
|
+
return documents.map((doc, index) => {
|
90
|
+
const lines = this.estimateLines(doc.pageContent, code);
|
91
|
+
return {
|
92
|
+
content: doc.pageContent,
|
93
|
+
metadata: {
|
94
|
+
startLine: lines.start,
|
95
|
+
endLine: lines.end,
|
96
|
+
language,
|
97
|
+
filePath,
|
98
|
+
},
|
99
|
+
};
|
100
|
+
});
|
101
|
+
}
|
102
|
+
estimateLines(chunk, originalCode) {
|
103
|
+
// Simple line number estimation
|
104
|
+
const codeLines = originalCode.split('\n');
|
105
|
+
const chunkLines = chunk.split('\n');
|
106
|
+
// Find chunk position in original code
|
107
|
+
const chunkStart = originalCode.indexOf(chunk);
|
108
|
+
if (chunkStart === -1) {
|
109
|
+
return { start: 1, end: chunkLines.length };
|
110
|
+
}
|
111
|
+
const beforeChunk = originalCode.substring(0, chunkStart);
|
112
|
+
const startLine = beforeChunk.split('\n').length;
|
113
|
+
const endLine = startLine + chunkLines.length - 1;
|
114
|
+
return { start: startLine, end: endLine };
|
115
|
+
}
|
116
|
+
}
|
117
|
+
exports.LangChainCodeSplitter = LangChainCodeSplitter;
|
118
|
+
//# sourceMappingURL=langchain-splitter.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"langchain-splitter.js","sourceRoot":"","sources":["../../src/splitter/langchain-splitter.ts"],"names":[],"mappings":";;;AAAA,2DAAyE;AAMzE,MAAa,qBAAqB;IAI9B,YAAY,SAAkB,EAAE,YAAqB;QAH7C,cAAS,GAAW,IAAI,CAAC;QACzB,iBAAY,GAAW,GAAG,CAAC;QAG/B,IAAI,SAAS;YAAE,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC1C,IAAI,YAAY;YAAE,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACvD,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,IAAY,EAAE,QAAgB,EAAE,QAAiB;QACzD,IAAI,CAAC;YACD,oCAAoC;YACpC,MAAM,cAAc,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;YAClD,IAAI,cAAc,EAAE,CAAC;gBACjB,MAAM,QAAQ,GAAG,8CAA8B,CAAC,YAAY,CACxD,cAAc,EACd;oBACI,SAAS,EAAE,IAAI,CAAC,SAAS;oBACzB,YAAY,EAAE,IAAI,CAAC,YAAY;iBAClC,CACJ,CAAC;gBAEF,aAAa;gBACb,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;gBAEzD,8BAA8B;gBAC9B,OAAO,SAAS,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE;oBAChC,MAAM,KAAK,GAAG,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,KAAK,IAAI,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC;oBAC7D,OAAO;wBACH,OAAO,EAAE,GAAG,CAAC,WAAW;wBACxB,QAAQ,EAAE;4BACN,SAAS,EAAE,KAAK,CAAC,IAAI;4BACrB,OAAO,EAAE,KAAK,CAAC,EAAE;4BACjB,QAAQ;4BACR,QAAQ;yBACX;qBACJ,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC;iBAAM,CAAC;gBACJ,8DAA8D;gBAC9D,OAAO,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,QAAQ,EAAE,QAAQ,CAAC,CAAC;YACxD,CAAC;QACL,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,OAAO,CAAC,KAAK,CAAC,6CAA6C,EAAE,KAAK,CAAC,CAAC;YACpE,6DAA6D;YAC7D,OAAO,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,QAAQ,EAAE,QAAQ,CAAC,CAAC;QACxD,CAAC;IACL,CAAC;IAED,YAAY,CAAC,SAAiB;QAC1B,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;IAC/B,CAAC;IAED,eAAe,CAAC,YAAoB;QAChC,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACrC,CAAC;IAEO,WAAW,CAAC,QAAgB;QAChC,2DAA2D;QAC3D,MAAM,WAAW,GAAsC;YACnD,YAAY,EAAE,IAAI;YAClB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,QAAQ;YAClB,MAAM,EAAE,MAAM;YACd,KAAK,EAAE,KAAK;YACZ,KAAK,EAAE,KAAK;YACZ,GAAG,EAAE,KAAK;YACV,IAAI,EAAE,IAAI;YACV,MAAM,EAAE,MAAM;YACd,KAAK,EAAE,KAAK;YACZ,MAAM,EAAE,MAAM;YACd,OAAO,EAAE,OAAO;YAChB,OAAO,EAAE,OAAO;YAChB,MAAM,EAAE,MAAM;YACd,UAAU,EAAE,UAAU;YACtB,IAAI,EAAE,UAAU;YAChB,OAAO,EAAE,OAAO;YAChB,KAAK,EAAE,OAAO;YACd,UAAU,EAAE,KAAK;YACjB,KAAK,EAAE,KAAK;SACf,CAAC;QAEF,OAAO,WAAW,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,IAAI,IAAI,CAAC;IACvD,CAAC;IAEO,KAAK,CAAC,aAAa,CAAC,IAAY,EAAE,QAAgB,EAAE,QAAiB;QACzE,+BAA+B;QAC/B,MAAM,QAAQ,GAAG,IAAI,8CAA8B,CAAC;YAChD,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,YAAY,EAAE,IAAI,CAAC,YAAY;SAClC,CAAC,CAAC;QAEH,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;QAEzD,OAAO,SAAS,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE;YAChC,MAAM,KAAK,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,WAAW,EAAE,IAAI,CAAC,CAAC;YACxD,OAAO;gBACH,OAAO,EAAE,GAAG,CAAC,WAAW;gBACxB,QAAQ,EAAE;oBACN,SAAS,EAAE,KAAK,CAAC,KAAK;oBACtB,OAAO,EAAE,KAAK,CAAC,GAAG;oBAClB,QAAQ;oBACR,QAAQ;iBACX;aACJ,CAAC;QACN,CAAC,CAAC,CAAC;IACP,CAAC;IAEO,aAAa,CAAC,KAAa,EAAE,YAAoB;QACrD,gCAAgC;QAChC,MAAM,SAAS,GAAG,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QAC3C,MAAM,UAAU,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QAErC,uCAAuC;QACvC,MAAM,UAAU,GAAG,YAAY,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAC/C,IAAI,UAAU,KAAK,CAAC,CAAC,EAAE,CAAC;YACpB,OAAO,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,UAAU,CAAC,MAAM,EAAE,CAAC;QAChD,CAAC;QAED,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;QAC1D,MAAM,SAAS,GAAG,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC;QACjD,MAAM,OAAO,GAAG,SAAS,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;QAElD,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,GAAG,EAAE,OAAO,EAAE,CAAC;IAC9C,CAAC;CACJ;AA7HD,sDA6HC"}
|
@@ -0,0 +1,26 @@
|
|
1
|
+
export interface MerkleDAGNode {
|
2
|
+
id: string;
|
3
|
+
hash: string;
|
4
|
+
data: string;
|
5
|
+
parents: string[];
|
6
|
+
children: string[];
|
7
|
+
}
|
8
|
+
export declare class MerkleDAG {
|
9
|
+
nodes: Map<string, MerkleDAGNode>;
|
10
|
+
rootIds: string[];
|
11
|
+
constructor();
|
12
|
+
private hash;
|
13
|
+
addNode(data: string, parentId?: string): string;
|
14
|
+
getNode(nodeId: string): MerkleDAGNode | undefined;
|
15
|
+
getAllNodes(): MerkleDAGNode[];
|
16
|
+
getRootNodes(): MerkleDAGNode[];
|
17
|
+
getLeafNodes(): MerkleDAGNode[];
|
18
|
+
serialize(): any;
|
19
|
+
static deserialize(data: any): MerkleDAG;
|
20
|
+
static compare(dag1: MerkleDAG, dag2: MerkleDAG): {
|
21
|
+
added: string[];
|
22
|
+
removed: string[];
|
23
|
+
modified: string[];
|
24
|
+
};
|
25
|
+
}
|
26
|
+
//# sourceMappingURL=merkle.d.ts.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"merkle.d.ts","sourceRoot":"","sources":["../../src/sync/merkle.ts"],"names":[],"mappings":"AAEA,MAAM,WAAW,aAAa;IAC1B,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,QAAQ,EAAE,MAAM,EAAE,CAAC;CACtB;AAED,qBAAa,SAAS;IAClB,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,aAAa,CAAC,CAAC;IAClC,OAAO,EAAE,MAAM,EAAE,CAAC;;IAOlB,OAAO,CAAC,IAAI;IAIL,OAAO,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM;IA2BhD,OAAO,CAAC,MAAM,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS;IAIlD,WAAW,IAAI,aAAa,EAAE;IAI9B,YAAY,IAAI,aAAa,EAAE;IAI/B,YAAY,IAAI,aAAa,EAAE;IAI/B,SAAS,IAAI,GAAG;WAOT,WAAW,CAAC,IAAI,EAAE,GAAG,GAAG,SAAS;WAOjC,OAAO,CAAC,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,SAAS,GAAG;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,OAAO,EAAE,MAAM,EAAE,CAAC;QAAC,QAAQ,EAAE,MAAM,EAAE,CAAA;KAAE;CAkBtH"}
|
@@ -0,0 +1,112 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
19
|
+
var ownKeys = function(o) {
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
21
|
+
var ar = [];
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
23
|
+
return ar;
|
24
|
+
};
|
25
|
+
return ownKeys(o);
|
26
|
+
};
|
27
|
+
return function (mod) {
|
28
|
+
if (mod && mod.__esModule) return mod;
|
29
|
+
var result = {};
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
31
|
+
__setModuleDefault(result, mod);
|
32
|
+
return result;
|
33
|
+
};
|
34
|
+
})();
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
36
|
+
exports.MerkleDAG = void 0;
|
37
|
+
const crypto = __importStar(require("crypto"));
|
38
|
+
class MerkleDAG {
|
39
|
+
constructor() {
|
40
|
+
this.nodes = new Map();
|
41
|
+
this.rootIds = [];
|
42
|
+
}
|
43
|
+
hash(data) {
|
44
|
+
return crypto.createHash('sha256').update(data).digest('hex');
|
45
|
+
}
|
46
|
+
addNode(data, parentId) {
|
47
|
+
const nodeId = this.hash(data);
|
48
|
+
const node = {
|
49
|
+
id: nodeId,
|
50
|
+
hash: nodeId,
|
51
|
+
data,
|
52
|
+
parents: [],
|
53
|
+
children: []
|
54
|
+
};
|
55
|
+
// If there's a parent, create the relationship
|
56
|
+
if (parentId) {
|
57
|
+
const parentNode = this.nodes.get(parentId);
|
58
|
+
if (parentNode) {
|
59
|
+
node.parents.push(parentId);
|
60
|
+
parentNode.children.push(nodeId);
|
61
|
+
this.nodes.set(parentId, parentNode);
|
62
|
+
}
|
63
|
+
}
|
64
|
+
else {
|
65
|
+
// If no parent, it's a root node
|
66
|
+
this.rootIds.push(nodeId);
|
67
|
+
}
|
68
|
+
this.nodes.set(nodeId, node);
|
69
|
+
return nodeId;
|
70
|
+
}
|
71
|
+
getNode(nodeId) {
|
72
|
+
return this.nodes.get(nodeId);
|
73
|
+
}
|
74
|
+
getAllNodes() {
|
75
|
+
return Array.from(this.nodes.values());
|
76
|
+
}
|
77
|
+
getRootNodes() {
|
78
|
+
return this.rootIds.map(id => this.nodes.get(id)).filter(Boolean);
|
79
|
+
}
|
80
|
+
getLeafNodes() {
|
81
|
+
return Array.from(this.nodes.values()).filter(node => node.children.length === 0);
|
82
|
+
}
|
83
|
+
serialize() {
|
84
|
+
return {
|
85
|
+
nodes: Array.from(this.nodes.entries()),
|
86
|
+
rootIds: this.rootIds
|
87
|
+
};
|
88
|
+
}
|
89
|
+
static deserialize(data) {
|
90
|
+
const dag = new MerkleDAG();
|
91
|
+
dag.nodes = new Map(data.nodes);
|
92
|
+
dag.rootIds = data.rootIds;
|
93
|
+
return dag;
|
94
|
+
}
|
95
|
+
static compare(dag1, dag2) {
|
96
|
+
const nodes1 = new Map(Array.from(dag1.getAllNodes()).map(n => [n.id, n]));
|
97
|
+
const nodes2 = new Map(Array.from(dag2.getAllNodes()).map(n => [n.id, n]));
|
98
|
+
const added = Array.from(nodes2.keys()).filter(k => !nodes1.has(k));
|
99
|
+
const removed = Array.from(nodes1.keys()).filter(k => !nodes2.has(k));
|
100
|
+
// For modified, we'll check if the data has changed for nodes that exist in both
|
101
|
+
const modified = [];
|
102
|
+
for (const [id, node1] of Array.from(nodes1.entries())) {
|
103
|
+
const node2 = nodes2.get(id);
|
104
|
+
if (node2 && node1.data !== node2.data) {
|
105
|
+
modified.push(id);
|
106
|
+
}
|
107
|
+
}
|
108
|
+
return { added, removed, modified };
|
109
|
+
}
|
110
|
+
}
|
111
|
+
exports.MerkleDAG = MerkleDAG;
|
112
|
+
//# sourceMappingURL=merkle.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"merkle.js","sourceRoot":"","sources":["../../src/sync/merkle.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,+CAAiC;AAUjC,MAAa,SAAS;IAIlB;QACI,IAAI,CAAC,KAAK,GAAG,IAAI,GAAG,EAAE,CAAC;QACvB,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;IACtB,CAAC;IAEO,IAAI,CAAC,IAAY;QACrB,OAAO,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAClE,CAAC;IAEM,OAAO,CAAC,IAAY,EAAE,QAAiB;QAC1C,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC/B,MAAM,IAAI,GAAkB;YACxB,EAAE,EAAE,MAAM;YACV,IAAI,EAAE,MAAM;YACZ,IAAI;YACJ,OAAO,EAAE,EAAE;YACX,QAAQ,EAAE,EAAE;SACf,CAAC;QAEF,+CAA+C;QAC/C,IAAI,QAAQ,EAAE,CAAC;YACX,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;YAC5C,IAAI,UAAU,EAAE,CAAC;gBACb,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;gBAC5B,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;gBACjC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;YACzC,CAAC;QACL,CAAC;aAAM,CAAC;YACJ,iCAAiC;YACjC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC9B,CAAC;QAED,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;QAC7B,OAAO,MAAM,CAAC;IAClB,CAAC;IAEM,OAAO,CAAC,MAAc;QACzB,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IAClC,CAAC;IAEM,WAAW;QACd,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;IAC3C,CAAC;IAEM,YAAY;QACf,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAE,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IACvE,CAAC;IAEM,YAAY;QACf,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;IACtF,CAAC;IAEM,SAAS;QACZ,OAAO;YACH,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC;YACvC,OAAO,EAAE,IAAI,CAAC,OAAO;SACxB,CAAC;IACN,CAAC;IAEM,MAAM,CAAC,WAAW,CAAC,IAAS;QAC/B,MAAM,GAAG,GAAG,IAAI,SAAS,EAAE,CAAC;QAC5B,GAAG,CAAC,KAAK,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAChC,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;QAC3B,OAAO,GAAG,CAAC;IACf,CAAC;IAEM,MAAM,CAAC,OAAO,CAAC,IAAe,EAAE,IAAe;QAClD,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3E,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAE3E,MAAM,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACpE,MAAM,OAAO,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QAEtE,iFAAiF;QACjF,MAAM,QAAQ,GAAa,EAAE,CAAC;QAC9B,KAAK,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC;YACrD,MAAM,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YAC7B,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,KAAK,CAAC,IAAI,EAAE,CAAC;gBACrC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YACtB,CAAC;QACL,CAAC;QAED,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC;IACxC,CAAC;CACJ;AAxFD,8BAwFC"}
|
@@ -0,0 +1,30 @@
|
|
1
|
+
export declare class FileSynchronizer {
|
2
|
+
private fileHashes;
|
3
|
+
private merkleDAG;
|
4
|
+
private rootDir;
|
5
|
+
private snapshotPath;
|
6
|
+
private ignorePatterns;
|
7
|
+
constructor(rootDir: string, ignorePatterns?: string[]);
|
8
|
+
private getSnapshotPath;
|
9
|
+
private hashFile;
|
10
|
+
private generateFileHashes;
|
11
|
+
private shouldIgnore;
|
12
|
+
private matchPattern;
|
13
|
+
private simpleGlobMatch;
|
14
|
+
private buildMerkleDAG;
|
15
|
+
initialize(): Promise<void>;
|
16
|
+
checkForChanges(): Promise<{
|
17
|
+
added: string[];
|
18
|
+
removed: string[];
|
19
|
+
modified: string[];
|
20
|
+
}>;
|
21
|
+
private compareStates;
|
22
|
+
getFileHash(filePath: string): string | undefined;
|
23
|
+
private saveSnapshot;
|
24
|
+
private loadSnapshot;
|
25
|
+
/**
|
26
|
+
* Delete snapshot file for a given codebase path
|
27
|
+
*/
|
28
|
+
static deleteSnapshot(codebasePath: string): Promise<void>;
|
29
|
+
}
|
30
|
+
//# sourceMappingURL=synchronizer.d.ts.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"synchronizer.d.ts","sourceRoot":"","sources":["../../src/sync/synchronizer.ts"],"names":[],"mappings":"AAMA,qBAAa,gBAAgB;IACzB,OAAO,CAAC,UAAU,CAAsB;IACxC,OAAO,CAAC,SAAS,CAAY;IAC7B,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,YAAY,CAAS;IAC7B,OAAO,CAAC,cAAc,CAAW;gBAErB,OAAO,EAAE,MAAM,EAAE,cAAc,GAAE,MAAM,EAAO;IAQ1D,OAAO,CAAC,eAAe;YAUT,QAAQ;YAUR,kBAAkB;IAwDhC,OAAO,CAAC,YAAY;IAwDpB,OAAO,CAAC,YAAY;IA6BpB,OAAO,CAAC,eAAe;IAYvB,OAAO,CAAC,cAAc;IAsBT,UAAU;IAOV,eAAe,IAAI,OAAO,CAAC;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,OAAO,EAAE,MAAM,EAAE,CAAC;QAAC,QAAQ,EAAE,MAAM,EAAE,CAAA;KAAE,CAAC;IA0BnG,OAAO,CAAC,aAAa;IA0Bd,WAAW,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;YAI1C,YAAY;YAmBZ,YAAY;IA2B1B;;OAEG;WACU,cAAc,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAmBnE"}
|
@@ -0,0 +1,339 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
19
|
+
var ownKeys = function(o) {
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
21
|
+
var ar = [];
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
23
|
+
return ar;
|
24
|
+
};
|
25
|
+
return ownKeys(o);
|
26
|
+
};
|
27
|
+
return function (mod) {
|
28
|
+
if (mod && mod.__esModule) return mod;
|
29
|
+
var result = {};
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
31
|
+
__setModuleDefault(result, mod);
|
32
|
+
return result;
|
33
|
+
};
|
34
|
+
})();
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
36
|
+
exports.FileSynchronizer = void 0;
|
37
|
+
const fs = __importStar(require("fs/promises"));
|
38
|
+
const path = __importStar(require("path"));
|
39
|
+
const crypto = __importStar(require("crypto"));
|
40
|
+
const merkle_1 = require("./merkle");
|
41
|
+
const os = __importStar(require("os"));
|
42
|
+
class FileSynchronizer {
|
43
|
+
constructor(rootDir, ignorePatterns = []) {
|
44
|
+
this.rootDir = rootDir;
|
45
|
+
this.snapshotPath = this.getSnapshotPath(rootDir);
|
46
|
+
this.fileHashes = new Map();
|
47
|
+
this.merkleDAG = new merkle_1.MerkleDAG();
|
48
|
+
this.ignorePatterns = ignorePatterns;
|
49
|
+
}
|
50
|
+
getSnapshotPath(codebasePath) {
|
51
|
+
const homeDir = os.homedir();
|
52
|
+
const merkleDir = path.join(homeDir, '.context', 'merkle');
|
53
|
+
const normalizedPath = path.resolve(codebasePath);
|
54
|
+
const hash = crypto.createHash('md5').update(normalizedPath).digest('hex');
|
55
|
+
return path.join(merkleDir, `${hash}.json`);
|
56
|
+
}
|
57
|
+
async hashFile(filePath) {
|
58
|
+
// Double-check that this is actually a file, not a directory
|
59
|
+
const stat = await fs.stat(filePath);
|
60
|
+
if (stat.isDirectory()) {
|
61
|
+
throw new Error(`Attempted to hash a directory: ${filePath}`);
|
62
|
+
}
|
63
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
64
|
+
return crypto.createHash('sha256').update(content).digest('hex');
|
65
|
+
}
|
66
|
+
async generateFileHashes(dir) {
|
67
|
+
const fileHashes = new Map();
|
68
|
+
let entries;
|
69
|
+
try {
|
70
|
+
entries = await fs.readdir(dir, { withFileTypes: true });
|
71
|
+
}
|
72
|
+
catch (error) {
|
73
|
+
console.warn(`[Synchronizer] Cannot read directory ${dir}: ${error.message}`);
|
74
|
+
return fileHashes;
|
75
|
+
}
|
76
|
+
for (const entry of entries) {
|
77
|
+
const fullPath = path.join(dir, entry.name);
|
78
|
+
const relativePath = path.relative(this.rootDir, fullPath);
|
79
|
+
// Check if this path should be ignored BEFORE any file system operations
|
80
|
+
if (this.shouldIgnore(relativePath, entry.isDirectory())) {
|
81
|
+
continue; // Skip completely - no access at all
|
82
|
+
}
|
83
|
+
// Double-check with fs.stat to be absolutely sure about file type
|
84
|
+
let stat;
|
85
|
+
try {
|
86
|
+
stat = await fs.stat(fullPath);
|
87
|
+
}
|
88
|
+
catch (error) {
|
89
|
+
console.warn(`[Synchronizer] Cannot stat ${fullPath}: ${error.message}`);
|
90
|
+
continue;
|
91
|
+
}
|
92
|
+
if (stat.isDirectory()) {
|
93
|
+
// Verify it's really a directory and not ignored
|
94
|
+
if (!this.shouldIgnore(relativePath, true)) {
|
95
|
+
const subHashes = await this.generateFileHashes(fullPath);
|
96
|
+
const entries = Array.from(subHashes.entries());
|
97
|
+
for (let i = 0; i < entries.length; i++) {
|
98
|
+
const [p, h] = entries[i];
|
99
|
+
fileHashes.set(p, h);
|
100
|
+
}
|
101
|
+
}
|
102
|
+
}
|
103
|
+
else if (stat.isFile()) {
|
104
|
+
// Verify it's really a file and not ignored
|
105
|
+
if (!this.shouldIgnore(relativePath, false)) {
|
106
|
+
try {
|
107
|
+
const hash = await this.hashFile(fullPath);
|
108
|
+
fileHashes.set(relativePath, hash);
|
109
|
+
}
|
110
|
+
catch (error) {
|
111
|
+
console.warn(`[Synchronizer] Cannot hash file ${fullPath}: ${error.message}`);
|
112
|
+
continue;
|
113
|
+
}
|
114
|
+
}
|
115
|
+
}
|
116
|
+
// Skip other types (symlinks, etc.)
|
117
|
+
}
|
118
|
+
return fileHashes;
|
119
|
+
}
|
120
|
+
shouldIgnore(relativePath, isDirectory = false) {
|
121
|
+
// Always ignore hidden files and directories (starting with .)
|
122
|
+
const pathParts = relativePath.split(path.sep);
|
123
|
+
if (pathParts.some(part => part.startsWith('.'))) {
|
124
|
+
return true;
|
125
|
+
}
|
126
|
+
if (this.ignorePatterns.length === 0) {
|
127
|
+
return false;
|
128
|
+
}
|
129
|
+
// Normalize path separators and remove leading/trailing slashes
|
130
|
+
const normalizedPath = relativePath.replace(/\\/g, '/').replace(/^\/+|\/+$/g, '');
|
131
|
+
if (!normalizedPath) {
|
132
|
+
return false; // Don't ignore root
|
133
|
+
}
|
134
|
+
// Check direct pattern matches first
|
135
|
+
for (const pattern of this.ignorePatterns) {
|
136
|
+
if (this.matchPattern(normalizedPath, pattern, isDirectory)) {
|
137
|
+
return true;
|
138
|
+
}
|
139
|
+
}
|
140
|
+
// Check if any parent directory is ignored
|
141
|
+
const normalizedPathParts = normalizedPath.split('/');
|
142
|
+
for (let i = 0; i < normalizedPathParts.length; i++) {
|
143
|
+
const partialPath = normalizedPathParts.slice(0, i + 1).join('/');
|
144
|
+
for (const pattern of this.ignorePatterns) {
|
145
|
+
// Check directory patterns
|
146
|
+
if (pattern.endsWith('/')) {
|
147
|
+
const dirPattern = pattern.slice(0, -1);
|
148
|
+
if (this.simpleGlobMatch(partialPath, dirPattern) ||
|
149
|
+
this.simpleGlobMatch(normalizedPathParts[i], dirPattern)) {
|
150
|
+
return true;
|
151
|
+
}
|
152
|
+
}
|
153
|
+
// Check exact path patterns
|
154
|
+
else if (pattern.includes('/')) {
|
155
|
+
if (this.simpleGlobMatch(partialPath, pattern)) {
|
156
|
+
return true;
|
157
|
+
}
|
158
|
+
}
|
159
|
+
// Check filename patterns against any path component
|
160
|
+
else {
|
161
|
+
if (this.simpleGlobMatch(normalizedPathParts[i], pattern)) {
|
162
|
+
return true;
|
163
|
+
}
|
164
|
+
}
|
165
|
+
}
|
166
|
+
}
|
167
|
+
return false;
|
168
|
+
}
|
169
|
+
matchPattern(filePath, pattern, isDirectory = false) {
|
170
|
+
// Clean both path and pattern
|
171
|
+
const cleanPath = filePath.replace(/^\/+|\/+$/g, '');
|
172
|
+
const cleanPattern = pattern.replace(/^\/+|\/+$/g, '');
|
173
|
+
if (!cleanPath || !cleanPattern) {
|
174
|
+
return false;
|
175
|
+
}
|
176
|
+
// Handle directory patterns (ending with /)
|
177
|
+
if (pattern.endsWith('/')) {
|
178
|
+
if (!isDirectory)
|
179
|
+
return false; // Directory pattern only matches directories
|
180
|
+
const dirPattern = cleanPattern.slice(0, -1);
|
181
|
+
// Direct match or any path component matches
|
182
|
+
return this.simpleGlobMatch(cleanPath, dirPattern) ||
|
183
|
+
cleanPath.split('/').some(part => this.simpleGlobMatch(part, dirPattern));
|
184
|
+
}
|
185
|
+
// Handle path patterns (containing /)
|
186
|
+
if (cleanPattern.includes('/')) {
|
187
|
+
return this.simpleGlobMatch(cleanPath, cleanPattern);
|
188
|
+
}
|
189
|
+
// Handle filename patterns (no /) - match against basename
|
190
|
+
const fileName = path.basename(cleanPath);
|
191
|
+
return this.simpleGlobMatch(fileName, cleanPattern);
|
192
|
+
}
|
193
|
+
simpleGlobMatch(text, pattern) {
|
194
|
+
if (!text || !pattern)
|
195
|
+
return false;
|
196
|
+
// Convert glob pattern to regex
|
197
|
+
const regexPattern = pattern
|
198
|
+
.replace(/[.+^${}()|[\]\\]/g, '\\$&') // Escape regex special chars except *
|
199
|
+
.replace(/\*/g, '.*'); // Convert * to .*
|
200
|
+
const regex = new RegExp(`^${regexPattern}$`);
|
201
|
+
return regex.test(text);
|
202
|
+
}
|
203
|
+
buildMerkleDAG(fileHashes) {
|
204
|
+
const dag = new merkle_1.MerkleDAG();
|
205
|
+
const keys = Array.from(fileHashes.keys());
|
206
|
+
const sortedPaths = keys.slice().sort(); // Create a sorted copy
|
207
|
+
// Create a root node for the entire directory
|
208
|
+
let valuesString = "";
|
209
|
+
keys.forEach(key => {
|
210
|
+
valuesString += fileHashes.get(key);
|
211
|
+
});
|
212
|
+
const rootNodeData = "root:" + valuesString;
|
213
|
+
const rootNodeId = dag.addNode(rootNodeData);
|
214
|
+
// Add each file as a child of the root
|
215
|
+
for (const path of sortedPaths) {
|
216
|
+
const fileData = path + ":" + fileHashes.get(path);
|
217
|
+
dag.addNode(fileData, rootNodeId);
|
218
|
+
}
|
219
|
+
return dag;
|
220
|
+
}
|
221
|
+
async initialize() {
|
222
|
+
console.log(`Initializing file synchronizer for ${this.rootDir}`);
|
223
|
+
await this.loadSnapshot();
|
224
|
+
this.merkleDAG = this.buildMerkleDAG(this.fileHashes);
|
225
|
+
console.log(`[Synchronizer] File synchronizer initialized. Loaded ${this.fileHashes.size} file hashes.`);
|
226
|
+
}
|
227
|
+
async checkForChanges() {
|
228
|
+
console.log('[Synchronizer] Checking for file changes...');
|
229
|
+
const newFileHashes = await this.generateFileHashes(this.rootDir);
|
230
|
+
const newMerkleDAG = this.buildMerkleDAG(newFileHashes);
|
231
|
+
// Compare the DAGs
|
232
|
+
const changes = merkle_1.MerkleDAG.compare(this.merkleDAG, newMerkleDAG);
|
233
|
+
// If there are any changes in the DAG, we should also do a file-level comparison
|
234
|
+
if (changes.added.length > 0 || changes.removed.length > 0 || changes.modified.length > 0) {
|
235
|
+
console.log('[Synchronizer] Merkle DAG has changed. Comparing file states...');
|
236
|
+
const fileChanges = this.compareStates(this.fileHashes, newFileHashes);
|
237
|
+
this.fileHashes = newFileHashes;
|
238
|
+
this.merkleDAG = newMerkleDAG;
|
239
|
+
await this.saveSnapshot();
|
240
|
+
console.log(`[Synchronizer] Found changes: ${fileChanges.added.length} added, ${fileChanges.removed.length} removed, ${fileChanges.modified.length} modified.`);
|
241
|
+
return fileChanges;
|
242
|
+
}
|
243
|
+
console.log('[Synchronizer] No changes detected based on Merkle DAG comparison.');
|
244
|
+
return { added: [], removed: [], modified: [] };
|
245
|
+
}
|
246
|
+
compareStates(oldHashes, newHashes) {
|
247
|
+
const added = [];
|
248
|
+
const removed = [];
|
249
|
+
const modified = [];
|
250
|
+
const newEntries = Array.from(newHashes.entries());
|
251
|
+
for (let i = 0; i < newEntries.length; i++) {
|
252
|
+
const [file, hash] = newEntries[i];
|
253
|
+
if (!oldHashes.has(file)) {
|
254
|
+
added.push(file);
|
255
|
+
}
|
256
|
+
else if (oldHashes.get(file) !== hash) {
|
257
|
+
modified.push(file);
|
258
|
+
}
|
259
|
+
}
|
260
|
+
const oldKeys = Array.from(oldHashes.keys());
|
261
|
+
for (let i = 0; i < oldKeys.length; i++) {
|
262
|
+
const file = oldKeys[i];
|
263
|
+
if (!newHashes.has(file)) {
|
264
|
+
removed.push(file);
|
265
|
+
}
|
266
|
+
}
|
267
|
+
return { added, removed, modified };
|
268
|
+
}
|
269
|
+
getFileHash(filePath) {
|
270
|
+
return this.fileHashes.get(filePath);
|
271
|
+
}
|
272
|
+
async saveSnapshot() {
|
273
|
+
const merkleDir = path.dirname(this.snapshotPath);
|
274
|
+
await fs.mkdir(merkleDir, { recursive: true });
|
275
|
+
// Convert Map to array without using iterator
|
276
|
+
const fileHashesArray = [];
|
277
|
+
const keys = Array.from(this.fileHashes.keys());
|
278
|
+
keys.forEach(key => {
|
279
|
+
fileHashesArray.push([key, this.fileHashes.get(key)]);
|
280
|
+
});
|
281
|
+
const data = JSON.stringify({
|
282
|
+
fileHashes: fileHashesArray,
|
283
|
+
merkleDAG: this.merkleDAG.serialize()
|
284
|
+
});
|
285
|
+
await fs.writeFile(this.snapshotPath, data, 'utf-8');
|
286
|
+
console.log(`Saved snapshot to ${this.snapshotPath}`);
|
287
|
+
}
|
288
|
+
async loadSnapshot() {
|
289
|
+
try {
|
290
|
+
const data = await fs.readFile(this.snapshotPath, 'utf-8');
|
291
|
+
const obj = JSON.parse(data);
|
292
|
+
// Reconstruct Map without using constructor with iterator
|
293
|
+
this.fileHashes = new Map();
|
294
|
+
for (const [key, value] of obj.fileHashes) {
|
295
|
+
this.fileHashes.set(key, value);
|
296
|
+
}
|
297
|
+
if (obj.merkleDAG) {
|
298
|
+
this.merkleDAG = merkle_1.MerkleDAG.deserialize(obj.merkleDAG);
|
299
|
+
}
|
300
|
+
console.log(`Loaded snapshot from ${this.snapshotPath}`);
|
301
|
+
}
|
302
|
+
catch (error) {
|
303
|
+
if (error.code === 'ENOENT') {
|
304
|
+
console.log(`Snapshot file not found at ${this.snapshotPath}. Generating new one.`);
|
305
|
+
this.fileHashes = await this.generateFileHashes(this.rootDir);
|
306
|
+
this.merkleDAG = this.buildMerkleDAG(this.fileHashes);
|
307
|
+
await this.saveSnapshot();
|
308
|
+
}
|
309
|
+
else {
|
310
|
+
throw error;
|
311
|
+
}
|
312
|
+
}
|
313
|
+
}
|
314
|
+
/**
|
315
|
+
* Delete snapshot file for a given codebase path
|
316
|
+
*/
|
317
|
+
static async deleteSnapshot(codebasePath) {
|
318
|
+
const homeDir = os.homedir();
|
319
|
+
const merkleDir = path.join(homeDir, '.context', 'merkle');
|
320
|
+
const normalizedPath = path.resolve(codebasePath);
|
321
|
+
const hash = crypto.createHash('md5').update(normalizedPath).digest('hex');
|
322
|
+
const snapshotPath = path.join(merkleDir, `${hash}.json`);
|
323
|
+
try {
|
324
|
+
await fs.unlink(snapshotPath);
|
325
|
+
console.log(`Deleted snapshot file: ${snapshotPath}`);
|
326
|
+
}
|
327
|
+
catch (error) {
|
328
|
+
if (error.code === 'ENOENT') {
|
329
|
+
console.log(`Snapshot file not found (already deleted): ${snapshotPath}`);
|
330
|
+
}
|
331
|
+
else {
|
332
|
+
console.error(`[Synchronizer] Failed to delete snapshot file ${snapshotPath}:`, error.message);
|
333
|
+
throw error; // Re-throw non-ENOENT errors
|
334
|
+
}
|
335
|
+
}
|
336
|
+
}
|
337
|
+
}
|
338
|
+
exports.FileSynchronizer = FileSynchronizer;
|
339
|
+
//# sourceMappingURL=synchronizer.js.map
|