@mhalder/qdrant-mcp-server 1.3.1 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.codecov.yml +16 -0
- package/CHANGELOG.md +25 -0
- package/README.md +304 -9
- package/build/code/chunker/base.d.ts +19 -0
- package/build/code/chunker/base.d.ts.map +1 -0
- package/build/code/chunker/base.js +5 -0
- package/build/code/chunker/base.js.map +1 -0
- package/build/code/chunker/character-chunker.d.ts +22 -0
- package/build/code/chunker/character-chunker.d.ts.map +1 -0
- package/build/code/chunker/character-chunker.js +111 -0
- package/build/code/chunker/character-chunker.js.map +1 -0
- package/build/code/chunker/tree-sitter-chunker.d.ts +29 -0
- package/build/code/chunker/tree-sitter-chunker.d.ts.map +1 -0
- package/build/code/chunker/tree-sitter-chunker.js +213 -0
- package/build/code/chunker/tree-sitter-chunker.js.map +1 -0
- package/build/code/config.d.ts +11 -0
- package/build/code/config.d.ts.map +1 -0
- package/build/code/config.js +145 -0
- package/build/code/config.js.map +1 -0
- package/build/code/indexer.d.ts +42 -0
- package/build/code/indexer.d.ts.map +1 -0
- package/build/code/indexer.js +508 -0
- package/build/code/indexer.js.map +1 -0
- package/build/code/metadata.d.ts +32 -0
- package/build/code/metadata.d.ts.map +1 -0
- package/build/code/metadata.js +128 -0
- package/build/code/metadata.js.map +1 -0
- package/build/code/scanner.d.ts +35 -0
- package/build/code/scanner.d.ts.map +1 -0
- package/build/code/scanner.js +108 -0
- package/build/code/scanner.js.map +1 -0
- package/build/code/sync/merkle.d.ts +45 -0
- package/build/code/sync/merkle.d.ts.map +1 -0
- package/build/code/sync/merkle.js +116 -0
- package/build/code/sync/merkle.js.map +1 -0
- package/build/code/sync/snapshot.d.ts +41 -0
- package/build/code/sync/snapshot.d.ts.map +1 -0
- package/build/code/sync/snapshot.js +91 -0
- package/build/code/sync/snapshot.js.map +1 -0
- package/build/code/sync/synchronizer.d.ts +53 -0
- package/build/code/sync/synchronizer.d.ts.map +1 -0
- package/build/code/sync/synchronizer.js +132 -0
- package/build/code/sync/synchronizer.js.map +1 -0
- package/build/code/types.d.ts +98 -0
- package/build/code/types.d.ts.map +1 -0
- package/build/code/types.js +5 -0
- package/build/code/types.js.map +1 -0
- package/build/index.js +321 -6
- package/build/index.js.map +1 -1
- package/build/prompts/index.d.ts +7 -0
- package/build/prompts/index.d.ts.map +1 -0
- package/build/prompts/index.js +7 -0
- package/build/prompts/index.js.map +1 -0
- package/build/prompts/index.test.d.ts +2 -0
- package/build/prompts/index.test.d.ts.map +1 -0
- package/build/prompts/index.test.js +25 -0
- package/build/prompts/index.test.js.map +1 -0
- package/build/prompts/loader.d.ts +25 -0
- package/build/prompts/loader.d.ts.map +1 -0
- package/build/prompts/loader.js +81 -0
- package/build/prompts/loader.js.map +1 -0
- package/build/prompts/loader.test.d.ts +2 -0
- package/build/prompts/loader.test.d.ts.map +1 -0
- package/build/prompts/loader.test.js +417 -0
- package/build/prompts/loader.test.js.map +1 -0
- package/build/prompts/template.d.ts +20 -0
- package/build/prompts/template.d.ts.map +1 -0
- package/build/prompts/template.js +52 -0
- package/build/prompts/template.js.map +1 -0
- package/build/prompts/template.test.d.ts +2 -0
- package/build/prompts/template.test.d.ts.map +1 -0
- package/build/prompts/template.test.js +163 -0
- package/build/prompts/template.test.js.map +1 -0
- package/build/prompts/types.d.ts +34 -0
- package/build/prompts/types.d.ts.map +1 -0
- package/build/prompts/types.js +5 -0
- package/build/prompts/types.js.map +1 -0
- package/examples/code-search/README.md +271 -0
- package/package.json +13 -1
- package/prompts.example.json +96 -0
- package/src/code/chunker/base.ts +22 -0
- package/src/code/chunker/character-chunker.ts +131 -0
- package/src/code/chunker/tree-sitter-chunker.ts +250 -0
- package/src/code/config.ts +156 -0
- package/src/code/indexer.ts +613 -0
- package/src/code/metadata.ts +153 -0
- package/src/code/scanner.ts +124 -0
- package/src/code/sync/merkle.ts +136 -0
- package/src/code/sync/snapshot.ts +110 -0
- package/src/code/sync/synchronizer.ts +154 -0
- package/src/code/types.ts +117 -0
- package/src/index.ts +382 -5
- package/src/prompts/index.test.ts +29 -0
- package/src/prompts/index.ts +7 -0
- package/src/prompts/loader.test.ts +494 -0
- package/src/prompts/loader.ts +90 -0
- package/src/prompts/template.test.ts +212 -0
- package/src/prompts/template.ts +69 -0
- package/src/prompts/types.ts +37 -0
- package/tests/code/chunker/character-chunker.test.ts +141 -0
- package/tests/code/chunker/tree-sitter-chunker.test.ts +275 -0
- package/tests/code/fixtures/sample-py/calculator.py +32 -0
- package/tests/code/fixtures/sample-ts/async-operations.ts +120 -0
- package/tests/code/fixtures/sample-ts/auth.ts +31 -0
- package/tests/code/fixtures/sample-ts/config.ts +52 -0
- package/tests/code/fixtures/sample-ts/database.ts +50 -0
- package/tests/code/fixtures/sample-ts/index.ts +39 -0
- package/tests/code/fixtures/sample-ts/types-advanced.ts +132 -0
- package/tests/code/fixtures/sample-ts/utils.ts +105 -0
- package/tests/code/fixtures/sample-ts/validator.ts +169 -0
- package/tests/code/indexer.test.ts +828 -0
- package/tests/code/integration.test.ts +708 -0
- package/tests/code/metadata.test.ts +457 -0
- package/tests/code/scanner.test.ts +131 -0
- package/tests/code/sync/merkle.test.ts +406 -0
- package/tests/code/sync/snapshot.test.ts +360 -0
- package/tests/code/sync/synchronizer.test.ts +501 -0
- package/vitest.config.ts +1 -0
|
@@ -0,0 +1,406 @@
|
|
|
1
|
+
import { describe, expect, it } from "vitest";
|
|
2
|
+
import { MerkleNode, MerkleTree } from "../../../src/code/sync/merkle.js";
|
|
3
|
+
|
|
4
|
+
describe("MerkleNode", () => {
|
|
5
|
+
it("should create a node with a hash", () => {
|
|
6
|
+
const node = new MerkleNode("abc123");
|
|
7
|
+
expect(node.hash).toBe("abc123");
|
|
8
|
+
expect(node.left).toBeUndefined();
|
|
9
|
+
expect(node.right).toBeUndefined();
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
it("should create a node with children", () => {
|
|
13
|
+
const left = new MerkleNode("left");
|
|
14
|
+
const right = new MerkleNode("right");
|
|
15
|
+
const parent = new MerkleNode("parent", left, right);
|
|
16
|
+
|
|
17
|
+
expect(parent.hash).toBe("parent");
|
|
18
|
+
expect(parent.left).toBe(left);
|
|
19
|
+
expect(parent.right).toBe(right);
|
|
20
|
+
});
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
describe("MerkleTree", () => {
|
|
24
|
+
describe("build", () => {
|
|
25
|
+
it("should build tree from single file", () => {
|
|
26
|
+
const tree = new MerkleTree();
|
|
27
|
+
const fileHashes = new Map([["file1.ts", "hash1"]]);
|
|
28
|
+
|
|
29
|
+
tree.build(fileHashes);
|
|
30
|
+
|
|
31
|
+
expect(tree.root).toBeDefined();
|
|
32
|
+
expect(tree.root?.hash).toBeTruthy();
|
|
33
|
+
expect(tree.root?.hash).toMatch(/^[a-f0-9]{64}$/); // SHA256 hex
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
it("should build tree from multiple files", () => {
|
|
37
|
+
const tree = new MerkleTree();
|
|
38
|
+
const fileHashes = new Map([
|
|
39
|
+
["file1.ts", "hash1"],
|
|
40
|
+
["file2.ts", "hash2"],
|
|
41
|
+
["file3.ts", "hash3"],
|
|
42
|
+
]);
|
|
43
|
+
|
|
44
|
+
tree.build(fileHashes);
|
|
45
|
+
|
|
46
|
+
expect(tree.root).toBeDefined();
|
|
47
|
+
expect(tree.root?.hash).toBeTruthy();
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
it("should handle empty file map", () => {
|
|
51
|
+
const tree = new MerkleTree();
|
|
52
|
+
const fileHashes = new Map<string, string>();
|
|
53
|
+
|
|
54
|
+
tree.build(fileHashes);
|
|
55
|
+
|
|
56
|
+
expect(tree.root).toBeUndefined();
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
it("should sort files alphabetically", () => {
|
|
60
|
+
const tree1 = new MerkleTree();
|
|
61
|
+
const tree2 = new MerkleTree();
|
|
62
|
+
|
|
63
|
+
// Same files, different insertion order
|
|
64
|
+
const files1 = new Map([
|
|
65
|
+
["a.ts", "hash1"],
|
|
66
|
+
["b.ts", "hash2"],
|
|
67
|
+
["c.ts", "hash3"],
|
|
68
|
+
]);
|
|
69
|
+
|
|
70
|
+
const files2 = new Map([
|
|
71
|
+
["c.ts", "hash3"],
|
|
72
|
+
["a.ts", "hash1"],
|
|
73
|
+
["b.ts", "hash2"],
|
|
74
|
+
]);
|
|
75
|
+
|
|
76
|
+
tree1.build(files1);
|
|
77
|
+
tree2.build(files2);
|
|
78
|
+
|
|
79
|
+
expect(tree1.root?.hash).toBe(tree2.root?.hash);
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
it("should create different hashes for different file sets", () => {
|
|
83
|
+
const tree1 = new MerkleTree();
|
|
84
|
+
const tree2 = new MerkleTree();
|
|
85
|
+
|
|
86
|
+
tree1.build(
|
|
87
|
+
new Map([
|
|
88
|
+
["file1.ts", "hash1"],
|
|
89
|
+
["file2.ts", "hash2"],
|
|
90
|
+
])
|
|
91
|
+
);
|
|
92
|
+
|
|
93
|
+
tree2.build(
|
|
94
|
+
new Map([
|
|
95
|
+
["file1.ts", "hash1"],
|
|
96
|
+
["file3.ts", "hash3"],
|
|
97
|
+
])
|
|
98
|
+
);
|
|
99
|
+
|
|
100
|
+
expect(tree1.root?.hash).not.toBe(tree2.root?.hash);
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
it("should handle large number of files", () => {
|
|
104
|
+
const tree = new MerkleTree();
|
|
105
|
+
const fileHashes = new Map<string, string>();
|
|
106
|
+
|
|
107
|
+
for (let i = 0; i < 100; i++) {
|
|
108
|
+
fileHashes.set(`file${i}.ts`, `hash${i}`);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
tree.build(fileHashes);
|
|
112
|
+
|
|
113
|
+
expect(tree.root).toBeDefined();
|
|
114
|
+
expect(tree.root?.hash).toBeTruthy();
|
|
115
|
+
});
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
describe("getRootHash", () => {
|
|
119
|
+
it("should return root hash when tree is built", () => {
|
|
120
|
+
const tree = new MerkleTree();
|
|
121
|
+
tree.build(new Map([["file.ts", "hash123"]]));
|
|
122
|
+
|
|
123
|
+
expect(tree.getRootHash()).toBeTruthy();
|
|
124
|
+
expect(tree.getRootHash()).toMatch(/^[a-f0-9]{64}$/);
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
it("should return undefined when tree is empty", () => {
|
|
128
|
+
const tree = new MerkleTree();
|
|
129
|
+
expect(tree.getRootHash()).toBeUndefined();
|
|
130
|
+
});
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
describe("serialize and deserialize", () => {
|
|
134
|
+
it("should serialize and deserialize single node tree", () => {
|
|
135
|
+
const tree = new MerkleTree();
|
|
136
|
+
tree.build(new Map([["file.ts", "hash123"]]));
|
|
137
|
+
|
|
138
|
+
const serialized = tree.serialize();
|
|
139
|
+
const newTree = MerkleTree.deserialize(serialized);
|
|
140
|
+
|
|
141
|
+
expect(newTree.getRootHash()).toBe(tree.getRootHash());
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
it("should serialize and deserialize multi-node tree", () => {
|
|
145
|
+
const tree = new MerkleTree();
|
|
146
|
+
tree.build(
|
|
147
|
+
new Map([
|
|
148
|
+
["file1.ts", "hash1"],
|
|
149
|
+
["file2.ts", "hash2"],
|
|
150
|
+
["file3.ts", "hash3"],
|
|
151
|
+
])
|
|
152
|
+
);
|
|
153
|
+
|
|
154
|
+
const serialized = tree.serialize();
|
|
155
|
+
const newTree = MerkleTree.deserialize(serialized);
|
|
156
|
+
|
|
157
|
+
expect(newTree.getRootHash()).toBe(tree.getRootHash());
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
it("should handle empty tree serialization", () => {
|
|
161
|
+
const tree = new MerkleTree();
|
|
162
|
+
const serialized = tree.serialize();
|
|
163
|
+
|
|
164
|
+
expect(serialized).toBeDefined();
|
|
165
|
+
|
|
166
|
+
const newTree = MerkleTree.deserialize(serialized);
|
|
167
|
+
expect(newTree.getRootHash()).toBeUndefined();
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
it("should handle complex tree structure", () => {
|
|
171
|
+
const tree = new MerkleTree();
|
|
172
|
+
const fileHashes = new Map<string, string>();
|
|
173
|
+
|
|
174
|
+
for (let i = 0; i < 10; i++) {
|
|
175
|
+
fileHashes.set(`file${i}.ts`, `hash${i}`);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
tree.build(fileHashes);
|
|
179
|
+
|
|
180
|
+
const serialized = tree.serialize();
|
|
181
|
+
const newTree = MerkleTree.deserialize(serialized);
|
|
182
|
+
|
|
183
|
+
expect(newTree.getRootHash()).toBe(tree.getRootHash());
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
it("should preserve tree structure through serialize/deserialize", () => {
|
|
187
|
+
const tree = new MerkleTree();
|
|
188
|
+
tree.build(
|
|
189
|
+
new Map([
|
|
190
|
+
["a.ts", "hash_a"],
|
|
191
|
+
["b.ts", "hash_b"],
|
|
192
|
+
])
|
|
193
|
+
);
|
|
194
|
+
|
|
195
|
+
const serialized = tree.serialize();
|
|
196
|
+
const deserialized = MerkleTree.deserialize(serialized);
|
|
197
|
+
|
|
198
|
+
// Rebuild original and compare
|
|
199
|
+
tree.build(
|
|
200
|
+
new Map([
|
|
201
|
+
["a.ts", "hash_a"],
|
|
202
|
+
["b.ts", "hash_b"],
|
|
203
|
+
])
|
|
204
|
+
);
|
|
205
|
+
|
|
206
|
+
expect(deserialized.getRootHash()).toBe(tree.getRootHash());
|
|
207
|
+
});
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
describe("compare", () => {
|
|
211
|
+
it("should detect no changes", () => {
|
|
212
|
+
const oldHashes = new Map([
|
|
213
|
+
["file1.ts", "hash1"],
|
|
214
|
+
["file2.ts", "hash2"],
|
|
215
|
+
]);
|
|
216
|
+
|
|
217
|
+
const newHashes = new Map([
|
|
218
|
+
["file1.ts", "hash1"],
|
|
219
|
+
["file2.ts", "hash2"],
|
|
220
|
+
]);
|
|
221
|
+
|
|
222
|
+
const changes = MerkleTree.compare(oldHashes, newHashes);
|
|
223
|
+
|
|
224
|
+
expect(changes.added).toEqual([]);
|
|
225
|
+
expect(changes.modified).toEqual([]);
|
|
226
|
+
expect(changes.deleted).toEqual([]);
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
it("should detect added files", () => {
|
|
230
|
+
const oldHashes = new Map([["file1.ts", "hash1"]]);
|
|
231
|
+
|
|
232
|
+
const newHashes = new Map([
|
|
233
|
+
["file1.ts", "hash1"],
|
|
234
|
+
["file2.ts", "hash2"],
|
|
235
|
+
["file3.ts", "hash3"],
|
|
236
|
+
]);
|
|
237
|
+
|
|
238
|
+
const changes = MerkleTree.compare(oldHashes, newHashes);
|
|
239
|
+
|
|
240
|
+
expect(changes.added).toContain("file2.ts");
|
|
241
|
+
expect(changes.added).toContain("file3.ts");
|
|
242
|
+
expect(changes.modified).toEqual([]);
|
|
243
|
+
expect(changes.deleted).toEqual([]);
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
it("should detect deleted files", () => {
|
|
247
|
+
const oldHashes = new Map([
|
|
248
|
+
["file1.ts", "hash1"],
|
|
249
|
+
["file2.ts", "hash2"],
|
|
250
|
+
["file3.ts", "hash3"],
|
|
251
|
+
]);
|
|
252
|
+
|
|
253
|
+
const newHashes = new Map([["file1.ts", "hash1"]]);
|
|
254
|
+
|
|
255
|
+
const changes = MerkleTree.compare(oldHashes, newHashes);
|
|
256
|
+
|
|
257
|
+
expect(changes.added).toEqual([]);
|
|
258
|
+
expect(changes.modified).toEqual([]);
|
|
259
|
+
expect(changes.deleted).toContain("file2.ts");
|
|
260
|
+
expect(changes.deleted).toContain("file3.ts");
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
it("should detect modified files", () => {
|
|
264
|
+
const oldHashes = new Map([
|
|
265
|
+
["file1.ts", "hash1"],
|
|
266
|
+
["file2.ts", "hash2"],
|
|
267
|
+
]);
|
|
268
|
+
|
|
269
|
+
const newHashes = new Map([
|
|
270
|
+
["file1.ts", "hash1_modified"],
|
|
271
|
+
["file2.ts", "hash2_modified"],
|
|
272
|
+
]);
|
|
273
|
+
|
|
274
|
+
const changes = MerkleTree.compare(oldHashes, newHashes);
|
|
275
|
+
|
|
276
|
+
expect(changes.added).toEqual([]);
|
|
277
|
+
expect(changes.modified).toContain("file1.ts");
|
|
278
|
+
expect(changes.modified).toContain("file2.ts");
|
|
279
|
+
expect(changes.deleted).toEqual([]);
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
it("should detect mixed changes", () => {
|
|
283
|
+
const oldHashes = new Map([
|
|
284
|
+
["file1.ts", "hash1"],
|
|
285
|
+
["file2.ts", "hash2"],
|
|
286
|
+
["file3.ts", "hash3"],
|
|
287
|
+
]);
|
|
288
|
+
|
|
289
|
+
const newHashes = new Map([
|
|
290
|
+
["file1.ts", "hash1_modified"], // modified
|
|
291
|
+
["file2.ts", "hash2"], // unchanged
|
|
292
|
+
["file4.ts", "hash4"], // added
|
|
293
|
+
// file3.ts deleted
|
|
294
|
+
]);
|
|
295
|
+
|
|
296
|
+
const changes = MerkleTree.compare(oldHashes, newHashes);
|
|
297
|
+
|
|
298
|
+
expect(changes.added).toEqual(["file4.ts"]);
|
|
299
|
+
expect(changes.modified).toEqual(["file1.ts"]);
|
|
300
|
+
expect(changes.deleted).toEqual(["file3.ts"]);
|
|
301
|
+
});
|
|
302
|
+
|
|
303
|
+
it("should handle empty old hashes", () => {
|
|
304
|
+
const oldHashes = new Map<string, string>();
|
|
305
|
+
const newHashes = new Map([
|
|
306
|
+
["file1.ts", "hash1"],
|
|
307
|
+
["file2.ts", "hash2"],
|
|
308
|
+
]);
|
|
309
|
+
|
|
310
|
+
const changes = MerkleTree.compare(oldHashes, newHashes);
|
|
311
|
+
|
|
312
|
+
expect(changes.added).toContain("file1.ts");
|
|
313
|
+
expect(changes.added).toContain("file2.ts");
|
|
314
|
+
expect(changes.modified).toEqual([]);
|
|
315
|
+
expect(changes.deleted).toEqual([]);
|
|
316
|
+
});
|
|
317
|
+
|
|
318
|
+
it("should handle empty new hashes", () => {
|
|
319
|
+
const oldHashes = new Map([
|
|
320
|
+
["file1.ts", "hash1"],
|
|
321
|
+
["file2.ts", "hash2"],
|
|
322
|
+
]);
|
|
323
|
+
const newHashes = new Map<string, string>();
|
|
324
|
+
|
|
325
|
+
const changes = MerkleTree.compare(oldHashes, newHashes);
|
|
326
|
+
|
|
327
|
+
expect(changes.added).toEqual([]);
|
|
328
|
+
expect(changes.modified).toEqual([]);
|
|
329
|
+
expect(changes.deleted).toContain("file1.ts");
|
|
330
|
+
expect(changes.deleted).toContain("file2.ts");
|
|
331
|
+
});
|
|
332
|
+
|
|
333
|
+
it("should handle both empty maps", () => {
|
|
334
|
+
const oldHashes = new Map<string, string>();
|
|
335
|
+
const newHashes = new Map<string, string>();
|
|
336
|
+
|
|
337
|
+
const changes = MerkleTree.compare(oldHashes, newHashes);
|
|
338
|
+
|
|
339
|
+
expect(changes.added).toEqual([]);
|
|
340
|
+
expect(changes.modified).toEqual([]);
|
|
341
|
+
expect(changes.deleted).toEqual([]);
|
|
342
|
+
});
|
|
343
|
+
});
|
|
344
|
+
|
|
345
|
+
describe("edge cases", () => {
|
|
346
|
+
it("should handle single file tree", () => {
|
|
347
|
+
const tree = new MerkleTree();
|
|
348
|
+
tree.build(new Map([["single.ts", "hash"]]));
|
|
349
|
+
|
|
350
|
+
expect(tree.root?.hash).toBeTruthy();
|
|
351
|
+
expect(tree.root?.hash).toMatch(/^[a-f0-9]{64}$/);
|
|
352
|
+
expect(tree.root?.left).toBeUndefined();
|
|
353
|
+
expect(tree.root?.right).toBeUndefined();
|
|
354
|
+
});
|
|
355
|
+
|
|
356
|
+
it("should handle two file tree", () => {
|
|
357
|
+
const tree = new MerkleTree();
|
|
358
|
+
tree.build(
|
|
359
|
+
new Map([
|
|
360
|
+
["file1.ts", "hash1"],
|
|
361
|
+
["file2.ts", "hash2"],
|
|
362
|
+
])
|
|
363
|
+
);
|
|
364
|
+
|
|
365
|
+
expect(tree.root).toBeDefined();
|
|
366
|
+
expect(tree.root?.left).toBeDefined();
|
|
367
|
+
expect(tree.root?.right).toBeDefined();
|
|
368
|
+
});
|
|
369
|
+
|
|
370
|
+
it("should handle odd number of files", () => {
|
|
371
|
+
const tree = new MerkleTree();
|
|
372
|
+
tree.build(
|
|
373
|
+
new Map([
|
|
374
|
+
["file1.ts", "hash1"],
|
|
375
|
+
["file2.ts", "hash2"],
|
|
376
|
+
["file3.ts", "hash3"],
|
|
377
|
+
])
|
|
378
|
+
);
|
|
379
|
+
|
|
380
|
+
expect(tree.root).toBeDefined();
|
|
381
|
+
});
|
|
382
|
+
|
|
383
|
+
it("should handle very long file paths", () => {
|
|
384
|
+
const tree = new MerkleTree();
|
|
385
|
+
const longPath = `${"/very/long/path/".repeat(50)}file.ts`;
|
|
386
|
+
|
|
387
|
+
tree.build(new Map([[longPath, "hash"]]));
|
|
388
|
+
|
|
389
|
+
expect(tree.root?.hash).toBeTruthy();
|
|
390
|
+
expect(tree.root?.hash).toMatch(/^[a-f0-9]{64}$/);
|
|
391
|
+
});
|
|
392
|
+
|
|
393
|
+
it("should handle special characters in filenames", () => {
|
|
394
|
+
const tree = new MerkleTree();
|
|
395
|
+
tree.build(
|
|
396
|
+
new Map([
|
|
397
|
+
["file with spaces.ts", "hash1"],
|
|
398
|
+
["file-with-dashes.ts", "hash2"],
|
|
399
|
+
["file_with_underscores.ts", "hash3"],
|
|
400
|
+
])
|
|
401
|
+
);
|
|
402
|
+
|
|
403
|
+
expect(tree.root).toBeDefined();
|
|
404
|
+
});
|
|
405
|
+
});
|
|
406
|
+
});
|