mcard-js 2.1.49 → 2.1.51
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/CardCollection-EMSBVZP3.js +10 -0
- package/dist/CardCollection-KQWR4PCV.js +10 -0
- package/dist/CardCollection-ORGE2XBG.js +10 -0
- package/dist/EngineRegistry-ABZXHZWO.js +17 -0
- package/dist/EngineRegistry-EIOT4MUZ.js +17 -0
- package/dist/EngineRegistry-IQ6EVO72.js +17 -0
- package/dist/EngineRegistry-PHRFXEOE.js +17 -0
- package/dist/IndexedDBEngine-EWA3SLAO.js +12 -0
- package/dist/IndexedDBEngine-FXAD42F3.js +12 -0
- package/dist/IndexedDBEngine-RD4447IS.js +12 -0
- package/dist/LLMRuntime-ARUWOX52.js +17 -0
- package/dist/LLMRuntime-C3XCO7WF.js +17 -0
- package/dist/LLMRuntime-CQ7X43QR.js +17 -0
- package/dist/LLMRuntime-PD45COKE.js +17 -0
- package/dist/LLMRuntime-QOUMLT33.js +17 -0
- package/dist/LLMRuntime-SZNLTHD7.js +17 -0
- package/dist/LLMRuntime-TVJGK2BG.js +17 -0
- package/dist/LambdaRuntime-25GMEJCU.js +19 -0
- package/dist/LambdaRuntime-7KQUMHPI.js +19 -0
- package/dist/LambdaRuntime-DRT7ODPC.js +19 -0
- package/dist/LambdaRuntime-HSREEYQG.js +19 -0
- package/dist/LambdaRuntime-IH7NVG6Z.js +19 -0
- package/dist/LambdaRuntime-MPG27FM2.js +19 -0
- package/dist/LambdaRuntime-ODSWIMNM.js +19 -0
- package/dist/LambdaRuntime-PHGRZYAW.js +19 -0
- package/dist/LambdaRuntime-QOEYR37L.js +19 -0
- package/dist/LambdaRuntime-RT33TFN2.js +19 -0
- package/dist/LambdaRuntime-W6TQBP5O.js +19 -0
- package/dist/Loader-35WSUC53.js +14 -0
- package/dist/Loader-STS3G4OQ.js +16 -0
- package/dist/Loader-W22AEM6F.js +12 -0
- package/dist/Loader-YBPWP43S.js +12 -0
- package/dist/Loader-ZYSS7B4D.js +12 -0
- package/dist/NetworkRuntime-KR2QITXV.js +987 -0
- package/dist/NetworkRuntime-S6V2CMZV.js +1575 -0
- package/dist/OllamaProvider-2ANW6EB2.js +9 -0
- package/dist/OllamaProvider-5QFJKYAC.js +9 -0
- package/dist/OllamaProvider-6QXJGR7V.js +9 -0
- package/dist/OllamaProvider-ABEEFX7M.js +9 -0
- package/dist/OllamaProvider-Z2CGY5LY.js +9 -0
- package/dist/VCard-225X42W7.js +25 -0
- package/dist/chunk-2APJYBH4.js +368 -0
- package/dist/chunk-4DFTWDRB.js +497 -0
- package/dist/chunk-4PBRTFSY.js +112 -0
- package/dist/chunk-4T3H25AP.js +299 -0
- package/dist/chunk-5DFXPIRL.js +42 -0
- package/dist/chunk-5HRZV4R3.js +217 -0
- package/dist/chunk-6ZRJXVJ3.js +529 -0
- package/dist/chunk-7N7JYGN2.js +364 -0
- package/dist/chunk-7QTJUGYQ.js +74 -0
- package/dist/chunk-7TXIPJI2.js +2360 -0
- package/dist/chunk-BFJUD527.js +2369 -0
- package/dist/chunk-CHXIVTQV.js +364 -0
- package/dist/chunk-DM2ABCA4.js +497 -0
- package/dist/chunk-DTPHGTBQ.js +275 -0
- package/dist/chunk-EDAJ5FO6.js +405 -0
- package/dist/chunk-ETJWXHKZ.js +246 -0
- package/dist/chunk-FLYGNPUC.js +2369 -0
- package/dist/chunk-FSDRDWOP.js +34 -0
- package/dist/chunk-GIKMCG4D.js +497 -0
- package/dist/chunk-IJKS3LGK.js +428 -0
- package/dist/chunk-JUQ2VQZA.js +428 -0
- package/dist/chunk-JVW4J7BY.js +2369 -0
- package/dist/chunk-JWTRVEC3.js +2369 -0
- package/dist/chunk-KJM4C65U.js +299 -0
- package/dist/chunk-KMC566CN.js +591 -0
- package/dist/chunk-KMNP6DBL.js +455 -0
- package/dist/chunk-LVU7O5IY.js +597 -0
- package/dist/chunk-M4C6RWLA.js +373 -0
- package/dist/chunk-NAAAKSEO.js +541 -0
- package/dist/chunk-NKIXLPHL.js +373 -0
- package/dist/chunk-NOEDMK7I.js +428 -0
- package/dist/chunk-NOPYSBOQ.js +2360 -0
- package/dist/chunk-P4G42QCY.js +2369 -0
- package/dist/chunk-PKLONZCF.js +253 -0
- package/dist/chunk-PNGECWPN.js +597 -0
- package/dist/chunk-PYP6T64W.js +217 -0
- package/dist/chunk-QFT3COE2.js +217 -0
- package/dist/chunk-QFZFXMNX.js +275 -0
- package/dist/chunk-QZGRQRJP.js +2369 -0
- package/dist/chunk-R3XRBAM7.js +253 -0
- package/dist/chunk-RYP66UMH.js +74 -0
- package/dist/chunk-RZIZYRLF.js +112 -0
- package/dist/chunk-T43V44RS.js +2369 -0
- package/dist/chunk-UCNVX5BZ.js +74 -0
- package/dist/chunk-UDF7HS4V.js +368 -0
- package/dist/chunk-VJPXJVEH.js +299 -0
- package/dist/chunk-VW3KBDK5.js +74 -0
- package/dist/chunk-X72XIYSN.js +364 -0
- package/dist/chunk-XETU7TV4.js +112 -0
- package/dist/chunk-Y4BT6LHA.js +368 -0
- package/dist/chunk-YQGB6BIA.js +2369 -0
- package/dist/chunk-ZEQPO3XV.js +217 -0
- package/dist/chunk-ZKRKWXEQ.js +2369 -0
- package/dist/chunk-ZMK2HTZ5.js +275 -0
- package/dist/constants-CLB7B6MN.js +101 -0
- package/dist/constants-O343SMHL.js +103 -0
- package/dist/constants-YPGDEX5X.js +103 -0
- package/dist/index.browser.cjs +11 -5
- package/dist/index.browser.js +12 -12
- package/dist/index.cjs +2358 -1896
- package/dist/index.d.cts +934 -776
- package/dist/index.d.ts +934 -776
- package/dist/index.js +1353 -1271
- package/dist/storage/SqliteNodeEngine.cjs +12 -6
- package/dist/storage/SqliteNodeEngine.js +4 -4
- package/dist/storage/SqliteWasmEngine.cjs +11 -5
- package/dist/storage/SqliteWasmEngine.js +4 -4
- package/package.json +5 -3
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ContentTypeInterpreter,
|
|
3
|
+
MCard
|
|
4
|
+
} from "./chunk-GGQCF7ZK.js";
|
|
5
|
+
import {
|
|
6
|
+
MAX_FILE_SIZE,
|
|
7
|
+
READ_TIMEOUT_MS
|
|
8
|
+
} from "./chunk-PKLONZCF.js";
|
|
9
|
+
import {
|
|
10
|
+
__export
|
|
11
|
+
} from "./chunk-PNKVD2UK.js";
|
|
12
|
+
|
|
13
|
+
// src/Loader.ts
|
|
14
|
+
var Loader_exports = {};
|
|
15
|
+
__export(Loader_exports, {
|
|
16
|
+
loadFileToCollection: () => loadFileToCollection,
|
|
17
|
+
processAndStoreFile: () => processAndStoreFile
|
|
18
|
+
});
|
|
19
|
+
import * as fs2 from "fs/promises";
|
|
20
|
+
import * as path2 from "path";
|
|
21
|
+
|
|
22
|
+
// src/FileIO.ts
|
|
23
|
+
var FileIO_exports = {};
|
|
24
|
+
__export(FileIO_exports, {
|
|
25
|
+
isProblematicFile: () => isProblematicFile,
|
|
26
|
+
listFiles: () => listFiles,
|
|
27
|
+
processFileContent: () => processFileContent,
|
|
28
|
+
readFileSafely: () => readFileSafely,
|
|
29
|
+
streamReadNormalizedText: () => streamReadNormalizedText
|
|
30
|
+
});
|
|
31
|
+
import * as crypto from "crypto";
|
|
32
|
+
import * as fs from "fs/promises";
|
|
33
|
+
import * as path from "path";
|
|
34
|
+
async function streamReadNormalizedText(filePath, options) {
|
|
35
|
+
const { byteCap, wrapWidth } = options;
|
|
36
|
+
const sha = crypto.createHash("sha256");
|
|
37
|
+
let totalSize = 0;
|
|
38
|
+
let producedText = "";
|
|
39
|
+
let currentLen = 0;
|
|
40
|
+
const handle = await fs.open(filePath, "r");
|
|
41
|
+
try {
|
|
42
|
+
const buffer = new Uint8Array(8192);
|
|
43
|
+
let remaining = byteCap;
|
|
44
|
+
const decoder = new TextDecoder("utf-8", { fatal: false });
|
|
45
|
+
let position = 0;
|
|
46
|
+
while (remaining > 0) {
|
|
47
|
+
const { bytesRead } = await handle.read(buffer, 0, Math.min(buffer.length, remaining), position);
|
|
48
|
+
if (bytesRead === 0) break;
|
|
49
|
+
position += bytesRead;
|
|
50
|
+
const chunk = buffer.subarray(0, bytesRead);
|
|
51
|
+
sha.update(chunk);
|
|
52
|
+
totalSize += bytesRead;
|
|
53
|
+
remaining -= bytesRead;
|
|
54
|
+
const s2 = decoder.decode(chunk, { stream: true });
|
|
55
|
+
for (const ch of s2) {
|
|
56
|
+
if (ch === "\r") continue;
|
|
57
|
+
producedText += ch;
|
|
58
|
+
if (ch === "\n") {
|
|
59
|
+
currentLen = 0;
|
|
60
|
+
} else {
|
|
61
|
+
currentLen++;
|
|
62
|
+
if (wrapWidth > 0 && currentLen >= wrapWidth) {
|
|
63
|
+
producedText += "\n";
|
|
64
|
+
currentLen = 0;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
const s = decoder.decode();
|
|
70
|
+
for (const ch of s) {
|
|
71
|
+
if (ch === "\r") continue;
|
|
72
|
+
producedText += ch;
|
|
73
|
+
if (ch === "\n") {
|
|
74
|
+
currentLen = 0;
|
|
75
|
+
} else {
|
|
76
|
+
currentLen++;
|
|
77
|
+
if (wrapWidth > 0 && currentLen >= wrapWidth) {
|
|
78
|
+
producedText += "\n";
|
|
79
|
+
currentLen = 0;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
} finally {
|
|
84
|
+
await handle.close();
|
|
85
|
+
}
|
|
86
|
+
return {
|
|
87
|
+
text: producedText,
|
|
88
|
+
originalSize: totalSize,
|
|
89
|
+
originalSha256Prefix: sha.digest("hex").substring(0, 16)
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
var MAX_FILE_SIZE2 = MAX_FILE_SIZE;
|
|
93
|
+
var READ_TIMEOUT_MS2 = READ_TIMEOUT_MS;
|
|
94
|
+
async function isProblematicFile(filePath) {
|
|
95
|
+
try {
|
|
96
|
+
const stats = await fs.stat(filePath);
|
|
97
|
+
if (stats.size === 0) return false;
|
|
98
|
+
if (path.basename(filePath).startsWith(".")) return true;
|
|
99
|
+
if (stats.size > MAX_FILE_SIZE2) return true;
|
|
100
|
+
const ext = path.extname(filePath);
|
|
101
|
+
const isKnownType = ContentTypeInterpreter.isKnownLongLineExtension(ext);
|
|
102
|
+
if (isKnownType && stats.size > 1024 * 1024) return true;
|
|
103
|
+
const handle = await fs.open(filePath, "r");
|
|
104
|
+
try {
|
|
105
|
+
const buffer = new Uint8Array(32 * 1024);
|
|
106
|
+
const { bytesRead } = await handle.read(buffer, 0, buffer.length, 0);
|
|
107
|
+
const sample = buffer.subarray(0, bytesRead);
|
|
108
|
+
if (ContentTypeInterpreter.isUnstructuredBinary(sample)) return true;
|
|
109
|
+
if (ContentTypeInterpreter.hasPathologicalLines(sample, isKnownType)) return true;
|
|
110
|
+
} finally {
|
|
111
|
+
await handle.close();
|
|
112
|
+
}
|
|
113
|
+
return false;
|
|
114
|
+
} catch {
|
|
115
|
+
return true;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
async function readFileSafely(filePath, options = {}) {
|
|
119
|
+
const stats = await fs.stat(filePath);
|
|
120
|
+
if (stats.size > MAX_FILE_SIZE2) throw new Error(`File too large: ${stats.size}`);
|
|
121
|
+
const controller = new AbortController();
|
|
122
|
+
const timeout = setTimeout(() => controller.abort(), READ_TIMEOUT_MS2);
|
|
123
|
+
try {
|
|
124
|
+
const handle = await fs.open(filePath, "r");
|
|
125
|
+
try {
|
|
126
|
+
const buffer = new Uint8Array(stats.size);
|
|
127
|
+
await handle.read(buffer, 0, stats.size, 0);
|
|
128
|
+
return buffer;
|
|
129
|
+
} finally {
|
|
130
|
+
await handle.close();
|
|
131
|
+
}
|
|
132
|
+
} catch (e) {
|
|
133
|
+
const error = e;
|
|
134
|
+
if (error.name === "AbortError") throw new Error(`Read timeout for ${filePath}`);
|
|
135
|
+
throw e;
|
|
136
|
+
} finally {
|
|
137
|
+
clearTimeout(timeout);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
async function listFiles(dirPath, recursive = false) {
|
|
141
|
+
let files = [];
|
|
142
|
+
try {
|
|
143
|
+
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
|
144
|
+
for (const entry of entries) {
|
|
145
|
+
const fullPath = path.join(dirPath, entry.name);
|
|
146
|
+
if (entry.name.startsWith(".")) continue;
|
|
147
|
+
if (entry.isDirectory()) {
|
|
148
|
+
if (recursive) {
|
|
149
|
+
files = files.concat(await listFiles(fullPath, true));
|
|
150
|
+
}
|
|
151
|
+
} else if (entry.isFile()) {
|
|
152
|
+
if (!await isProblematicFile(fullPath)) {
|
|
153
|
+
files.push(fullPath);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
} catch (e) {
|
|
158
|
+
console.warn(`Error listing directory ${dirPath}:`, e);
|
|
159
|
+
}
|
|
160
|
+
return files;
|
|
161
|
+
}
|
|
162
|
+
async function processFileContent(filePath, options = {}) {
|
|
163
|
+
const rawContent = await readFileSafely(filePath, { allowPathological: options.allowPathological, maxBytes: options.maxBytes });
|
|
164
|
+
const sample = rawContent.subarray(0, 1024 * 1024);
|
|
165
|
+
const detection = ContentTypeInterpreter.detectContentType(sample, path.extname(filePath));
|
|
166
|
+
let isBinary = ContentTypeInterpreter.isBinaryContent(sample, detection.mimeType);
|
|
167
|
+
if (options.forceBinary) isBinary = true;
|
|
168
|
+
let content = rawContent;
|
|
169
|
+
if (!isBinary) {
|
|
170
|
+
try {
|
|
171
|
+
content = new TextDecoder("utf-8", { fatal: true }).decode(rawContent);
|
|
172
|
+
} catch {
|
|
173
|
+
content = new TextDecoder("utf-8", { fatal: false }).decode(rawContent);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
return {
|
|
177
|
+
content,
|
|
178
|
+
filename: path.basename(filePath),
|
|
179
|
+
mimeType: detection.mimeType,
|
|
180
|
+
extension: detection.extension,
|
|
181
|
+
isBinary,
|
|
182
|
+
size: rawContent.length
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// src/Loader.ts
|
|
187
|
+
var DEFAULT_MAX_PROBLEM_BYTES = 2 * 1024 * 1024;
|
|
188
|
+
var WRAP_WIDTH_KNOWN = 1e3;
|
|
189
|
+
var WRAP_WIDTH_DEFAULT = 80;
|
|
190
|
+
var Logger = {
|
|
191
|
+
info: (...args) => console.log("[Loader]", ...args),
|
|
192
|
+
warn: (...args) => console.warn("[Loader]", ...args),
|
|
193
|
+
error: (...args) => console.error("[Loader]", ...args),
|
|
194
|
+
debug: (...args) => {
|
|
195
|
+
if (process.env.DEBUG) console.log("[Loader]", ...args);
|
|
196
|
+
}
|
|
197
|
+
};
|
|
198
|
+
async function processAndStoreFile(filePath, collection, options = {}) {
|
|
199
|
+
const {
|
|
200
|
+
allowProblematic = false,
|
|
201
|
+
maxBytesOnProblem = DEFAULT_MAX_PROBLEM_BYTES,
|
|
202
|
+
metadataOnly = false,
|
|
203
|
+
rootPath
|
|
204
|
+
} = options;
|
|
205
|
+
try {
|
|
206
|
+
let fileInfo;
|
|
207
|
+
if (await isProblematicFile(filePath)) {
|
|
208
|
+
if (!allowProblematic) {
|
|
209
|
+
Logger.warn(`Skipping problematic file: ${filePath}`);
|
|
210
|
+
return null;
|
|
211
|
+
}
|
|
212
|
+
const extension = path2.extname(filePath).toLowerCase();
|
|
213
|
+
const isKnownType = ContentTypeInterpreter.isKnownLongLineExtension(extension);
|
|
214
|
+
const wrapWidth = isKnownType ? WRAP_WIDTH_KNOWN : WRAP_WIDTH_DEFAULT;
|
|
215
|
+
Logger.warn(`Problematic file detected, processing as safe text: ${filePath}`);
|
|
216
|
+
try {
|
|
217
|
+
const streamed = await streamReadNormalizedText(filePath, {
|
|
218
|
+
byteCap: maxBytesOnProblem,
|
|
219
|
+
wrapWidth
|
|
220
|
+
});
|
|
221
|
+
fileInfo = {
|
|
222
|
+
content: streamed.text,
|
|
223
|
+
filename: path2.basename(filePath),
|
|
224
|
+
mimeType: "text/plain",
|
|
225
|
+
extension,
|
|
226
|
+
isBinary: false,
|
|
227
|
+
size: streamed.text.length,
|
|
228
|
+
originalSize: streamed.originalSize,
|
|
229
|
+
originalSha256Prefix: streamed.originalSha256Prefix,
|
|
230
|
+
normalized: true,
|
|
231
|
+
wrapWidth
|
|
232
|
+
};
|
|
233
|
+
} catch (e) {
|
|
234
|
+
Logger.warn(`Safe text processing failed, falling back to capped binary: ${filePath}`, e);
|
|
235
|
+
fileInfo = await processFileContent(filePath, {
|
|
236
|
+
forceBinary: true,
|
|
237
|
+
allowPathological: true,
|
|
238
|
+
maxBytes: maxBytesOnProblem
|
|
239
|
+
});
|
|
240
|
+
}
|
|
241
|
+
} else {
|
|
242
|
+
Logger.info(`Processing file: ${filePath}`);
|
|
243
|
+
fileInfo = await processFileContent(filePath);
|
|
244
|
+
}
|
|
245
|
+
if (!fileInfo) return null;
|
|
246
|
+
const content = fileInfo.content;
|
|
247
|
+
if (!content || typeof content === "string" && content.length === 0 || content instanceof Uint8Array && content.length === 0) {
|
|
248
|
+
Logger.debug(`Skipping empty file: ${filePath} (empty files cannot be stored as MCards)`);
|
|
249
|
+
return {
|
|
250
|
+
hash: "",
|
|
251
|
+
contentType: fileInfo.mimeType,
|
|
252
|
+
isBinary: fileInfo.isBinary,
|
|
253
|
+
filename: fileInfo.filename,
|
|
254
|
+
size: 0,
|
|
255
|
+
filePath
|
|
256
|
+
};
|
|
257
|
+
}
|
|
258
|
+
let mcard = null;
|
|
259
|
+
const isProblematic = await isProblematicFile(filePath);
|
|
260
|
+
if (metadataOnly && isProblematic) {
|
|
261
|
+
mcard = null;
|
|
262
|
+
} else {
|
|
263
|
+
mcard = await MCard.create(fileInfo.content);
|
|
264
|
+
const handle = path2.basename(filePath);
|
|
265
|
+
try {
|
|
266
|
+
await collection.addWithHandle(mcard, handle);
|
|
267
|
+
} catch (e) {
|
|
268
|
+
let registered = false;
|
|
269
|
+
if (rootPath) {
|
|
270
|
+
const relPath = path2.relative(rootPath, filePath);
|
|
271
|
+
if (relPath !== handle) {
|
|
272
|
+
try {
|
|
273
|
+
await collection.addWithHandle(mcard, relPath);
|
|
274
|
+
registered = true;
|
|
275
|
+
} catch (e2) {
|
|
276
|
+
Logger.debug(
|
|
277
|
+
`Handle name '${handle}' already in use (common for files like README.md, LICENSE). MCard stored successfully with hash ${mcard.hash.slice(0, 8)}... (accessible by hash, not by handle)`
|
|
278
|
+
);
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
if (!registered) {
|
|
283
|
+
try {
|
|
284
|
+
await collection.add(mcard);
|
|
285
|
+
} catch (e3) {
|
|
286
|
+
Logger.warn(`Hash fallback also failed for ${handle}:`, e3);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
const result = {
|
|
292
|
+
hash: mcard ? mcard.hash : "METADATA_ONLY",
|
|
293
|
+
contentType: fileInfo.mimeType,
|
|
294
|
+
isBinary: fileInfo.isBinary,
|
|
295
|
+
filename: fileInfo.filename,
|
|
296
|
+
size: fileInfo.size,
|
|
297
|
+
filePath
|
|
298
|
+
};
|
|
299
|
+
if (fileInfo.originalSize !== void 0) result.originalSize = fileInfo.originalSize;
|
|
300
|
+
if (fileInfo.originalSha256Prefix) result.originalSha256Prefix = fileInfo.originalSha256Prefix;
|
|
301
|
+
if (metadataOnly && isProblematic) result.metadataOnly = true;
|
|
302
|
+
return result;
|
|
303
|
+
} catch (e) {
|
|
304
|
+
Logger.error(`Error processing ${filePath}:`, e);
|
|
305
|
+
return null;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
async function loadFileToCollection(targetPath, collection, options = {}) {
|
|
309
|
+
const {
|
|
310
|
+
recursive = false,
|
|
311
|
+
includeProblematic = false,
|
|
312
|
+
maxBytesOnProblem = DEFAULT_MAX_PROBLEM_BYTES,
|
|
313
|
+
metadataOnly = false
|
|
314
|
+
} = options;
|
|
315
|
+
const resolvedPath = path2.resolve(targetPath);
|
|
316
|
+
const stats = await fs2.stat(resolvedPath);
|
|
317
|
+
const results = [];
|
|
318
|
+
let files = [];
|
|
319
|
+
let rootPath = resolvedPath;
|
|
320
|
+
if (stats.isFile()) {
|
|
321
|
+
files = [resolvedPath];
|
|
322
|
+
rootPath = path2.dirname(resolvedPath);
|
|
323
|
+
} else if (stats.isDirectory()) {
|
|
324
|
+
files = await listFiles(resolvedPath, recursive);
|
|
325
|
+
rootPath = resolvedPath;
|
|
326
|
+
} else {
|
|
327
|
+
throw new Error(`Path ${targetPath} is not a file or directory`);
|
|
328
|
+
}
|
|
329
|
+
const uniqueDirs = /* @__PURE__ */ new Set();
|
|
330
|
+
let maxDepth = 0;
|
|
331
|
+
for (const file of files) {
|
|
332
|
+
const dir = path2.dirname(file);
|
|
333
|
+
if (dir.startsWith(rootPath)) {
|
|
334
|
+
uniqueDirs.add(dir);
|
|
335
|
+
const rel = path2.relative(rootPath, file);
|
|
336
|
+
const parts = rel.split(path2.sep);
|
|
337
|
+
const depth = parts.length - 1;
|
|
338
|
+
if (depth > maxDepth) maxDepth = depth;
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
const metrics = {
|
|
342
|
+
filesCount: files.length,
|
|
343
|
+
directoriesCount: uniqueDirs.size,
|
|
344
|
+
directoryLevels: maxDepth
|
|
345
|
+
};
|
|
346
|
+
Logger.info(`About to process ${files.length} files`);
|
|
347
|
+
for (const file of files) {
|
|
348
|
+
const result = await processAndStoreFile(file, collection, {
|
|
349
|
+
allowProblematic: includeProblematic,
|
|
350
|
+
maxBytesOnProblem,
|
|
351
|
+
metadataOnly,
|
|
352
|
+
rootPath
|
|
353
|
+
});
|
|
354
|
+
if (result) results.push(result);
|
|
355
|
+
}
|
|
356
|
+
return { metrics, results };
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
export {
|
|
360
|
+
FileIO_exports,
|
|
361
|
+
processAndStoreFile,
|
|
362
|
+
loadFileToCollection,
|
|
363
|
+
Loader_exports
|
|
364
|
+
};
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import {
|
|
2
|
+
AbstractSqlEngine,
|
|
3
|
+
CORE_SCHEMAS
|
|
4
|
+
} from "./chunk-GIKMCG4D.js";
|
|
5
|
+
import {
|
|
6
|
+
MCard
|
|
7
|
+
} from "./chunk-GGQCF7ZK.js";
|
|
8
|
+
import {
|
|
9
|
+
DEFAULT_SQLJS_WASM_URL
|
|
10
|
+
} from "./chunk-ETJWXHKZ.js";
|
|
11
|
+
|
|
12
|
+
// src/storage/engines/SqliteWasmEngine.ts
|
|
13
|
+
var SqliteWasmEngine = class extends AbstractSqlEngine {
|
|
14
|
+
db = null;
|
|
15
|
+
SQL = null;
|
|
16
|
+
/**
|
|
17
|
+
* Initialize the database
|
|
18
|
+
* @param wasmUrl URL to sql-wasm.wasm file (optional, defaults to CDN)
|
|
19
|
+
* @param existingData Optional existing database as Uint8Array
|
|
20
|
+
*/
|
|
21
|
+
async init(wasmUrl, existingData) {
|
|
22
|
+
const initSqlJs = (await import("sql.js")).default;
|
|
23
|
+
const SQL = await initSqlJs({
|
|
24
|
+
locateFile: (file) => wasmUrl || `${DEFAULT_SQLJS_WASM_URL}${file}`
|
|
25
|
+
});
|
|
26
|
+
this.SQL = SQL;
|
|
27
|
+
this.db = existingData ? new SQL.Database(existingData) : new SQL.Database();
|
|
28
|
+
this.db.run(CORE_SCHEMAS.card);
|
|
29
|
+
this.db.run(CORE_SCHEMAS.handleRegistry);
|
|
30
|
+
this.db.run(CORE_SCHEMAS.handleHistory);
|
|
31
|
+
this.db.run(CORE_SCHEMAS.handleIndex);
|
|
32
|
+
}
|
|
33
|
+
ensureDb() {
|
|
34
|
+
if (!this.db) throw new Error("Database not initialized. Call init() first.");
|
|
35
|
+
return this.db;
|
|
36
|
+
}
|
|
37
|
+
// ======================================================================
|
|
38
|
+
// AbstractSqlEngine primitives
|
|
39
|
+
// ======================================================================
|
|
40
|
+
async queryRows(sql, ...params) {
|
|
41
|
+
const db = this.ensureDb();
|
|
42
|
+
const stmt = db.prepare(sql);
|
|
43
|
+
if (params.length > 0) stmt.bind(params);
|
|
44
|
+
const results = [];
|
|
45
|
+
while (stmt.step()) {
|
|
46
|
+
const row = stmt.getAsObject();
|
|
47
|
+
results.push(row);
|
|
48
|
+
}
|
|
49
|
+
stmt.free();
|
|
50
|
+
return results;
|
|
51
|
+
}
|
|
52
|
+
async execSql(sql, ...params) {
|
|
53
|
+
const db = this.ensureDb();
|
|
54
|
+
if (params.length === 0) {
|
|
55
|
+
db.run(sql);
|
|
56
|
+
} else {
|
|
57
|
+
db.run(sql, params);
|
|
58
|
+
}
|
|
59
|
+
return 0;
|
|
60
|
+
}
|
|
61
|
+
// ======================================================================
|
|
62
|
+
// Row → MCard conversion (sql.js returns Uint8Array for BLOBs)
|
|
63
|
+
// ======================================================================
|
|
64
|
+
rowToCard(row) {
|
|
65
|
+
const rawContent = row.content;
|
|
66
|
+
const content = rawContent instanceof Uint8Array ? rawContent : typeof rawContent === "string" ? new TextEncoder().encode(rawContent) : new Uint8Array(0);
|
|
67
|
+
return MCard.fromData(content, String(row.hash), String(row.g_time));
|
|
68
|
+
}
|
|
69
|
+
// ======================================================================
|
|
70
|
+
// sql.js-specific helpers
|
|
71
|
+
// ======================================================================
|
|
72
|
+
/**
|
|
73
|
+
* Export database as Uint8Array (for persistence)
|
|
74
|
+
*/
|
|
75
|
+
export() {
|
|
76
|
+
return this.ensureDb().export();
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Get raw sql.js Database for use with VectorStore adapter.
|
|
80
|
+
*/
|
|
81
|
+
getRawDb() {
|
|
82
|
+
return this.ensureDb();
|
|
83
|
+
}
|
|
84
|
+
// =========== pruneHandleHistory override (needs count before delete) ===========
|
|
85
|
+
async pruneHandleHistory(handle, options = {}) {
|
|
86
|
+
const { validateHandle } = await import("./Handle-3N4QOA3U.js");
|
|
87
|
+
const db = this.ensureDb();
|
|
88
|
+
const normalized = validateHandle(handle);
|
|
89
|
+
if (options.deleteAll) {
|
|
90
|
+
const stmt = db.prepare("SELECT COUNT(*) FROM handle_history WHERE handle = ?");
|
|
91
|
+
stmt.bind([normalized]);
|
|
92
|
+
stmt.step();
|
|
93
|
+
const count = stmt.get()[0];
|
|
94
|
+
stmt.free();
|
|
95
|
+
db.run("DELETE FROM handle_history WHERE handle = ?", [normalized]);
|
|
96
|
+
return count;
|
|
97
|
+
} else if (options.olderThan) {
|
|
98
|
+
const stmt = db.prepare("SELECT COUNT(*) FROM handle_history WHERE handle = ? AND changed_at < ?");
|
|
99
|
+
stmt.bind([normalized, options.olderThan]);
|
|
100
|
+
stmt.step();
|
|
101
|
+
const count = stmt.get()[0];
|
|
102
|
+
stmt.free();
|
|
103
|
+
db.run("DELETE FROM handle_history WHERE handle = ? AND changed_at < ?", [normalized, options.olderThan]);
|
|
104
|
+
return count;
|
|
105
|
+
}
|
|
106
|
+
return 0;
|
|
107
|
+
}
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
export {
|
|
111
|
+
SqliteWasmEngine
|
|
112
|
+
};
|