mcard-js 2.1.49 → 2.1.51

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/dist/CardCollection-EMSBVZP3.js +10 -0
  2. package/dist/CardCollection-KQWR4PCV.js +10 -0
  3. package/dist/CardCollection-ORGE2XBG.js +10 -0
  4. package/dist/EngineRegistry-ABZXHZWO.js +17 -0
  5. package/dist/EngineRegistry-EIOT4MUZ.js +17 -0
  6. package/dist/EngineRegistry-IQ6EVO72.js +17 -0
  7. package/dist/EngineRegistry-PHRFXEOE.js +17 -0
  8. package/dist/IndexedDBEngine-EWA3SLAO.js +12 -0
  9. package/dist/IndexedDBEngine-FXAD42F3.js +12 -0
  10. package/dist/IndexedDBEngine-RD4447IS.js +12 -0
  11. package/dist/LLMRuntime-ARUWOX52.js +17 -0
  12. package/dist/LLMRuntime-C3XCO7WF.js +17 -0
  13. package/dist/LLMRuntime-CQ7X43QR.js +17 -0
  14. package/dist/LLMRuntime-PD45COKE.js +17 -0
  15. package/dist/LLMRuntime-QOUMLT33.js +17 -0
  16. package/dist/LLMRuntime-SZNLTHD7.js +17 -0
  17. package/dist/LLMRuntime-TVJGK2BG.js +17 -0
  18. package/dist/LambdaRuntime-25GMEJCU.js +19 -0
  19. package/dist/LambdaRuntime-7KQUMHPI.js +19 -0
  20. package/dist/LambdaRuntime-DRT7ODPC.js +19 -0
  21. package/dist/LambdaRuntime-HSREEYQG.js +19 -0
  22. package/dist/LambdaRuntime-IH7NVG6Z.js +19 -0
  23. package/dist/LambdaRuntime-MPG27FM2.js +19 -0
  24. package/dist/LambdaRuntime-ODSWIMNM.js +19 -0
  25. package/dist/LambdaRuntime-PHGRZYAW.js +19 -0
  26. package/dist/LambdaRuntime-QOEYR37L.js +19 -0
  27. package/dist/LambdaRuntime-RT33TFN2.js +19 -0
  28. package/dist/LambdaRuntime-W6TQBP5O.js +19 -0
  29. package/dist/Loader-35WSUC53.js +14 -0
  30. package/dist/Loader-STS3G4OQ.js +16 -0
  31. package/dist/Loader-W22AEM6F.js +12 -0
  32. package/dist/Loader-YBPWP43S.js +12 -0
  33. package/dist/Loader-ZYSS7B4D.js +12 -0
  34. package/dist/NetworkRuntime-KR2QITXV.js +987 -0
  35. package/dist/NetworkRuntime-S6V2CMZV.js +1575 -0
  36. package/dist/OllamaProvider-2ANW6EB2.js +9 -0
  37. package/dist/OllamaProvider-5QFJKYAC.js +9 -0
  38. package/dist/OllamaProvider-6QXJGR7V.js +9 -0
  39. package/dist/OllamaProvider-ABEEFX7M.js +9 -0
  40. package/dist/OllamaProvider-Z2CGY5LY.js +9 -0
  41. package/dist/VCard-225X42W7.js +25 -0
  42. package/dist/chunk-2APJYBH4.js +368 -0
  43. package/dist/chunk-4DFTWDRB.js +497 -0
  44. package/dist/chunk-4PBRTFSY.js +112 -0
  45. package/dist/chunk-4T3H25AP.js +299 -0
  46. package/dist/chunk-5DFXPIRL.js +42 -0
  47. package/dist/chunk-5HRZV4R3.js +217 -0
  48. package/dist/chunk-6ZRJXVJ3.js +529 -0
  49. package/dist/chunk-7N7JYGN2.js +364 -0
  50. package/dist/chunk-7QTJUGYQ.js +74 -0
  51. package/dist/chunk-7TXIPJI2.js +2360 -0
  52. package/dist/chunk-BFJUD527.js +2369 -0
  53. package/dist/chunk-CHXIVTQV.js +364 -0
  54. package/dist/chunk-DM2ABCA4.js +497 -0
  55. package/dist/chunk-DTPHGTBQ.js +275 -0
  56. package/dist/chunk-EDAJ5FO6.js +405 -0
  57. package/dist/chunk-ETJWXHKZ.js +246 -0
  58. package/dist/chunk-FLYGNPUC.js +2369 -0
  59. package/dist/chunk-FSDRDWOP.js +34 -0
  60. package/dist/chunk-GIKMCG4D.js +497 -0
  61. package/dist/chunk-IJKS3LGK.js +428 -0
  62. package/dist/chunk-JUQ2VQZA.js +428 -0
  63. package/dist/chunk-JVW4J7BY.js +2369 -0
  64. package/dist/chunk-JWTRVEC3.js +2369 -0
  65. package/dist/chunk-KJM4C65U.js +299 -0
  66. package/dist/chunk-KMC566CN.js +591 -0
  67. package/dist/chunk-KMNP6DBL.js +455 -0
  68. package/dist/chunk-LVU7O5IY.js +597 -0
  69. package/dist/chunk-M4C6RWLA.js +373 -0
  70. package/dist/chunk-NAAAKSEO.js +541 -0
  71. package/dist/chunk-NKIXLPHL.js +373 -0
  72. package/dist/chunk-NOEDMK7I.js +428 -0
  73. package/dist/chunk-NOPYSBOQ.js +2360 -0
  74. package/dist/chunk-P4G42QCY.js +2369 -0
  75. package/dist/chunk-PKLONZCF.js +253 -0
  76. package/dist/chunk-PNGECWPN.js +597 -0
  77. package/dist/chunk-PYP6T64W.js +217 -0
  78. package/dist/chunk-QFT3COE2.js +217 -0
  79. package/dist/chunk-QFZFXMNX.js +275 -0
  80. package/dist/chunk-QZGRQRJP.js +2369 -0
  81. package/dist/chunk-R3XRBAM7.js +253 -0
  82. package/dist/chunk-RYP66UMH.js +74 -0
  83. package/dist/chunk-RZIZYRLF.js +112 -0
  84. package/dist/chunk-T43V44RS.js +2369 -0
  85. package/dist/chunk-UCNVX5BZ.js +74 -0
  86. package/dist/chunk-UDF7HS4V.js +368 -0
  87. package/dist/chunk-VJPXJVEH.js +299 -0
  88. package/dist/chunk-VW3KBDK5.js +74 -0
  89. package/dist/chunk-X72XIYSN.js +364 -0
  90. package/dist/chunk-XETU7TV4.js +112 -0
  91. package/dist/chunk-Y4BT6LHA.js +368 -0
  92. package/dist/chunk-YQGB6BIA.js +2369 -0
  93. package/dist/chunk-ZEQPO3XV.js +217 -0
  94. package/dist/chunk-ZKRKWXEQ.js +2369 -0
  95. package/dist/chunk-ZMK2HTZ5.js +275 -0
  96. package/dist/constants-CLB7B6MN.js +101 -0
  97. package/dist/constants-O343SMHL.js +103 -0
  98. package/dist/constants-YPGDEX5X.js +103 -0
  99. package/dist/index.browser.cjs +11 -5
  100. package/dist/index.browser.js +12 -12
  101. package/dist/index.cjs +2358 -1896
  102. package/dist/index.d.cts +934 -776
  103. package/dist/index.d.ts +934 -776
  104. package/dist/index.js +1353 -1271
  105. package/dist/storage/SqliteNodeEngine.cjs +12 -6
  106. package/dist/storage/SqliteNodeEngine.js +4 -4
  107. package/dist/storage/SqliteWasmEngine.cjs +11 -5
  108. package/dist/storage/SqliteWasmEngine.js +4 -4
  109. package/package.json +5 -3
@@ -0,0 +1,364 @@
1
+ import {
2
+ ContentTypeInterpreter,
3
+ MCard
4
+ } from "./chunk-GGQCF7ZK.js";
5
+ import {
6
+ MAX_FILE_SIZE,
7
+ READ_TIMEOUT_MS
8
+ } from "./chunk-R3XRBAM7.js";
9
+ import {
10
+ __export
11
+ } from "./chunk-PNKVD2UK.js";
12
+
13
+ // src/Loader.ts
14
+ var Loader_exports = {};
15
+ __export(Loader_exports, {
16
+ loadFileToCollection: () => loadFileToCollection,
17
+ processAndStoreFile: () => processAndStoreFile
18
+ });
19
+ import * as fs2 from "fs/promises";
20
+ import * as path2 from "path";
21
+
22
+ // src/FileIO.ts
23
+ var FileIO_exports = {};
24
+ __export(FileIO_exports, {
25
+ isProblematicFile: () => isProblematicFile,
26
+ listFiles: () => listFiles,
27
+ processFileContent: () => processFileContent,
28
+ readFileSafely: () => readFileSafely,
29
+ streamReadNormalizedText: () => streamReadNormalizedText
30
+ });
31
+ import * as crypto from "crypto";
32
+ import * as fs from "fs/promises";
33
+ import * as path from "path";
34
+ async function streamReadNormalizedText(filePath, options) {
35
+ const { byteCap, wrapWidth } = options;
36
+ const sha = crypto.createHash("sha256");
37
+ let totalSize = 0;
38
+ let producedText = "";
39
+ let currentLen = 0;
40
+ const handle = await fs.open(filePath, "r");
41
+ try {
42
+ const buffer = new Uint8Array(8192);
43
+ let remaining = byteCap;
44
+ const decoder = new TextDecoder("utf-8", { fatal: false });
45
+ let position = 0;
46
+ while (remaining > 0) {
47
+ const { bytesRead } = await handle.read(buffer, 0, Math.min(buffer.length, remaining), position);
48
+ if (bytesRead === 0) break;
49
+ position += bytesRead;
50
+ const chunk = buffer.subarray(0, bytesRead);
51
+ sha.update(chunk);
52
+ totalSize += bytesRead;
53
+ remaining -= bytesRead;
54
+ const s2 = decoder.decode(chunk, { stream: true });
55
+ for (const ch of s2) {
56
+ if (ch === "\r") continue;
57
+ producedText += ch;
58
+ if (ch === "\n") {
59
+ currentLen = 0;
60
+ } else {
61
+ currentLen++;
62
+ if (wrapWidth > 0 && currentLen >= wrapWidth) {
63
+ producedText += "\n";
64
+ currentLen = 0;
65
+ }
66
+ }
67
+ }
68
+ }
69
+ const s = decoder.decode();
70
+ for (const ch of s) {
71
+ if (ch === "\r") continue;
72
+ producedText += ch;
73
+ if (ch === "\n") {
74
+ currentLen = 0;
75
+ } else {
76
+ currentLen++;
77
+ if (wrapWidth > 0 && currentLen >= wrapWidth) {
78
+ producedText += "\n";
79
+ currentLen = 0;
80
+ }
81
+ }
82
+ }
83
+ } finally {
84
+ await handle.close();
85
+ }
86
+ return {
87
+ text: producedText,
88
+ originalSize: totalSize,
89
+ originalSha256Prefix: sha.digest("hex").substring(0, 16)
90
+ };
91
+ }
92
+ var MAX_FILE_SIZE2 = MAX_FILE_SIZE;
93
+ var READ_TIMEOUT_MS2 = READ_TIMEOUT_MS;
94
+ async function isProblematicFile(filePath) {
95
+ try {
96
+ const stats = await fs.stat(filePath);
97
+ if (stats.size === 0) return false;
98
+ if (path.basename(filePath).startsWith(".")) return true;
99
+ if (stats.size > MAX_FILE_SIZE2) return true;
100
+ const ext = path.extname(filePath);
101
+ const isKnownType = ContentTypeInterpreter.isKnownLongLineExtension(ext);
102
+ if (isKnownType && stats.size > 1024 * 1024) return true;
103
+ const handle = await fs.open(filePath, "r");
104
+ try {
105
+ const buffer = new Uint8Array(32 * 1024);
106
+ const { bytesRead } = await handle.read(buffer, 0, buffer.length, 0);
107
+ const sample = buffer.subarray(0, bytesRead);
108
+ if (ContentTypeInterpreter.isUnstructuredBinary(sample)) return true;
109
+ if (ContentTypeInterpreter.hasPathologicalLines(sample, isKnownType)) return true;
110
+ } finally {
111
+ await handle.close();
112
+ }
113
+ return false;
114
+ } catch {
115
+ return true;
116
+ }
117
+ }
118
+ async function readFileSafely(filePath, options = {}) {
119
+ const stats = await fs.stat(filePath);
120
+ if (stats.size > MAX_FILE_SIZE2) throw new Error(`File too large: ${stats.size}`);
121
+ const controller = new AbortController();
122
+ const timeout = setTimeout(() => controller.abort(), READ_TIMEOUT_MS2);
123
+ try {
124
+ const handle = await fs.open(filePath, "r");
125
+ try {
126
+ const buffer = new Uint8Array(stats.size);
127
+ await handle.read(buffer, 0, stats.size, 0);
128
+ return buffer;
129
+ } finally {
130
+ await handle.close();
131
+ }
132
+ } catch (e) {
133
+ const error = e;
134
+ if (error.name === "AbortError") throw new Error(`Read timeout for ${filePath}`);
135
+ throw e;
136
+ } finally {
137
+ clearTimeout(timeout);
138
+ }
139
+ }
140
+ async function listFiles(dirPath, recursive = false) {
141
+ let files = [];
142
+ try {
143
+ const entries = await fs.readdir(dirPath, { withFileTypes: true });
144
+ for (const entry of entries) {
145
+ const fullPath = path.join(dirPath, entry.name);
146
+ if (entry.name.startsWith(".")) continue;
147
+ if (entry.isDirectory()) {
148
+ if (recursive) {
149
+ files = files.concat(await listFiles(fullPath, true));
150
+ }
151
+ } else if (entry.isFile()) {
152
+ if (!await isProblematicFile(fullPath)) {
153
+ files.push(fullPath);
154
+ }
155
+ }
156
+ }
157
+ } catch (e) {
158
+ console.warn(`Error listing directory ${dirPath}:`, e);
159
+ }
160
+ return files;
161
+ }
162
+ async function processFileContent(filePath, options = {}) {
163
+ const rawContent = await readFileSafely(filePath, { allowPathological: options.allowPathological, maxBytes: options.maxBytes });
164
+ const sample = rawContent.subarray(0, 1024 * 1024);
165
+ const detection = ContentTypeInterpreter.detectContentType(sample, path.extname(filePath));
166
+ let isBinary = ContentTypeInterpreter.isBinaryContent(sample, detection.mimeType);
167
+ if (options.forceBinary) isBinary = true;
168
+ let content = rawContent;
169
+ if (!isBinary) {
170
+ try {
171
+ content = new TextDecoder("utf-8", { fatal: true }).decode(rawContent);
172
+ } catch {
173
+ content = new TextDecoder("utf-8", { fatal: false }).decode(rawContent);
174
+ }
175
+ }
176
+ return {
177
+ content,
178
+ filename: path.basename(filePath),
179
+ mimeType: detection.mimeType,
180
+ extension: detection.extension,
181
+ isBinary,
182
+ size: rawContent.length
183
+ };
184
+ }
185
+
186
+ // src/Loader.ts
187
+ var DEFAULT_MAX_PROBLEM_BYTES = 2 * 1024 * 1024;
188
+ var WRAP_WIDTH_KNOWN = 1e3;
189
+ var WRAP_WIDTH_DEFAULT = 80;
190
+ var Logger = {
191
+ info: (...args) => console.log("[Loader]", ...args),
192
+ warn: (...args) => console.warn("[Loader]", ...args),
193
+ error: (...args) => console.error("[Loader]", ...args),
194
+ debug: (...args) => {
195
+ if (process.env.DEBUG) console.log("[Loader]", ...args);
196
+ }
197
+ };
198
+ async function processAndStoreFile(filePath, collection, options = {}) {
199
+ const {
200
+ allowProblematic = false,
201
+ maxBytesOnProblem = DEFAULT_MAX_PROBLEM_BYTES,
202
+ metadataOnly = false,
203
+ rootPath
204
+ } = options;
205
+ try {
206
+ let fileInfo;
207
+ if (await isProblematicFile(filePath)) {
208
+ if (!allowProblematic) {
209
+ Logger.warn(`Skipping problematic file: ${filePath}`);
210
+ return null;
211
+ }
212
+ const extension = path2.extname(filePath).toLowerCase();
213
+ const isKnownType = ContentTypeInterpreter.isKnownLongLineExtension(extension);
214
+ const wrapWidth = isKnownType ? WRAP_WIDTH_KNOWN : WRAP_WIDTH_DEFAULT;
215
+ Logger.warn(`Problematic file detected, processing as safe text: ${filePath}`);
216
+ try {
217
+ const streamed = await streamReadNormalizedText(filePath, {
218
+ byteCap: maxBytesOnProblem,
219
+ wrapWidth
220
+ });
221
+ fileInfo = {
222
+ content: streamed.text,
223
+ filename: path2.basename(filePath),
224
+ mimeType: "text/plain",
225
+ extension,
226
+ isBinary: false,
227
+ size: streamed.text.length,
228
+ originalSize: streamed.originalSize,
229
+ originalSha256Prefix: streamed.originalSha256Prefix,
230
+ normalized: true,
231
+ wrapWidth
232
+ };
233
+ } catch (e) {
234
+ Logger.warn(`Safe text processing failed, falling back to capped binary: ${filePath}`, e);
235
+ fileInfo = await processFileContent(filePath, {
236
+ forceBinary: true,
237
+ allowPathological: true,
238
+ maxBytes: maxBytesOnProblem
239
+ });
240
+ }
241
+ } else {
242
+ Logger.info(`Processing file: ${filePath}`);
243
+ fileInfo = await processFileContent(filePath);
244
+ }
245
+ if (!fileInfo) return null;
246
+ const content = fileInfo.content;
247
+ if (!content || typeof content === "string" && content.length === 0 || content instanceof Uint8Array && content.length === 0) {
248
+ Logger.debug(`Skipping empty file: ${filePath} (empty files cannot be stored as MCards)`);
249
+ return {
250
+ hash: "",
251
+ contentType: fileInfo.mimeType,
252
+ isBinary: fileInfo.isBinary,
253
+ filename: fileInfo.filename,
254
+ size: 0,
255
+ filePath
256
+ };
257
+ }
258
+ let mcard = null;
259
+ const isProblematic = await isProblematicFile(filePath);
260
+ if (metadataOnly && isProblematic) {
261
+ mcard = null;
262
+ } else {
263
+ mcard = await MCard.create(fileInfo.content);
264
+ const handle = path2.basename(filePath);
265
+ try {
266
+ await collection.addWithHandle(mcard, handle);
267
+ } catch (e) {
268
+ let registered = false;
269
+ if (rootPath) {
270
+ const relPath = path2.relative(rootPath, filePath);
271
+ if (relPath !== handle) {
272
+ try {
273
+ await collection.addWithHandle(mcard, relPath);
274
+ registered = true;
275
+ } catch (e2) {
276
+ Logger.debug(
277
+ `Handle name '${handle}' already in use (common for files like README.md, LICENSE). MCard stored successfully with hash ${mcard.hash.slice(0, 8)}... (accessible by hash, not by handle)`
278
+ );
279
+ }
280
+ }
281
+ }
282
+ if (!registered) {
283
+ try {
284
+ await collection.add(mcard);
285
+ } catch (e3) {
286
+ Logger.warn(`Hash fallback also failed for ${handle}:`, e3);
287
+ }
288
+ }
289
+ }
290
+ }
291
+ const result = {
292
+ hash: mcard ? mcard.hash : "METADATA_ONLY",
293
+ contentType: fileInfo.mimeType,
294
+ isBinary: fileInfo.isBinary,
295
+ filename: fileInfo.filename,
296
+ size: fileInfo.size,
297
+ filePath
298
+ };
299
+ if (fileInfo.originalSize !== void 0) result.originalSize = fileInfo.originalSize;
300
+ if (fileInfo.originalSha256Prefix) result.originalSha256Prefix = fileInfo.originalSha256Prefix;
301
+ if (metadataOnly && isProblematic) result.metadataOnly = true;
302
+ return result;
303
+ } catch (e) {
304
+ Logger.error(`Error processing ${filePath}:`, e);
305
+ return null;
306
+ }
307
+ }
308
+ async function loadFileToCollection(targetPath, collection, options = {}) {
309
+ const {
310
+ recursive = false,
311
+ includeProblematic = false,
312
+ maxBytesOnProblem = DEFAULT_MAX_PROBLEM_BYTES,
313
+ metadataOnly = false
314
+ } = options;
315
+ const resolvedPath = path2.resolve(targetPath);
316
+ const stats = await fs2.stat(resolvedPath);
317
+ const results = [];
318
+ let files = [];
319
+ let rootPath = resolvedPath;
320
+ if (stats.isFile()) {
321
+ files = [resolvedPath];
322
+ rootPath = path2.dirname(resolvedPath);
323
+ } else if (stats.isDirectory()) {
324
+ files = await listFiles(resolvedPath, recursive);
325
+ rootPath = resolvedPath;
326
+ } else {
327
+ throw new Error(`Path ${targetPath} is not a file or directory`);
328
+ }
329
+ const uniqueDirs = /* @__PURE__ */ new Set();
330
+ let maxDepth = 0;
331
+ for (const file of files) {
332
+ const dir = path2.dirname(file);
333
+ if (dir.startsWith(rootPath)) {
334
+ uniqueDirs.add(dir);
335
+ const rel = path2.relative(rootPath, file);
336
+ const parts = rel.split(path2.sep);
337
+ const depth = parts.length - 1;
338
+ if (depth > maxDepth) maxDepth = depth;
339
+ }
340
+ }
341
+ const metrics = {
342
+ filesCount: files.length,
343
+ directoriesCount: uniqueDirs.size,
344
+ directoryLevels: maxDepth
345
+ };
346
+ Logger.info(`About to process ${files.length} files`);
347
+ for (const file of files) {
348
+ const result = await processAndStoreFile(file, collection, {
349
+ allowProblematic: includeProblematic,
350
+ maxBytesOnProblem,
351
+ metadataOnly,
352
+ rootPath
353
+ });
354
+ if (result) results.push(result);
355
+ }
356
+ return { metrics, results };
357
+ }
358
+
359
+ export {
360
+ FileIO_exports,
361
+ processAndStoreFile,
362
+ loadFileToCollection,
363
+ Loader_exports
364
+ };
@@ -0,0 +1,74 @@
1
+ // src/storage/EngineRegistry.ts
2
+ var EngineType = /* @__PURE__ */ ((EngineType2) => {
3
+ EngineType2["INDEXED_DB"] = "indexeddb";
4
+ EngineType2["SQLITE_NODE"] = "sqlite-node";
5
+ EngineType2["SQLITE_WASM"] = "sqlite-wasm";
6
+ return EngineType2;
7
+ })(EngineType || {});
8
+ var ENGINE_INFO = {
9
+ ["indexeddb" /* INDEXED_DB */]: {
10
+ type: "indexeddb" /* INDEXED_DB */,
11
+ displayName: "IndexedDB",
12
+ environment: "browser",
13
+ package: null,
14
+ description: "Browser-native IndexedDB key-value store"
15
+ },
16
+ ["sqlite-node" /* SQLITE_NODE */]: {
17
+ type: "sqlite-node" /* SQLITE_NODE */,
18
+ displayName: "SQLite (Node.js)",
19
+ environment: "node",
20
+ package: "better-sqlite3",
21
+ description: "SQLite via native better-sqlite3 bindings"
22
+ },
23
+ ["sqlite-wasm" /* SQLITE_WASM */]: {
24
+ type: "sqlite-wasm" /* SQLITE_WASM */,
25
+ displayName: "SQLite (WASM)",
26
+ environment: "browser",
27
+ package: "sql.js",
28
+ description: "SQLite via sql.js WebAssembly"
29
+ }
30
+ };
31
+ async function createEngine(type, options = {}) {
32
+ switch (type) {
33
+ case "indexeddb" /* INDEXED_DB */: {
34
+ const { IndexedDBEngine } = await import("./IndexedDBEngine-EWA3SLAO.js");
35
+ const engine = new IndexedDBEngine();
36
+ await engine.init();
37
+ return engine;
38
+ }
39
+ case "sqlite-node" /* SQLITE_NODE */: {
40
+ const { SqliteNodeEngine } = await import("./storage/SqliteNodeEngine.js");
41
+ return SqliteNodeEngine.create(options.dbPath || ":memory:");
42
+ }
43
+ case "sqlite-wasm" /* SQLITE_WASM */: {
44
+ const { SqliteWasmEngine } = await import("./storage/SqliteWasmEngine.js");
45
+ const engine = new SqliteWasmEngine();
46
+ await engine.init(options.wasmUrl, options.existingData);
47
+ return engine;
48
+ }
49
+ default: {
50
+ const validTypes = Object.values(EngineType).join(", ");
51
+ throw new Error(
52
+ `Unknown engine type: "${type}". Valid types: ${validTypes}`
53
+ );
54
+ }
55
+ }
56
+ }
57
+ function getAvailableEngines() {
58
+ return Object.values(EngineType);
59
+ }
60
+ function getEngineInfo(type) {
61
+ return ENGINE_INFO[type];
62
+ }
63
+ function getEnginesByEnvironment(env) {
64
+ return Object.values(ENGINE_INFO).filter((info) => info.environment === env);
65
+ }
66
+
67
+ export {
68
+ EngineType,
69
+ ENGINE_INFO,
70
+ createEngine,
71
+ getAvailableEngines,
72
+ getEngineInfo,
73
+ getEnginesByEnvironment
74
+ };