@peam-ai/search 0.1.1 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -63,4 +63,54 @@ declare class SearchEngine {
63
63
  import(handler: (key: string) => Promise<string>, keys: string[]): Promise<void>;
64
64
  }
65
65
 
66
- export { type PageToIndex, SearchEngine, type SearchEngineConfig, type SearchIndexData, type StructuredPageDocumentData, TextSearch, buildSearchIndex };
66
+ /**
67
+ * Interface for exporting and importing search indexes
68
+ */
69
+ interface SearchIndexExporter {
70
+ /**
71
+ * Import a search index from storage
72
+ * @returns The search index data or null if not available
73
+ */
74
+ import(): Promise<SearchIndexData | null>;
75
+ /**
76
+ * Export a search index to storage
77
+ * @param data The search index data to save
78
+ */
79
+ export(data: SearchIndexData): Promise<void>;
80
+ }
81
+
82
+ interface FileBasedSearchIndexExporterOptions {
83
+ /**
84
+ * The directory where the index file is located
85
+ */
86
+ baseDir: string;
87
+ /**
88
+ * The path to the index file relative to baseDir
89
+ */
90
+ indexPath: string;
91
+ }
92
+ /**
93
+ * File-based implementation of SearchIndexExporter
94
+ * Reads and writes search index data to/from a JSON file
95
+ */
96
+ declare class FileBasedSearchIndexExporter implements SearchIndexExporter {
97
+ private baseDir;
98
+ private indexPath;
99
+ private cachedData;
100
+ constructor(options: FileBasedSearchIndexExporterOptions);
101
+ private getFullPath;
102
+ private loadData;
103
+ import(): Promise<SearchIndexData | null>;
104
+ export(data: SearchIndexData): Promise<void>;
105
+ }
106
+
107
+ type SearchExporterConfig = {
108
+ type: 'fileBased';
109
+ config: FileBasedSearchIndexExporterOptions;
110
+ };
111
+ /**
112
+ * Creates a SearchIndexExporter instance from a SearchExporterConfig
113
+ */
114
+ declare function createExporterFromConfig(exporterConfig: SearchExporterConfig): SearchIndexExporter;
115
+
116
+ export { FileBasedSearchIndexExporter, type FileBasedSearchIndexExporterOptions, type PageToIndex, SearchEngine, type SearchEngineConfig, type SearchExporterConfig, type SearchIndexData, type SearchIndexExporter, type StructuredPageDocumentData, TextSearch, buildSearchIndex, createExporterFromConfig };
package/dist/index.d.ts CHANGED
@@ -63,4 +63,54 @@ declare class SearchEngine {
63
63
  import(handler: (key: string) => Promise<string>, keys: string[]): Promise<void>;
64
64
  }
65
65
 
66
- export { type PageToIndex, SearchEngine, type SearchEngineConfig, type SearchIndexData, type StructuredPageDocumentData, TextSearch, buildSearchIndex };
66
+ /**
67
+ * Interface for exporting and importing search indexes
68
+ */
69
+ interface SearchIndexExporter {
70
+ /**
71
+ * Import a search index from storage
72
+ * @returns The search index data or null if not available
73
+ */
74
+ import(): Promise<SearchIndexData | null>;
75
+ /**
76
+ * Export a search index to storage
77
+ * @param data The search index data to save
78
+ */
79
+ export(data: SearchIndexData): Promise<void>;
80
+ }
81
+
82
+ interface FileBasedSearchIndexExporterOptions {
83
+ /**
84
+ * The directory where the index file is located
85
+ */
86
+ baseDir: string;
87
+ /**
88
+ * The path to the index file relative to baseDir
89
+ */
90
+ indexPath: string;
91
+ }
92
+ /**
93
+ * File-based implementation of SearchIndexExporter
94
+ * Reads and writes search index data to/from a JSON file
95
+ */
96
+ declare class FileBasedSearchIndexExporter implements SearchIndexExporter {
97
+ private baseDir;
98
+ private indexPath;
99
+ private cachedData;
100
+ constructor(options: FileBasedSearchIndexExporterOptions);
101
+ private getFullPath;
102
+ private loadData;
103
+ import(): Promise<SearchIndexData | null>;
104
+ export(data: SearchIndexData): Promise<void>;
105
+ }
106
+
107
+ type SearchExporterConfig = {
108
+ type: 'fileBased';
109
+ config: FileBasedSearchIndexExporterOptions;
110
+ };
111
+ /**
112
+ * Creates a SearchIndexExporter instance from a SearchExporterConfig
113
+ */
114
+ declare function createExporterFromConfig(exporterConfig: SearchExporterConfig): SearchIndexExporter;
115
+
116
+ export { FileBasedSearchIndexExporter, type FileBasedSearchIndexExporterOptions, type PageToIndex, SearchEngine, type SearchEngineConfig, type SearchExporterConfig, type SearchIndexData, type SearchIndexExporter, type StructuredPageDocumentData, TextSearch, buildSearchIndex, createExporterFromConfig };
package/dist/index.js CHANGED
@@ -1,8 +1,24 @@
1
1
  "use strict";
2
+ var __create = Object.create;
2
3
  var __defProp = Object.defineProperty;
3
4
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
5
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getOwnPropSymbols = Object.getOwnPropertySymbols;
7
+ var __getProtoOf = Object.getPrototypeOf;
5
8
  var __hasOwnProp = Object.prototype.hasOwnProperty;
9
+ var __propIsEnum = Object.prototype.propertyIsEnumerable;
10
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
11
+ var __spreadValues = (a, b) => {
12
+ for (var prop in b || (b = {}))
13
+ if (__hasOwnProp.call(b, prop))
14
+ __defNormalProp(a, prop, b[prop]);
15
+ if (__getOwnPropSymbols)
16
+ for (var prop of __getOwnPropSymbols(b)) {
17
+ if (__propIsEnum.call(b, prop))
18
+ __defNormalProp(a, prop, b[prop]);
19
+ }
20
+ return a;
21
+ };
6
22
  var __export = (target, all) => {
7
23
  for (var name in all)
8
24
  __defProp(target, name, { get: all[name], enumerable: true });
@@ -15,6 +31,14 @@ var __copyProps = (to, from, except, desc) => {
15
31
  }
16
32
  return to;
17
33
  };
34
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
35
+ // If the importer is in node compatibility mode or this is not an ESM
36
+ // file that has been converted to a CommonJS file using a Babel-
37
+ // compatible transform (i.e. "__esModule" has not been set), then set
38
+ // "default" to the CommonJS "module.exports" for node compatibility.
39
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
40
+ mod
41
+ ));
18
42
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
43
  var __async = (__this, __arguments, generator) => {
20
44
  return new Promise((resolve, reject) => {
@@ -40,9 +64,11 @@ var __async = (__this, __arguments, generator) => {
40
64
  // src/index.ts
41
65
  var index_exports = {};
42
66
  __export(index_exports, {
67
+ FileBasedSearchIndexExporter: () => FileBasedSearchIndexExporter,
43
68
  SearchEngine: () => SearchEngine,
44
69
  TextSearch: () => TextSearch,
45
- buildSearchIndex: () => buildSearchIndex
70
+ buildSearchIndex: () => buildSearchIndex,
71
+ createExporterFromConfig: () => createExporterFromConfig
46
72
  });
47
73
  module.exports = __toCommonJS(index_exports);
48
74
 
@@ -133,8 +159,8 @@ var TextSearch = class {
133
159
  count() {
134
160
  return this.documentIds.size;
135
161
  }
136
- getDocument(path) {
137
- return this.index.get(path);
162
+ getDocument(path2) {
163
+ return this.index.get(path2);
138
164
  }
139
165
  getAllDocuments(limit) {
140
166
  const documents = [];
@@ -215,18 +241,18 @@ var SearchEngine = class {
215
241
  this.initialized = true;
216
242
  });
217
243
  }
218
- addPage(path, content) {
244
+ addPage(path2, content) {
219
245
  return __async(this, null, function* () {
220
246
  if (!this.initialized) {
221
247
  throw new Error("Search engine not initialized. Call initialize() first.");
222
248
  }
223
249
  const document = {
224
- id: path,
225
- path,
250
+ id: path2,
251
+ path: path2,
226
252
  content
227
253
  };
228
254
  yield this.textSearch.addDocument(document);
229
- log2.debug("Page added to search engine:", path);
255
+ log2.debug("Page added to search engine:", path2);
230
256
  });
231
257
  }
232
258
  search(_0) {
@@ -241,8 +267,8 @@ var SearchEngine = class {
241
267
  count() {
242
268
  return this.textSearch.count();
243
269
  }
244
- getDocument(path) {
245
- return this.textSearch.getDocument(path);
270
+ getDocument(path2) {
271
+ return this.textSearch.getDocument(path2);
246
272
  }
247
273
  getAllDocuments(limit) {
248
274
  return this.textSearch.getAllDocuments(limit);
@@ -282,10 +308,82 @@ function buildSearchIndex(pages) {
282
308
  };
283
309
  });
284
310
  }
311
+
312
+ // src/exporters/FileBasedSearchIndexExporter.ts
313
+ var import_logger3 = require("@peam-ai/logger");
314
+ var fs = __toESM(require("fs/promises"));
315
+ var path = __toESM(require("path"));
316
+ var log3 = import_logger3.loggers.search;
317
+ var FileBasedSearchIndexExporter = class {
318
+ constructor(options) {
319
+ this.cachedData = null;
320
+ this.baseDir = options.baseDir;
321
+ this.indexPath = options.indexPath;
322
+ }
323
+ getFullPath() {
324
+ return path.join(this.baseDir, this.indexPath);
325
+ }
326
+ loadData() {
327
+ return __async(this, null, function* () {
328
+ if (this.cachedData) {
329
+ return this.cachedData;
330
+ }
331
+ const fullPath = this.getFullPath();
332
+ try {
333
+ const fileContent = yield fs.readFile(fullPath, "utf-8");
334
+ const data = JSON.parse(fileContent);
335
+ if (!data || !data.keys || !Array.isArray(data.keys) || !data.data) {
336
+ log3.warn("Invalid search index structure in file:", fullPath);
337
+ return null;
338
+ }
339
+ if (data.keys.length === 0) {
340
+ log3.debug("Search index is empty:", fullPath);
341
+ return null;
342
+ }
343
+ this.cachedData = data;
344
+ log3.debug("Search index loaded from file:", fullPath, "with", data.keys.length, "keys");
345
+ return data;
346
+ } catch (error) {
347
+ log3.error("Failed to load search index from file:", fullPath, error);
348
+ return null;
349
+ }
350
+ });
351
+ }
352
+ import() {
353
+ return __async(this, null, function* () {
354
+ const data = yield this.loadData();
355
+ return data;
356
+ });
357
+ }
358
+ export(data) {
359
+ return __async(this, null, function* () {
360
+ const fullPath = this.getFullPath();
361
+ try {
362
+ const dir = path.dirname(fullPath);
363
+ yield fs.mkdir(dir, { recursive: true });
364
+ yield fs.writeFile(fullPath, JSON.stringify(data, null, 2), "utf-8");
365
+ log3.debug("Search index saved to file:", fullPath, "with", data.keys.length, "keys");
366
+ } catch (error) {
367
+ log3.error("Failed to save search index to file:", fullPath, error);
368
+ throw error;
369
+ }
370
+ });
371
+ }
372
+ };
373
+
374
+ // src/exporters/config.ts
375
+ function createExporterFromConfig(exporterConfig) {
376
+ if (exporterConfig.type === "fileBased") {
377
+ return new FileBasedSearchIndexExporter(__spreadValues({}, exporterConfig.config));
378
+ }
379
+ throw new Error(`Unknown exporter type: ${exporterConfig.type}`);
380
+ }
285
381
  // Annotate the CommonJS export names for ESM import in node:
286
382
  0 && (module.exports = {
383
+ FileBasedSearchIndexExporter,
287
384
  SearchEngine,
288
385
  TextSearch,
289
- buildSearchIndex
386
+ buildSearchIndex,
387
+ createExporterFromConfig
290
388
  });
291
389
  //# sourceMappingURL=index.js.map
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/searchEngine.ts","../src/textSearch.ts","../src/indexBuilder.ts"],"sourcesContent":["export { buildSearchIndex, type PageToIndex, type SearchIndexData } from './indexBuilder';\nexport { SearchEngine } from './searchEngine';\nexport { TextSearch } from './textSearch';\n\nexport type { StructuredPageDocumentData } from './types';\n\nexport type { SearchEngineConfig } from './searchEngine';\n","import { loggers } from '@peam-ai/logger';\nimport type { StructuredPage } from '@peam-ai/parser';\nimport { TextSearch, type TextSearchOptions } from './textSearch';\nimport type { StructuredPageDocumentData } from './types';\n\nconst log = loggers.search;\n\n// eslint-disable-next-line @typescript-eslint/no-empty-object-type\nexport interface SearchEngineConfig {\n // Reserved for future configuration options\n}\n\nexport class SearchEngine {\n private textSearch: TextSearch;\n private initialized: boolean;\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n constructor(_config?: SearchEngineConfig) {\n this.textSearch = new TextSearch();\n this.initialized = false;\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) {\n return;\n }\n\n log.debug('Initializing search engine');\n await this.textSearch.initialize();\n this.initialized = true;\n }\n\n async addPage(path: string, content: StructuredPage): Promise<void> {\n if (!this.initialized) {\n throw new Error('Search engine not initialized. Call initialize() first.');\n }\n\n const document: StructuredPageDocumentData = {\n id: path,\n path,\n content,\n };\n\n await this.textSearch.addDocument(document);\n log.debug('Page added to search engine:', path);\n }\n\n async search(query: string, options: TextSearchOptions = {}): Promise<StructuredPageDocumentData[]> {\n if (!this.initialized) {\n throw new Error('Search engine not initialized. Call initialize() first.');\n }\n\n log.debug('Performing text search:', query);\n return this.textSearch.search(query, options);\n }\n\n count(): number {\n return this.textSearch.count();\n }\n\n getDocument(path: string) {\n return this.textSearch.getDocument(path);\n }\n\n getAllDocuments(limit?: number): StructuredPageDocumentData[] {\n return this.textSearch.getAllDocuments(limit);\n }\n\n clear(): void {\n this.textSearch.clear();\n }\n\n async export(handler: (key: string, data: string) => Promise<void>): Promise<{ keys: string[] }> {\n return this.textSearch.export(handler);\n }\n\n async import(handler: (key: string) => Promise<string>, keys: string[]): Promise<void> {\n await this.textSearch.import(handler, keys);\n this.initialized = true;\n log.debug('Search engine initialized from imported data');\n }\n}\n","import { loggers } from '@peam-ai/logger';\nimport { Charset, Document } from 'flexsearch';\nimport type { StructuredPageDocumentData } from './types';\n\nexport interface TextSearchOptions {\n limit?: number;\n offset?: number;\n suggest?: boolean;\n}\n\nconst PEAM_DOCUMENT_IDS_KEY = 'peam.documentIds';\nconst MAX_DOCUMENTS_RETRIEVE = 25;\nconst log = loggers.search;\n\nexport class TextSearch {\n private index: Document<StructuredPageDocumentData>;\n private initialized: boolean;\n private documentIds: Set<string>;\n\n constructor() {\n this.initialized = false;\n this.index = this.getIndex();\n this.documentIds = new Set();\n }\n\n private getIndex() {\n return new Document<StructuredPageDocumentData>({\n worker: false,\n document: {\n id: 'path',\n index: ['content:title', 'content:description', 'content:textContent', 'content:author', 'content:keywords'],\n store: true,\n },\n tokenize: 'forward',\n resolution: 9,\n context: {\n resolution: 3,\n depth: 2,\n bidirectional: true,\n },\n cache: 100,\n encoder: Charset.LatinExtra,\n });\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) {\n log.debug('Text search already initialized');\n return;\n }\n\n this.initialized = true;\n }\n\n async addDocument(document: StructuredPageDocumentData): Promise<void> {\n if (!this.initialized) {\n throw new Error('TextSearch not initialized. Call initialize() first.');\n }\n\n log.debug('Adding document to text search:', document.path);\n\n this.index.add(document);\n this.documentIds.add(document.path);\n }\n\n async search(query: string, options: TextSearchOptions = {}): Promise<StructuredPageDocumentData[]> {\n if (!this.initialized) {\n throw new Error('TextSearch not initialized. Call initialize() first.');\n }\n\n const limit = options.limit || MAX_DOCUMENTS_RETRIEVE;\n const offset = options.offset || 0;\n\n log.debug('Searching for:', query);\n\n const results = await this.index.search(query, {\n limit: limit + offset,\n suggest: options.suggest,\n enrich: true,\n });\n\n const pathSet = new Set<string>();\n const documents: StructuredPageDocumentData[] = [];\n\n for (const fieldResults of results) {\n if (Array.isArray(fieldResults.result)) {\n for (const result of fieldResults.result) {\n const id = typeof result === 'object' && 'id' in result ? result.id : result;\n const doc = typeof result === 'object' && 'doc' in result ? result.doc : null;\n\n if (!pathSet.has(id as string) && doc) {\n pathSet.add(id as string);\n documents.push(doc);\n }\n }\n }\n }\n\n const pagedResults = documents.slice(offset, offset + limit);\n\n return pagedResults;\n }\n\n count(): number {\n return this.documentIds.size;\n }\n\n getDocument(path: string) {\n return this.index.get(path);\n }\n\n getAllDocuments(limit?: number): StructuredPageDocumentData[] {\n const documents: StructuredPageDocumentData[] = [];\n let count = 0;\n limit = limit || MAX_DOCUMENTS_RETRIEVE;\n\n for (const id of this.documentIds) {\n if (count >= limit) {\n break;\n }\n\n const doc = this.index.get(id);\n if (doc) {\n documents.push(doc);\n count++;\n }\n }\n\n log.debug('Retrieved documents from store (limit):', documents.length, limit);\n return documents;\n }\n\n clear(): void {\n this.index.clear();\n this.index = this.getIndex();\n this.documentIds.clear();\n }\n\n async export(handler: (key: string, data: string) => Promise<void>): Promise<{ keys: string[] }> {\n const keys: string[] = [];\n\n await handler(PEAM_DOCUMENT_IDS_KEY, JSON.stringify(Array.from(this.documentIds)));\n keys.push(PEAM_DOCUMENT_IDS_KEY);\n\n await this.index.export(async (key: string, data: string) => {\n keys.push(key);\n await handler(key, data);\n });\n\n log.debug('Exported keys:', keys.length);\n\n return { keys };\n }\n\n async import(handler: (key: string) => Promise<string>, keys: string[]): Promise<void> {\n const documentIdsData = await handler(PEAM_DOCUMENT_IDS_KEY);\n if (documentIdsData) {\n const parsed = typeof documentIdsData === 'string' ? JSON.parse(documentIdsData) : documentIdsData;\n this.documentIds = new Set(parsed);\n }\n\n for (const key of keys) {\n if (key === PEAM_DOCUMENT_IDS_KEY) {\n continue;\n }\n\n try {\n const data = await handler(key);\n if (data) {\n this.index.import(key, data);\n }\n } catch (error) {\n log.error('Error importing key:', key, error);\n }\n }\n\n this.initialized = true;\n log.debug('Import completed with keys:', keys.length);\n }\n}\n","import type { StructuredPage } from '@peam-ai/parser';\nimport { SearchEngine } from './searchEngine';\n\nexport interface PageToIndex {\n path: string;\n structuredPage: StructuredPage;\n}\n\nexport interface SearchIndexData {\n keys: string[];\n data: Record<string, string>;\n}\n\n/**\n * Build a search index from structured pages\n */\nexport async function buildSearchIndex(pages: PageToIndex[]): Promise<SearchIndexData> {\n const searchEngine = new SearchEngine();\n await searchEngine.initialize();\n\n for (const page of pages) {\n await searchEngine.addPage(page.path, page.structuredPage);\n }\n\n const exportedData: Record<string, string> = {};\n const result = await searchEngine.export(async (key, data) => {\n exportedData[key] = data;\n });\n\n return {\n keys: result.keys,\n data: exportedData,\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,iBAAwB;;;ACAxB,oBAAwB;AACxB,wBAAkC;AASlC,IAAM,wBAAwB;AAC9B,IAAM,yBAAyB;AAC/B,IAAM,MAAM,sBAAQ;AAEb,IAAM,aAAN,MAAiB;AAAA,EAKtB,cAAc;AACZ,SAAK,cAAc;AACnB,SAAK,QAAQ,KAAK,SAAS;AAC3B,SAAK,cAAc,oBAAI,IAAI;AAAA,EAC7B;AAAA,EAEQ,WAAW;AACjB,WAAO,IAAI,2BAAqC;AAAA,MAC9C,QAAQ;AAAA,MACR,UAAU;AAAA,QACR,IAAI;AAAA,QACJ,OAAO,CAAC,iBAAiB,uBAAuB,uBAAuB,kBAAkB,kBAAkB;AAAA,QAC3G,OAAO;AAAA,MACT;AAAA,MACA,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,SAAS;AAAA,QACP,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,eAAe;AAAA,MACjB;AAAA,MACA,OAAO;AAAA,MACP,SAAS,0BAAQ;AAAA,IACnB,CAAC;AAAA,EACH;AAAA,EAEM,aAA4B;AAAA;AAChC,UAAI,KAAK,aAAa;AACpB,YAAI,MAAM,iCAAiC;AAC3C;AAAA,MACF;AAEA,WAAK,cAAc;AAAA,IACrB;AAAA;AAAA,EAEM,YAAY,UAAqD;AAAA;AACrE,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AAEA,UAAI,MAAM,mCAAmC,SAAS,IAAI;AAE1D,WAAK,MAAM,IAAI,QAAQ;AACvB,WAAK,YAAY,IAAI,SAAS,IAAI;AAAA,IACpC;AAAA;AAAA,EAEM,OAAO,IAAuF;AAAA,+CAAvF,OAAe,UAA6B,CAAC,GAA0C;AAClG,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AAEA,YAAM,QAAQ,QAAQ,SAAS;AAC/B,YAAM,SAAS,QAAQ,UAAU;AAEjC,UAAI,MAAM,kBAAkB,KAAK;AAEjC,YAAM,UAAU,MAAM,KAAK,MAAM,OAAO,OAAO;AAAA,QAC7C,OAAO,QAAQ;AAAA,QACf,SAAS,QAAQ;AAAA,QACjB,QAAQ;AAAA,MACV,CAAC;AAED,YAAM,UAAU,oBAAI,IAAY;AAChC,YAAM,YAA0C,CAAC;AAEjD,iBAAW,gBAAgB,SAAS;AAClC,YAAI,MAAM,QAAQ,aAAa,MAAM,GAAG;AACtC,qBAAW,UAAU,aAAa,QAAQ;AACxC,kBAAM,KAAK,OAAO,WAAW,YAAY,QAAQ,SAAS,OAAO,KAAK;AACtE,kBAAM,MAAM,OAAO,WAAW,YAAY,SAAS,SAAS,OAAO,MAAM;AAEzE,gBAAI,CAAC,QAAQ,IAAI,EAAY,KAAK,KAAK;AACrC,sBAAQ,IAAI,EAAY;AACxB,wBAAU,KAAK,GAAG;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,YAAM,eAAe,UAAU,MAAM,QAAQ,SAAS,KAAK;AAE3D,aAAO;AAAA,IACT;AAAA;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,YAAY,MAAc;AACxB,WAAO,KAAK,MAAM,IAAI,IAAI;AAAA,EAC5B;AAAA,EAEA,gBAAgB,OAA8C;AAC5D,UAAM,YAA0C,CAAC;AACjD,QAAI,QAAQ;AACZ,YAAQ,SAAS;AAEjB,eAAW,MAAM,KAAK,aAAa;AACjC,UAAI,SAAS,OAAO;AAClB;AAAA,MACF;AAEA,YAAM,MAAM,KAAK,MAAM,IAAI,EAAE;AAC7B,UAAI,KAAK;AACP,kBAAU,KAAK,GAAG;AAClB;AAAA,MACF;AAAA,IACF;AAEA,QAAI,MAAM,2CAA2C,UAAU,QAAQ,KAAK;AAC5E,WAAO;AAAA,EACT;AAAA,EAEA,QAAc;AACZ,SAAK,MAAM,MAAM;AACjB,SAAK,QAAQ,KAAK,SAAS;AAC3B,SAAK,YAAY,MAAM;AAAA,EACzB;AAAA,EAEM,OAAO,SAAoF;AAAA;AAC/F,YAAM,OAAiB,CAAC;AAExB,YAAM,QAAQ,uBAAuB,KAAK,UAAU,MAAM,KAAK,KAAK,WAAW,CAAC,CAAC;AACjF,WAAK,KAAK,qBAAqB;AAE/B,YAAM,KAAK,MAAM,OAAO,CAAO,KAAa,SAAiB;AAC3D,aAAK,KAAK,GAAG;AACb,cAAM,QAAQ,KAAK,IAAI;AAAA,MACzB,EAAC;AAED,UAAI,MAAM,kBAAkB,KAAK,MAAM;AAEvC,aAAO,EAAE,KAAK;AAAA,IAChB;AAAA;AAAA,EAEM,OAAO,SAA2C,MAA+B;AAAA;AACrF,YAAM,kBAAkB,MAAM,QAAQ,qBAAqB;AAC3D,UAAI,iBAAiB;AACnB,cAAM,SAAS,OAAO,oBAAoB,WAAW,KAAK,MAAM,eAAe,IAAI;AACnF,aAAK,cAAc,IAAI,IAAI,MAAM;AAAA,MACnC;AAEA,iBAAW,OAAO,MAAM;AACtB,YAAI,QAAQ,uBAAuB;AACjC;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,OAAO,MAAM,QAAQ,GAAG;AAC9B,cAAI,MAAM;AACR,iBAAK,MAAM,OAAO,KAAK,IAAI;AAAA,UAC7B;AAAA,QACF,SAAS,OAAO;AACd,cAAI,MAAM,wBAAwB,KAAK,KAAK;AAAA,QAC9C;AAAA,MACF;AAEA,WAAK,cAAc;AACnB,UAAI,MAAM,+BAA+B,KAAK,MAAM;AAAA,IACtD;AAAA;AACF;;;AD9KA,IAAMC,OAAM,uBAAQ;AAOb,IAAM,eAAN,MAAmB;AAAA;AAAA,EAKxB,YAAY,SAA8B;AACxC,SAAK,aAAa,IAAI,WAAW;AACjC,SAAK,cAAc;AAAA,EACrB;AAAA,EAEM,aAA4B;AAAA;AAChC,UAAI,KAAK,aAAa;AACpB;AAAA,MACF;AAEA,MAAAA,KAAI,MAAM,4BAA4B;AACtC,YAAM,KAAK,WAAW,WAAW;AACjC,WAAK,cAAc;AAAA,IACrB;AAAA;AAAA,EAEM,QAAQ,MAAc,SAAwC;AAAA;AAClE,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,yDAAyD;AAAA,MAC3E;AAEA,YAAM,WAAuC;AAAA,QAC3C,IAAI;AAAA,QACJ;AAAA,QACA;AAAA,MACF;AAEA,YAAM,KAAK,WAAW,YAAY,QAAQ;AAC1C,MAAAA,KAAI,MAAM,gCAAgC,IAAI;AAAA,IAChD;AAAA;AAAA,EAEM,OAAO,IAAuF;AAAA,+CAAvF,OAAe,UAA6B,CAAC,GAA0C;AAClG,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,yDAAyD;AAAA,MAC3E;AAEA,MAAAA,KAAI,MAAM,2BAA2B,KAAK;AAC1C,aAAO,KAAK,WAAW,OAAO,OAAO,OAAO;AAAA,IAC9C;AAAA;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK,WAAW,MAAM;AAAA,EAC/B;AAAA,EAEA,YAAY,MAAc;AACxB,WAAO,KAAK,WAAW,YAAY,IAAI;AAAA,EACzC;AAAA,EAEA,gBAAgB,OAA8C;AAC5D,WAAO,KAAK,WAAW,gBAAgB,KAAK;AAAA,EAC9C;AAAA,EAEA,QAAc;AACZ,SAAK,WAAW,MAAM;AAAA,EACxB;AAAA,EAEM,OAAO,SAAoF;AAAA;AAC/F,aAAO,KAAK,WAAW,OAAO,OAAO;AAAA,IACvC;AAAA;AAAA,EAEM,OAAO,SAA2C,MAA+B;AAAA;AACrF,YAAM,KAAK,WAAW,OAAO,SAAS,IAAI;AAC1C,WAAK,cAAc;AACnB,MAAAA,KAAI,MAAM,8CAA8C;AAAA,IAC1D;AAAA;AACF;;;AEjEA,SAAsB,iBAAiB,OAAgD;AAAA;AACrF,UAAM,eAAe,IAAI,aAAa;AACtC,UAAM,aAAa,WAAW;AAE9B,eAAW,QAAQ,OAAO;AACxB,YAAM,aAAa,QAAQ,KAAK,MAAM,KAAK,cAAc;AAAA,IAC3D;AAEA,UAAM,eAAuC,CAAC;AAC9C,UAAM,SAAS,MAAM,aAAa,OAAO,CAAO,KAAK,SAAS;AAC5D,mBAAa,GAAG,IAAI;AAAA,IACtB,EAAC;AAED,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,MAAM;AAAA,IACR;AAAA,EACF;AAAA;","names":["import_logger","log"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/searchEngine.ts","../src/textSearch.ts","../src/indexBuilder.ts","../src/exporters/FileBasedSearchIndexExporter.ts","../src/exporters/config.ts"],"sourcesContent":["export { buildSearchIndex, type PageToIndex, type SearchIndexData } from './indexBuilder';\nexport { SearchEngine } from './searchEngine';\nexport { TextSearch } from './textSearch';\n\nexport type { StructuredPageDocumentData } from './types';\n\nexport type { SearchEngineConfig } from './searchEngine';\n\nexport {\n FileBasedSearchIndexExporter,\n createExporterFromConfig,\n type FileBasedSearchIndexExporterOptions,\n type SearchExporterConfig,\n type SearchIndexExporter,\n} from './exporters';\n","import { loggers } from '@peam-ai/logger';\nimport type { StructuredPage } from '@peam-ai/parser';\nimport { TextSearch, type TextSearchOptions } from './textSearch';\nimport type { StructuredPageDocumentData } from './types';\n\nconst log = loggers.search;\n\n// eslint-disable-next-line @typescript-eslint/no-empty-object-type\nexport interface SearchEngineConfig {\n // Reserved for future configuration options\n}\n\nexport class SearchEngine {\n private textSearch: TextSearch;\n private initialized: boolean;\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n constructor(_config?: SearchEngineConfig) {\n this.textSearch = new TextSearch();\n this.initialized = false;\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) {\n return;\n }\n\n log.debug('Initializing search engine');\n await this.textSearch.initialize();\n this.initialized = true;\n }\n\n async addPage(path: string, content: StructuredPage): Promise<void> {\n if (!this.initialized) {\n throw new Error('Search engine not initialized. Call initialize() first.');\n }\n\n const document: StructuredPageDocumentData = {\n id: path,\n path,\n content,\n };\n\n await this.textSearch.addDocument(document);\n log.debug('Page added to search engine:', path);\n }\n\n async search(query: string, options: TextSearchOptions = {}): Promise<StructuredPageDocumentData[]> {\n if (!this.initialized) {\n throw new Error('Search engine not initialized. Call initialize() first.');\n }\n\n log.debug('Performing text search:', query);\n return this.textSearch.search(query, options);\n }\n\n count(): number {\n return this.textSearch.count();\n }\n\n getDocument(path: string) {\n return this.textSearch.getDocument(path);\n }\n\n getAllDocuments(limit?: number): StructuredPageDocumentData[] {\n return this.textSearch.getAllDocuments(limit);\n }\n\n clear(): void {\n this.textSearch.clear();\n }\n\n async export(handler: (key: string, data: string) => Promise<void>): Promise<{ keys: string[] }> {\n return this.textSearch.export(handler);\n }\n\n async import(handler: (key: string) => Promise<string>, keys: string[]): Promise<void> {\n await this.textSearch.import(handler, keys);\n this.initialized = true;\n log.debug('Search engine initialized from imported data');\n }\n}\n","import { loggers } from '@peam-ai/logger';\nimport { Charset, Document } from 'flexsearch';\nimport type { StructuredPageDocumentData } from './types';\n\nexport interface TextSearchOptions {\n limit?: number;\n offset?: number;\n suggest?: boolean;\n}\n\nconst PEAM_DOCUMENT_IDS_KEY = 'peam.documentIds';\nconst MAX_DOCUMENTS_RETRIEVE = 25;\nconst log = loggers.search;\n\nexport class TextSearch {\n private index: Document<StructuredPageDocumentData>;\n private initialized: boolean;\n private documentIds: Set<string>;\n\n constructor() {\n this.initialized = false;\n this.index = this.getIndex();\n this.documentIds = new Set();\n }\n\n private getIndex() {\n return new Document<StructuredPageDocumentData>({\n worker: false,\n document: {\n id: 'path',\n index: ['content:title', 'content:description', 'content:textContent', 'content:author', 'content:keywords'],\n store: true,\n },\n tokenize: 'forward',\n resolution: 9,\n context: {\n resolution: 3,\n depth: 2,\n bidirectional: true,\n },\n cache: 100,\n encoder: Charset.LatinExtra,\n });\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) {\n log.debug('Text search already initialized');\n return;\n }\n\n this.initialized = true;\n }\n\n async addDocument(document: StructuredPageDocumentData): Promise<void> {\n if (!this.initialized) {\n throw new Error('TextSearch not initialized. Call initialize() first.');\n }\n\n log.debug('Adding document to text search:', document.path);\n\n this.index.add(document);\n this.documentIds.add(document.path);\n }\n\n async search(query: string, options: TextSearchOptions = {}): Promise<StructuredPageDocumentData[]> {\n if (!this.initialized) {\n throw new Error('TextSearch not initialized. Call initialize() first.');\n }\n\n const limit = options.limit || MAX_DOCUMENTS_RETRIEVE;\n const offset = options.offset || 0;\n\n log.debug('Searching for:', query);\n\n const results = await this.index.search(query, {\n limit: limit + offset,\n suggest: options.suggest,\n enrich: true,\n });\n\n const pathSet = new Set<string>();\n const documents: StructuredPageDocumentData[] = [];\n\n for (const fieldResults of results) {\n if (Array.isArray(fieldResults.result)) {\n for (const result of fieldResults.result) {\n const id = typeof result === 'object' && 'id' in result ? result.id : result;\n const doc = typeof result === 'object' && 'doc' in result ? result.doc : null;\n\n if (!pathSet.has(id as string) && doc) {\n pathSet.add(id as string);\n documents.push(doc);\n }\n }\n }\n }\n\n const pagedResults = documents.slice(offset, offset + limit);\n\n return pagedResults;\n }\n\n count(): number {\n return this.documentIds.size;\n }\n\n getDocument(path: string) {\n return this.index.get(path);\n }\n\n getAllDocuments(limit?: number): StructuredPageDocumentData[] {\n const documents: StructuredPageDocumentData[] = [];\n let count = 0;\n limit = limit || MAX_DOCUMENTS_RETRIEVE;\n\n for (const id of this.documentIds) {\n if (count >= limit) {\n break;\n }\n\n const doc = this.index.get(id);\n if (doc) {\n documents.push(doc);\n count++;\n }\n }\n\n log.debug('Retrieved documents from store (limit):', documents.length, limit);\n return documents;\n }\n\n clear(): void {\n this.index.clear();\n this.index = this.getIndex();\n this.documentIds.clear();\n }\n\n async export(handler: (key: string, data: string) => Promise<void>): Promise<{ keys: string[] }> {\n const keys: string[] = [];\n\n await handler(PEAM_DOCUMENT_IDS_KEY, JSON.stringify(Array.from(this.documentIds)));\n keys.push(PEAM_DOCUMENT_IDS_KEY);\n\n await this.index.export(async (key: string, data: string) => {\n keys.push(key);\n await handler(key, data);\n });\n\n log.debug('Exported keys:', keys.length);\n\n return { keys };\n }\n\n async import(handler: (key: string) => Promise<string>, keys: string[]): Promise<void> {\n const documentIdsData = await handler(PEAM_DOCUMENT_IDS_KEY);\n if (documentIdsData) {\n const parsed = typeof documentIdsData === 'string' ? JSON.parse(documentIdsData) : documentIdsData;\n this.documentIds = new Set(parsed);\n }\n\n for (const key of keys) {\n if (key === PEAM_DOCUMENT_IDS_KEY) {\n continue;\n }\n\n try {\n const data = await handler(key);\n if (data) {\n this.index.import(key, data);\n }\n } catch (error) {\n log.error('Error importing key:', key, error);\n }\n }\n\n this.initialized = true;\n log.debug('Import completed with keys:', keys.length);\n }\n}\n","import type { StructuredPage } from '@peam-ai/parser';\nimport { SearchEngine } from './searchEngine';\n\nexport interface PageToIndex {\n path: string;\n structuredPage: StructuredPage;\n}\n\nexport interface SearchIndexData {\n keys: string[];\n data: Record<string, string>;\n}\n\n/**\n * Build a search index from structured pages\n */\nexport async function buildSearchIndex(pages: PageToIndex[]): Promise<SearchIndexData> {\n const searchEngine = new SearchEngine();\n await searchEngine.initialize();\n\n for (const page of pages) {\n await searchEngine.addPage(page.path, page.structuredPage);\n }\n\n const exportedData: Record<string, string> = {};\n const result = await searchEngine.export(async (key, data) => {\n exportedData[key] = data;\n });\n\n return {\n keys: result.keys,\n data: exportedData,\n };\n}\n","import { loggers } from '@peam-ai/logger';\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\nimport type { SearchIndexData } from '../indexBuilder';\nimport type { SearchIndexExporter } from './SearchIndexExporter';\n\nconst log = loggers.search;\n\nexport interface FileBasedSearchIndexExporterOptions {\n /**\n * The directory where the index file is located\n */\n baseDir: string;\n\n /**\n * The path to the index file relative to baseDir\n */\n indexPath: string;\n}\n\n/**\n * File-based implementation of SearchIndexExporter\n * Reads and writes search index data to/from a JSON file\n */\nexport class FileBasedSearchIndexExporter implements SearchIndexExporter {\n private baseDir: string;\n private indexPath: string;\n private cachedData: SearchIndexData | null = null;\n\n constructor(options: FileBasedSearchIndexExporterOptions) {\n this.baseDir = options.baseDir;\n this.indexPath = options.indexPath;\n }\n\n private getFullPath(): string {\n return path.join(this.baseDir, this.indexPath);\n }\n\n private async loadData(): Promise<SearchIndexData | null> {\n if (this.cachedData) {\n return this.cachedData;\n }\n\n const fullPath = this.getFullPath();\n\n try {\n const fileContent = await fs.readFile(fullPath, 'utf-8');\n const data = JSON.parse(fileContent) as SearchIndexData;\n\n if (!data || !data.keys || !Array.isArray(data.keys) || !data.data) {\n log.warn('Invalid search index structure in file:', fullPath);\n return null;\n }\n\n if (data.keys.length === 0) {\n log.debug('Search index is empty:', fullPath);\n return null;\n }\n\n this.cachedData = data;\n log.debug('Search index loaded from file:', fullPath, 'with', data.keys.length, 'keys');\n return data;\n } catch (error) {\n log.error('Failed to load search index from file:', fullPath, error);\n return null;\n }\n }\n\n async import(): Promise<SearchIndexData | null> {\n const data = await this.loadData();\n return data;\n }\n\n async export(data: SearchIndexData): Promise<void> {\n const fullPath = this.getFullPath();\n\n try {\n const dir = path.dirname(fullPath);\n await fs.mkdir(dir, { recursive: true });\n\n await fs.writeFile(fullPath, JSON.stringify(data, null, 2), 'utf-8');\n\n log.debug('Search index saved to file:', fullPath, 'with', data.keys.length, 'keys');\n } catch (error) {\n log.error('Failed to save search index to file:', fullPath, error);\n throw error;\n }\n }\n}\n","import { FileBasedSearchIndexExporter, FileBasedSearchIndexExporterOptions } from './FileBasedSearchIndexExporter';\nimport { SearchIndexExporter } from './SearchIndexExporter';\n\nexport type SearchExporterConfig = {\n type: 'fileBased';\n config: FileBasedSearchIndexExporterOptions;\n};\n\n/**\n * Creates a SearchIndexExporter instance from a SearchExporterConfig\n */\nexport function createExporterFromConfig(exporterConfig: SearchExporterConfig): SearchIndexExporter {\n if (exporterConfig.type === 'fileBased') {\n return new FileBasedSearchIndexExporter({\n ...exporterConfig.config,\n });\n }\n\n throw new Error(`Unknown exporter type: ${exporterConfig.type}`);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,iBAAwB;;;ACAxB,oBAAwB;AACxB,wBAAkC;AASlC,IAAM,wBAAwB;AAC9B,IAAM,yBAAyB;AAC/B,IAAM,MAAM,sBAAQ;AAEb,IAAM,aAAN,MAAiB;AAAA,EAKtB,cAAc;AACZ,SAAK,cAAc;AACnB,SAAK,QAAQ,KAAK,SAAS;AAC3B,SAAK,cAAc,oBAAI,IAAI;AAAA,EAC7B;AAAA,EAEQ,WAAW;AACjB,WAAO,IAAI,2BAAqC;AAAA,MAC9C,QAAQ;AAAA,MACR,UAAU;AAAA,QACR,IAAI;AAAA,QACJ,OAAO,CAAC,iBAAiB,uBAAuB,uBAAuB,kBAAkB,kBAAkB;AAAA,QAC3G,OAAO;AAAA,MACT;AAAA,MACA,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,SAAS;AAAA,QACP,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,eAAe;AAAA,MACjB;AAAA,MACA,OAAO;AAAA,MACP,SAAS,0BAAQ;AAAA,IACnB,CAAC;AAAA,EACH;AAAA,EAEM,aAA4B;AAAA;AAChC,UAAI,KAAK,aAAa;AACpB,YAAI,MAAM,iCAAiC;AAC3C;AAAA,MACF;AAEA,WAAK,cAAc;AAAA,IACrB;AAAA;AAAA,EAEM,YAAY,UAAqD;AAAA;AACrE,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AAEA,UAAI,MAAM,mCAAmC,SAAS,IAAI;AAE1D,WAAK,MAAM,IAAI,QAAQ;AACvB,WAAK,YAAY,IAAI,SAAS,IAAI;AAAA,IACpC;AAAA;AAAA,EAEM,OAAO,IAAuF;AAAA,+CAAvF,OAAe,UAA6B,CAAC,GAA0C;AAClG,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AAEA,YAAM,QAAQ,QAAQ,SAAS;AAC/B,YAAM,SAAS,QAAQ,UAAU;AAEjC,UAAI,MAAM,kBAAkB,KAAK;AAEjC,YAAM,UAAU,MAAM,KAAK,MAAM,OAAO,OAAO;AAAA,QAC7C,OAAO,QAAQ;AAAA,QACf,SAAS,QAAQ;AAAA,QACjB,QAAQ;AAAA,MACV,CAAC;AAED,YAAM,UAAU,oBAAI,IAAY;AAChC,YAAM,YAA0C,CAAC;AAEjD,iBAAW,gBAAgB,SAAS;AAClC,YAAI,MAAM,QAAQ,aAAa,MAAM,GAAG;AACtC,qBAAW,UAAU,aAAa,QAAQ;AACxC,kBAAM,KAAK,OAAO,WAAW,YAAY,QAAQ,SAAS,OAAO,KAAK;AACtE,kBAAM,MAAM,OAAO,WAAW,YAAY,SAAS,SAAS,OAAO,MAAM;AAEzE,gBAAI,CAAC,QAAQ,IAAI,EAAY,KAAK,KAAK;AACrC,sBAAQ,IAAI,EAAY;AACxB,wBAAU,KAAK,GAAG;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,YAAM,eAAe,UAAU,MAAM,QAAQ,SAAS,KAAK;AAE3D,aAAO;AAAA,IACT;AAAA;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,YAAYC,OAAc;AACxB,WAAO,KAAK,MAAM,IAAIA,KAAI;AAAA,EAC5B;AAAA,EAEA,gBAAgB,OAA8C;AAC5D,UAAM,YAA0C,CAAC;AACjD,QAAI,QAAQ;AACZ,YAAQ,SAAS;AAEjB,eAAW,MAAM,KAAK,aAAa;AACjC,UAAI,SAAS,OAAO;AAClB;AAAA,MACF;AAEA,YAAM,MAAM,KAAK,MAAM,IAAI,EAAE;AAC7B,UAAI,KAAK;AACP,kBAAU,KAAK,GAAG;AAClB;AAAA,MACF;AAAA,IACF;AAEA,QAAI,MAAM,2CAA2C,UAAU,QAAQ,KAAK;AAC5E,WAAO;AAAA,EACT;AAAA,EAEA,QAAc;AACZ,SAAK,MAAM,MAAM;AACjB,SAAK,QAAQ,KAAK,SAAS;AAC3B,SAAK,YAAY,MAAM;AAAA,EACzB;AAAA,EAEM,OAAO,SAAoF;AAAA;AAC/F,YAAM,OAAiB,CAAC;AAExB,YAAM,QAAQ,uBAAuB,KAAK,UAAU,MAAM,KAAK,KAAK,WAAW,CAAC,CAAC;AACjF,WAAK,KAAK,qBAAqB;AAE/B,YAAM,KAAK,MAAM,OAAO,CAAO,KAAa,SAAiB;AAC3D,aAAK,KAAK,GAAG;AACb,cAAM,QAAQ,KAAK,IAAI;AAAA,MACzB,EAAC;AAED,UAAI,MAAM,kBAAkB,KAAK,MAAM;AAEvC,aAAO,EAAE,KAAK;AAAA,IAChB;AAAA;AAAA,EAEM,OAAO,SAA2C,MAA+B;AAAA;AACrF,YAAM,kBAAkB,MAAM,QAAQ,qBAAqB;AAC3D,UAAI,iBAAiB;AACnB,cAAM,SAAS,OAAO,oBAAoB,WAAW,KAAK,MAAM,eAAe,IAAI;AACnF,aAAK,cAAc,IAAI,IAAI,MAAM;AAAA,MACnC;AAEA,iBAAW,OAAO,MAAM;AACtB,YAAI,QAAQ,uBAAuB;AACjC;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,OAAO,MAAM,QAAQ,GAAG;AAC9B,cAAI,MAAM;AACR,iBAAK,MAAM,OAAO,KAAK,IAAI;AAAA,UAC7B;AAAA,QACF,SAAS,OAAO;AACd,cAAI,MAAM,wBAAwB,KAAK,KAAK;AAAA,QAC9C;AAAA,MACF;AAEA,WAAK,cAAc;AACnB,UAAI,MAAM,+BAA+B,KAAK,MAAM;AAAA,IACtD;AAAA;AACF;;;AD9KA,IAAMC,OAAM,uBAAQ;AAOb,IAAM,eAAN,MAAmB;AAAA;AAAA,EAKxB,YAAY,SAA8B;AACxC,SAAK,aAAa,IAAI,WAAW;AACjC,SAAK,cAAc;AAAA,EACrB;AAAA,EAEM,aAA4B;AAAA;AAChC,UAAI,KAAK,aAAa;AACpB;AAAA,MACF;AAEA,MAAAA,KAAI,MAAM,4BAA4B;AACtC,YAAM,KAAK,WAAW,WAAW;AACjC,WAAK,cAAc;AAAA,IACrB;AAAA;AAAA,EAEM,QAAQC,OAAc,SAAwC;AAAA;AAClE,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,yDAAyD;AAAA,MAC3E;AAEA,YAAM,WAAuC;AAAA,QAC3C,IAAIA;AAAA,QACJ,MAAAA;AAAA,QACA;AAAA,MACF;AAEA,YAAM,KAAK,WAAW,YAAY,QAAQ;AAC1C,MAAAD,KAAI,MAAM,gCAAgCC,KAAI;AAAA,IAChD;AAAA;AAAA,EAEM,OAAO,IAAuF;AAAA,+CAAvF,OAAe,UAA6B,CAAC,GAA0C;AAClG,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,yDAAyD;AAAA,MAC3E;AAEA,MAAAD,KAAI,MAAM,2BAA2B,KAAK;AAC1C,aAAO,KAAK,WAAW,OAAO,OAAO,OAAO;AAAA,IAC9C;AAAA;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK,WAAW,MAAM;AAAA,EAC/B;AAAA,EAEA,YAAYC,OAAc;AACxB,WAAO,KAAK,WAAW,YAAYA,KAAI;AAAA,EACzC;AAAA,EAEA,gBAAgB,OAA8C;AAC5D,WAAO,KAAK,WAAW,gBAAgB,KAAK;AAAA,EAC9C;AAAA,EAEA,QAAc;AACZ,SAAK,WAAW,MAAM;AAAA,EACxB;AAAA,EAEM,OAAO,SAAoF;AAAA;AAC/F,aAAO,KAAK,WAAW,OAAO,OAAO;AAAA,IACvC;AAAA;AAAA,EAEM,OAAO,SAA2C,MAA+B;AAAA;AACrF,YAAM,KAAK,WAAW,OAAO,SAAS,IAAI;AAC1C,WAAK,cAAc;AACnB,MAAAD,KAAI,MAAM,8CAA8C;AAAA,IAC1D;AAAA;AACF;;;AEjEA,SAAsB,iBAAiB,OAAgD;AAAA;AACrF,UAAM,eAAe,IAAI,aAAa;AACtC,UAAM,aAAa,WAAW;AAE9B,eAAW,QAAQ,OAAO;AACxB,YAAM,aAAa,QAAQ,KAAK,MAAM,KAAK,cAAc;AAAA,IAC3D;AAEA,UAAM,eAAuC,CAAC;AAC9C,UAAM,SAAS,MAAM,aAAa,OAAO,CAAO,KAAK,SAAS;AAC5D,mBAAa,GAAG,IAAI;AAAA,IACtB,EAAC;AAED,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,MAAM;AAAA,IACR;AAAA,EACF;AAAA;;;ACjCA,IAAAE,iBAAwB;AACxB,SAAoB;AACpB,WAAsB;AAItB,IAAMC,OAAM,uBAAQ;AAkBb,IAAM,+BAAN,MAAkE;AAAA,EAKvE,YAAY,SAA8C;AAF1D,SAAQ,aAAqC;AAG3C,SAAK,UAAU,QAAQ;AACvB,SAAK,YAAY,QAAQ;AAAA,EAC3B;AAAA,EAEQ,cAAsB;AAC5B,WAAY,UAAK,KAAK,SAAS,KAAK,SAAS;AAAA,EAC/C;AAAA,EAEc,WAA4C;AAAA;AACxD,UAAI,KAAK,YAAY;AACnB,eAAO,KAAK;AAAA,MACd;AAEA,YAAM,WAAW,KAAK,YAAY;AAElC,UAAI;AACF,cAAM,cAAc,MAAS,YAAS,UAAU,OAAO;AACvD,cAAM,OAAO,KAAK,MAAM,WAAW;AAEnC,YAAI,CAAC,QAAQ,CAAC,KAAK,QAAQ,CAAC,MAAM,QAAQ,KAAK,IAAI,KAAK,CAAC,KAAK,MAAM;AAClE,UAAAA,KAAI,KAAK,2CAA2C,QAAQ;AAC5D,iBAAO;AAAA,QACT;AAEA,YAAI,KAAK,KAAK,WAAW,GAAG;AAC1B,UAAAA,KAAI,MAAM,0BAA0B,QAAQ;AAC5C,iBAAO;AAAA,QACT;AAEA,aAAK,aAAa;AAClB,QAAAA,KAAI,MAAM,kCAAkC,UAAU,QAAQ,KAAK,KAAK,QAAQ,MAAM;AACtF,eAAO;AAAA,MACT,SAAS,OAAO;AACd,QAAAA,KAAI,MAAM,0CAA0C,UAAU,KAAK;AACnE,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA,EAEM,SAA0C;AAAA;AAC9C,YAAM,OAAO,MAAM,KAAK,SAAS;AACjC,aAAO;AAAA,IACT;AAAA;AAAA,EAEM,OAAO,MAAsC;AAAA;AACjD,YAAM,WAAW,KAAK,YAAY;AAElC,UAAI;AACF,cAAM,MAAW,aAAQ,QAAQ;AACjC,cAAS,SAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AAEvC,cAAS,aAAU,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,OAAO;AAEnE,QAAAA,KAAI,MAAM,+BAA+B,UAAU,QAAQ,KAAK,KAAK,QAAQ,MAAM;AAAA,MACrF,SAAS,OAAO;AACd,QAAAA,KAAI,MAAM,wCAAwC,UAAU,KAAK;AACjE,cAAM;AAAA,MACR;AAAA,IACF;AAAA;AACF;;;AC7EO,SAAS,yBAAyB,gBAA2D;AAClG,MAAI,eAAe,SAAS,aAAa;AACvC,WAAO,IAAI,6BAA6B,mBACnC,eAAe,OACnB;AAAA,EACH;AAEA,QAAM,IAAI,MAAM,0BAA0B,eAAe,IAAI,EAAE;AACjE;","names":["import_logger","path","log","path","import_logger","log"]}
package/dist/index.mjs CHANGED
@@ -1,3 +1,19 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __getOwnPropSymbols = Object.getOwnPropertySymbols;
3
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
4
+ var __propIsEnum = Object.prototype.propertyIsEnumerable;
5
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
6
+ var __spreadValues = (a, b) => {
7
+ for (var prop in b || (b = {}))
8
+ if (__hasOwnProp.call(b, prop))
9
+ __defNormalProp(a, prop, b[prop]);
10
+ if (__getOwnPropSymbols)
11
+ for (var prop of __getOwnPropSymbols(b)) {
12
+ if (__propIsEnum.call(b, prop))
13
+ __defNormalProp(a, prop, b[prop]);
14
+ }
15
+ return a;
16
+ };
1
17
  var __async = (__this, __arguments, generator) => {
2
18
  return new Promise((resolve, reject) => {
3
19
  var fulfilled = (value) => {
@@ -106,8 +122,8 @@ var TextSearch = class {
106
122
  count() {
107
123
  return this.documentIds.size;
108
124
  }
109
- getDocument(path) {
110
- return this.index.get(path);
125
+ getDocument(path2) {
126
+ return this.index.get(path2);
111
127
  }
112
128
  getAllDocuments(limit) {
113
129
  const documents = [];
@@ -188,18 +204,18 @@ var SearchEngine = class {
188
204
  this.initialized = true;
189
205
  });
190
206
  }
191
- addPage(path, content) {
207
+ addPage(path2, content) {
192
208
  return __async(this, null, function* () {
193
209
  if (!this.initialized) {
194
210
  throw new Error("Search engine not initialized. Call initialize() first.");
195
211
  }
196
212
  const document = {
197
- id: path,
198
- path,
213
+ id: path2,
214
+ path: path2,
199
215
  content
200
216
  };
201
217
  yield this.textSearch.addDocument(document);
202
- log2.debug("Page added to search engine:", path);
218
+ log2.debug("Page added to search engine:", path2);
203
219
  });
204
220
  }
205
221
  search(_0) {
@@ -214,8 +230,8 @@ var SearchEngine = class {
214
230
  count() {
215
231
  return this.textSearch.count();
216
232
  }
217
- getDocument(path) {
218
- return this.textSearch.getDocument(path);
233
+ getDocument(path2) {
234
+ return this.textSearch.getDocument(path2);
219
235
  }
220
236
  getAllDocuments(limit) {
221
237
  return this.textSearch.getAllDocuments(limit);
@@ -255,9 +271,81 @@ function buildSearchIndex(pages) {
255
271
  };
256
272
  });
257
273
  }
274
+
275
+ // src/exporters/FileBasedSearchIndexExporter.ts
276
+ import { loggers as loggers3 } from "@peam-ai/logger";
277
+ import * as fs from "fs/promises";
278
+ import * as path from "path";
279
+ var log3 = loggers3.search;
280
+ var FileBasedSearchIndexExporter = class {
281
+ constructor(options) {
282
+ this.cachedData = null;
283
+ this.baseDir = options.baseDir;
284
+ this.indexPath = options.indexPath;
285
+ }
286
+ getFullPath() {
287
+ return path.join(this.baseDir, this.indexPath);
288
+ }
289
+ loadData() {
290
+ return __async(this, null, function* () {
291
+ if (this.cachedData) {
292
+ return this.cachedData;
293
+ }
294
+ const fullPath = this.getFullPath();
295
+ try {
296
+ const fileContent = yield fs.readFile(fullPath, "utf-8");
297
+ const data = JSON.parse(fileContent);
298
+ if (!data || !data.keys || !Array.isArray(data.keys) || !data.data) {
299
+ log3.warn("Invalid search index structure in file:", fullPath);
300
+ return null;
301
+ }
302
+ if (data.keys.length === 0) {
303
+ log3.debug("Search index is empty:", fullPath);
304
+ return null;
305
+ }
306
+ this.cachedData = data;
307
+ log3.debug("Search index loaded from file:", fullPath, "with", data.keys.length, "keys");
308
+ return data;
309
+ } catch (error) {
310
+ log3.error("Failed to load search index from file:", fullPath, error);
311
+ return null;
312
+ }
313
+ });
314
+ }
315
+ import() {
316
+ return __async(this, null, function* () {
317
+ const data = yield this.loadData();
318
+ return data;
319
+ });
320
+ }
321
+ export(data) {
322
+ return __async(this, null, function* () {
323
+ const fullPath = this.getFullPath();
324
+ try {
325
+ const dir = path.dirname(fullPath);
326
+ yield fs.mkdir(dir, { recursive: true });
327
+ yield fs.writeFile(fullPath, JSON.stringify(data, null, 2), "utf-8");
328
+ log3.debug("Search index saved to file:", fullPath, "with", data.keys.length, "keys");
329
+ } catch (error) {
330
+ log3.error("Failed to save search index to file:", fullPath, error);
331
+ throw error;
332
+ }
333
+ });
334
+ }
335
+ };
336
+
337
+ // src/exporters/config.ts
338
+ function createExporterFromConfig(exporterConfig) {
339
+ if (exporterConfig.type === "fileBased") {
340
+ return new FileBasedSearchIndexExporter(__spreadValues({}, exporterConfig.config));
341
+ }
342
+ throw new Error(`Unknown exporter type: ${exporterConfig.type}`);
343
+ }
258
344
  export {
345
+ FileBasedSearchIndexExporter,
259
346
  SearchEngine,
260
347
  TextSearch,
261
- buildSearchIndex
348
+ buildSearchIndex,
349
+ createExporterFromConfig
262
350
  };
263
351
  //# sourceMappingURL=index.mjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/searchEngine.ts","../src/textSearch.ts","../src/indexBuilder.ts"],"sourcesContent":["import { loggers } from '@peam-ai/logger';\nimport type { StructuredPage } from '@peam-ai/parser';\nimport { TextSearch, type TextSearchOptions } from './textSearch';\nimport type { StructuredPageDocumentData } from './types';\n\nconst log = loggers.search;\n\n// eslint-disable-next-line @typescript-eslint/no-empty-object-type\nexport interface SearchEngineConfig {\n // Reserved for future configuration options\n}\n\nexport class SearchEngine {\n private textSearch: TextSearch;\n private initialized: boolean;\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n constructor(_config?: SearchEngineConfig) {\n this.textSearch = new TextSearch();\n this.initialized = false;\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) {\n return;\n }\n\n log.debug('Initializing search engine');\n await this.textSearch.initialize();\n this.initialized = true;\n }\n\n async addPage(path: string, content: StructuredPage): Promise<void> {\n if (!this.initialized) {\n throw new Error('Search engine not initialized. Call initialize() first.');\n }\n\n const document: StructuredPageDocumentData = {\n id: path,\n path,\n content,\n };\n\n await this.textSearch.addDocument(document);\n log.debug('Page added to search engine:', path);\n }\n\n async search(query: string, options: TextSearchOptions = {}): Promise<StructuredPageDocumentData[]> {\n if (!this.initialized) {\n throw new Error('Search engine not initialized. Call initialize() first.');\n }\n\n log.debug('Performing text search:', query);\n return this.textSearch.search(query, options);\n }\n\n count(): number {\n return this.textSearch.count();\n }\n\n getDocument(path: string) {\n return this.textSearch.getDocument(path);\n }\n\n getAllDocuments(limit?: number): StructuredPageDocumentData[] {\n return this.textSearch.getAllDocuments(limit);\n }\n\n clear(): void {\n this.textSearch.clear();\n }\n\n async export(handler: (key: string, data: string) => Promise<void>): Promise<{ keys: string[] }> {\n return this.textSearch.export(handler);\n }\n\n async import(handler: (key: string) => Promise<string>, keys: string[]): Promise<void> {\n await this.textSearch.import(handler, keys);\n this.initialized = true;\n log.debug('Search engine initialized from imported data');\n }\n}\n","import { loggers } from '@peam-ai/logger';\nimport { Charset, Document } from 'flexsearch';\nimport type { StructuredPageDocumentData } from './types';\n\nexport interface TextSearchOptions {\n limit?: number;\n offset?: number;\n suggest?: boolean;\n}\n\nconst PEAM_DOCUMENT_IDS_KEY = 'peam.documentIds';\nconst MAX_DOCUMENTS_RETRIEVE = 25;\nconst log = loggers.search;\n\nexport class TextSearch {\n private index: Document<StructuredPageDocumentData>;\n private initialized: boolean;\n private documentIds: Set<string>;\n\n constructor() {\n this.initialized = false;\n this.index = this.getIndex();\n this.documentIds = new Set();\n }\n\n private getIndex() {\n return new Document<StructuredPageDocumentData>({\n worker: false,\n document: {\n id: 'path',\n index: ['content:title', 'content:description', 'content:textContent', 'content:author', 'content:keywords'],\n store: true,\n },\n tokenize: 'forward',\n resolution: 9,\n context: {\n resolution: 3,\n depth: 2,\n bidirectional: true,\n },\n cache: 100,\n encoder: Charset.LatinExtra,\n });\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) {\n log.debug('Text search already initialized');\n return;\n }\n\n this.initialized = true;\n }\n\n async addDocument(document: StructuredPageDocumentData): Promise<void> {\n if (!this.initialized) {\n throw new Error('TextSearch not initialized. Call initialize() first.');\n }\n\n log.debug('Adding document to text search:', document.path);\n\n this.index.add(document);\n this.documentIds.add(document.path);\n }\n\n async search(query: string, options: TextSearchOptions = {}): Promise<StructuredPageDocumentData[]> {\n if (!this.initialized) {\n throw new Error('TextSearch not initialized. Call initialize() first.');\n }\n\n const limit = options.limit || MAX_DOCUMENTS_RETRIEVE;\n const offset = options.offset || 0;\n\n log.debug('Searching for:', query);\n\n const results = await this.index.search(query, {\n limit: limit + offset,\n suggest: options.suggest,\n enrich: true,\n });\n\n const pathSet = new Set<string>();\n const documents: StructuredPageDocumentData[] = [];\n\n for (const fieldResults of results) {\n if (Array.isArray(fieldResults.result)) {\n for (const result of fieldResults.result) {\n const id = typeof result === 'object' && 'id' in result ? result.id : result;\n const doc = typeof result === 'object' && 'doc' in result ? result.doc : null;\n\n if (!pathSet.has(id as string) && doc) {\n pathSet.add(id as string);\n documents.push(doc);\n }\n }\n }\n }\n\n const pagedResults = documents.slice(offset, offset + limit);\n\n return pagedResults;\n }\n\n count(): number {\n return this.documentIds.size;\n }\n\n getDocument(path: string) {\n return this.index.get(path);\n }\n\n getAllDocuments(limit?: number): StructuredPageDocumentData[] {\n const documents: StructuredPageDocumentData[] = [];\n let count = 0;\n limit = limit || MAX_DOCUMENTS_RETRIEVE;\n\n for (const id of this.documentIds) {\n if (count >= limit) {\n break;\n }\n\n const doc = this.index.get(id);\n if (doc) {\n documents.push(doc);\n count++;\n }\n }\n\n log.debug('Retrieved documents from store (limit):', documents.length, limit);\n return documents;\n }\n\n clear(): void {\n this.index.clear();\n this.index = this.getIndex();\n this.documentIds.clear();\n }\n\n async export(handler: (key: string, data: string) => Promise<void>): Promise<{ keys: string[] }> {\n const keys: string[] = [];\n\n await handler(PEAM_DOCUMENT_IDS_KEY, JSON.stringify(Array.from(this.documentIds)));\n keys.push(PEAM_DOCUMENT_IDS_KEY);\n\n await this.index.export(async (key: string, data: string) => {\n keys.push(key);\n await handler(key, data);\n });\n\n log.debug('Exported keys:', keys.length);\n\n return { keys };\n }\n\n async import(handler: (key: string) => Promise<string>, keys: string[]): Promise<void> {\n const documentIdsData = await handler(PEAM_DOCUMENT_IDS_KEY);\n if (documentIdsData) {\n const parsed = typeof documentIdsData === 'string' ? JSON.parse(documentIdsData) : documentIdsData;\n this.documentIds = new Set(parsed);\n }\n\n for (const key of keys) {\n if (key === PEAM_DOCUMENT_IDS_KEY) {\n continue;\n }\n\n try {\n const data = await handler(key);\n if (data) {\n this.index.import(key, data);\n }\n } catch (error) {\n log.error('Error importing key:', key, error);\n }\n }\n\n this.initialized = true;\n log.debug('Import completed with keys:', keys.length);\n }\n}\n","import type { StructuredPage } from '@peam-ai/parser';\nimport { SearchEngine } from './searchEngine';\n\nexport interface PageToIndex {\n path: string;\n structuredPage: StructuredPage;\n}\n\nexport interface SearchIndexData {\n keys: string[];\n data: Record<string, string>;\n}\n\n/**\n * Build a search index from structured pages\n */\nexport async function buildSearchIndex(pages: PageToIndex[]): Promise<SearchIndexData> {\n const searchEngine = new SearchEngine();\n await searchEngine.initialize();\n\n for (const page of pages) {\n await searchEngine.addPage(page.path, page.structuredPage);\n }\n\n const exportedData: Record<string, string> = {};\n const result = await searchEngine.export(async (key, data) => {\n exportedData[key] = data;\n });\n\n return {\n keys: result.keys,\n data: exportedData,\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,WAAAA,gBAAe;;;ACAxB,SAAS,eAAe;AACxB,SAAS,SAAS,gBAAgB;AASlC,IAAM,wBAAwB;AAC9B,IAAM,yBAAyB;AAC/B,IAAM,MAAM,QAAQ;AAEb,IAAM,aAAN,MAAiB;AAAA,EAKtB,cAAc;AACZ,SAAK,cAAc;AACnB,SAAK,QAAQ,KAAK,SAAS;AAC3B,SAAK,cAAc,oBAAI,IAAI;AAAA,EAC7B;AAAA,EAEQ,WAAW;AACjB,WAAO,IAAI,SAAqC;AAAA,MAC9C,QAAQ;AAAA,MACR,UAAU;AAAA,QACR,IAAI;AAAA,QACJ,OAAO,CAAC,iBAAiB,uBAAuB,uBAAuB,kBAAkB,kBAAkB;AAAA,QAC3G,OAAO;AAAA,MACT;AAAA,MACA,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,SAAS;AAAA,QACP,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,eAAe;AAAA,MACjB;AAAA,MACA,OAAO;AAAA,MACP,SAAS,QAAQ;AAAA,IACnB,CAAC;AAAA,EACH;AAAA,EAEM,aAA4B;AAAA;AAChC,UAAI,KAAK,aAAa;AACpB,YAAI,MAAM,iCAAiC;AAC3C;AAAA,MACF;AAEA,WAAK,cAAc;AAAA,IACrB;AAAA;AAAA,EAEM,YAAY,UAAqD;AAAA;AACrE,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AAEA,UAAI,MAAM,mCAAmC,SAAS,IAAI;AAE1D,WAAK,MAAM,IAAI,QAAQ;AACvB,WAAK,YAAY,IAAI,SAAS,IAAI;AAAA,IACpC;AAAA;AAAA,EAEM,OAAO,IAAuF;AAAA,+CAAvF,OAAe,UAA6B,CAAC,GAA0C;AAClG,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AAEA,YAAM,QAAQ,QAAQ,SAAS;AAC/B,YAAM,SAAS,QAAQ,UAAU;AAEjC,UAAI,MAAM,kBAAkB,KAAK;AAEjC,YAAM,UAAU,MAAM,KAAK,MAAM,OAAO,OAAO;AAAA,QAC7C,OAAO,QAAQ;AAAA,QACf,SAAS,QAAQ;AAAA,QACjB,QAAQ;AAAA,MACV,CAAC;AAED,YAAM,UAAU,oBAAI,IAAY;AAChC,YAAM,YAA0C,CAAC;AAEjD,iBAAW,gBAAgB,SAAS;AAClC,YAAI,MAAM,QAAQ,aAAa,MAAM,GAAG;AACtC,qBAAW,UAAU,aAAa,QAAQ;AACxC,kBAAM,KAAK,OAAO,WAAW,YAAY,QAAQ,SAAS,OAAO,KAAK;AACtE,kBAAM,MAAM,OAAO,WAAW,YAAY,SAAS,SAAS,OAAO,MAAM;AAEzE,gBAAI,CAAC,QAAQ,IAAI,EAAY,KAAK,KAAK;AACrC,sBAAQ,IAAI,EAAY;AACxB,wBAAU,KAAK,GAAG;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,YAAM,eAAe,UAAU,MAAM,QAAQ,SAAS,KAAK;AAE3D,aAAO;AAAA,IACT;AAAA;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,YAAY,MAAc;AACxB,WAAO,KAAK,MAAM,IAAI,IAAI;AAAA,EAC5B;AAAA,EAEA,gBAAgB,OAA8C;AAC5D,UAAM,YAA0C,CAAC;AACjD,QAAI,QAAQ;AACZ,YAAQ,SAAS;AAEjB,eAAW,MAAM,KAAK,aAAa;AACjC,UAAI,SAAS,OAAO;AAClB;AAAA,MACF;AAEA,YAAM,MAAM,KAAK,MAAM,IAAI,EAAE;AAC7B,UAAI,KAAK;AACP,kBAAU,KAAK,GAAG;AAClB;AAAA,MACF;AAAA,IACF;AAEA,QAAI,MAAM,2CAA2C,UAAU,QAAQ,KAAK;AAC5E,WAAO;AAAA,EACT;AAAA,EAEA,QAAc;AACZ,SAAK,MAAM,MAAM;AACjB,SAAK,QAAQ,KAAK,SAAS;AAC3B,SAAK,YAAY,MAAM;AAAA,EACzB;AAAA,EAEM,OAAO,SAAoF;AAAA;AAC/F,YAAM,OAAiB,CAAC;AAExB,YAAM,QAAQ,uBAAuB,KAAK,UAAU,MAAM,KAAK,KAAK,WAAW,CAAC,CAAC;AACjF,WAAK,KAAK,qBAAqB;AAE/B,YAAM,KAAK,MAAM,OAAO,CAAO,KAAa,SAAiB;AAC3D,aAAK,KAAK,GAAG;AACb,cAAM,QAAQ,KAAK,IAAI;AAAA,MACzB,EAAC;AAED,UAAI,MAAM,kBAAkB,KAAK,MAAM;AAEvC,aAAO,EAAE,KAAK;AAAA,IAChB;AAAA;AAAA,EAEM,OAAO,SAA2C,MAA+B;AAAA;AACrF,YAAM,kBAAkB,MAAM,QAAQ,qBAAqB;AAC3D,UAAI,iBAAiB;AACnB,cAAM,SAAS,OAAO,oBAAoB,WAAW,KAAK,MAAM,eAAe,IAAI;AACnF,aAAK,cAAc,IAAI,IAAI,MAAM;AAAA,MACnC;AAEA,iBAAW,OAAO,MAAM;AACtB,YAAI,QAAQ,uBAAuB;AACjC;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,OAAO,MAAM,QAAQ,GAAG;AAC9B,cAAI,MAAM;AACR,iBAAK,MAAM,OAAO,KAAK,IAAI;AAAA,UAC7B;AAAA,QACF,SAAS,OAAO;AACd,cAAI,MAAM,wBAAwB,KAAK,KAAK;AAAA,QAC9C;AAAA,MACF;AAEA,WAAK,cAAc;AACnB,UAAI,MAAM,+BAA+B,KAAK,MAAM;AAAA,IACtD;AAAA;AACF;;;AD9KA,IAAMC,OAAMC,SAAQ;AAOb,IAAM,eAAN,MAAmB;AAAA;AAAA,EAKxB,YAAY,SAA8B;AACxC,SAAK,aAAa,IAAI,WAAW;AACjC,SAAK,cAAc;AAAA,EACrB;AAAA,EAEM,aAA4B;AAAA;AAChC,UAAI,KAAK,aAAa;AACpB;AAAA,MACF;AAEA,MAAAD,KAAI,MAAM,4BAA4B;AACtC,YAAM,KAAK,WAAW,WAAW;AACjC,WAAK,cAAc;AAAA,IACrB;AAAA;AAAA,EAEM,QAAQ,MAAc,SAAwC;AAAA;AAClE,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,yDAAyD;AAAA,MAC3E;AAEA,YAAM,WAAuC;AAAA,QAC3C,IAAI;AAAA,QACJ;AAAA,QACA;AAAA,MACF;AAEA,YAAM,KAAK,WAAW,YAAY,QAAQ;AAC1C,MAAAA,KAAI,MAAM,gCAAgC,IAAI;AAAA,IAChD;AAAA;AAAA,EAEM,OAAO,IAAuF;AAAA,+CAAvF,OAAe,UAA6B,CAAC,GAA0C;AAClG,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,yDAAyD;AAAA,MAC3E;AAEA,MAAAA,KAAI,MAAM,2BAA2B,KAAK;AAC1C,aAAO,KAAK,WAAW,OAAO,OAAO,OAAO;AAAA,IAC9C;AAAA;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK,WAAW,MAAM;AAAA,EAC/B;AAAA,EAEA,YAAY,MAAc;AACxB,WAAO,KAAK,WAAW,YAAY,IAAI;AAAA,EACzC;AAAA,EAEA,gBAAgB,OAA8C;AAC5D,WAAO,KAAK,WAAW,gBAAgB,KAAK;AAAA,EAC9C;AAAA,EAEA,QAAc;AACZ,SAAK,WAAW,MAAM;AAAA,EACxB;AAAA,EAEM,OAAO,SAAoF;AAAA;AAC/F,aAAO,KAAK,WAAW,OAAO,OAAO;AAAA,IACvC;AAAA;AAAA,EAEM,OAAO,SAA2C,MAA+B;AAAA;AACrF,YAAM,KAAK,WAAW,OAAO,SAAS,IAAI;AAC1C,WAAK,cAAc;AACnB,MAAAA,KAAI,MAAM,8CAA8C;AAAA,IAC1D;AAAA;AACF;;;AEjEA,SAAsB,iBAAiB,OAAgD;AAAA;AACrF,UAAM,eAAe,IAAI,aAAa;AACtC,UAAM,aAAa,WAAW;AAE9B,eAAW,QAAQ,OAAO;AACxB,YAAM,aAAa,QAAQ,KAAK,MAAM,KAAK,cAAc;AAAA,IAC3D;AAEA,UAAM,eAAuC,CAAC;AAC9C,UAAM,SAAS,MAAM,aAAa,OAAO,CAAO,KAAK,SAAS;AAC5D,mBAAa,GAAG,IAAI;AAAA,IACtB,EAAC;AAED,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,MAAM;AAAA,IACR;AAAA,EACF;AAAA;","names":["loggers","log","loggers"]}
1
+ {"version":3,"sources":["../src/searchEngine.ts","../src/textSearch.ts","../src/indexBuilder.ts","../src/exporters/FileBasedSearchIndexExporter.ts","../src/exporters/config.ts"],"sourcesContent":["import { loggers } from '@peam-ai/logger';\nimport type { StructuredPage } from '@peam-ai/parser';\nimport { TextSearch, type TextSearchOptions } from './textSearch';\nimport type { StructuredPageDocumentData } from './types';\n\nconst log = loggers.search;\n\n// eslint-disable-next-line @typescript-eslint/no-empty-object-type\nexport interface SearchEngineConfig {\n // Reserved for future configuration options\n}\n\nexport class SearchEngine {\n private textSearch: TextSearch;\n private initialized: boolean;\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n constructor(_config?: SearchEngineConfig) {\n this.textSearch = new TextSearch();\n this.initialized = false;\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) {\n return;\n }\n\n log.debug('Initializing search engine');\n await this.textSearch.initialize();\n this.initialized = true;\n }\n\n async addPage(path: string, content: StructuredPage): Promise<void> {\n if (!this.initialized) {\n throw new Error('Search engine not initialized. Call initialize() first.');\n }\n\n const document: StructuredPageDocumentData = {\n id: path,\n path,\n content,\n };\n\n await this.textSearch.addDocument(document);\n log.debug('Page added to search engine:', path);\n }\n\n async search(query: string, options: TextSearchOptions = {}): Promise<StructuredPageDocumentData[]> {\n if (!this.initialized) {\n throw new Error('Search engine not initialized. Call initialize() first.');\n }\n\n log.debug('Performing text search:', query);\n return this.textSearch.search(query, options);\n }\n\n count(): number {\n return this.textSearch.count();\n }\n\n getDocument(path: string) {\n return this.textSearch.getDocument(path);\n }\n\n getAllDocuments(limit?: number): StructuredPageDocumentData[] {\n return this.textSearch.getAllDocuments(limit);\n }\n\n clear(): void {\n this.textSearch.clear();\n }\n\n async export(handler: (key: string, data: string) => Promise<void>): Promise<{ keys: string[] }> {\n return this.textSearch.export(handler);\n }\n\n async import(handler: (key: string) => Promise<string>, keys: string[]): Promise<void> {\n await this.textSearch.import(handler, keys);\n this.initialized = true;\n log.debug('Search engine initialized from imported data');\n }\n}\n","import { loggers } from '@peam-ai/logger';\nimport { Charset, Document } from 'flexsearch';\nimport type { StructuredPageDocumentData } from './types';\n\nexport interface TextSearchOptions {\n limit?: number;\n offset?: number;\n suggest?: boolean;\n}\n\nconst PEAM_DOCUMENT_IDS_KEY = 'peam.documentIds';\nconst MAX_DOCUMENTS_RETRIEVE = 25;\nconst log = loggers.search;\n\nexport class TextSearch {\n private index: Document<StructuredPageDocumentData>;\n private initialized: boolean;\n private documentIds: Set<string>;\n\n constructor() {\n this.initialized = false;\n this.index = this.getIndex();\n this.documentIds = new Set();\n }\n\n private getIndex() {\n return new Document<StructuredPageDocumentData>({\n worker: false,\n document: {\n id: 'path',\n index: ['content:title', 'content:description', 'content:textContent', 'content:author', 'content:keywords'],\n store: true,\n },\n tokenize: 'forward',\n resolution: 9,\n context: {\n resolution: 3,\n depth: 2,\n bidirectional: true,\n },\n cache: 100,\n encoder: Charset.LatinExtra,\n });\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) {\n log.debug('Text search already initialized');\n return;\n }\n\n this.initialized = true;\n }\n\n async addDocument(document: StructuredPageDocumentData): Promise<void> {\n if (!this.initialized) {\n throw new Error('TextSearch not initialized. Call initialize() first.');\n }\n\n log.debug('Adding document to text search:', document.path);\n\n this.index.add(document);\n this.documentIds.add(document.path);\n }\n\n async search(query: string, options: TextSearchOptions = {}): Promise<StructuredPageDocumentData[]> {\n if (!this.initialized) {\n throw new Error('TextSearch not initialized. Call initialize() first.');\n }\n\n const limit = options.limit || MAX_DOCUMENTS_RETRIEVE;\n const offset = options.offset || 0;\n\n log.debug('Searching for:', query);\n\n const results = await this.index.search(query, {\n limit: limit + offset,\n suggest: options.suggest,\n enrich: true,\n });\n\n const pathSet = new Set<string>();\n const documents: StructuredPageDocumentData[] = [];\n\n for (const fieldResults of results) {\n if (Array.isArray(fieldResults.result)) {\n for (const result of fieldResults.result) {\n const id = typeof result === 'object' && 'id' in result ? result.id : result;\n const doc = typeof result === 'object' && 'doc' in result ? result.doc : null;\n\n if (!pathSet.has(id as string) && doc) {\n pathSet.add(id as string);\n documents.push(doc);\n }\n }\n }\n }\n\n const pagedResults = documents.slice(offset, offset + limit);\n\n return pagedResults;\n }\n\n count(): number {\n return this.documentIds.size;\n }\n\n getDocument(path: string) {\n return this.index.get(path);\n }\n\n getAllDocuments(limit?: number): StructuredPageDocumentData[] {\n const documents: StructuredPageDocumentData[] = [];\n let count = 0;\n limit = limit || MAX_DOCUMENTS_RETRIEVE;\n\n for (const id of this.documentIds) {\n if (count >= limit) {\n break;\n }\n\n const doc = this.index.get(id);\n if (doc) {\n documents.push(doc);\n count++;\n }\n }\n\n log.debug('Retrieved documents from store (limit):', documents.length, limit);\n return documents;\n }\n\n clear(): void {\n this.index.clear();\n this.index = this.getIndex();\n this.documentIds.clear();\n }\n\n async export(handler: (key: string, data: string) => Promise<void>): Promise<{ keys: string[] }> {\n const keys: string[] = [];\n\n await handler(PEAM_DOCUMENT_IDS_KEY, JSON.stringify(Array.from(this.documentIds)));\n keys.push(PEAM_DOCUMENT_IDS_KEY);\n\n await this.index.export(async (key: string, data: string) => {\n keys.push(key);\n await handler(key, data);\n });\n\n log.debug('Exported keys:', keys.length);\n\n return { keys };\n }\n\n async import(handler: (key: string) => Promise<string>, keys: string[]): Promise<void> {\n const documentIdsData = await handler(PEAM_DOCUMENT_IDS_KEY);\n if (documentIdsData) {\n const parsed = typeof documentIdsData === 'string' ? JSON.parse(documentIdsData) : documentIdsData;\n this.documentIds = new Set(parsed);\n }\n\n for (const key of keys) {\n if (key === PEAM_DOCUMENT_IDS_KEY) {\n continue;\n }\n\n try {\n const data = await handler(key);\n if (data) {\n this.index.import(key, data);\n }\n } catch (error) {\n log.error('Error importing key:', key, error);\n }\n }\n\n this.initialized = true;\n log.debug('Import completed with keys:', keys.length);\n }\n}\n","import type { StructuredPage } from '@peam-ai/parser';\nimport { SearchEngine } from './searchEngine';\n\nexport interface PageToIndex {\n path: string;\n structuredPage: StructuredPage;\n}\n\nexport interface SearchIndexData {\n keys: string[];\n data: Record<string, string>;\n}\n\n/**\n * Build a search index from structured pages\n */\nexport async function buildSearchIndex(pages: PageToIndex[]): Promise<SearchIndexData> {\n const searchEngine = new SearchEngine();\n await searchEngine.initialize();\n\n for (const page of pages) {\n await searchEngine.addPage(page.path, page.structuredPage);\n }\n\n const exportedData: Record<string, string> = {};\n const result = await searchEngine.export(async (key, data) => {\n exportedData[key] = data;\n });\n\n return {\n keys: result.keys,\n data: exportedData,\n };\n}\n","import { loggers } from '@peam-ai/logger';\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\nimport type { SearchIndexData } from '../indexBuilder';\nimport type { SearchIndexExporter } from './SearchIndexExporter';\n\nconst log = loggers.search;\n\nexport interface FileBasedSearchIndexExporterOptions {\n /**\n * The directory where the index file is located\n */\n baseDir: string;\n\n /**\n * The path to the index file relative to baseDir\n */\n indexPath: string;\n}\n\n/**\n * File-based implementation of SearchIndexExporter\n * Reads and writes search index data to/from a JSON file\n */\nexport class FileBasedSearchIndexExporter implements SearchIndexExporter {\n private baseDir: string;\n private indexPath: string;\n private cachedData: SearchIndexData | null = null;\n\n constructor(options: FileBasedSearchIndexExporterOptions) {\n this.baseDir = options.baseDir;\n this.indexPath = options.indexPath;\n }\n\n private getFullPath(): string {\n return path.join(this.baseDir, this.indexPath);\n }\n\n private async loadData(): Promise<SearchIndexData | null> {\n if (this.cachedData) {\n return this.cachedData;\n }\n\n const fullPath = this.getFullPath();\n\n try {\n const fileContent = await fs.readFile(fullPath, 'utf-8');\n const data = JSON.parse(fileContent) as SearchIndexData;\n\n if (!data || !data.keys || !Array.isArray(data.keys) || !data.data) {\n log.warn('Invalid search index structure in file:', fullPath);\n return null;\n }\n\n if (data.keys.length === 0) {\n log.debug('Search index is empty:', fullPath);\n return null;\n }\n\n this.cachedData = data;\n log.debug('Search index loaded from file:', fullPath, 'with', data.keys.length, 'keys');\n return data;\n } catch (error) {\n log.error('Failed to load search index from file:', fullPath, error);\n return null;\n }\n }\n\n async import(): Promise<SearchIndexData | null> {\n const data = await this.loadData();\n return data;\n }\n\n async export(data: SearchIndexData): Promise<void> {\n const fullPath = this.getFullPath();\n\n try {\n const dir = path.dirname(fullPath);\n await fs.mkdir(dir, { recursive: true });\n\n await fs.writeFile(fullPath, JSON.stringify(data, null, 2), 'utf-8');\n\n log.debug('Search index saved to file:', fullPath, 'with', data.keys.length, 'keys');\n } catch (error) {\n log.error('Failed to save search index to file:', fullPath, error);\n throw error;\n }\n }\n}\n","import { FileBasedSearchIndexExporter, FileBasedSearchIndexExporterOptions } from './FileBasedSearchIndexExporter';\nimport { SearchIndexExporter } from './SearchIndexExporter';\n\nexport type SearchExporterConfig = {\n type: 'fileBased';\n config: FileBasedSearchIndexExporterOptions;\n};\n\n/**\n * Creates a SearchIndexExporter instance from a SearchExporterConfig\n */\nexport function createExporterFromConfig(exporterConfig: SearchExporterConfig): SearchIndexExporter {\n if (exporterConfig.type === 'fileBased') {\n return new FileBasedSearchIndexExporter({\n ...exporterConfig.config,\n });\n }\n\n throw new Error(`Unknown exporter type: ${exporterConfig.type}`);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,WAAAA,gBAAe;;;ACAxB,SAAS,eAAe;AACxB,SAAS,SAAS,gBAAgB;AASlC,IAAM,wBAAwB;AAC9B,IAAM,yBAAyB;AAC/B,IAAM,MAAM,QAAQ;AAEb,IAAM,aAAN,MAAiB;AAAA,EAKtB,cAAc;AACZ,SAAK,cAAc;AACnB,SAAK,QAAQ,KAAK,SAAS;AAC3B,SAAK,cAAc,oBAAI,IAAI;AAAA,EAC7B;AAAA,EAEQ,WAAW;AACjB,WAAO,IAAI,SAAqC;AAAA,MAC9C,QAAQ;AAAA,MACR,UAAU;AAAA,QACR,IAAI;AAAA,QACJ,OAAO,CAAC,iBAAiB,uBAAuB,uBAAuB,kBAAkB,kBAAkB;AAAA,QAC3G,OAAO;AAAA,MACT;AAAA,MACA,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,SAAS;AAAA,QACP,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,eAAe;AAAA,MACjB;AAAA,MACA,OAAO;AAAA,MACP,SAAS,QAAQ;AAAA,IACnB,CAAC;AAAA,EACH;AAAA,EAEM,aAA4B;AAAA;AAChC,UAAI,KAAK,aAAa;AACpB,YAAI,MAAM,iCAAiC;AAC3C;AAAA,MACF;AAEA,WAAK,cAAc;AAAA,IACrB;AAAA;AAAA,EAEM,YAAY,UAAqD;AAAA;AACrE,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AAEA,UAAI,MAAM,mCAAmC,SAAS,IAAI;AAE1D,WAAK,MAAM,IAAI,QAAQ;AACvB,WAAK,YAAY,IAAI,SAAS,IAAI;AAAA,IACpC;AAAA;AAAA,EAEM,OAAO,IAAuF;AAAA,+CAAvF,OAAe,UAA6B,CAAC,GAA0C;AAClG,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACxE;AAEA,YAAM,QAAQ,QAAQ,SAAS;AAC/B,YAAM,SAAS,QAAQ,UAAU;AAEjC,UAAI,MAAM,kBAAkB,KAAK;AAEjC,YAAM,UAAU,MAAM,KAAK,MAAM,OAAO,OAAO;AAAA,QAC7C,OAAO,QAAQ;AAAA,QACf,SAAS,QAAQ;AAAA,QACjB,QAAQ;AAAA,MACV,CAAC;AAED,YAAM,UAAU,oBAAI,IAAY;AAChC,YAAM,YAA0C,CAAC;AAEjD,iBAAW,gBAAgB,SAAS;AAClC,YAAI,MAAM,QAAQ,aAAa,MAAM,GAAG;AACtC,qBAAW,UAAU,aAAa,QAAQ;AACxC,kBAAM,KAAK,OAAO,WAAW,YAAY,QAAQ,SAAS,OAAO,KAAK;AACtE,kBAAM,MAAM,OAAO,WAAW,YAAY,SAAS,SAAS,OAAO,MAAM;AAEzE,gBAAI,CAAC,QAAQ,IAAI,EAAY,KAAK,KAAK;AACrC,sBAAQ,IAAI,EAAY;AACxB,wBAAU,KAAK,GAAG;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,YAAM,eAAe,UAAU,MAAM,QAAQ,SAAS,KAAK;AAE3D,aAAO;AAAA,IACT;AAAA;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,YAAYC,OAAc;AACxB,WAAO,KAAK,MAAM,IAAIA,KAAI;AAAA,EAC5B;AAAA,EAEA,gBAAgB,OAA8C;AAC5D,UAAM,YAA0C,CAAC;AACjD,QAAI,QAAQ;AACZ,YAAQ,SAAS;AAEjB,eAAW,MAAM,KAAK,aAAa;AACjC,UAAI,SAAS,OAAO;AAClB;AAAA,MACF;AAEA,YAAM,MAAM,KAAK,MAAM,IAAI,EAAE;AAC7B,UAAI,KAAK;AACP,kBAAU,KAAK,GAAG;AAClB;AAAA,MACF;AAAA,IACF;AAEA,QAAI,MAAM,2CAA2C,UAAU,QAAQ,KAAK;AAC5E,WAAO;AAAA,EACT;AAAA,EAEA,QAAc;AACZ,SAAK,MAAM,MAAM;AACjB,SAAK,QAAQ,KAAK,SAAS;AAC3B,SAAK,YAAY,MAAM;AAAA,EACzB;AAAA,EAEM,OAAO,SAAoF;AAAA;AAC/F,YAAM,OAAiB,CAAC;AAExB,YAAM,QAAQ,uBAAuB,KAAK,UAAU,MAAM,KAAK,KAAK,WAAW,CAAC,CAAC;AACjF,WAAK,KAAK,qBAAqB;AAE/B,YAAM,KAAK,MAAM,OAAO,CAAO,KAAa,SAAiB;AAC3D,aAAK,KAAK,GAAG;AACb,cAAM,QAAQ,KAAK,IAAI;AAAA,MACzB,EAAC;AAED,UAAI,MAAM,kBAAkB,KAAK,MAAM;AAEvC,aAAO,EAAE,KAAK;AAAA,IAChB;AAAA;AAAA,EAEM,OAAO,SAA2C,MAA+B;AAAA;AACrF,YAAM,kBAAkB,MAAM,QAAQ,qBAAqB;AAC3D,UAAI,iBAAiB;AACnB,cAAM,SAAS,OAAO,oBAAoB,WAAW,KAAK,MAAM,eAAe,IAAI;AACnF,aAAK,cAAc,IAAI,IAAI,MAAM;AAAA,MACnC;AAEA,iBAAW,OAAO,MAAM;AACtB,YAAI,QAAQ,uBAAuB;AACjC;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,OAAO,MAAM,QAAQ,GAAG;AAC9B,cAAI,MAAM;AACR,iBAAK,MAAM,OAAO,KAAK,IAAI;AAAA,UAC7B;AAAA,QACF,SAAS,OAAO;AACd,cAAI,MAAM,wBAAwB,KAAK,KAAK;AAAA,QAC9C;AAAA,MACF;AAEA,WAAK,cAAc;AACnB,UAAI,MAAM,+BAA+B,KAAK,MAAM;AAAA,IACtD;AAAA;AACF;;;AD9KA,IAAMC,OAAMC,SAAQ;AAOb,IAAM,eAAN,MAAmB;AAAA;AAAA,EAKxB,YAAY,SAA8B;AACxC,SAAK,aAAa,IAAI,WAAW;AACjC,SAAK,cAAc;AAAA,EACrB;AAAA,EAEM,aAA4B;AAAA;AAChC,UAAI,KAAK,aAAa;AACpB;AAAA,MACF;AAEA,MAAAD,KAAI,MAAM,4BAA4B;AACtC,YAAM,KAAK,WAAW,WAAW;AACjC,WAAK,cAAc;AAAA,IACrB;AAAA;AAAA,EAEM,QAAQE,OAAc,SAAwC;AAAA;AAClE,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,yDAAyD;AAAA,MAC3E;AAEA,YAAM,WAAuC;AAAA,QAC3C,IAAIA;AAAA,QACJ,MAAAA;AAAA,QACA;AAAA,MACF;AAEA,YAAM,KAAK,WAAW,YAAY,QAAQ;AAC1C,MAAAF,KAAI,MAAM,gCAAgCE,KAAI;AAAA,IAChD;AAAA;AAAA,EAEM,OAAO,IAAuF;AAAA,+CAAvF,OAAe,UAA6B,CAAC,GAA0C;AAClG,UAAI,CAAC,KAAK,aAAa;AACrB,cAAM,IAAI,MAAM,yDAAyD;AAAA,MAC3E;AAEA,MAAAF,KAAI,MAAM,2BAA2B,KAAK;AAC1C,aAAO,KAAK,WAAW,OAAO,OAAO,OAAO;AAAA,IAC9C;AAAA;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK,WAAW,MAAM;AAAA,EAC/B;AAAA,EAEA,YAAYE,OAAc;AACxB,WAAO,KAAK,WAAW,YAAYA,KAAI;AAAA,EACzC;AAAA,EAEA,gBAAgB,OAA8C;AAC5D,WAAO,KAAK,WAAW,gBAAgB,KAAK;AAAA,EAC9C;AAAA,EAEA,QAAc;AACZ,SAAK,WAAW,MAAM;AAAA,EACxB;AAAA,EAEM,OAAO,SAAoF;AAAA;AAC/F,aAAO,KAAK,WAAW,OAAO,OAAO;AAAA,IACvC;AAAA;AAAA,EAEM,OAAO,SAA2C,MAA+B;AAAA;AACrF,YAAM,KAAK,WAAW,OAAO,SAAS,IAAI;AAC1C,WAAK,cAAc;AACnB,MAAAF,KAAI,MAAM,8CAA8C;AAAA,IAC1D;AAAA;AACF;;;AEjEA,SAAsB,iBAAiB,OAAgD;AAAA;AACrF,UAAM,eAAe,IAAI,aAAa;AACtC,UAAM,aAAa,WAAW;AAE9B,eAAW,QAAQ,OAAO;AACxB,YAAM,aAAa,QAAQ,KAAK,MAAM,KAAK,cAAc;AAAA,IAC3D;AAEA,UAAM,eAAuC,CAAC;AAC9C,UAAM,SAAS,MAAM,aAAa,OAAO,CAAO,KAAK,SAAS;AAC5D,mBAAa,GAAG,IAAI;AAAA,IACtB,EAAC;AAED,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,MAAM;AAAA,IACR;AAAA,EACF;AAAA;;;ACjCA,SAAS,WAAAG,gBAAe;AACxB,YAAY,QAAQ;AACpB,YAAY,UAAU;AAItB,IAAMC,OAAMC,SAAQ;AAkBb,IAAM,+BAAN,MAAkE;AAAA,EAKvE,YAAY,SAA8C;AAF1D,SAAQ,aAAqC;AAG3C,SAAK,UAAU,QAAQ;AACvB,SAAK,YAAY,QAAQ;AAAA,EAC3B;AAAA,EAEQ,cAAsB;AAC5B,WAAY,UAAK,KAAK,SAAS,KAAK,SAAS;AAAA,EAC/C;AAAA,EAEc,WAA4C;AAAA;AACxD,UAAI,KAAK,YAAY;AACnB,eAAO,KAAK;AAAA,MACd;AAEA,YAAM,WAAW,KAAK,YAAY;AAElC,UAAI;AACF,cAAM,cAAc,MAAS,YAAS,UAAU,OAAO;AACvD,cAAM,OAAO,KAAK,MAAM,WAAW;AAEnC,YAAI,CAAC,QAAQ,CAAC,KAAK,QAAQ,CAAC,MAAM,QAAQ,KAAK,IAAI,KAAK,CAAC,KAAK,MAAM;AAClE,UAAAD,KAAI,KAAK,2CAA2C,QAAQ;AAC5D,iBAAO;AAAA,QACT;AAEA,YAAI,KAAK,KAAK,WAAW,GAAG;AAC1B,UAAAA,KAAI,MAAM,0BAA0B,QAAQ;AAC5C,iBAAO;AAAA,QACT;AAEA,aAAK,aAAa;AAClB,QAAAA,KAAI,MAAM,kCAAkC,UAAU,QAAQ,KAAK,KAAK,QAAQ,MAAM;AACtF,eAAO;AAAA,MACT,SAAS,OAAO;AACd,QAAAA,KAAI,MAAM,0CAA0C,UAAU,KAAK;AACnE,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA,EAEM,SAA0C;AAAA;AAC9C,YAAM,OAAO,MAAM,KAAK,SAAS;AACjC,aAAO;AAAA,IACT;AAAA;AAAA,EAEM,OAAO,MAAsC;AAAA;AACjD,YAAM,WAAW,KAAK,YAAY;AAElC,UAAI;AACF,cAAM,MAAW,aAAQ,QAAQ;AACjC,cAAS,SAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AAEvC,cAAS,aAAU,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,OAAO;AAEnE,QAAAA,KAAI,MAAM,+BAA+B,UAAU,QAAQ,KAAK,KAAK,QAAQ,MAAM;AAAA,MACrF,SAAS,OAAO;AACd,QAAAA,KAAI,MAAM,wCAAwC,UAAU,KAAK;AACjE,cAAM;AAAA,MACR;AAAA,IACF;AAAA;AACF;;;AC7EO,SAAS,yBAAyB,gBAA2D;AAClG,MAAI,eAAe,SAAS,aAAa;AACvC,WAAO,IAAI,6BAA6B,mBACnC,eAAe,OACnB;AAAA,EACH;AAEA,QAAM,IAAI,MAAM,0BAA0B,eAAe,IAAI,EAAE;AACjE;","names":["loggers","path","log","loggers","path","loggers","log","loggers"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@peam-ai/search",
3
- "version": "0.1.1",
3
+ "version": "0.1.2",
4
4
  "description": "Vector database and search functionality for Peam",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",
@@ -29,8 +29,8 @@
29
29
  },
30
30
  "dependencies": {
31
31
  "flexsearch": "^0.8.212",
32
- "@peam-ai/logger": "0.1.1",
33
- "@peam-ai/parser": "0.1.1"
32
+ "@peam-ai/logger": "0.1.2",
33
+ "@peam-ai/parser": "0.1.2"
34
34
  },
35
35
  "devDependencies": {
36
36
  "@types/node": "^22.10.2",