@tinacms/search 0.0.0-ee8d9a3-20250429131017 → 0.0.0-f1cec43-20251216232909

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,6 +15,7 @@ export declare class SearchIndexer {
15
15
  private readonly schema;
16
16
  private readonly textIndexLength;
17
17
  constructor(options: SearchIndexOptions);
18
+ private createBatchProcessor;
18
19
  private makeIndexerCallback;
19
20
  indexContentByPaths(documentPaths: string[]): Promise<void>;
20
21
  indexAllContent(): Promise<{
@@ -1,3 +1,3 @@
1
1
  import { Collection, ObjectField } from '@tinacms/schema-tools';
2
- export declare const processDocumentForIndexing: (data: any, path: string, collection: Collection, textIndexLength: number, field?: ObjectField) => any;
2
+ export declare const processDocumentForIndexing: (data: Record<string, unknown>, path: string, collection: Collection, textIndexLength: number, field?: ObjectField) => Record<string, unknown>;
3
3
  export declare const lookupStopwords: (keys?: string[], defaultStopWords?: string[]) => string[];
@@ -0,0 +1,22 @@
1
+ export interface PaginationOptions {
2
+ limit?: number;
3
+ cursor?: string;
4
+ }
5
+ export interface PageOptions {
6
+ PAGE?: {
7
+ SIZE: number;
8
+ NUMBER: number;
9
+ };
10
+ }
11
+ export interface PaginationCursors {
12
+ nextCursor: string | null;
13
+ prevCursor: string | null;
14
+ }
15
+ /**
16
+ * Converts limit/cursor pagination options to search-index PAGE format.
17
+ */
18
+ export declare function buildPageOptions(options: PaginationOptions): PageOptions;
19
+ /**
20
+ * Calculates pagination cursors based on total results and current options.
21
+ */
22
+ export declare function buildPaginationCursors(total: number, options: PaginationOptions): PaginationCursors;
package/dist/types.d.ts CHANGED
@@ -1,15 +1,34 @@
1
+ import type { FuzzySearchOptions, FuzzyMatch } from './fuzzy';
2
+ export interface SearchOptions {
3
+ cursor?: string;
4
+ limit?: number;
5
+ fuzzy?: boolean;
6
+ fuzzyOptions?: FuzzySearchOptions;
7
+ }
8
+ export interface SearchResult {
9
+ _id: string;
10
+ _match: Record<string, string[]>;
11
+ [key: string]: unknown;
12
+ }
13
+ export interface SearchQueryResponse {
14
+ results: SearchResult[];
15
+ total: number;
16
+ nextCursor: string | null;
17
+ prevCursor: string | null;
18
+ fuzzyMatches?: Record<string, FuzzyMatch[]>;
19
+ }
20
+ export interface IndexableDocument {
21
+ _id: string;
22
+ [key: string]: unknown;
23
+ }
24
+ export interface SearchIndexResult {
25
+ RESULT: SearchResult[];
26
+ RESULT_LENGTH: number;
27
+ }
1
28
  export type SearchClient = {
2
- query: (query: string, options?: {
3
- cursor?: string;
4
- limit?: number;
5
- }) => Promise<{
6
- results: any[];
7
- total: number;
8
- nextCursor: string | null;
9
- prevCursor: string | null;
10
- }>;
11
- put: (docs: any[]) => Promise<any>;
12
- del: (ids: string[]) => Promise<any>;
29
+ query: (query: string, options?: SearchOptions) => Promise<SearchQueryResponse>;
30
+ put: (docs: IndexableDocument[] | Record<string, unknown>[]) => Promise<void>;
31
+ del: (ids: string[]) => Promise<void>;
13
32
  onStartIndexing?: () => Promise<void>;
14
33
  onFinishIndexing?: () => Promise<void>;
15
34
  supportsClientSideIndexing?: () => boolean;
package/package.json CHANGED
@@ -1,21 +1,20 @@
1
1
  {
2
2
  "name": "@tinacms/search",
3
- "version": "0.0.0-ee8d9a3-20250429131017",
3
+ "type": "module",
4
+ "version": "0.0.0-f1cec43-20251216232909",
4
5
  "main": "dist/index.js",
5
- "module": "dist/index-client.mjs",
6
- "typings": "dist/index.d.ts",
6
+ "types": "dist/index.d.ts",
7
7
  "files": [
8
- "package.json",
9
8
  "dist"
10
9
  ],
11
10
  "exports": {
12
11
  ".": {
13
- "import": "./dist/index.mjs",
14
- "require": "./dist/index.js"
12
+ "types": "./dist/index.d.ts",
13
+ "default": "./dist/index.js"
15
14
  },
16
- "./dist/index-client": {
17
- "import": "./dist/index-client.mjs",
18
- "require": "./dist/index-client.js"
15
+ "./index-client": {
16
+ "types": "./dist/index-client.d.ts",
17
+ "default": "./dist/index-client.js"
19
18
  }
20
19
  },
21
20
  "license": "SEE LICENSE IN LICENSE",
@@ -33,8 +32,8 @@
33
32
  "search-index": "4.0.0",
34
33
  "sqlite-level": "^1.2.1",
35
34
  "stopword": "^3.1.4",
36
- "@tinacms/graphql": "0.0.0-ee8d9a3-20250429131017",
37
- "@tinacms/schema-tools": "1.7.3"
35
+ "@tinacms/graphql": "2.0.2",
36
+ "@tinacms/schema-tools": "2.1.0"
38
37
  },
39
38
  "publishConfig": {
40
39
  "registry": "https://registry.npmjs.org"
@@ -53,14 +52,14 @@
53
52
  "jest-file-snapshot": "^0.7.0",
54
53
  "jest-matcher-utils": "^29.7.0",
55
54
  "typescript": "^5.7.3",
56
- "@tinacms/scripts": "1.3.4"
55
+ "@tinacms/scripts": "1.4.2"
57
56
  },
58
57
  "scripts": {
59
58
  "types": "pnpm tsc",
60
59
  "build": "tinacms-scripts build",
61
60
  "docs": "pnpm typedoc",
62
61
  "serve": "pnpm nodemon dist/server.js",
63
- "test": "jest",
64
- "test-watch": "jest --watch"
62
+ "test": "NODE_OPTIONS='--experimental-vm-modules' jest",
63
+ "test-watch": "NODE_OPTIONS='--experimental-vm-modules' jest --watch"
65
64
  }
66
65
  }
@@ -1 +0,0 @@
1
- {"version":3,"file":"index-client.js","sources":["../src/indexer/utils.ts","../src/index-client.ts"],"sourcesContent":["import { Collection, ObjectField } from '@tinacms/schema-tools';\nimport * as sw from 'stopword';\n\nclass StringBuilder {\n private readonly buffer: string[];\n public length = 0;\n private readonly limit: number;\n constructor(limit: number) {\n this.buffer = [];\n this.limit = limit;\n }\n\n public append(str: string) {\n if (this.length + str.length > this.limit) {\n return true;\n } else {\n this.buffer.push(str);\n this.length += str.length;\n if (this.length > this.limit) {\n return true;\n }\n return false;\n }\n }\n\n public toString() {\n // NOTE this is going to add some length to the final string beyond the limit\n return this.buffer.join(' ');\n }\n}\n\nconst extractText = (\n data: any,\n acc: StringBuilder,\n indexableNodeTypes: string[]\n) => {\n if (data) {\n if (\n indexableNodeTypes.indexOf(data.type) !== -1 &&\n (data.text || data.value)\n ) {\n const tokens = tokenizeString(data.text || data.value);\n for (const token of tokens) {\n if (acc.append(token)) {\n return;\n }\n }\n }\n\n data.children?.forEach?.((child: any) =>\n extractText(child, acc, indexableNodeTypes)\n );\n }\n};\n\nconst relativePath = (path: string, collection: Collection) => {\n return path\n .replace(/\\\\/g, '/')\n .replace(collection.path, '')\n .replace(/^\\/|\\/$/g, '');\n};\n\nconst tokenizeString = (str: string) => {\n return str\n .split(/[\\s\\.,]+/)\n .map((s) => s.toLowerCase())\n .filter((s) => s);\n};\n\nconst processTextFieldValue = (value: string, maxLen: number) => {\n const tokens = tokenizeString(value);\n const builder = new StringBuilder(maxLen);\n for (const part of tokens) {\n if (builder.append(part)) {\n break;\n }\n }\n return builder.toString();\n};\n\nexport const processDocumentForIndexing = (\n data: any,\n path: string,\n collection: Collection,\n textIndexLength: number,\n field?: ObjectField\n) => {\n if (!field) {\n const relPath = relativePath(path, collection);\n data['_id'] = `${collection.name}:${relPath}`;\n data['_relativePath'] = relPath;\n }\n for (const f of field?.fields || collection.fields || []) {\n if (!f.searchable) {\n delete data[f.name];\n continue;\n }\n const isList = f.list;\n if (data[f.name]) {\n if (f.type === 'object') {\n if (isList) {\n data[f.name] = data[f.name].map((obj: any) =>\n processDocumentForIndexing(\n obj,\n path,\n collection,\n textIndexLength,\n f\n )\n );\n } else {\n data[f.name] = processDocumentForIndexing(\n data[f.name],\n path,\n collection,\n textIndexLength,\n f\n );\n }\n } else if (f.type === 'string') {\n const fieldTextIndexLength =\n f.maxSearchIndexFieldLength || textIndexLength;\n if (isList) {\n data[f.name] = data[f.name].map((value: string) =>\n processTextFieldValue(value, fieldTextIndexLength)\n );\n } else {\n data[f.name] = processTextFieldValue(\n data[f.name],\n fieldTextIndexLength\n );\n }\n } else if (f.type === 'rich-text') {\n const fieldTextIndexLength =\n f.maxSearchIndexFieldLength || textIndexLength;\n if (isList) {\n data[f.name] = data[f.name].map((value: any) => {\n const acc = new StringBuilder(fieldTextIndexLength);\n extractText(value, acc, ['text', 'code_block', 'html']);\n return acc.toString();\n });\n } else {\n const acc = new StringBuilder(fieldTextIndexLength);\n extractText(data[f.name], acc, ['text', 'code_block', 'html']);\n data[f.name] = acc.toString();\n }\n }\n }\n }\n return data;\n};\n\nconst memo: Record<string, string[]> = {};\nexport const lookupStopwords = (\n keys?: string[],\n defaultStopWords: string[] = sw.eng\n) => {\n let stopwords = defaultStopWords;\n if (keys) {\n if (memo[keys.join(',')]) {\n return memo[keys.join(',')];\n }\n stopwords = [];\n for (const key of keys) {\n stopwords.push(...sw[key]);\n }\n memo[keys.join(',')] = stopwords;\n }\n return stopwords;\n};\n","export type { SearchClient } from './types';\nexport { processDocumentForIndexing } from './indexer/utils';\nimport { lookupStopwords } from './indexer/utils';\n\nexport const queryToSearchIndexQuery = (\n query: string,\n stopwordLanguages?: string[]\n) => {\n let q;\n const parts = query.split(' ');\n const stopwords = lookupStopwords(stopwordLanguages);\n if (parts.length === 1) {\n q = { AND: [parts[0]] };\n } else {\n // TODO only allow AND for now - need parser\n q = {\n AND: parts.filter(\n (part) =>\n part.toLowerCase() !== 'and' &&\n stopwords.indexOf(part.toLowerCase()) === -1\n ),\n };\n }\n return q;\n};\n\nexport const optionsToSearchIndexOptions = (options?: {\n limit?: number;\n cursor?: string;\n}) => {\n const opt = {};\n if (options?.limit) {\n opt['PAGE'] = {\n SIZE: options.limit,\n NUMBER: options?.cursor ? parseInt(options.cursor) : 0,\n };\n }\n return opt;\n};\n\nexport const parseSearchIndexResponse = (\n data: any,\n options?: {\n limit?: number;\n cursor?: string;\n }\n) => {\n const results = data['RESULT'];\n const total = data['RESULT_LENGTH'];\n if (options?.cursor && options?.limit) {\n const prevCursor =\n options.cursor === '0' ? null : (parseInt(options.cursor) - 1).toString();\n const nextCursor =\n total <= (parseInt(options.cursor) + 1) * options.limit\n ? null\n : (parseInt(options.cursor) + 1).toString();\n return {\n results,\n total,\n prevCursor,\n nextCursor,\n };\n } else if (!options?.cursor && options?.limit) {\n const prevCursor = null;\n const nextCursor = total <= options.limit ? null : '1';\n return {\n results,\n total,\n prevCursor,\n nextCursor,\n };\n } else {\n return {\n results,\n total,\n prevCursor: null,\n nextCursor: null,\n };\n }\n};\n"],"names":["sw"],"mappings":";;;;;;;;;;;;;;;;;;;;;EAGA,MAAM,cAAc;AAAA,IAIlB,YAAY,OAAe;AAF3B,WAAO,SAAS;AAGd,WAAK,SAAS;AACd,WAAK,QAAQ;AAAA,IACf;AAAA,IAEO,OAAO,KAAa;AACzB,UAAI,KAAK,SAAS,IAAI,SAAS,KAAK,OAAO;AAClC,eAAA;AAAA,MAAA,OACF;AACA,aAAA,OAAO,KAAK,GAAG;AACpB,aAAK,UAAU,IAAI;AACf,YAAA,KAAK,SAAS,KAAK,OAAO;AACrB,iBAAA;AAAA,QACT;AACO,eAAA;AAAA,MACT;AAAA,IACF;AAAA,IAEO,WAAW;AAET,aAAA,KAAK,OAAO,KAAK,GAAG;AAAA,IAC7B;AAAA,EACF;AAEA,QAAM,cAAc,CAClB,MACA,KACA,uBACG;;AACH,QAAI,MAAM;AAEN,UAAA,mBAAmB,QAAQ,KAAK,IAAI,MAAM,OACzC,KAAK,QAAQ,KAAK,QACnB;AACA,cAAM,SAAS,eAAe,KAAK,QAAQ,KAAK,KAAK;AACrD,mBAAW,SAAS,QAAQ;AACtB,cAAA,IAAI,OAAO,KAAK,GAAG;AACrB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,uBAAK,aAAL,mBAAe,YAAf;AAAA;AAAA,QAAyB,CAAC,UACxB,YAAY,OAAO,KAAK,kBAAkB;AAAA;AAAA,IAE9C;AAAA,EACF;AAEA,QAAM,eAAe,CAAC,MAAc,eAA2B;AAC7D,WAAO,KACJ,QAAQ,OAAO,GAAG,EAClB,QAAQ,WAAW,MAAM,EAAE,EAC3B,QAAQ,YAAY,EAAE;AAAA,EAC3B;AAEA,QAAM,iBAAiB,CAAC,QAAgB;AACtC,WAAO,IACJ,MAAM,UAAU,EAChB,IAAI,CAAC,MAAM,EAAE,YAAa,CAAA,EAC1B,OAAO,CAAC,MAAM,CAAC;AAAA,EACpB;AAEA,QAAM,wBAAwB,CAAC,OAAe,WAAmB;AACzD,UAAA,SAAS,eAAe,KAAK;AAC7B,UAAA,UAAU,IAAI,cAAc,MAAM;AACxC,eAAW,QAAQ,QAAQ;AACrB,UAAA,QAAQ,OAAO,IAAI,GAAG;AACxB;AAAA,MACF;AAAA,IACF;AACA,WAAO,QAAQ;EACjB;AAEO,QAAM,6BAA6B,CACxC,MACA,MACA,YACA,iBACA,UACG;AACH,QAAI,CAAC,OAAO;AACJ,YAAA,UAAU,aAAa,MAAM,UAAU;AAC7C,WAAK,KAAK,IAAI,GAAG,WAAW,IAAI,IAAI,OAAO;AAC3C,WAAK,eAAe,IAAI;AAAA,IAC1B;AACA,eAAW,MAAK,+BAAO,WAAU,WAAW,UAAU,IAAI;AACpD,UAAA,CAAC,EAAE,YAAY;AACV,eAAA,KAAK,EAAE,IAAI;AAClB;AAAA,MACF;AACA,YAAM,SAAS,EAAE;AACb,UAAA,KAAK,EAAE,IAAI,GAAG;AACZ,YAAA,EAAE,SAAS,UAAU;AACvB,cAAI,QAAQ;AACV,iBAAK,EAAE,IAAI,IAAI,KAAK,EAAE,IAAI,EAAE;AAAA,cAAI,CAAC,QAC/B;AAAA,gBACE;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAAA,YAAA;AAAA,UACF,OACK;AACA,iBAAA,EAAE,IAAI,IAAI;AAAA,cACb,KAAK,EAAE,IAAI;AAAA,cACX;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,YAAA;AAAA,UAEJ;AAAA,QAAA,WACS,EAAE,SAAS,UAAU;AACxB,gBAAA,uBACJ,EAAE,6BAA6B;AACjC,cAAI,QAAQ;AACV,iBAAK,EAAE,IAAI,IAAI,KAAK,EAAE,IAAI,EAAE;AAAA,cAAI,CAAC,UAC/B,sBAAsB,OAAO,oBAAoB;AAAA,YAAA;AAAA,UACnD,OACK;AACA,iBAAA,EAAE,IAAI,IAAI;AAAA,cACb,KAAK,EAAE,IAAI;AAAA,cACX;AAAA,YAAA;AAAA,UAEJ;AAAA,QAAA,WACS,EAAE,SAAS,aAAa;AAC3B,gBAAA,uBACJ,EAAE,6BAA6B;AACjC,cAAI,QAAQ;AACL,iBAAA,EAAE,IAAI,IAAI,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,UAAe;AACxC,oBAAA,MAAM,IAAI,cAAc,oBAAoB;AAClD,0BAAY,OAAO,KAAK,CAAC,QAAQ,cAAc,MAAM,CAAC;AACtD,qBAAO,IAAI;YAAS,CACrB;AAAA,UAAA,OACI;AACC,kBAAA,MAAM,IAAI,cAAc,oBAAoB;AACtC,wBAAA,KAAK,EAAE,IAAI,GAAG,KAAK,CAAC,QAAQ,cAAc,MAAM,CAAC;AAC7D,iBAAK,EAAE,IAAI,IAAI,IAAI,SAAS;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACO,WAAA;AAAA,EACT;AAEA,QAAM,OAAiC,CAAA;AAChC,QAAM,kBAAkB,CAC7B,MACA,mBAA6BA,cAAG,QAC7B;AACH,QAAI,YAAY;AAChB,QAAI,MAAM;AACR,UAAI,KAAK,KAAK,KAAK,GAAG,CAAC,GAAG;AACxB,eAAO,KAAK,KAAK,KAAK,GAAG,CAAC;AAAA,MAC5B;AACA,kBAAY,CAAA;AACZ,iBAAW,OAAO,MAAM;AACtB,kBAAU,KAAK,GAAGA,cAAG,GAAG,CAAC;AAAA,MAC3B;AACA,WAAK,KAAK,KAAK,GAAG,CAAC,IAAI;AAAA,IACzB;AACO,WAAA;AAAA,EACT;ACrKa,QAAA,0BAA0B,CACrC,OACA,sBACG;AACC,QAAA;AACE,UAAA,QAAQ,MAAM,MAAM,GAAG;AACvB,UAAA,YAAY,gBAAgB,iBAAiB;AAC/C,QAAA,MAAM,WAAW,GAAG;AACtB,UAAI,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AAAA,IAAA,OACjB;AAED,UAAA;AAAA,QACF,KAAK,MAAM;AAAA,UACT,CAAC,SACC,KAAK,YAAY,MAAM,SACvB,UAAU,QAAQ,KAAK,YAAa,CAAA,MAAM;AAAA,QAC9C;AAAA,MAAA;AAAA,IAEJ;AACO,WAAA;AAAA,EACT;AAEa,QAAA,8BAA8B,CAAC,YAGtC;AACJ,UAAM,MAAM,CAAA;AACZ,QAAI,mCAAS,OAAO;AAClB,UAAI,MAAM,IAAI;AAAA,QACZ,MAAM,QAAQ;AAAA,QACd,SAAQ,mCAAS,UAAS,SAAS,QAAQ,MAAM,IAAI;AAAA,MAAA;AAAA,IAEzD;AACO,WAAA;AAAA,EACT;AAEa,QAAA,2BAA2B,CACtC,MACA,YAIG;AACG,UAAA,UAAU,KAAK,QAAQ;AACvB,UAAA,QAAQ,KAAK,eAAe;AAC9B,SAAA,mCAAS,YAAU,mCAAS,QAAO;AAC/B,YAAA,aACJ,QAAQ,WAAW,MAAM,QAAQ,SAAS,QAAQ,MAAM,IAAI,GAAG,SAAS;AAC1E,YAAM,aACJ,UAAU,SAAS,QAAQ,MAAM,IAAI,KAAK,QAAQ,QAC9C,QACC,SAAS,QAAQ,MAAM,IAAI,GAAG;AAC9B,aAAA;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAAA,IAEO,WAAA,EAAC,mCAAS,YAAU,mCAAS,QAAO;AAC7C,YAAM,aAAa;AACnB,YAAM,aAAa,SAAS,QAAQ,QAAQ,OAAO;AAC5C,aAAA;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAAA,IACF,OACK;AACE,aAAA;AAAA,QACL;AAAA,QACA;AAAA,QACA,YAAY;AAAA,QACZ,YAAY;AAAA,MAAA;AAAA,IAEhB;AAAA,EACF;;;;;;;"}
@@ -1,196 +0,0 @@
1
- import * as sw from "stopword";
2
- class StringBuilder {
3
- constructor(limit) {
4
- this.length = 0;
5
- this.buffer = [];
6
- this.limit = limit;
7
- }
8
- append(str) {
9
- if (this.length + str.length > this.limit) {
10
- return true;
11
- } else {
12
- this.buffer.push(str);
13
- this.length += str.length;
14
- if (this.length > this.limit) {
15
- return true;
16
- }
17
- return false;
18
- }
19
- }
20
- toString() {
21
- return this.buffer.join(" ");
22
- }
23
- }
24
- const extractText = (data, acc, indexableNodeTypes) => {
25
- var _a, _b;
26
- if (data) {
27
- if (indexableNodeTypes.indexOf(data.type) !== -1 && (data.text || data.value)) {
28
- const tokens = tokenizeString(data.text || data.value);
29
- for (const token of tokens) {
30
- if (acc.append(token)) {
31
- return;
32
- }
33
- }
34
- }
35
- (_b = (_a = data.children) == null ? void 0 : _a.forEach) == null ? void 0 : _b.call(
36
- _a,
37
- (child) => extractText(child, acc, indexableNodeTypes)
38
- );
39
- }
40
- };
41
- const relativePath = (path, collection) => {
42
- return path.replace(/\\/g, "/").replace(collection.path, "").replace(/^\/|\/$/g, "");
43
- };
44
- const tokenizeString = (str) => {
45
- return str.split(/[\s\.,]+/).map((s) => s.toLowerCase()).filter((s) => s);
46
- };
47
- const processTextFieldValue = (value, maxLen) => {
48
- const tokens = tokenizeString(value);
49
- const builder = new StringBuilder(maxLen);
50
- for (const part of tokens) {
51
- if (builder.append(part)) {
52
- break;
53
- }
54
- }
55
- return builder.toString();
56
- };
57
- const processDocumentForIndexing = (data, path, collection, textIndexLength, field) => {
58
- if (!field) {
59
- const relPath = relativePath(path, collection);
60
- data["_id"] = `${collection.name}:${relPath}`;
61
- data["_relativePath"] = relPath;
62
- }
63
- for (const f of (field == null ? void 0 : field.fields) || collection.fields || []) {
64
- if (!f.searchable) {
65
- delete data[f.name];
66
- continue;
67
- }
68
- const isList = f.list;
69
- if (data[f.name]) {
70
- if (f.type === "object") {
71
- if (isList) {
72
- data[f.name] = data[f.name].map(
73
- (obj) => processDocumentForIndexing(
74
- obj,
75
- path,
76
- collection,
77
- textIndexLength,
78
- f
79
- )
80
- );
81
- } else {
82
- data[f.name] = processDocumentForIndexing(
83
- data[f.name],
84
- path,
85
- collection,
86
- textIndexLength,
87
- f
88
- );
89
- }
90
- } else if (f.type === "string") {
91
- const fieldTextIndexLength = f.maxSearchIndexFieldLength || textIndexLength;
92
- if (isList) {
93
- data[f.name] = data[f.name].map(
94
- (value) => processTextFieldValue(value, fieldTextIndexLength)
95
- );
96
- } else {
97
- data[f.name] = processTextFieldValue(
98
- data[f.name],
99
- fieldTextIndexLength
100
- );
101
- }
102
- } else if (f.type === "rich-text") {
103
- const fieldTextIndexLength = f.maxSearchIndexFieldLength || textIndexLength;
104
- if (isList) {
105
- data[f.name] = data[f.name].map((value) => {
106
- const acc = new StringBuilder(fieldTextIndexLength);
107
- extractText(value, acc, ["text", "code_block", "html"]);
108
- return acc.toString();
109
- });
110
- } else {
111
- const acc = new StringBuilder(fieldTextIndexLength);
112
- extractText(data[f.name], acc, ["text", "code_block", "html"]);
113
- data[f.name] = acc.toString();
114
- }
115
- }
116
- }
117
- }
118
- return data;
119
- };
120
- const memo = {};
121
- const lookupStopwords = (keys, defaultStopWords = sw.eng) => {
122
- let stopwords = defaultStopWords;
123
- if (keys) {
124
- if (memo[keys.join(",")]) {
125
- return memo[keys.join(",")];
126
- }
127
- stopwords = [];
128
- for (const key of keys) {
129
- stopwords.push(...sw[key]);
130
- }
131
- memo[keys.join(",")] = stopwords;
132
- }
133
- return stopwords;
134
- };
135
- const queryToSearchIndexQuery = (query, stopwordLanguages) => {
136
- let q;
137
- const parts = query.split(" ");
138
- const stopwords = lookupStopwords(stopwordLanguages);
139
- if (parts.length === 1) {
140
- q = { AND: [parts[0]] };
141
- } else {
142
- q = {
143
- AND: parts.filter(
144
- (part) => part.toLowerCase() !== "and" && stopwords.indexOf(part.toLowerCase()) === -1
145
- )
146
- };
147
- }
148
- return q;
149
- };
150
- const optionsToSearchIndexOptions = (options) => {
151
- const opt = {};
152
- if (options == null ? void 0 : options.limit) {
153
- opt["PAGE"] = {
154
- SIZE: options.limit,
155
- NUMBER: (options == null ? void 0 : options.cursor) ? parseInt(options.cursor) : 0
156
- };
157
- }
158
- return opt;
159
- };
160
- const parseSearchIndexResponse = (data, options) => {
161
- const results = data["RESULT"];
162
- const total = data["RESULT_LENGTH"];
163
- if ((options == null ? void 0 : options.cursor) && (options == null ? void 0 : options.limit)) {
164
- const prevCursor = options.cursor === "0" ? null : (parseInt(options.cursor) - 1).toString();
165
- const nextCursor = total <= (parseInt(options.cursor) + 1) * options.limit ? null : (parseInt(options.cursor) + 1).toString();
166
- return {
167
- results,
168
- total,
169
- prevCursor,
170
- nextCursor
171
- };
172
- } else if (!(options == null ? void 0 : options.cursor) && (options == null ? void 0 : options.limit)) {
173
- const prevCursor = null;
174
- const nextCursor = total <= options.limit ? null : "1";
175
- return {
176
- results,
177
- total,
178
- prevCursor,
179
- nextCursor
180
- };
181
- } else {
182
- return {
183
- results,
184
- total,
185
- prevCursor: null,
186
- nextCursor: null
187
- };
188
- }
189
- };
190
- export {
191
- optionsToSearchIndexOptions,
192
- parseSearchIndexResponse,
193
- processDocumentForIndexing,
194
- queryToSearchIndexQuery
195
- };
196
- //# sourceMappingURL=index-client.mjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"index-client.mjs","sources":["../src/indexer/utils.ts","../src/index-client.ts"],"sourcesContent":["import { Collection, ObjectField } from '@tinacms/schema-tools';\nimport * as sw from 'stopword';\n\nclass StringBuilder {\n private readonly buffer: string[];\n public length = 0;\n private readonly limit: number;\n constructor(limit: number) {\n this.buffer = [];\n this.limit = limit;\n }\n\n public append(str: string) {\n if (this.length + str.length > this.limit) {\n return true;\n } else {\n this.buffer.push(str);\n this.length += str.length;\n if (this.length > this.limit) {\n return true;\n }\n return false;\n }\n }\n\n public toString() {\n // NOTE this is going to add some length to the final string beyond the limit\n return this.buffer.join(' ');\n }\n}\n\nconst extractText = (\n data: any,\n acc: StringBuilder,\n indexableNodeTypes: string[]\n) => {\n if (data) {\n if (\n indexableNodeTypes.indexOf(data.type) !== -1 &&\n (data.text || data.value)\n ) {\n const tokens = tokenizeString(data.text || data.value);\n for (const token of tokens) {\n if (acc.append(token)) {\n return;\n }\n }\n }\n\n data.children?.forEach?.((child: any) =>\n extractText(child, acc, indexableNodeTypes)\n );\n }\n};\n\nconst relativePath = (path: string, collection: Collection) => {\n return path\n .replace(/\\\\/g, '/')\n .replace(collection.path, '')\n .replace(/^\\/|\\/$/g, '');\n};\n\nconst tokenizeString = (str: string) => {\n return str\n .split(/[\\s\\.,]+/)\n .map((s) => s.toLowerCase())\n .filter((s) => s);\n};\n\nconst processTextFieldValue = (value: string, maxLen: number) => {\n const tokens = tokenizeString(value);\n const builder = new StringBuilder(maxLen);\n for (const part of tokens) {\n if (builder.append(part)) {\n break;\n }\n }\n return builder.toString();\n};\n\nexport const processDocumentForIndexing = (\n data: any,\n path: string,\n collection: Collection,\n textIndexLength: number,\n field?: ObjectField\n) => {\n if (!field) {\n const relPath = relativePath(path, collection);\n data['_id'] = `${collection.name}:${relPath}`;\n data['_relativePath'] = relPath;\n }\n for (const f of field?.fields || collection.fields || []) {\n if (!f.searchable) {\n delete data[f.name];\n continue;\n }\n const isList = f.list;\n if (data[f.name]) {\n if (f.type === 'object') {\n if (isList) {\n data[f.name] = data[f.name].map((obj: any) =>\n processDocumentForIndexing(\n obj,\n path,\n collection,\n textIndexLength,\n f\n )\n );\n } else {\n data[f.name] = processDocumentForIndexing(\n data[f.name],\n path,\n collection,\n textIndexLength,\n f\n );\n }\n } else if (f.type === 'string') {\n const fieldTextIndexLength =\n f.maxSearchIndexFieldLength || textIndexLength;\n if (isList) {\n data[f.name] = data[f.name].map((value: string) =>\n processTextFieldValue(value, fieldTextIndexLength)\n );\n } else {\n data[f.name] = processTextFieldValue(\n data[f.name],\n fieldTextIndexLength\n );\n }\n } else if (f.type === 'rich-text') {\n const fieldTextIndexLength =\n f.maxSearchIndexFieldLength || textIndexLength;\n if (isList) {\n data[f.name] = data[f.name].map((value: any) => {\n const acc = new StringBuilder(fieldTextIndexLength);\n extractText(value, acc, ['text', 'code_block', 'html']);\n return acc.toString();\n });\n } else {\n const acc = new StringBuilder(fieldTextIndexLength);\n extractText(data[f.name], acc, ['text', 'code_block', 'html']);\n data[f.name] = acc.toString();\n }\n }\n }\n }\n return data;\n};\n\nconst memo: Record<string, string[]> = {};\nexport const lookupStopwords = (\n keys?: string[],\n defaultStopWords: string[] = sw.eng\n) => {\n let stopwords = defaultStopWords;\n if (keys) {\n if (memo[keys.join(',')]) {\n return memo[keys.join(',')];\n }\n stopwords = [];\n for (const key of keys) {\n stopwords.push(...sw[key]);\n }\n memo[keys.join(',')] = stopwords;\n }\n return stopwords;\n};\n","export type { SearchClient } from './types';\nexport { processDocumentForIndexing } from './indexer/utils';\nimport { lookupStopwords } from './indexer/utils';\n\nexport const queryToSearchIndexQuery = (\n query: string,\n stopwordLanguages?: string[]\n) => {\n let q;\n const parts = query.split(' ');\n const stopwords = lookupStopwords(stopwordLanguages);\n if (parts.length === 1) {\n q = { AND: [parts[0]] };\n } else {\n // TODO only allow AND for now - need parser\n q = {\n AND: parts.filter(\n (part) =>\n part.toLowerCase() !== 'and' &&\n stopwords.indexOf(part.toLowerCase()) === -1\n ),\n };\n }\n return q;\n};\n\nexport const optionsToSearchIndexOptions = (options?: {\n limit?: number;\n cursor?: string;\n}) => {\n const opt = {};\n if (options?.limit) {\n opt['PAGE'] = {\n SIZE: options.limit,\n NUMBER: options?.cursor ? parseInt(options.cursor) : 0,\n };\n }\n return opt;\n};\n\nexport const parseSearchIndexResponse = (\n data: any,\n options?: {\n limit?: number;\n cursor?: string;\n }\n) => {\n const results = data['RESULT'];\n const total = data['RESULT_LENGTH'];\n if (options?.cursor && options?.limit) {\n const prevCursor =\n options.cursor === '0' ? null : (parseInt(options.cursor) - 1).toString();\n const nextCursor =\n total <= (parseInt(options.cursor) + 1) * options.limit\n ? null\n : (parseInt(options.cursor) + 1).toString();\n return {\n results,\n total,\n prevCursor,\n nextCursor,\n };\n } else if (!options?.cursor && options?.limit) {\n const prevCursor = null;\n const nextCursor = total <= options.limit ? null : '1';\n return {\n results,\n total,\n prevCursor,\n nextCursor,\n };\n } else {\n return {\n results,\n total,\n prevCursor: null,\n nextCursor: null,\n };\n }\n};\n"],"names":[],"mappings":";AAGA,MAAM,cAAc;AAAA,EAIlB,YAAY,OAAe;AAF3B,SAAO,SAAS;AAGd,SAAK,SAAS;AACd,SAAK,QAAQ;AAAA,EACf;AAAA,EAEO,OAAO,KAAa;AACzB,QAAI,KAAK,SAAS,IAAI,SAAS,KAAK,OAAO;AAClC,aAAA;AAAA,IAAA,OACF;AACA,WAAA,OAAO,KAAK,GAAG;AACpB,WAAK,UAAU,IAAI;AACf,UAAA,KAAK,SAAS,KAAK,OAAO;AACrB,eAAA;AAAA,MACT;AACO,aAAA;AAAA,IACT;AAAA,EACF;AAAA,EAEO,WAAW;AAET,WAAA,KAAK,OAAO,KAAK,GAAG;AAAA,EAC7B;AACF;AAEA,MAAM,cAAc,CAClB,MACA,KACA,uBACG;;AACH,MAAI,MAAM;AAEN,QAAA,mBAAmB,QAAQ,KAAK,IAAI,MAAM,OACzC,KAAK,QAAQ,KAAK,QACnB;AACA,YAAM,SAAS,eAAe,KAAK,QAAQ,KAAK,KAAK;AACrD,iBAAW,SAAS,QAAQ;AACtB,YAAA,IAAI,OAAO,KAAK,GAAG;AACrB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,qBAAK,aAAL,mBAAe,YAAf;AAAA;AAAA,MAAyB,CAAC,UACxB,YAAY,OAAO,KAAK,kBAAkB;AAAA;AAAA,EAE9C;AACF;AAEA,MAAM,eAAe,CAAC,MAAc,eAA2B;AAC7D,SAAO,KACJ,QAAQ,OAAO,GAAG,EAClB,QAAQ,WAAW,MAAM,EAAE,EAC3B,QAAQ,YAAY,EAAE;AAC3B;AAEA,MAAM,iBAAiB,CAAC,QAAgB;AACtC,SAAO,IACJ,MAAM,UAAU,EAChB,IAAI,CAAC,MAAM,EAAE,YAAa,CAAA,EAC1B,OAAO,CAAC,MAAM,CAAC;AACpB;AAEA,MAAM,wBAAwB,CAAC,OAAe,WAAmB;AACzD,QAAA,SAAS,eAAe,KAAK;AAC7B,QAAA,UAAU,IAAI,cAAc,MAAM;AACxC,aAAW,QAAQ,QAAQ;AACrB,QAAA,QAAQ,OAAO,IAAI,GAAG;AACxB;AAAA,IACF;AAAA,EACF;AACA,SAAO,QAAQ;AACjB;AAEO,MAAM,6BAA6B,CACxC,MACA,MACA,YACA,iBACA,UACG;AACH,MAAI,CAAC,OAAO;AACJ,UAAA,UAAU,aAAa,MAAM,UAAU;AAC7C,SAAK,KAAK,IAAI,GAAG,WAAW,IAAI,IAAI,OAAO;AAC3C,SAAK,eAAe,IAAI;AAAA,EAC1B;AACA,aAAW,MAAK,+BAAO,WAAU,WAAW,UAAU,IAAI;AACpD,QAAA,CAAC,EAAE,YAAY;AACV,aAAA,KAAK,EAAE,IAAI;AAClB;AAAA,IACF;AACA,UAAM,SAAS,EAAE;AACb,QAAA,KAAK,EAAE,IAAI,GAAG;AACZ,UAAA,EAAE,SAAS,UAAU;AACvB,YAAI,QAAQ;AACV,eAAK,EAAE,IAAI,IAAI,KAAK,EAAE,IAAI,EAAE;AAAA,YAAI,CAAC,QAC/B;AAAA,cACE;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UAAA;AAAA,QACF,OACK;AACA,eAAA,EAAE,IAAI,IAAI;AAAA,YACb,KAAK,EAAE,IAAI;AAAA,YACX;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UAAA;AAAA,QAEJ;AAAA,MAAA,WACS,EAAE,SAAS,UAAU;AACxB,cAAA,uBACJ,EAAE,6BAA6B;AACjC,YAAI,QAAQ;AACV,eAAK,EAAE,IAAI,IAAI,KAAK,EAAE,IAAI,EAAE;AAAA,YAAI,CAAC,UAC/B,sBAAsB,OAAO,oBAAoB;AAAA,UAAA;AAAA,QACnD,OACK;AACA,eAAA,EAAE,IAAI,IAAI;AAAA,YACb,KAAK,EAAE,IAAI;AAAA,YACX;AAAA,UAAA;AAAA,QAEJ;AAAA,MAAA,WACS,EAAE,SAAS,aAAa;AAC3B,cAAA,uBACJ,EAAE,6BAA6B;AACjC,YAAI,QAAQ;AACL,eAAA,EAAE,IAAI,IAAI,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,UAAe;AACxC,kBAAA,MAAM,IAAI,cAAc,oBAAoB;AAClD,wBAAY,OAAO,KAAK,CAAC,QAAQ,cAAc,MAAM,CAAC;AACtD,mBAAO,IAAI;UAAS,CACrB;AAAA,QAAA,OACI;AACC,gBAAA,MAAM,IAAI,cAAc,oBAAoB;AACtC,sBAAA,KAAK,EAAE,IAAI,GAAG,KAAK,CAAC,QAAQ,cAAc,MAAM,CAAC;AAC7D,eAAK,EAAE,IAAI,IAAI,IAAI,SAAS;AAAA,QAC9B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACO,SAAA;AACT;AAEA,MAAM,OAAiC,CAAA;AAChC,MAAM,kBAAkB,CAC7B,MACA,mBAA6B,GAAG,QAC7B;AACH,MAAI,YAAY;AAChB,MAAI,MAAM;AACR,QAAI,KAAK,KAAK,KAAK,GAAG,CAAC,GAAG;AACxB,aAAO,KAAK,KAAK,KAAK,GAAG,CAAC;AAAA,IAC5B;AACA,gBAAY,CAAA;AACZ,eAAW,OAAO,MAAM;AACtB,gBAAU,KAAK,GAAG,GAAG,GAAG,CAAC;AAAA,IAC3B;AACA,SAAK,KAAK,KAAK,GAAG,CAAC,IAAI;AAAA,EACzB;AACO,SAAA;AACT;ACrKa,MAAA,0BAA0B,CACrC,OACA,sBACG;AACC,MAAA;AACE,QAAA,QAAQ,MAAM,MAAM,GAAG;AACvB,QAAA,YAAY,gBAAgB,iBAAiB;AAC/C,MAAA,MAAM,WAAW,GAAG;AACtB,QAAI,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AAAA,EAAA,OACjB;AAED,QAAA;AAAA,MACF,KAAK,MAAM;AAAA,QACT,CAAC,SACC,KAAK,YAAY,MAAM,SACvB,UAAU,QAAQ,KAAK,YAAa,CAAA,MAAM;AAAA,MAC9C;AAAA,IAAA;AAAA,EAEJ;AACO,SAAA;AACT;AAEa,MAAA,8BAA8B,CAAC,YAGtC;AACJ,QAAM,MAAM,CAAA;AACZ,MAAI,mCAAS,OAAO;AAClB,QAAI,MAAM,IAAI;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,SAAQ,mCAAS,UAAS,SAAS,QAAQ,MAAM,IAAI;AAAA,IAAA;AAAA,EAEzD;AACO,SAAA;AACT;AAEa,MAAA,2BAA2B,CACtC,MACA,YAIG;AACG,QAAA,UAAU,KAAK,QAAQ;AACvB,QAAA,QAAQ,KAAK,eAAe;AAC9B,OAAA,mCAAS,YAAU,mCAAS,QAAO;AAC/B,UAAA,aACJ,QAAQ,WAAW,MAAM,QAAQ,SAAS,QAAQ,MAAM,IAAI,GAAG,SAAS;AAC1E,UAAM,aACJ,UAAU,SAAS,QAAQ,MAAM,IAAI,KAAK,QAAQ,QAC9C,QACC,SAAS,QAAQ,MAAM,IAAI,GAAG;AAC9B,WAAA;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEO,WAAA,EAAC,mCAAS,YAAU,mCAAS,QAAO;AAC7C,UAAM,aAAa;AACnB,UAAM,aAAa,SAAS,QAAQ,QAAQ,OAAO;AAC5C,WAAA;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EACF,OACK;AACE,WAAA;AAAA,MACL;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ,YAAY;AAAA,IAAA;AAAA,EAEhB;AACF;"}