@openspecui/search 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,316 @@
1
+ import { a as SearchIndexDocument, i as SearchIndex, n as SearchDocumentKind, o as SearchProvider, r as SearchHit, s as SearchQuery, t as SearchDocument } from "./types-B4eONYNW.mjs";
2
+ import { z } from "zod";
3
+
4
+ //#region src/engine.d.ts
5
+ declare function normalizeText(input: string): string;
6
+ declare function splitTerms(query: string): string[];
7
+ declare function buildSearchIndex(docs: SearchDocument[]): SearchIndex;
8
+ declare function createSnippet(content: string, terms: string[]): string;
9
+ declare function searchIndex(index: SearchIndex, query: SearchQuery): SearchHit[];
10
+ //#endregion
11
+ //#region src/protocol.d.ts
12
+ declare const SearchDocumentKindSchema: z.ZodEnum<["spec", "change", "archive"]>;
13
+ declare const SearchDocumentSchema: z.ZodObject<{
14
+ id: z.ZodString;
15
+ kind: z.ZodEnum<["spec", "change", "archive"]>;
16
+ title: z.ZodString;
17
+ href: z.ZodString;
18
+ path: z.ZodString;
19
+ content: z.ZodString;
20
+ updatedAt: z.ZodNumber;
21
+ }, "strip", z.ZodTypeAny, {
22
+ id: string;
23
+ kind: "spec" | "change" | "archive";
24
+ title: string;
25
+ href: string;
26
+ path: string;
27
+ content: string;
28
+ updatedAt: number;
29
+ }, {
30
+ id: string;
31
+ kind: "spec" | "change" | "archive";
32
+ title: string;
33
+ href: string;
34
+ path: string;
35
+ content: string;
36
+ updatedAt: number;
37
+ }>;
38
+ declare const SearchQuerySchema: z.ZodObject<{
39
+ query: z.ZodString;
40
+ limit: z.ZodOptional<z.ZodNumber>;
41
+ }, "strip", z.ZodTypeAny, {
42
+ query: string;
43
+ limit?: number | undefined;
44
+ }, {
45
+ query: string;
46
+ limit?: number | undefined;
47
+ }>;
48
+ declare const SearchHitSchema: z.ZodObject<{
49
+ documentId: z.ZodString;
50
+ kind: z.ZodEnum<["spec", "change", "archive"]>;
51
+ title: z.ZodString;
52
+ href: z.ZodString;
53
+ path: z.ZodString;
54
+ score: z.ZodNumber;
55
+ snippet: z.ZodString;
56
+ updatedAt: z.ZodNumber;
57
+ }, "strip", z.ZodTypeAny, {
58
+ kind: "spec" | "change" | "archive";
59
+ title: string;
60
+ href: string;
61
+ path: string;
62
+ updatedAt: number;
63
+ documentId: string;
64
+ score: number;
65
+ snippet: string;
66
+ }, {
67
+ kind: "spec" | "change" | "archive";
68
+ title: string;
69
+ href: string;
70
+ path: string;
71
+ updatedAt: number;
72
+ documentId: string;
73
+ score: number;
74
+ snippet: string;
75
+ }>;
76
+ declare const SearchWorkerRequestSchema: z.ZodDiscriminatedUnion<"type", [z.ZodObject<{
77
+ id: z.ZodString;
78
+ type: z.ZodLiteral<"init">;
79
+ docs: z.ZodArray<z.ZodObject<{
80
+ id: z.ZodString;
81
+ kind: z.ZodEnum<["spec", "change", "archive"]>;
82
+ title: z.ZodString;
83
+ href: z.ZodString;
84
+ path: z.ZodString;
85
+ content: z.ZodString;
86
+ updatedAt: z.ZodNumber;
87
+ }, "strip", z.ZodTypeAny, {
88
+ id: string;
89
+ kind: "spec" | "change" | "archive";
90
+ title: string;
91
+ href: string;
92
+ path: string;
93
+ content: string;
94
+ updatedAt: number;
95
+ }, {
96
+ id: string;
97
+ kind: "spec" | "change" | "archive";
98
+ title: string;
99
+ href: string;
100
+ path: string;
101
+ content: string;
102
+ updatedAt: number;
103
+ }>, "many">;
104
+ }, "strip", z.ZodTypeAny, {
105
+ id: string;
106
+ type: "init";
107
+ docs: {
108
+ id: string;
109
+ kind: "spec" | "change" | "archive";
110
+ title: string;
111
+ href: string;
112
+ path: string;
113
+ content: string;
114
+ updatedAt: number;
115
+ }[];
116
+ }, {
117
+ id: string;
118
+ type: "init";
119
+ docs: {
120
+ id: string;
121
+ kind: "spec" | "change" | "archive";
122
+ title: string;
123
+ href: string;
124
+ path: string;
125
+ content: string;
126
+ updatedAt: number;
127
+ }[];
128
+ }>, z.ZodObject<{
129
+ id: z.ZodString;
130
+ type: z.ZodLiteral<"replaceAll">;
131
+ docs: z.ZodArray<z.ZodObject<{
132
+ id: z.ZodString;
133
+ kind: z.ZodEnum<["spec", "change", "archive"]>;
134
+ title: z.ZodString;
135
+ href: z.ZodString;
136
+ path: z.ZodString;
137
+ content: z.ZodString;
138
+ updatedAt: z.ZodNumber;
139
+ }, "strip", z.ZodTypeAny, {
140
+ id: string;
141
+ kind: "spec" | "change" | "archive";
142
+ title: string;
143
+ href: string;
144
+ path: string;
145
+ content: string;
146
+ updatedAt: number;
147
+ }, {
148
+ id: string;
149
+ kind: "spec" | "change" | "archive";
150
+ title: string;
151
+ href: string;
152
+ path: string;
153
+ content: string;
154
+ updatedAt: number;
155
+ }>, "many">;
156
+ }, "strip", z.ZodTypeAny, {
157
+ id: string;
158
+ type: "replaceAll";
159
+ docs: {
160
+ id: string;
161
+ kind: "spec" | "change" | "archive";
162
+ title: string;
163
+ href: string;
164
+ path: string;
165
+ content: string;
166
+ updatedAt: number;
167
+ }[];
168
+ }, {
169
+ id: string;
170
+ type: "replaceAll";
171
+ docs: {
172
+ id: string;
173
+ kind: "spec" | "change" | "archive";
174
+ title: string;
175
+ href: string;
176
+ path: string;
177
+ content: string;
178
+ updatedAt: number;
179
+ }[];
180
+ }>, z.ZodObject<{
181
+ id: z.ZodString;
182
+ type: z.ZodLiteral<"search">;
183
+ query: z.ZodObject<{
184
+ query: z.ZodString;
185
+ limit: z.ZodOptional<z.ZodNumber>;
186
+ }, "strip", z.ZodTypeAny, {
187
+ query: string;
188
+ limit?: number | undefined;
189
+ }, {
190
+ query: string;
191
+ limit?: number | undefined;
192
+ }>;
193
+ }, "strip", z.ZodTypeAny, {
194
+ id: string;
195
+ type: "search";
196
+ query: {
197
+ query: string;
198
+ limit?: number | undefined;
199
+ };
200
+ }, {
201
+ id: string;
202
+ type: "search";
203
+ query: {
204
+ query: string;
205
+ limit?: number | undefined;
206
+ };
207
+ }>, z.ZodObject<{
208
+ id: z.ZodString;
209
+ type: z.ZodLiteral<"dispose">;
210
+ }, "strip", z.ZodTypeAny, {
211
+ id: string;
212
+ type: "dispose";
213
+ }, {
214
+ id: string;
215
+ type: "dispose";
216
+ }>]>;
217
+ declare const SearchWorkerResponseSchema: z.ZodDiscriminatedUnion<"type", [z.ZodObject<{
218
+ id: z.ZodString;
219
+ type: z.ZodLiteral<"ok">;
220
+ }, "strip", z.ZodTypeAny, {
221
+ id: string;
222
+ type: "ok";
223
+ }, {
224
+ id: string;
225
+ type: "ok";
226
+ }>, z.ZodObject<{
227
+ id: z.ZodString;
228
+ type: z.ZodLiteral<"results">;
229
+ hits: z.ZodArray<z.ZodObject<{
230
+ documentId: z.ZodString;
231
+ kind: z.ZodEnum<["spec", "change", "archive"]>;
232
+ title: z.ZodString;
233
+ href: z.ZodString;
234
+ path: z.ZodString;
235
+ score: z.ZodNumber;
236
+ snippet: z.ZodString;
237
+ updatedAt: z.ZodNumber;
238
+ }, "strip", z.ZodTypeAny, {
239
+ kind: "spec" | "change" | "archive";
240
+ title: string;
241
+ href: string;
242
+ path: string;
243
+ updatedAt: number;
244
+ documentId: string;
245
+ score: number;
246
+ snippet: string;
247
+ }, {
248
+ kind: "spec" | "change" | "archive";
249
+ title: string;
250
+ href: string;
251
+ path: string;
252
+ updatedAt: number;
253
+ documentId: string;
254
+ score: number;
255
+ snippet: string;
256
+ }>, "many">;
257
+ }, "strip", z.ZodTypeAny, {
258
+ id: string;
259
+ type: "results";
260
+ hits: {
261
+ kind: "spec" | "change" | "archive";
262
+ title: string;
263
+ href: string;
264
+ path: string;
265
+ updatedAt: number;
266
+ documentId: string;
267
+ score: number;
268
+ snippet: string;
269
+ }[];
270
+ }, {
271
+ id: string;
272
+ type: "results";
273
+ hits: {
274
+ kind: "spec" | "change" | "archive";
275
+ title: string;
276
+ href: string;
277
+ path: string;
278
+ updatedAt: number;
279
+ documentId: string;
280
+ score: number;
281
+ snippet: string;
282
+ }[];
283
+ }>, z.ZodObject<{
284
+ id: z.ZodString;
285
+ type: z.ZodLiteral<"error">;
286
+ message: z.ZodString;
287
+ }, "strip", z.ZodTypeAny, {
288
+ id: string;
289
+ message: string;
290
+ type: "error";
291
+ }, {
292
+ id: string;
293
+ message: string;
294
+ type: "error";
295
+ }>]>;
296
+ type SearchDocumentInput = z.infer<typeof SearchDocumentSchema>;
297
+ type SearchQueryInput = z.infer<typeof SearchQuerySchema>;
298
+ type SearchHitOutput = z.infer<typeof SearchHitSchema>;
299
+ type SearchWorkerRequest = z.infer<typeof SearchWorkerRequestSchema>;
300
+ type SearchWorkerResponse = z.infer<typeof SearchWorkerResponseSchema>;
301
+ //#endregion
302
+ //#region src/webworker-provider.d.ts
303
+ declare class WebWorkerSearchProvider implements SearchProvider {
304
+ private worker;
305
+ private workerUrl;
306
+ private pending;
307
+ init(docs: SearchDocument[]): Promise<void>;
308
+ replaceAll(docs: SearchDocument[]): Promise<void>;
309
+ search(query: SearchQuery): Promise<SearchHit[]>;
310
+ dispose(): Promise<void>;
311
+ private ensureWorker;
312
+ private sendRequest;
313
+ private failPending;
314
+ }
315
+ //#endregion
316
+ export { type SearchDocument, type SearchDocumentInput, type SearchDocumentKind, SearchDocumentKindSchema, SearchDocumentSchema, type SearchHit, type SearchHitOutput, SearchHitSchema, type SearchIndex, type SearchIndexDocument, type SearchProvider, type SearchQuery, type SearchQueryInput, SearchQuerySchema, type SearchWorkerRequest, SearchWorkerRequestSchema, type SearchWorkerResponse, SearchWorkerResponseSchema, WebWorkerSearchProvider, buildSearchIndex, createSnippet, normalizeText, searchIndex, splitTerms };
package/dist/index.mjs ADDED
@@ -0,0 +1,99 @@
1
+ import { a as SearchHitSchema, c as SearchWorkerResponseSchema, d as normalizeText, f as searchIndex, i as SearchDocumentSchema, l as buildSearchIndex, n as buildWebWorkerSource, o as SearchQuerySchema, p as splitTerms, r as SearchDocumentKindSchema, s as SearchWorkerRequestSchema, u as createSnippet } from "./worker-source-BxMlTiAB.mjs";
2
+
3
+ //#region src/webworker-provider.ts
4
+ function requestId() {
5
+ return Math.random().toString(36).slice(2);
6
+ }
7
+ var WebWorkerSearchProvider = class {
8
+ worker = null;
9
+ workerUrl = null;
10
+ pending = /* @__PURE__ */ new Map();
11
+ async init(docs) {
12
+ await this.ensureWorker();
13
+ await this.sendRequest({
14
+ id: requestId(),
15
+ type: "init",
16
+ docs
17
+ });
18
+ }
19
+ async replaceAll(docs) {
20
+ await this.ensureWorker();
21
+ await this.sendRequest({
22
+ id: requestId(),
23
+ type: "replaceAll",
24
+ docs
25
+ });
26
+ }
27
+ async search(query) {
28
+ await this.ensureWorker();
29
+ return await this.sendRequest({
30
+ id: requestId(),
31
+ type: "search",
32
+ query
33
+ }) ?? [];
34
+ }
35
+ async dispose() {
36
+ if (this.worker) {
37
+ await this.sendRequest({
38
+ id: requestId(),
39
+ type: "dispose"
40
+ }).catch(() => {});
41
+ this.worker.terminate();
42
+ this.worker = null;
43
+ }
44
+ if (this.workerUrl) {
45
+ URL.revokeObjectURL(this.workerUrl);
46
+ this.workerUrl = null;
47
+ }
48
+ this.failPending(/* @__PURE__ */ new Error("Worker disposed"));
49
+ }
50
+ async ensureWorker() {
51
+ if (this.worker) return;
52
+ const source = buildWebWorkerSource();
53
+ const blob = new Blob([source], { type: "text/javascript" });
54
+ this.workerUrl = URL.createObjectURL(blob);
55
+ this.worker = new Worker(this.workerUrl, { type: "module" });
56
+ this.worker.onmessage = (event) => {
57
+ const parsed = SearchWorkerResponseSchema.safeParse(event.data);
58
+ if (!parsed.success) {
59
+ this.failPending(/* @__PURE__ */ new Error("Invalid worker response payload"));
60
+ return;
61
+ }
62
+ const response = parsed.data;
63
+ const pending = this.pending.get(response.id);
64
+ if (!pending) return;
65
+ this.pending.delete(response.id);
66
+ if (response.type === "error") {
67
+ pending.reject(new Error(response.message));
68
+ return;
69
+ }
70
+ if (response.type === "results") {
71
+ pending.resolve(response.hits.map((hit) => SearchHitSchema.parse(hit)));
72
+ return;
73
+ }
74
+ pending.resolve(void 0);
75
+ };
76
+ this.worker.onerror = (event) => {
77
+ const message = event.message || "Web worker runtime error";
78
+ this.failPending(new Error(message));
79
+ };
80
+ }
81
+ sendRequest(payload) {
82
+ const worker = this.worker;
83
+ if (!worker) return Promise.reject(/* @__PURE__ */ new Error("Web worker is not initialized"));
84
+ return new Promise((resolve, reject) => {
85
+ this.pending.set(payload.id, {
86
+ resolve,
87
+ reject
88
+ });
89
+ worker.postMessage(payload);
90
+ });
91
+ }
92
+ failPending(error) {
93
+ this.pending.forEach(({ reject }) => reject(error));
94
+ this.pending.clear();
95
+ }
96
+ };
97
+
98
+ //#endregion
99
+ export { SearchDocumentKindSchema, SearchDocumentSchema, SearchHitSchema, SearchQuerySchema, SearchWorkerRequestSchema, SearchWorkerResponseSchema, WebWorkerSearchProvider, buildSearchIndex, createSnippet, normalizeText, searchIndex, splitTerms };
@@ -0,0 +1,16 @@
1
+ import { o as SearchProvider, r as SearchHit, s as SearchQuery, t as SearchDocument } from "./types-B4eONYNW.mjs";
2
+
3
+ //#region src/node-worker-provider.d.ts
4
+ declare class NodeWorkerSearchProvider implements SearchProvider {
5
+ private worker;
6
+ private pending;
7
+ init(docs: SearchDocument[]): Promise<void>;
8
+ replaceAll(docs: SearchDocument[]): Promise<void>;
9
+ search(query: SearchQuery): Promise<SearchHit[]>;
10
+ dispose(): Promise<void>;
11
+ private ensureWorker;
12
+ private sendRequest;
13
+ private failPending;
14
+ }
15
+ //#endregion
16
+ export { NodeWorkerSearchProvider };
package/dist/node.mjs ADDED
@@ -0,0 +1,91 @@
1
+ import { a as SearchHitSchema, c as SearchWorkerResponseSchema, t as buildNodeWorkerSource } from "./worker-source-BxMlTiAB.mjs";
2
+ import { Worker } from "node:worker_threads";
3
+
4
+ //#region src/node-worker-provider.ts
5
+ function requestId() {
6
+ return Math.random().toString(36).slice(2);
7
+ }
8
+ var NodeWorkerSearchProvider = class {
9
+ worker = null;
10
+ pending = /* @__PURE__ */ new Map();
11
+ async init(docs) {
12
+ await this.ensureWorker();
13
+ await this.sendRequest({
14
+ id: requestId(),
15
+ type: "init",
16
+ docs
17
+ });
18
+ }
19
+ async replaceAll(docs) {
20
+ await this.ensureWorker();
21
+ await this.sendRequest({
22
+ id: requestId(),
23
+ type: "replaceAll",
24
+ docs
25
+ });
26
+ }
27
+ async search(query) {
28
+ await this.ensureWorker();
29
+ return await this.sendRequest({
30
+ id: requestId(),
31
+ type: "search",
32
+ query
33
+ }) ?? [];
34
+ }
35
+ async dispose() {
36
+ if (this.worker) {
37
+ await this.sendRequest({
38
+ id: requestId(),
39
+ type: "dispose"
40
+ }).catch(() => {});
41
+ await this.worker.terminate();
42
+ this.worker = null;
43
+ }
44
+ this.failPending(/* @__PURE__ */ new Error("Worker disposed"));
45
+ }
46
+ async ensureWorker() {
47
+ if (this.worker) return;
48
+ this.worker = new Worker(buildNodeWorkerSource(), { eval: true });
49
+ this.worker.on("message", (payload) => {
50
+ const parsed = SearchWorkerResponseSchema.safeParse(payload);
51
+ if (!parsed.success) {
52
+ this.failPending(/* @__PURE__ */ new Error("Invalid worker response payload"));
53
+ return;
54
+ }
55
+ const response = parsed.data;
56
+ const pending = this.pending.get(response.id);
57
+ if (!pending) return;
58
+ this.pending.delete(response.id);
59
+ if (response.type === "error") {
60
+ pending.reject(new Error(response.message));
61
+ return;
62
+ }
63
+ if (response.type === "results") {
64
+ pending.resolve(response.hits.map((hit) => SearchHitSchema.parse(hit)));
65
+ return;
66
+ }
67
+ pending.resolve(void 0);
68
+ });
69
+ this.worker.on("error", (error) => {
70
+ this.failPending(error);
71
+ });
72
+ }
73
+ sendRequest(payload) {
74
+ const worker = this.worker;
75
+ if (!worker) return Promise.reject(/* @__PURE__ */ new Error("Node worker is not initialized"));
76
+ return new Promise((resolve, reject) => {
77
+ this.pending.set(payload.id, {
78
+ resolve,
79
+ reject
80
+ });
81
+ worker.postMessage(payload);
82
+ });
83
+ }
84
+ failPending(error) {
85
+ this.pending.forEach(({ reject }) => reject(error));
86
+ this.pending.clear();
87
+ }
88
+ };
89
+
90
+ //#endregion
91
+ export { NodeWorkerSearchProvider };
@@ -0,0 +1,41 @@
1
+ //#region src/types.d.ts
2
+ type SearchDocumentKind = 'spec' | 'change' | 'archive';
3
+ interface SearchDocument {
4
+ id: string;
5
+ kind: SearchDocumentKind;
6
+ title: string;
7
+ href: string;
8
+ path: string;
9
+ content: string;
10
+ updatedAt: number;
11
+ }
12
+ interface SearchQuery {
13
+ query: string;
14
+ limit?: number;
15
+ }
16
+ interface SearchHit {
17
+ documentId: string;
18
+ kind: SearchDocumentKind;
19
+ title: string;
20
+ href: string;
21
+ path: string;
22
+ score: number;
23
+ snippet: string;
24
+ updatedAt: number;
25
+ }
26
+ interface SearchProvider {
27
+ init(docs: SearchDocument[]): Promise<void>;
28
+ replaceAll(docs: SearchDocument[]): Promise<void>;
29
+ search(query: SearchQuery): Promise<SearchHit[]>;
30
+ dispose(): Promise<void>;
31
+ }
32
+ interface SearchIndexDocument extends SearchDocument {
33
+ normalizedTitle: string;
34
+ normalizedPath: string;
35
+ normalizedContent: string;
36
+ }
37
+ interface SearchIndex {
38
+ documents: SearchIndexDocument[];
39
+ }
40
+ //#endregion
41
+ export { SearchIndexDocument as a, SearchIndex as i, SearchDocumentKind as n, SearchProvider as o, SearchHit as r, SearchQuery as s, SearchDocument as t };
@@ -0,0 +1,205 @@
1
+ import { z } from "zod";
2
+
3
+ //#region src/engine.ts
4
+ const DEFAULT_LIMIT = 50;
5
+ const MAX_LIMIT = 200;
6
+ const SNIPPET_SIZE = 180;
7
+ function normalizeText(input) {
8
+ return input.toLowerCase().replace(/\s+/g, " ").trim();
9
+ }
10
+ function splitTerms(query) {
11
+ return normalizeText(query).split(" ").map((term) => term.trim()).filter((term) => term.length > 0);
12
+ }
13
+ function toSearchIndexDocument(doc) {
14
+ return {
15
+ ...doc,
16
+ normalizedTitle: normalizeText(doc.title),
17
+ normalizedPath: normalizeText(doc.path),
18
+ normalizedContent: normalizeText(doc.content)
19
+ };
20
+ }
21
+ function buildSearchIndex(docs) {
22
+ return { documents: docs.map(toSearchIndexDocument) };
23
+ }
24
+ function resolveLimit(limit) {
25
+ if (typeof limit !== "number" || Number.isNaN(limit)) return DEFAULT_LIMIT;
26
+ return Math.min(MAX_LIMIT, Math.max(1, Math.trunc(limit)));
27
+ }
28
+ function isDocumentMatch(doc, terms) {
29
+ return terms.every((term) => doc.normalizedTitle.includes(term) || doc.normalizedPath.includes(term) || doc.normalizedContent.includes(term));
30
+ }
31
+ function scoreDocument(doc, terms) {
32
+ let score = 0;
33
+ for (const term of terms) {
34
+ if (doc.normalizedTitle.includes(term)) score += 30;
35
+ if (doc.normalizedPath.includes(term)) score += 20;
36
+ const contentIdx = doc.normalizedContent.indexOf(term);
37
+ if (contentIdx >= 0) {
38
+ score += 8;
39
+ if (contentIdx < 160) score += 4;
40
+ }
41
+ }
42
+ return score;
43
+ }
44
+ function createSnippet(content, terms) {
45
+ const source = content.trim();
46
+ if (!source) return "";
47
+ const normalizedSource = normalizeText(source);
48
+ let matchIndex = -1;
49
+ for (const term of terms) {
50
+ const idx = normalizedSource.indexOf(term);
51
+ if (idx >= 0 && (matchIndex < 0 || idx < matchIndex)) matchIndex = idx;
52
+ }
53
+ if (matchIndex < 0) return source.slice(0, SNIPPET_SIZE);
54
+ const start = Math.max(0, matchIndex - Math.floor(SNIPPET_SIZE / 3));
55
+ const end = Math.min(source.length, start + SNIPPET_SIZE);
56
+ const prefix = start > 0 ? "..." : "";
57
+ const suffix = end < source.length ? "..." : "";
58
+ return `${prefix}${source.slice(start, end)}${suffix}`;
59
+ }
60
+ function searchIndex(index, query) {
61
+ const terms = splitTerms(query.query);
62
+ if (terms.length === 0) return [];
63
+ const hits = [];
64
+ for (const doc of index.documents) {
65
+ if (!isDocumentMatch(doc, terms)) continue;
66
+ hits.push({
67
+ documentId: doc.id,
68
+ kind: doc.kind,
69
+ title: doc.title,
70
+ href: doc.href,
71
+ path: doc.path,
72
+ score: scoreDocument(doc, terms),
73
+ snippet: createSnippet(doc.content, terms),
74
+ updatedAt: doc.updatedAt
75
+ });
76
+ }
77
+ hits.sort((a, b) => {
78
+ if (b.score !== a.score) return b.score - a.score;
79
+ return b.updatedAt - a.updatedAt;
80
+ });
81
+ return hits.slice(0, resolveLimit(query.limit));
82
+ }
83
+
84
+ //#endregion
85
+ //#region src/protocol.ts
86
+ const SearchDocumentKindSchema = z.enum([
87
+ "spec",
88
+ "change",
89
+ "archive"
90
+ ]);
91
+ const SearchDocumentSchema = z.object({
92
+ id: z.string(),
93
+ kind: SearchDocumentKindSchema,
94
+ title: z.string(),
95
+ href: z.string(),
96
+ path: z.string(),
97
+ content: z.string(),
98
+ updatedAt: z.number()
99
+ });
100
+ const SearchQuerySchema = z.object({
101
+ query: z.string(),
102
+ limit: z.number().int().positive().optional()
103
+ });
104
+ const SearchHitSchema = z.object({
105
+ documentId: z.string(),
106
+ kind: SearchDocumentKindSchema,
107
+ title: z.string(),
108
+ href: z.string(),
109
+ path: z.string(),
110
+ score: z.number(),
111
+ snippet: z.string(),
112
+ updatedAt: z.number()
113
+ });
114
+ const SearchWorkerRequestSchema = z.discriminatedUnion("type", [
115
+ z.object({
116
+ id: z.string(),
117
+ type: z.literal("init"),
118
+ docs: z.array(SearchDocumentSchema)
119
+ }),
120
+ z.object({
121
+ id: z.string(),
122
+ type: z.literal("replaceAll"),
123
+ docs: z.array(SearchDocumentSchema)
124
+ }),
125
+ z.object({
126
+ id: z.string(),
127
+ type: z.literal("search"),
128
+ query: SearchQuerySchema
129
+ }),
130
+ z.object({
131
+ id: z.string(),
132
+ type: z.literal("dispose")
133
+ })
134
+ ]);
135
+ const SearchWorkerResponseSchema = z.discriminatedUnion("type", [
136
+ z.object({
137
+ id: z.string(),
138
+ type: z.literal("ok")
139
+ }),
140
+ z.object({
141
+ id: z.string(),
142
+ type: z.literal("results"),
143
+ hits: z.array(SearchHitSchema)
144
+ }),
145
+ z.object({
146
+ id: z.string(),
147
+ type: z.literal("error"),
148
+ message: z.string()
149
+ })
150
+ ]);
151
+
152
+ //#endregion
153
+ //#region src/worker-source.ts
154
+ const sharedRuntimeSource = String.raw`
155
+ const DEFAULT_LIMIT = 50;
156
+ const MAX_LIMIT = 200;
157
+ const SNIPPET_SIZE = 180;
158
+ const normalizeText = ${normalizeText.toString()};
159
+ const splitTerms = ${splitTerms.toString()};
160
+ const toSearchIndexDocument = ${toSearchIndexDocument.toString()};
161
+ const buildSearchIndex = ${buildSearchIndex.toString()};
162
+ const resolveLimit = ${resolveLimit.toString()};
163
+ const isDocumentMatch = ${isDocumentMatch.toString()};
164
+ const scoreDocument = ${scoreDocument.toString()};
165
+ const createSnippet = ${createSnippet.toString()};
166
+ const searchIndex = ${searchIndex.toString()};
167
+ let index = buildSearchIndex([]);
168
+
169
+ function handleMessage(payload) {
170
+ try {
171
+ if (!payload || typeof payload !== 'object') {
172
+ throw new Error('Invalid worker request payload');
173
+ }
174
+
175
+ if (payload.type === 'init' || payload.type === 'replaceAll') {
176
+ index = buildSearchIndex(Array.isArray(payload.docs) ? payload.docs : []);
177
+ return { id: payload.id, type: 'ok' };
178
+ }
179
+
180
+ if (payload.type === 'search') {
181
+ const hits = searchIndex(index, payload.query || { query: '' });
182
+ return { id: payload.id, type: 'results', hits };
183
+ }
184
+
185
+ if (payload.type === 'dispose') {
186
+ index = buildSearchIndex([]);
187
+ return { id: payload.id, type: 'ok' };
188
+ }
189
+
190
+ throw new Error('Unsupported worker request type');
191
+ } catch (error) {
192
+ const message = error instanceof Error ? error.message : String(error);
193
+ return { id: payload?.id ?? 'unknown', type: 'error', message };
194
+ }
195
+ }
196
+ `;
197
+ function buildWebWorkerSource() {
198
+ return `${sharedRuntimeSource}\nself.onmessage = (event) => { self.postMessage(handleMessage(event.data)); };`;
199
+ }
200
+ function buildNodeWorkerSource() {
201
+ return `${sharedRuntimeSource}\nconst { parentPort } = require('node:worker_threads');\nif (!parentPort) { throw new Error('Missing parentPort'); }\nparentPort.on('message', (payload) => { parentPort.postMessage(handleMessage(payload)); });`;
202
+ }
203
+
204
+ //#endregion
205
+ export { SearchHitSchema as a, SearchWorkerResponseSchema as c, normalizeText as d, searchIndex as f, SearchDocumentSchema as i, buildSearchIndex as l, buildWebWorkerSource as n, SearchQuerySchema as o, splitTerms as p, SearchDocumentKindSchema as r, SearchWorkerRequestSchema as s, buildNodeWorkerSource as t, createSnippet as u };
package/package.json ADDED
@@ -0,0 +1,39 @@
1
+ {
2
+ "name": "@openspecui/search",
3
+ "version": "1.1.0",
4
+ "description": "Shared search engine and worker providers for OpenSpecUI",
5
+ "type": "module",
6
+ "main": "./dist/index.mjs",
7
+ "types": "./dist/index.d.mts",
8
+ "exports": {
9
+ ".": {
10
+ "import": "./dist/index.mjs",
11
+ "types": "./dist/index.d.mts"
12
+ },
13
+ "./node": {
14
+ "import": "./dist/node.mjs",
15
+ "types": "./dist/node.d.mts"
16
+ }
17
+ },
18
+ "files": [
19
+ "dist"
20
+ ],
21
+ "dependencies": {
22
+ "zod": "^3.24.1"
23
+ },
24
+ "devDependencies": {
25
+ "tsdown": "^0.16.6",
26
+ "typescript": "^5.7.2",
27
+ "vitest": "^2.1.8"
28
+ },
29
+ "peerDependencies": {
30
+ "typescript": "^5.0.0"
31
+ },
32
+ "scripts": {
33
+ "build": "tsdown src/index.ts src/node.ts --format esm --dts",
34
+ "dev": "tsdown src/index.ts src/node.ts --format esm --dts --watch",
35
+ "typecheck": "tsc --noEmit",
36
+ "test": "vitest run",
37
+ "test:watch": "vitest"
38
+ }
39
+ }