@eclipsa/content 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +34 -0
- package/.turbo/turbo-test.log +13 -0
- package/.turbo/turbo-typecheck.log +1 -0
- package/dist/internal-h0upzIHm.mjs +644 -0
- package/dist/internal-h0upzIHm.mjs.map +1 -0
- package/dist/internal.d.mts +47 -0
- package/dist/internal.mjs +2 -0
- package/dist/mod-P8gKoDsz.d.mts +151 -0
- package/dist/mod.d.mts +2 -0
- package/dist/mod.mjs +34 -0
- package/dist/mod.mjs.map +1 -0
- package/dist/package.json +40 -0
- package/dist/types-rZ-wc23p.mjs +6 -0
- package/dist/types-rZ-wc23p.mjs.map +1 -0
- package/dist/virtual-runtime.d.ts +24 -0
- package/dist/vite.d.mts +7 -0
- package/dist/vite.mjs +195 -0
- package/dist/vite.mjs.map +1 -0
- package/highlight.ts +125 -0
- package/internal.test.ts +263 -0
- package/internal.ts +514 -0
- package/mod.ts +124 -0
- package/package.json +62 -0
- package/search.test.ts +56 -0
- package/search.ts +450 -0
- package/typecheck.ts +103 -0
- package/types.ts +172 -0
- package/virtual-runtime.d.ts +24 -0
- package/vite-config.test.ts +15 -0
- package/vite.config.ts +16 -0
- package/vite.test.ts +283 -0
- package/vite.ts +276 -0
package/search.test.ts
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { describe, expect, it } from 'vitest'
|
|
2
|
+
import {
|
|
3
|
+
buildContentSearchIndex,
|
|
4
|
+
resolveContentSearchOptions,
|
|
5
|
+
searchContentIndex,
|
|
6
|
+
} from './search.ts'
|
|
7
|
+
|
|
8
|
+
describe('@eclipsa/content search', () => {
|
|
9
|
+
it('resolves search options with defaults', () => {
|
|
10
|
+
expect(resolveContentSearchOptions(undefined)).toEqual({
|
|
11
|
+
enabled: true,
|
|
12
|
+
hotkey: '/',
|
|
13
|
+
limit: 10,
|
|
14
|
+
placeholder: 'Search docs...',
|
|
15
|
+
prefix: true,
|
|
16
|
+
})
|
|
17
|
+
expect(resolveContentSearchOptions(false).enabled).toBe(false)
|
|
18
|
+
})
|
|
19
|
+
|
|
20
|
+
it('ranks exact and prefix matches with snippets', () => {
|
|
21
|
+
const index = buildContentSearchIndex(
|
|
22
|
+
[
|
|
23
|
+
{
|
|
24
|
+
body: 'Build apps with eclipsa signals and resumable rendering.',
|
|
25
|
+
code: ['const count = useSignal(0)'],
|
|
26
|
+
collection: 'docs',
|
|
27
|
+
headings: ['Signals'],
|
|
28
|
+
id: 'materials/signal',
|
|
29
|
+
title: 'Signals',
|
|
30
|
+
url: '/docs/materials/signal',
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
body: 'Motion primitives for transitions.',
|
|
34
|
+
code: ['motion.div'],
|
|
35
|
+
collection: 'docs',
|
|
36
|
+
headings: ['Motion'],
|
|
37
|
+
id: 'integrations/motion',
|
|
38
|
+
title: 'Motion',
|
|
39
|
+
url: '/docs/integrations/motion',
|
|
40
|
+
},
|
|
41
|
+
],
|
|
42
|
+
resolveContentSearchOptions({
|
|
43
|
+
limit: 3,
|
|
44
|
+
}),
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
const exact = searchContentIndex(index, 'signals')
|
|
48
|
+
expect(exact[0]?.id).toBe('materials/signal')
|
|
49
|
+
expect(exact[0]?.snippet).toContain('signals')
|
|
50
|
+
|
|
51
|
+
const prefix = searchContentIndex(index, 'sig')
|
|
52
|
+
expect(prefix[0]?.id).toBe('materials/signal')
|
|
53
|
+
|
|
54
|
+
expect(searchContentIndex(index, ' ')).toEqual([])
|
|
55
|
+
})
|
|
56
|
+
})
|
package/search.ts
ADDED
|
@@ -0,0 +1,450 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
ContentSearchDocument,
|
|
3
|
+
ContentSearchField,
|
|
4
|
+
ContentSearchIndex,
|
|
5
|
+
ContentSearchOptions,
|
|
6
|
+
ContentSearchQueryOptions,
|
|
7
|
+
ContentSearchResult,
|
|
8
|
+
ContentSearchPosting,
|
|
9
|
+
ResolvedContentSearchOptions,
|
|
10
|
+
} from './types.ts'
|
|
11
|
+
|
|
12
|
+
const DEFAULT_SEARCH_OPTIONS: ResolvedContentSearchOptions = {
|
|
13
|
+
enabled: true,
|
|
14
|
+
hotkey: '/',
|
|
15
|
+
limit: 10,
|
|
16
|
+
placeholder: 'Search docs...',
|
|
17
|
+
prefix: true,
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const SEARCH_STOPWORDS = new Set([
|
|
21
|
+
'a',
|
|
22
|
+
'an',
|
|
23
|
+
'and',
|
|
24
|
+
'are',
|
|
25
|
+
'as',
|
|
26
|
+
'at',
|
|
27
|
+
'be',
|
|
28
|
+
'by',
|
|
29
|
+
'for',
|
|
30
|
+
'from',
|
|
31
|
+
'has',
|
|
32
|
+
'have',
|
|
33
|
+
'how',
|
|
34
|
+
'in',
|
|
35
|
+
'is',
|
|
36
|
+
'it',
|
|
37
|
+
'of',
|
|
38
|
+
'on',
|
|
39
|
+
'or',
|
|
40
|
+
'that',
|
|
41
|
+
'the',
|
|
42
|
+
'this',
|
|
43
|
+
'to',
|
|
44
|
+
'was',
|
|
45
|
+
'were',
|
|
46
|
+
'with',
|
|
47
|
+
])
|
|
48
|
+
|
|
49
|
+
const SEARCH_K1 = 1.2
|
|
50
|
+
const SEARCH_B = 0.75
|
|
51
|
+
|
|
52
|
+
const isCjkChar = (char: string) =>
|
|
53
|
+
/[\u3400-\u4dbf\u4e00-\u9fff\u3040-\u30ff\uac00-\ud7af]/u.test(char)
|
|
54
|
+
|
|
55
|
+
const tokenizeValue = (text: string, query: boolean) => {
|
|
56
|
+
const tokens: string[] = []
|
|
57
|
+
let current = ''
|
|
58
|
+
|
|
59
|
+
for (const char of text) {
|
|
60
|
+
if (isCjkChar(char)) {
|
|
61
|
+
if (current !== '') {
|
|
62
|
+
const token = current.toLowerCase()
|
|
63
|
+
if (query || (token.length >= 2 && !SEARCH_STOPWORDS.has(token))) {
|
|
64
|
+
tokens.push(token)
|
|
65
|
+
}
|
|
66
|
+
current = ''
|
|
67
|
+
}
|
|
68
|
+
tokens.push(char)
|
|
69
|
+
continue
|
|
70
|
+
}
|
|
71
|
+
if (/[\p{L}\p{N}_]/u.test(char)) {
|
|
72
|
+
current += char
|
|
73
|
+
continue
|
|
74
|
+
}
|
|
75
|
+
if (current !== '') {
|
|
76
|
+
const token = current.toLowerCase()
|
|
77
|
+
if (query || (token.length >= 2 && !SEARCH_STOPWORDS.has(token))) {
|
|
78
|
+
tokens.push(token)
|
|
79
|
+
}
|
|
80
|
+
current = ''
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (current !== '') {
|
|
85
|
+
const token = current.toLowerCase()
|
|
86
|
+
if (query || (token.length >= 2 && !SEARCH_STOPWORDS.has(token))) {
|
|
87
|
+
tokens.push(token)
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return tokens
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
const tokenizeIndex = (text: string) => tokenizeValue(text, false)
|
|
95
|
+
|
|
96
|
+
const tokenizeQuery = (text: string) => tokenizeValue(text, true)
|
|
97
|
+
|
|
98
|
+
const getFieldBoost = (field: ContentSearchField) => {
|
|
99
|
+
switch (field) {
|
|
100
|
+
case 'title':
|
|
101
|
+
return 10
|
|
102
|
+
case 'heading':
|
|
103
|
+
return 5
|
|
104
|
+
case 'code':
|
|
105
|
+
return 0.5
|
|
106
|
+
case 'body':
|
|
107
|
+
default:
|
|
108
|
+
return 1
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const addDocumentFieldTerms = (
|
|
113
|
+
map: Map<string, { field: ContentSearchField; tf: number }>,
|
|
114
|
+
field: ContentSearchField,
|
|
115
|
+
text: string,
|
|
116
|
+
) => {
|
|
117
|
+
for (const token of tokenizeIndex(text)) {
|
|
118
|
+
const existing = map.get(token)
|
|
119
|
+
if (existing) {
|
|
120
|
+
existing.tf += 1
|
|
121
|
+
continue
|
|
122
|
+
}
|
|
123
|
+
map.set(token, { field, tf: 1 })
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
const getSnippet = (body: string, matches: string[], maxLength = 150) => {
|
|
128
|
+
if (body === '') {
|
|
129
|
+
return ''
|
|
130
|
+
}
|
|
131
|
+
const lowerBody = body.toLowerCase()
|
|
132
|
+
let firstMatchIndex = -1
|
|
133
|
+
for (const match of matches) {
|
|
134
|
+
const index = lowerBody.indexOf(match.toLowerCase())
|
|
135
|
+
if (index !== -1 && (firstMatchIndex === -1 || index < firstMatchIndex)) {
|
|
136
|
+
firstMatchIndex = index
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
const start = Math.max(0, firstMatchIndex - 50)
|
|
140
|
+
const end = Math.min(body.length, start + maxLength)
|
|
141
|
+
let snippet = body.slice(start, end).trim()
|
|
142
|
+
if (start > 0) {
|
|
143
|
+
snippet = `...${snippet}`
|
|
144
|
+
}
|
|
145
|
+
if (end < body.length) {
|
|
146
|
+
snippet = `${snippet}...`
|
|
147
|
+
}
|
|
148
|
+
return snippet
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
export const resolveContentSearchOptions = (
|
|
152
|
+
options: boolean | ContentSearchOptions | undefined,
|
|
153
|
+
): ResolvedContentSearchOptions => {
|
|
154
|
+
if (options === false) {
|
|
155
|
+
return {
|
|
156
|
+
...DEFAULT_SEARCH_OPTIONS,
|
|
157
|
+
enabled: false,
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
const normalized = typeof options === 'object' ? options : {}
|
|
161
|
+
return {
|
|
162
|
+
enabled: normalized.enabled ?? true,
|
|
163
|
+
hotkey: normalized.hotkey ?? DEFAULT_SEARCH_OPTIONS.hotkey,
|
|
164
|
+
limit: normalized.limit ?? DEFAULT_SEARCH_OPTIONS.limit,
|
|
165
|
+
placeholder: normalized.placeholder ?? DEFAULT_SEARCH_OPTIONS.placeholder,
|
|
166
|
+
prefix: normalized.prefix ?? DEFAULT_SEARCH_OPTIONS.prefix,
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
export const buildContentSearchIndex = (
|
|
171
|
+
documents: ContentSearchDocument[],
|
|
172
|
+
options: ResolvedContentSearchOptions,
|
|
173
|
+
): ContentSearchIndex => {
|
|
174
|
+
const index: Record<string, ContentSearchPosting[]> = {}
|
|
175
|
+
const df: Record<string, number> = {}
|
|
176
|
+
let totalDocumentLength = 0
|
|
177
|
+
|
|
178
|
+
documents.forEach((document, docIdx) => {
|
|
179
|
+
const docTerms = new Map<string, { field: ContentSearchField; tf: number }>()
|
|
180
|
+
|
|
181
|
+
addDocumentFieldTerms(docTerms, 'title', document.title)
|
|
182
|
+
for (const heading of document.headings) {
|
|
183
|
+
addDocumentFieldTerms(docTerms, 'heading', heading)
|
|
184
|
+
}
|
|
185
|
+
addDocumentFieldTerms(docTerms, 'body', document.body)
|
|
186
|
+
for (const code of document.code) {
|
|
187
|
+
addDocumentFieldTerms(docTerms, 'code', code)
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
totalDocumentLength += tokenizeIndex(document.body).length
|
|
191
|
+
|
|
192
|
+
for (const [term, posting] of docTerms) {
|
|
193
|
+
df[term] = (df[term] ?? 0) + 1
|
|
194
|
+
const postings = index[term] ?? []
|
|
195
|
+
postings.push({
|
|
196
|
+
docIdx,
|
|
197
|
+
field: posting.field,
|
|
198
|
+
tf: posting.tf,
|
|
199
|
+
})
|
|
200
|
+
index[term] = postings
|
|
201
|
+
}
|
|
202
|
+
})
|
|
203
|
+
|
|
204
|
+
return {
|
|
205
|
+
avgDl: documents.length === 0 ? 0 : totalDocumentLength / documents.length,
|
|
206
|
+
df,
|
|
207
|
+
docCount: documents.length,
|
|
208
|
+
documents,
|
|
209
|
+
index,
|
|
210
|
+
options,
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
export const searchContentIndex = (
|
|
215
|
+
searchIndex: ContentSearchIndex,
|
|
216
|
+
query: string,
|
|
217
|
+
options: ContentSearchQueryOptions = {},
|
|
218
|
+
): ContentSearchResult[] => {
|
|
219
|
+
if (query.trim() === '' || searchIndex.docCount === 0) {
|
|
220
|
+
return []
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
const tokens = tokenizeQuery(query)
|
|
224
|
+
if (tokens.length === 0) {
|
|
225
|
+
return []
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const limit = options.limit ?? searchIndex.options.limit
|
|
229
|
+
const prefix = options.prefix ?? searchIndex.options.prefix
|
|
230
|
+
const docScores = new Map<number, { matches: Set<string>; score: number }>()
|
|
231
|
+
|
|
232
|
+
tokens.forEach((token, index) => {
|
|
233
|
+
const isLastToken = index === tokens.length - 1
|
|
234
|
+
const matchingTerms =
|
|
235
|
+
prefix && isLastToken && token.length >= 2
|
|
236
|
+
? Object.keys(searchIndex.index).filter((term) => term.startsWith(token))
|
|
237
|
+
: searchIndex.index[token]
|
|
238
|
+
? [token]
|
|
239
|
+
: []
|
|
240
|
+
|
|
241
|
+
for (const term of matchingTerms) {
|
|
242
|
+
const postings = searchIndex.index[term] ?? []
|
|
243
|
+
const df = searchIndex.df[term] ?? 1
|
|
244
|
+
const idf = Math.log((searchIndex.docCount - df + 0.5) / (df + 0.5) + 1)
|
|
245
|
+
|
|
246
|
+
for (const posting of postings) {
|
|
247
|
+
const document = searchIndex.documents[posting.docIdx]
|
|
248
|
+
if (!document) {
|
|
249
|
+
continue
|
|
250
|
+
}
|
|
251
|
+
const docLength = Math.max(1, tokenizeIndex(document.body).length)
|
|
252
|
+
const score =
|
|
253
|
+
idf *
|
|
254
|
+
((posting.tf * (SEARCH_K1 + 1)) /
|
|
255
|
+
(posting.tf +
|
|
256
|
+
SEARCH_K1 *
|
|
257
|
+
(1 - SEARCH_B + (SEARCH_B * docLength) / Math.max(1, searchIndex.avgDl)))) *
|
|
258
|
+
getFieldBoost(posting.field)
|
|
259
|
+
|
|
260
|
+
const current = docScores.get(posting.docIdx) ?? {
|
|
261
|
+
matches: new Set<string>(),
|
|
262
|
+
score: 0,
|
|
263
|
+
}
|
|
264
|
+
current.score += score
|
|
265
|
+
current.matches.add(term)
|
|
266
|
+
docScores.set(posting.docIdx, current)
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
})
|
|
270
|
+
|
|
271
|
+
return [...docScores.entries()]
|
|
272
|
+
.map(([docIdx, value]) => {
|
|
273
|
+
const document = searchIndex.documents[docIdx]!
|
|
274
|
+
const matches = [...value.matches]
|
|
275
|
+
return {
|
|
276
|
+
collection: document.collection,
|
|
277
|
+
id: document.id,
|
|
278
|
+
matches,
|
|
279
|
+
score: value.score,
|
|
280
|
+
snippet: getSnippet(document.body, matches),
|
|
281
|
+
title: document.title,
|
|
282
|
+
url: document.url,
|
|
283
|
+
} satisfies ContentSearchResult
|
|
284
|
+
})
|
|
285
|
+
.sort((left, right) => right.score - left.score)
|
|
286
|
+
.slice(0, limit)
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
export const generateContentSearchRuntimeModule = (
|
|
290
|
+
assetPath: string,
|
|
291
|
+
options: ResolvedContentSearchOptions,
|
|
292
|
+
) => `let searchIndexPromise = null
|
|
293
|
+
const searchOptions = ${JSON.stringify(options)}
|
|
294
|
+
|
|
295
|
+
const loadSearchIndex = async () => {
|
|
296
|
+
if (searchIndexPromise) {
|
|
297
|
+
return searchIndexPromise
|
|
298
|
+
}
|
|
299
|
+
searchIndexPromise = fetch(${JSON.stringify(assetPath)})
|
|
300
|
+
.then((response) => {
|
|
301
|
+
if (!response.ok) {
|
|
302
|
+
throw new Error('Failed to load search index.')
|
|
303
|
+
}
|
|
304
|
+
return response.json()
|
|
305
|
+
})
|
|
306
|
+
.catch(() => null)
|
|
307
|
+
return searchIndexPromise
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
const isCjkChar = (char) => /[\\u3400-\\u4dbf\\u4e00-\\u9fff\\u3040-\\u30ff\\uac00-\\ud7af]/u.test(char)
|
|
311
|
+
|
|
312
|
+
const tokenizeQuery = (text) => {
|
|
313
|
+
const tokens = []
|
|
314
|
+
let current = ''
|
|
315
|
+
for (const char of text) {
|
|
316
|
+
if (isCjkChar(char)) {
|
|
317
|
+
if (current !== '') {
|
|
318
|
+
tokens.push(current.toLowerCase())
|
|
319
|
+
current = ''
|
|
320
|
+
}
|
|
321
|
+
tokens.push(char)
|
|
322
|
+
continue
|
|
323
|
+
}
|
|
324
|
+
if (/[\\p{L}\\p{N}_]/u.test(char)) {
|
|
325
|
+
current += char
|
|
326
|
+
continue
|
|
327
|
+
}
|
|
328
|
+
if (current !== '') {
|
|
329
|
+
tokens.push(current.toLowerCase())
|
|
330
|
+
current = ''
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
if (current !== '') {
|
|
334
|
+
tokens.push(current.toLowerCase())
|
|
335
|
+
}
|
|
336
|
+
return tokens
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
const getFieldBoost = (field) => {
|
|
340
|
+
switch (field) {
|
|
341
|
+
case 'title':
|
|
342
|
+
return 10
|
|
343
|
+
case 'heading':
|
|
344
|
+
return 5
|
|
345
|
+
case 'code':
|
|
346
|
+
return 0.5
|
|
347
|
+
case 'body':
|
|
348
|
+
default:
|
|
349
|
+
return 1
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
const getSnippet = (body, matches, maxLength = 150) => {
|
|
354
|
+
if (body === '') {
|
|
355
|
+
return ''
|
|
356
|
+
}
|
|
357
|
+
const lowerBody = body.toLowerCase()
|
|
358
|
+
let firstMatchIndex = -1
|
|
359
|
+
for (const match of matches) {
|
|
360
|
+
const index = lowerBody.indexOf(match.toLowerCase())
|
|
361
|
+
if (index !== -1 && (firstMatchIndex === -1 || index < firstMatchIndex)) {
|
|
362
|
+
firstMatchIndex = index
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
const start = Math.max(0, firstMatchIndex - 50)
|
|
366
|
+
const end = Math.min(body.length, start + maxLength)
|
|
367
|
+
let snippet = body.slice(start, end).trim()
|
|
368
|
+
if (start > 0) {
|
|
369
|
+
snippet = '...' + snippet
|
|
370
|
+
}
|
|
371
|
+
if (end < body.length) {
|
|
372
|
+
snippet = snippet + '...'
|
|
373
|
+
}
|
|
374
|
+
return snippet
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
export const search = async (
|
|
378
|
+
query,
|
|
379
|
+
options = {},
|
|
380
|
+
) => {
|
|
381
|
+
const searchIndex = await loadSearchIndex()
|
|
382
|
+
if (!searchIndex || query.trim() === '') {
|
|
383
|
+
return []
|
|
384
|
+
}
|
|
385
|
+
const tokens = tokenizeQuery(query)
|
|
386
|
+
if (tokens.length === 0) {
|
|
387
|
+
return []
|
|
388
|
+
}
|
|
389
|
+
const limit = options.limit ?? searchOptions.limit
|
|
390
|
+
const prefix = options.prefix ?? searchOptions.prefix
|
|
391
|
+
const docScores = new Map()
|
|
392
|
+
|
|
393
|
+
tokens.forEach((token, tokenIndex) => {
|
|
394
|
+
const isLastToken = tokenIndex === tokens.length - 1
|
|
395
|
+
const matchingTerms =
|
|
396
|
+
prefix && isLastToken && token.length >= 2
|
|
397
|
+
? Object.keys(searchIndex.index).filter((term) => term.startsWith(token))
|
|
398
|
+
: searchIndex.index[token]
|
|
399
|
+
? [token]
|
|
400
|
+
: []
|
|
401
|
+
|
|
402
|
+
for (const term of matchingTerms) {
|
|
403
|
+
const postings = searchIndex.index[term] ?? []
|
|
404
|
+
const df = searchIndex.df[term] ?? 1
|
|
405
|
+
const idf = Math.log((searchIndex.docCount - df + 0.5) / (df + 0.5) + 1)
|
|
406
|
+
|
|
407
|
+
for (const posting of postings) {
|
|
408
|
+
const document = searchIndex.documents[posting.docIdx]
|
|
409
|
+
if (!document) {
|
|
410
|
+
continue
|
|
411
|
+
}
|
|
412
|
+
const docLength = Math.max(1, document.body.split(/\\s+/u).filter(Boolean).length)
|
|
413
|
+
const score =
|
|
414
|
+
idf *
|
|
415
|
+
((posting.tf * (1.2 + 1)) /
|
|
416
|
+
(posting.tf + 1.2 * (1 - 0.75 + (0.75 * docLength) / Math.max(1, searchIndex.avgDl)))) *
|
|
417
|
+
getFieldBoost(posting.field)
|
|
418
|
+
|
|
419
|
+
const current = docScores.get(posting.docIdx) ?? {
|
|
420
|
+
matches: new Set(),
|
|
421
|
+
score: 0,
|
|
422
|
+
}
|
|
423
|
+
current.score += score
|
|
424
|
+
current.matches.add(term)
|
|
425
|
+
docScores.set(posting.docIdx, current)
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
})
|
|
429
|
+
|
|
430
|
+
return [...docScores.entries()]
|
|
431
|
+
.map(([docIdx, value]) => {
|
|
432
|
+
const document = searchIndex.documents[docIdx]
|
|
433
|
+
const matches = [...value.matches]
|
|
434
|
+
return {
|
|
435
|
+
collection: document.collection,
|
|
436
|
+
id: document.id,
|
|
437
|
+
matches,
|
|
438
|
+
score: value.score,
|
|
439
|
+
snippet: getSnippet(document.body, matches),
|
|
440
|
+
title: document.title,
|
|
441
|
+
url: document.url,
|
|
442
|
+
}
|
|
443
|
+
})
|
|
444
|
+
.sort((left, right) => right.score - left.score)
|
|
445
|
+
.slice(0, limit)
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
export { searchOptions }
|
|
449
|
+
export default { search, searchOptions }
|
|
450
|
+
`
|
package/typecheck.ts
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import {
|
|
2
|
+
defineCollection,
|
|
3
|
+
getEntries,
|
|
4
|
+
getEntry,
|
|
5
|
+
getCollection,
|
|
6
|
+
glob,
|
|
7
|
+
type CollectionEntry,
|
|
8
|
+
} from './mod.ts'
|
|
9
|
+
import type { StandardSchemaV1 } from 'eclipsa'
|
|
10
|
+
|
|
11
|
+
type Equal<Left, Right> =
|
|
12
|
+
(<T>() => T extends Left ? 1 : 2) extends <T>() => T extends Right ? 1 : 2 ? true : false
|
|
13
|
+
type Expect<T extends true> = T
|
|
14
|
+
|
|
15
|
+
const schema = {
|
|
16
|
+
'~standard': {
|
|
17
|
+
types: undefined as unknown as {
|
|
18
|
+
input: {
|
|
19
|
+
title: string
|
|
20
|
+
}
|
|
21
|
+
output: {
|
|
22
|
+
order: number
|
|
23
|
+
title: string
|
|
24
|
+
}
|
|
25
|
+
},
|
|
26
|
+
validate(value: unknown) {
|
|
27
|
+
return {
|
|
28
|
+
value: value as {
|
|
29
|
+
order: number
|
|
30
|
+
title: string
|
|
31
|
+
},
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
vendor: 'typecheck',
|
|
35
|
+
version: 1 as const,
|
|
36
|
+
},
|
|
37
|
+
} satisfies StandardSchemaV1<
|
|
38
|
+
{
|
|
39
|
+
title: string
|
|
40
|
+
},
|
|
41
|
+
{
|
|
42
|
+
order: number
|
|
43
|
+
title: string
|
|
44
|
+
}
|
|
45
|
+
>
|
|
46
|
+
|
|
47
|
+
const docs = defineCollection({
|
|
48
|
+
loader: glob({
|
|
49
|
+
base: './content/docs',
|
|
50
|
+
pattern: '**/*.md',
|
|
51
|
+
}),
|
|
52
|
+
markdown: {
|
|
53
|
+
highlight: {
|
|
54
|
+
theme: 'github-dark',
|
|
55
|
+
},
|
|
56
|
+
},
|
|
57
|
+
search: {
|
|
58
|
+
hotkey: 'k',
|
|
59
|
+
limit: 6,
|
|
60
|
+
placeholder: 'Search docs',
|
|
61
|
+
},
|
|
62
|
+
schema,
|
|
63
|
+
})
|
|
64
|
+
|
|
65
|
+
declare const entry: CollectionEntry<typeof docs>
|
|
66
|
+
|
|
67
|
+
type _Id = Expect<Equal<typeof entry.id, string>>
|
|
68
|
+
type _Collection = Expect<Equal<typeof entry.collection, string>>
|
|
69
|
+
type _Data = Expect<
|
|
70
|
+
Equal<
|
|
71
|
+
typeof entry.data,
|
|
72
|
+
{
|
|
73
|
+
order: number
|
|
74
|
+
title: string
|
|
75
|
+
}
|
|
76
|
+
>
|
|
77
|
+
>
|
|
78
|
+
|
|
79
|
+
declare const filter: Parameters<typeof getCollection<typeof docs>>[1]
|
|
80
|
+
declare const refsPromise: ReturnType<
|
|
81
|
+
typeof getEntries<
|
|
82
|
+
[
|
|
83
|
+
{
|
|
84
|
+
collection: typeof docs
|
|
85
|
+
id: 'guide/start-here'
|
|
86
|
+
},
|
|
87
|
+
]
|
|
88
|
+
>
|
|
89
|
+
>
|
|
90
|
+
declare const entryPromise: ReturnType<typeof getEntry<typeof docs>>
|
|
91
|
+
|
|
92
|
+
type _Filter = Expect<
|
|
93
|
+
Equal<
|
|
94
|
+
typeof filter,
|
|
95
|
+
((entry: CollectionEntry<typeof docs>) => boolean | Promise<boolean>) | undefined
|
|
96
|
+
>
|
|
97
|
+
>
|
|
98
|
+
type _GetEntry = Expect<
|
|
99
|
+
Equal<typeof entryPromise, Promise<CollectionEntry<typeof docs> | undefined>>
|
|
100
|
+
>
|
|
101
|
+
type _GetEntries = Expect<
|
|
102
|
+
Equal<typeof refsPromise, Promise<[CollectionEntry<typeof docs> | undefined]>>
|
|
103
|
+
>
|